diff --git a/.gitattributes b/.gitattributes index 8fd242703e39553fb45b1b75749fc59bbd96d593..fcfae1707dfec1763d00b3e09ae8e6adf2513eed 100644 --- a/.gitattributes +++ b/.gitattributes @@ -35,3 +35,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text data/WHU/annotations/WHU_building_test.json filter=lfs diff=lfs merge=lfs -text data/WHU/annotations/WHU_building_train.json filter=lfs diff=lfs merge=lfs -text +mmpretrain/annotations/WHU_building_test.json filter=lfs diff=lfs merge=lfs -text +mmpretrain/annotations/WHU_building_train.json filter=lfs diff=lfs merge=lfs -text diff --git a/mmpretrain/__init__.py b/mmpretrain/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6262d2c67b99d221e9bb00c2c795540473cf1fae --- /dev/null +++ b/mmpretrain/__init__.py @@ -0,0 +1,28 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import mmcv +import mmengine +from mmengine.utils import digit_version + +from .apis import * # noqa: F401, F403 +from .version import __version__ + +mmcv_minimum_version = '2.0.0rc4' +mmcv_maximum_version = '2.1.0' +mmcv_version = digit_version(mmcv.__version__) + +mmengine_minimum_version = '0.7.1' +mmengine_maximum_version = '1.0.0' +mmengine_version = digit_version(mmengine.__version__) + +assert (mmcv_version >= digit_version(mmcv_minimum_version) + and mmcv_version < digit_version(mmcv_maximum_version)), \ + f'MMCV=={mmcv.__version__} is used but incompatible. ' \ + f'Please install mmcv>={mmcv_minimum_version}, <{mmcv_maximum_version}.' + +assert (mmengine_version >= digit_version(mmengine_minimum_version) + and mmengine_version < digit_version(mmengine_maximum_version)), \ + f'MMEngine=={mmengine.__version__} is used but incompatible. ' \ + f'Please install mmengine>={mmengine_minimum_version}, ' \ + f'<{mmengine_maximum_version}.' + +__all__ = ['__version__'] diff --git a/mmpretrain/annotations/WHU_building_test.json b/mmpretrain/annotations/WHU_building_test.json new file mode 100644 index 0000000000000000000000000000000000000000..6edcd903e05e4725d412fe4f4ab02c0d32877412 --- /dev/null +++ b/mmpretrain/annotations/WHU_building_test.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5845dd19a3ec84aa3bc978ad5dc8066b43569c4ac9ff12c954d96208ec13432 +size 13511169 diff --git a/mmpretrain/annotations/WHU_building_train.json b/mmpretrain/annotations/WHU_building_train.json new file mode 100644 index 0000000000000000000000000000000000000000..d3e1ed6e5087f39646991adeb7efa43e59c670ef --- /dev/null +++ b/mmpretrain/annotations/WHU_building_train.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:28c490b7c80e6900a5b4da522faee91c6251589a0c9ebb258e79221c2586d2fa +size 42910976 diff --git a/mmpretrain/annotations/WHU_building_val.json b/mmpretrain/annotations/WHU_building_val.json new file mode 100644 index 0000000000000000000000000000000000000000..e501baa78b89b3df7fa39a5fa79ca1e4d07bfb71 --- /dev/null +++ b/mmpretrain/annotations/WHU_building_val.json @@ -0,0 +1 @@ +{"images": [{"file_name": "val_667.tif", "height": 512, "width": 512, "segm_file": "val_667.tif", "id": 0}, {"file_name": "val_101.tif", "height": 512, "width": 512, "segm_file": "val_101.tif", "id": 1}, {"file_name": "val_115.tif", "height": 512, "width": 512, "segm_file": "val_115.tif", "id": 2}, {"file_name": "val_673.tif", "height": 512, "width": 512, "segm_file": "val_673.tif", "id": 3}, {"file_name": "val_883.tif", "height": 512, "width": 512, "segm_file": "val_883.tif", "id": 4}, {"file_name": "val_77.tif", "height": 512, "width": 512, "segm_file": "val_77.tif", "id": 5}, {"file_name": "val_63.tif", "height": 512, "width": 512, "segm_file": "val_63.tif", "id": 6}, {"file_name": "val_129.tif", "height": 512, "width": 512, "segm_file": "val_129.tif", "id": 7}, {"file_name": "val_897.tif", "height": 512, "width": 512, "segm_file": "val_897.tif", "id": 8}, {"file_name": "val_88.tif", "height": 512, "width": 512, "segm_file": "val_88.tif", "id": 9}, {"file_name": "val_868.tif", "height": 512, "width": 512, "segm_file": "val_868.tif", "id": 10}, {"file_name": "val_840.tif", "height": 512, "width": 512, "segm_file": "val_840.tif", "id": 11}, {"file_name": "val_698.tif", "height": 512, "width": 512, "segm_file": "val_698.tif", "id": 12}, {"file_name": "val_854.tif", "height": 512, "width": 512, "segm_file": "val_854.tif", "id": 13}, {"file_name": "val_303.tif", "height": 512, "width": 512, "segm_file": "val_303.tif", "id": 14}, {"file_name": "val_465.tif", "height": 512, "width": 512, "segm_file": "val_465.tif", "id": 15}, {"file_name": "val_471.tif", "height": 512, "width": 512, "segm_file": "val_471.tif", "id": 16}, {"file_name": "val_317.tif", "height": 512, "width": 512, "segm_file": "val_317.tif", "id": 17}, {"file_name": "val_459.tif", "height": 512, "width": 512, "segm_file": "val_459.tif", "id": 18}, {"file_name": "val_1016.tif", "height": 512, "width": 512, "segm_file": "val_1016.tif", "id": 19}, {"file_name": "val_1002.tif", "height": 512, "width": 512, "segm_file": "val_1002.tif", "id": 20}, {"file_name": "val_277.tif", "height": 512, "width": 512, "segm_file": "val_277.tif", "id": 21}, {"file_name": "val_511.tif", "height": 512, "width": 512, "segm_file": "val_511.tif", "id": 22}, {"file_name": "val_505.tif", "height": 512, "width": 512, "segm_file": "val_505.tif", "id": 23}, {"file_name": "val_263.tif", "height": 512, "width": 512, "segm_file": "val_263.tif", "id": 24}, {"file_name": "val_539.tif", "height": 512, "width": 512, "segm_file": "val_539.tif", "id": 25}, {"file_name": "val_288.tif", "height": 512, "width": 512, "segm_file": "val_288.tif", "id": 26}, {"file_name": "val_713.tif", "height": 512, "width": 512, "segm_file": "val_713.tif", "id": 27}, {"file_name": "val_707.tif", "height": 512, "width": 512, "segm_file": "val_707.tif", "id": 28}, {"file_name": "val_908.tif", "height": 512, "width": 512, "segm_file": "val_908.tif", "id": 29}, {"file_name": "val_934.tif", "height": 512, "width": 512, "segm_file": "val_934.tif", "id": 30}, {"file_name": "val_920.tif", "height": 512, "width": 512, "segm_file": "val_920.tif", "id": 31}, {"file_name": "val_921.tif", "height": 512, "width": 512, "segm_file": "val_921.tif", "id": 32}, {"file_name": "val_935.tif", "height": 512, "width": 512, "segm_file": "val_935.tif", "id": 33}, {"file_name": "val_909.tif", "height": 512, "width": 512, "segm_file": "val_909.tif", "id": 34}, {"file_name": "val_706.tif", "height": 512, "width": 512, "segm_file": "val_706.tif", "id": 35}, {"file_name": "val_712.tif", "height": 512, "width": 512, "segm_file": "val_712.tif", "id": 36}, {"file_name": "val_289.tif", "height": 512, "width": 512, "segm_file": "val_289.tif", "id": 37}, {"file_name": "val_538.tif", "height": 512, "width": 512, "segm_file": "val_538.tif", "id": 38}, {"file_name": "val_504.tif", "height": 512, "width": 512, "segm_file": "val_504.tif", "id": 39}, {"file_name": "val_262.tif", "height": 512, "width": 512, "segm_file": "val_262.tif", "id": 40}, {"file_name": "val_276.tif", "height": 512, "width": 512, "segm_file": "val_276.tif", "id": 41}, {"file_name": "val_510.tif", "height": 512, "width": 512, "segm_file": "val_510.tif", "id": 42}, {"file_name": "val_1003.tif", "height": 512, "width": 512, "segm_file": "val_1003.tif", "id": 43}, {"file_name": "val_1017.tif", "height": 512, "width": 512, "segm_file": "val_1017.tif", "id": 44}, {"file_name": "val_458.tif", "height": 512, "width": 512, "segm_file": "val_458.tif", "id": 45}, {"file_name": "val_470.tif", "height": 512, "width": 512, "segm_file": "val_470.tif", "id": 46}, {"file_name": "val_316.tif", "height": 512, "width": 512, "segm_file": "val_316.tif", "id": 47}, {"file_name": "val_302.tif", "height": 512, "width": 512, "segm_file": "val_302.tif", "id": 48}, {"file_name": "val_464.tif", "height": 512, "width": 512, "segm_file": "val_464.tif", "id": 49}, {"file_name": "val_855.tif", "height": 512, "width": 512, "segm_file": "val_855.tif", "id": 50}, {"file_name": "val_699.tif", "height": 512, "width": 512, "segm_file": "val_699.tif", "id": 51}, {"file_name": "val_841.tif", "height": 512, "width": 512, "segm_file": "val_841.tif", "id": 52}, {"file_name": "val_869.tif", "height": 512, "width": 512, "segm_file": "val_869.tif", "id": 53}, {"file_name": "val_89.tif", "height": 512, "width": 512, "segm_file": "val_89.tif", "id": 54}, {"file_name": "val_128.tif", "height": 512, "width": 512, "segm_file": "val_128.tif", "id": 55}, {"file_name": "val_62.tif", "height": 512, "width": 512, "segm_file": "val_62.tif", "id": 56}, {"file_name": "val_896.tif", "height": 512, "width": 512, "segm_file": "val_896.tif", "id": 57}, {"file_name": "val_882.tif", "height": 512, "width": 512, "segm_file": "val_882.tif", "id": 58}, {"file_name": "val_76.tif", "height": 512, "width": 512, "segm_file": "val_76.tif", "id": 59}, {"file_name": "val_114.tif", "height": 512, "width": 512, "segm_file": "val_114.tif", "id": 60}, {"file_name": "val_672.tif", "height": 512, "width": 512, "segm_file": "val_672.tif", "id": 61}, {"file_name": "val_666.tif", "height": 512, "width": 512, "segm_file": "val_666.tif", "id": 62}, {"file_name": "val_100.tif", "height": 512, "width": 512, "segm_file": "val_100.tif", "id": 63}, {"file_name": "val_670.tif", "height": 512, "width": 512, "segm_file": "val_670.tif", "id": 64}, {"file_name": "val_116.tif", "height": 512, "width": 512, "segm_file": "val_116.tif", "id": 65}, {"file_name": "val_48.tif", "height": 512, "width": 512, "segm_file": "val_48.tif", "id": 66}, {"file_name": "val_102.tif", "height": 512, "width": 512, "segm_file": "val_102.tif", "id": 67}, {"file_name": "val_664.tif", "height": 512, "width": 512, "segm_file": "val_664.tif", "id": 68}, {"file_name": "val_894.tif", "height": 512, "width": 512, "segm_file": "val_894.tif", "id": 69}, {"file_name": "val_60.tif", "height": 512, "width": 512, "segm_file": "val_60.tif", "id": 70}, {"file_name": "val_74.tif", "height": 512, "width": 512, "segm_file": "val_74.tif", "id": 71}, {"file_name": "val_658.tif", "height": 512, "width": 512, "segm_file": "val_658.tif", "id": 72}, {"file_name": "val_880.tif", "height": 512, "width": 512, "segm_file": "val_880.tif", "id": 73}, {"file_name": "val_857.tif", "height": 512, "width": 512, "segm_file": "val_857.tif", "id": 74}, {"file_name": "val_843.tif", "height": 512, "width": 512, "segm_file": "val_843.tif", "id": 75}, {"file_name": "val_314.tif", "height": 512, "width": 512, "segm_file": "val_314.tif", "id": 76}, {"file_name": "val_472.tif", "height": 512, "width": 512, "segm_file": "val_472.tif", "id": 77}, {"file_name": "val_466.tif", "height": 512, "width": 512, "segm_file": "val_466.tif", "id": 78}, {"file_name": "val_300.tif", "height": 512, "width": 512, "segm_file": "val_300.tif", "id": 79}, {"file_name": "val_328.tif", "height": 512, "width": 512, "segm_file": "val_328.tif", "id": 80}, {"file_name": "val_1001.tif", "height": 512, "width": 512, "segm_file": "val_1001.tif", "id": 81}, {"file_name": "val_1015.tif", "height": 512, "width": 512, "segm_file": "val_1015.tif", "id": 82}, {"file_name": "val_1029.tif", "height": 512, "width": 512, "segm_file": "val_1029.tif", "id": 83}, {"file_name": "val_499.tif", "height": 512, "width": 512, "segm_file": "val_499.tif", "id": 84}, {"file_name": "val_260.tif", "height": 512, "width": 512, "segm_file": "val_260.tif", "id": 85}, {"file_name": "val_506.tif", "height": 512, "width": 512, "segm_file": "val_506.tif", "id": 86}, {"file_name": "val_512.tif", "height": 512, "width": 512, "segm_file": "val_512.tif", "id": 87}, {"file_name": "val_274.tif", "height": 512, "width": 512, "segm_file": "val_274.tif", "id": 88}, {"file_name": "val_248.tif", "height": 512, "width": 512, "segm_file": "val_248.tif", "id": 89}, {"file_name": "val_704.tif", "height": 512, "width": 512, "segm_file": "val_704.tif", "id": 90}, {"file_name": "val_710.tif", "height": 512, "width": 512, "segm_file": "val_710.tif", "id": 91}, {"file_name": "val_738.tif", "height": 512, "width": 512, "segm_file": "val_738.tif", "id": 92}, {"file_name": "val_923.tif", "height": 512, "width": 512, "segm_file": "val_923.tif", "id": 93}, {"file_name": "val_937.tif", "height": 512, "width": 512, "segm_file": "val_937.tif", "id": 94}, {"file_name": "val_936.tif", "height": 512, "width": 512, "segm_file": "val_936.tif", "id": 95}, {"file_name": "val_922.tif", "height": 512, "width": 512, "segm_file": "val_922.tif", "id": 96}, {"file_name": "val_739.tif", "height": 512, "width": 512, "segm_file": "val_739.tif", "id": 97}, {"file_name": "val_711.tif", "height": 512, "width": 512, "segm_file": "val_711.tif", "id": 98}, {"file_name": "val_705.tif", "height": 512, "width": 512, "segm_file": "val_705.tif", "id": 99}, {"file_name": "val_249.tif", "height": 512, "width": 512, "segm_file": "val_249.tif", "id": 100}, {"file_name": "val_513.tif", "height": 512, "width": 512, "segm_file": "val_513.tif", "id": 101}, {"file_name": "val_275.tif", "height": 512, "width": 512, "segm_file": "val_275.tif", "id": 102}, {"file_name": "val_261.tif", "height": 512, "width": 512, "segm_file": "val_261.tif", "id": 103}, {"file_name": "val_507.tif", "height": 512, "width": 512, "segm_file": "val_507.tif", "id": 104}, {"file_name": "val_498.tif", "height": 512, "width": 512, "segm_file": "val_498.tif", "id": 105}, {"file_name": "val_1028.tif", "height": 512, "width": 512, "segm_file": "val_1028.tif", "id": 106}, {"file_name": "val_1014.tif", "height": 512, "width": 512, "segm_file": "val_1014.tif", "id": 107}, {"file_name": "val_1000.tif", "height": 512, "width": 512, "segm_file": "val_1000.tif", "id": 108}, {"file_name": "val_329.tif", "height": 512, "width": 512, "segm_file": "val_329.tif", "id": 109}, {"file_name": "val_467.tif", "height": 512, "width": 512, "segm_file": "val_467.tif", "id": 110}, {"file_name": "val_301.tif", "height": 512, "width": 512, "segm_file": "val_301.tif", "id": 111}, {"file_name": "val_315.tif", "height": 512, "width": 512, "segm_file": "val_315.tif", "id": 112}, {"file_name": "val_473.tif", "height": 512, "width": 512, "segm_file": "val_473.tif", "id": 113}, {"file_name": "val_842.tif", "height": 512, "width": 512, "segm_file": "val_842.tif", "id": 114}, {"file_name": "val_856.tif", "height": 512, "width": 512, "segm_file": "val_856.tif", "id": 115}, {"file_name": "val_75.tif", "height": 512, "width": 512, "segm_file": "val_75.tif", "id": 116}, {"file_name": "val_881.tif", "height": 512, "width": 512, "segm_file": "val_881.tif", "id": 117}, {"file_name": "val_659.tif", "height": 512, "width": 512, "segm_file": "val_659.tif", "id": 118}, {"file_name": "val_895.tif", "height": 512, "width": 512, "segm_file": "val_895.tif", "id": 119}, {"file_name": "val_61.tif", "height": 512, "width": 512, "segm_file": "val_61.tif", "id": 120}, {"file_name": "val_103.tif", "height": 512, "width": 512, "segm_file": "val_103.tif", "id": 121}, {"file_name": "val_49.tif", "height": 512, "width": 512, "segm_file": "val_49.tif", "id": 122}, {"file_name": "val_665.tif", "height": 512, "width": 512, "segm_file": "val_665.tif", "id": 123}, {"file_name": "val_671.tif", "height": 512, "width": 512, "segm_file": "val_671.tif", "id": 124}, {"file_name": "val_117.tif", "height": 512, "width": 512, "segm_file": "val_117.tif", "id": 125}, {"file_name": "val_65.tif", "height": 512, "width": 512, "segm_file": "val_65.tif", "id": 126}, {"file_name": "val_649.tif", "height": 512, "width": 512, "segm_file": "val_649.tif", "id": 127}, {"file_name": "val_891.tif", "height": 512, "width": 512, "segm_file": "val_891.tif", "id": 128}, {"file_name": "val_885.tif", "height": 512, "width": 512, "segm_file": "val_885.tif", "id": 129}, {"file_name": "val_71.tif", "height": 512, "width": 512, "segm_file": "val_71.tif", "id": 130}, {"file_name": "val_59.tif", "height": 512, "width": 512, "segm_file": "val_59.tif", "id": 131}, {"file_name": "val_113.tif", "height": 512, "width": 512, "segm_file": "val_113.tif", "id": 132}, {"file_name": "val_675.tif", "height": 512, "width": 512, "segm_file": "val_675.tif", "id": 133}, {"file_name": "val_661.tif", "height": 512, "width": 512, "segm_file": "val_661.tif", "id": 134}, {"file_name": "val_107.tif", "height": 512, "width": 512, "segm_file": "val_107.tif", "id": 135}, {"file_name": "val_852.tif", "height": 512, "width": 512, "segm_file": "val_852.tif", "id": 136}, {"file_name": "val_846.tif", "height": 512, "width": 512, "segm_file": "val_846.tif", "id": 137}, {"file_name": "val_339.tif", "height": 512, "width": 512, "segm_file": "val_339.tif", "id": 138}, {"file_name": "val_477.tif", "height": 512, "width": 512, "segm_file": "val_477.tif", "id": 139}, {"file_name": "val_311.tif", "height": 512, "width": 512, "segm_file": "val_311.tif", "id": 140}, {"file_name": "val_305.tif", "height": 512, "width": 512, "segm_file": "val_305.tif", "id": 141}, {"file_name": "val_463.tif", "height": 512, "width": 512, "segm_file": "val_463.tif", "id": 142}, {"file_name": "val_488.tif", "height": 512, "width": 512, "segm_file": "val_488.tif", "id": 143}, {"file_name": "val_1004.tif", "height": 512, "width": 512, "segm_file": "val_1004.tif", "id": 144}, {"file_name": "val_1010.tif", "height": 512, "width": 512, "segm_file": "val_1010.tif", "id": 145}, {"file_name": "val_259.tif", "height": 512, "width": 512, "segm_file": "val_259.tif", "id": 146}, {"file_name": "val_503.tif", "height": 512, "width": 512, "segm_file": "val_503.tif", "id": 147}, {"file_name": "val_265.tif", "height": 512, "width": 512, "segm_file": "val_265.tif", "id": 148}, {"file_name": "val_271.tif", "height": 512, "width": 512, "segm_file": "val_271.tif", "id": 149}, {"file_name": "val_517.tif", "height": 512, "width": 512, "segm_file": "val_517.tif", "id": 150}, {"file_name": "val_729.tif", "height": 512, "width": 512, "segm_file": "val_729.tif", "id": 151}, {"file_name": "val_701.tif", "height": 512, "width": 512, "segm_file": "val_701.tif", "id": 152}, {"file_name": "val_715.tif", "height": 512, "width": 512, "segm_file": "val_715.tif", "id": 153}, {"file_name": "val_926.tif", "height": 512, "width": 512, "segm_file": "val_926.tif", "id": 154}, {"file_name": "val_932.tif", "height": 512, "width": 512, "segm_file": "val_932.tif", "id": 155}, {"file_name": "val_933.tif", "height": 512, "width": 512, "segm_file": "val_933.tif", "id": 156}, {"file_name": "val_927.tif", "height": 512, "width": 512, "segm_file": "val_927.tif", "id": 157}, {"file_name": "val_714.tif", "height": 512, "width": 512, "segm_file": "val_714.tif", "id": 158}, {"file_name": "val_700.tif", "height": 512, "width": 512, "segm_file": "val_700.tif", "id": 159}, {"file_name": "val_728.tif", "height": 512, "width": 512, "segm_file": "val_728.tif", "id": 160}, {"file_name": "val_270.tif", "height": 512, "width": 512, "segm_file": "val_270.tif", "id": 161}, {"file_name": "val_516.tif", "height": 512, "width": 512, "segm_file": "val_516.tif", "id": 162}, {"file_name": "val_502.tif", "height": 512, "width": 512, "segm_file": "val_502.tif", "id": 163}, {"file_name": "val_264.tif", "height": 512, "width": 512, "segm_file": "val_264.tif", "id": 164}, {"file_name": "val_258.tif", "height": 512, "width": 512, "segm_file": "val_258.tif", "id": 165}, {"file_name": "val_1011.tif", "height": 512, "width": 512, "segm_file": "val_1011.tif", "id": 166}, {"file_name": "val_1005.tif", "height": 512, "width": 512, "segm_file": "val_1005.tif", "id": 167}, {"file_name": "val_489.tif", "height": 512, "width": 512, "segm_file": "val_489.tif", "id": 168}, {"file_name": "val_304.tif", "height": 512, "width": 512, "segm_file": "val_304.tif", "id": 169}, {"file_name": "val_462.tif", "height": 512, "width": 512, "segm_file": "val_462.tif", "id": 170}, {"file_name": "val_476.tif", "height": 512, "width": 512, "segm_file": "val_476.tif", "id": 171}, {"file_name": "val_310.tif", "height": 512, "width": 512, "segm_file": "val_310.tif", "id": 172}, {"file_name": "val_338.tif", "height": 512, "width": 512, "segm_file": "val_338.tif", "id": 173}, {"file_name": "val_847.tif", "height": 512, "width": 512, "segm_file": "val_847.tif", "id": 174}, {"file_name": "val_853.tif", "height": 512, "width": 512, "segm_file": "val_853.tif", "id": 175}, {"file_name": "val_660.tif", "height": 512, "width": 512, "segm_file": "val_660.tif", "id": 176}, {"file_name": "val_106.tif", "height": 512, "width": 512, "segm_file": "val_106.tif", "id": 177}, {"file_name": "val_112.tif", "height": 512, "width": 512, "segm_file": "val_112.tif", "id": 178}, {"file_name": "val_58.tif", "height": 512, "width": 512, "segm_file": "val_58.tif", "id": 179}, {"file_name": "val_674.tif", "height": 512, "width": 512, "segm_file": "val_674.tif", "id": 180}, {"file_name": "val_884.tif", "height": 512, "width": 512, "segm_file": "val_884.tif", "id": 181}, {"file_name": "val_70.tif", "height": 512, "width": 512, "segm_file": "val_70.tif", "id": 182}, {"file_name": "val_64.tif", "height": 512, "width": 512, "segm_file": "val_64.tif", "id": 183}, {"file_name": "val_890.tif", "height": 512, "width": 512, "segm_file": "val_890.tif", "id": 184}, {"file_name": "val_648.tif", "height": 512, "width": 512, "segm_file": "val_648.tif", "id": 185}, {"file_name": "val_72.tif", "height": 512, "width": 512, "segm_file": "val_72.tif", "id": 186}, {"file_name": "val_138.tif", "height": 512, "width": 512, "segm_file": "val_138.tif", "id": 187}, {"file_name": "val_886.tif", "height": 512, "width": 512, "segm_file": "val_886.tif", "id": 188}, {"file_name": "val_892.tif", "height": 512, "width": 512, "segm_file": "val_892.tif", "id": 189}, {"file_name": "val_66.tif", "height": 512, "width": 512, "segm_file": "val_66.tif", "id": 190}, {"file_name": "val_104.tif", "height": 512, "width": 512, "segm_file": "val_104.tif", "id": 191}, {"file_name": "val_662.tif", "height": 512, "width": 512, "segm_file": "val_662.tif", "id": 192}, {"file_name": "val_676.tif", "height": 512, "width": 512, "segm_file": "val_676.tif", "id": 193}, {"file_name": "val_110.tif", "height": 512, "width": 512, "segm_file": "val_110.tif", "id": 194}, {"file_name": "val_845.tif", "height": 512, "width": 512, "segm_file": "val_845.tif", "id": 195}, {"file_name": "val_851.tif", "height": 512, "width": 512, "segm_file": "val_851.tif", "id": 196}, {"file_name": "val_689.tif", "height": 512, "width": 512, "segm_file": "val_689.tif", "id": 197}, {"file_name": "val_879.tif", "height": 512, "width": 512, "segm_file": "val_879.tif", "id": 198}, {"file_name": "val_99.tif", "height": 512, "width": 512, "segm_file": "val_99.tif", "id": 199}, {"file_name": "val_448.tif", "height": 512, "width": 512, "segm_file": "val_448.tif", "id": 200}, {"file_name": "val_460.tif", "height": 512, "width": 512, "segm_file": "val_460.tif", "id": 201}, {"file_name": "val_306.tif", "height": 512, "width": 512, "segm_file": "val_306.tif", "id": 202}, {"file_name": "val_312.tif", "height": 512, "width": 512, "segm_file": "val_312.tif", "id": 203}, {"file_name": "val_474.tif", "height": 512, "width": 512, "segm_file": "val_474.tif", "id": 204}, {"file_name": "val_1013.tif", "height": 512, "width": 512, "segm_file": "val_1013.tif", "id": 205}, {"file_name": "val_1007.tif", "height": 512, "width": 512, "segm_file": "val_1007.tif", "id": 206}, {"file_name": "val_528.tif", "height": 512, "width": 512, "segm_file": "val_528.tif", "id": 207}, {"file_name": "val_514.tif", "height": 512, "width": 512, "segm_file": "val_514.tif", "id": 208}, {"file_name": "val_272.tif", "height": 512, "width": 512, "segm_file": "val_272.tif", "id": 209}, {"file_name": "val_266.tif", "height": 512, "width": 512, "segm_file": "val_266.tif", "id": 210}, {"file_name": "val_500.tif", "height": 512, "width": 512, "segm_file": "val_500.tif", "id": 211}, {"file_name": "val_299.tif", "height": 512, "width": 512, "segm_file": "val_299.tif", "id": 212}, {"file_name": "val_716.tif", "height": 512, "width": 512, "segm_file": "val_716.tif", "id": 213}, {"file_name": "val_702.tif", "height": 512, "width": 512, "segm_file": "val_702.tif", "id": 214}, {"file_name": "val_931.tif", "height": 512, "width": 512, "segm_file": "val_931.tif", "id": 215}, {"file_name": "val_925.tif", "height": 512, "width": 512, "segm_file": "val_925.tif", "id": 216}, {"file_name": "val_919.tif", "height": 512, "width": 512, "segm_file": "val_919.tif", "id": 217}, {"file_name": "val_918.tif", "height": 512, "width": 512, "segm_file": "val_918.tif", "id": 218}, {"file_name": "val_924.tif", "height": 512, "width": 512, "segm_file": "val_924.tif", "id": 219}, {"file_name": "val_930.tif", "height": 512, "width": 512, "segm_file": "val_930.tif", "id": 220}, {"file_name": "val_703.tif", "height": 512, "width": 512, "segm_file": "val_703.tif", "id": 221}, {"file_name": "val_717.tif", "height": 512, "width": 512, "segm_file": "val_717.tif", "id": 222}, {"file_name": "val_298.tif", "height": 512, "width": 512, "segm_file": "val_298.tif", "id": 223}, {"file_name": "val_267.tif", "height": 512, "width": 512, "segm_file": "val_267.tif", "id": 224}, {"file_name": "val_501.tif", "height": 512, "width": 512, "segm_file": "val_501.tif", "id": 225}, {"file_name": "val_515.tif", "height": 512, "width": 512, "segm_file": "val_515.tif", "id": 226}, {"file_name": "val_273.tif", "height": 512, "width": 512, "segm_file": "val_273.tif", "id": 227}, {"file_name": "val_529.tif", "height": 512, "width": 512, "segm_file": "val_529.tif", "id": 228}, {"file_name": "val_1006.tif", "height": 512, "width": 512, "segm_file": "val_1006.tif", "id": 229}, {"file_name": "val_1012.tif", "height": 512, "width": 512, "segm_file": "val_1012.tif", "id": 230}, {"file_name": "val_313.tif", "height": 512, "width": 512, "segm_file": "val_313.tif", "id": 231}, {"file_name": "val_475.tif", "height": 512, "width": 512, "segm_file": "val_475.tif", "id": 232}, {"file_name": "val_461.tif", "height": 512, "width": 512, "segm_file": "val_461.tif", "id": 233}, {"file_name": "val_307.tif", "height": 512, "width": 512, "segm_file": "val_307.tif", "id": 234}, {"file_name": "val_449.tif", "height": 512, "width": 512, "segm_file": "val_449.tif", "id": 235}, {"file_name": "val_98.tif", "height": 512, "width": 512, "segm_file": "val_98.tif", "id": 236}, {"file_name": "val_878.tif", "height": 512, "width": 512, "segm_file": "val_878.tif", "id": 237}, {"file_name": "val_688.tif", "height": 512, "width": 512, "segm_file": "val_688.tif", "id": 238}, {"file_name": "val_850.tif", "height": 512, "width": 512, "segm_file": "val_850.tif", "id": 239}, {"file_name": "val_844.tif", "height": 512, "width": 512, "segm_file": "val_844.tif", "id": 240}, {"file_name": "val_677.tif", "height": 512, "width": 512, "segm_file": "val_677.tif", "id": 241}, {"file_name": "val_111.tif", "height": 512, "width": 512, "segm_file": "val_111.tif", "id": 242}, {"file_name": "val_105.tif", "height": 512, "width": 512, "segm_file": "val_105.tif", "id": 243}, {"file_name": "val_663.tif", "height": 512, "width": 512, "segm_file": "val_663.tif", "id": 244}, {"file_name": "val_893.tif", "height": 512, "width": 512, "segm_file": "val_893.tif", "id": 245}, {"file_name": "val_67.tif", "height": 512, "width": 512, "segm_file": "val_67.tif", "id": 246}, {"file_name": "val_139.tif", "height": 512, "width": 512, "segm_file": "val_139.tif", "id": 247}, {"file_name": "val_73.tif", "height": 512, "width": 512, "segm_file": "val_73.tif", "id": 248}, {"file_name": "val_887.tif", "height": 512, "width": 512, "segm_file": "val_887.tif", "id": 249}, {"file_name": "val_28.tif", "height": 512, "width": 512, "segm_file": "val_28.tif", "id": 250}, {"file_name": "val_162.tif", "height": 512, "width": 512, "segm_file": "val_162.tif", "id": 251}, {"file_name": "val_604.tif", "height": 512, "width": 512, "segm_file": "val_604.tif", "id": 252}, {"file_name": "val_610.tif", "height": 512, "width": 512, "segm_file": "val_610.tif", "id": 253}, {"file_name": "val_176.tif", "height": 512, "width": 512, "segm_file": "val_176.tif", "id": 254}, {"file_name": "val_14.tif", "height": 512, "width": 512, "segm_file": "val_14.tif", "id": 255}, {"file_name": "val_638.tif", "height": 512, "width": 512, "segm_file": "val_638.tif", "id": 256}, {"file_name": "val_823.tif", "height": 512, "width": 512, "segm_file": "val_823.tif", "id": 257}, {"file_name": "val_837.tif", "height": 512, "width": 512, "segm_file": "val_837.tif", "id": 258}, {"file_name": "val_189.tif", "height": 512, "width": 512, "segm_file": "val_189.tif", "id": 259}, {"file_name": "val_406.tif", "height": 512, "width": 512, "segm_file": "val_406.tif", "id": 260}, {"file_name": "val_360.tif", "height": 512, "width": 512, "segm_file": "val_360.tif", "id": 261}, {"file_name": "val_374.tif", "height": 512, "width": 512, "segm_file": "val_374.tif", "id": 262}, {"file_name": "val_412.tif", "height": 512, "width": 512, "segm_file": "val_412.tif", "id": 263}, {"file_name": "val_348.tif", "height": 512, "width": 512, "segm_file": "val_348.tif", "id": 264}, {"file_name": "val_572.tif", "height": 512, "width": 512, "segm_file": "val_572.tif", "id": 265}, {"file_name": "val_214.tif", "height": 512, "width": 512, "segm_file": "val_214.tif", "id": 266}, {"file_name": "val_200.tif", "height": 512, "width": 512, "segm_file": "val_200.tif", "id": 267}, {"file_name": "val_566.tif", "height": 512, "width": 512, "segm_file": "val_566.tif", "id": 268}, {"file_name": "val_228.tif", "height": 512, "width": 512, "segm_file": "val_228.tif", "id": 269}, {"file_name": "val_599.tif", "height": 512, "width": 512, "segm_file": "val_599.tif", "id": 270}, {"file_name": "val_770.tif", "height": 512, "width": 512, "segm_file": "val_770.tif", "id": 271}, {"file_name": "val_764.tif", "height": 512, "width": 512, "segm_file": "val_764.tif", "id": 272}, {"file_name": "val_994.tif", "height": 512, "width": 512, "segm_file": "val_994.tif", "id": 273}, {"file_name": "val_980.tif", "height": 512, "width": 512, "segm_file": "val_980.tif", "id": 274}, {"file_name": "val_758.tif", "height": 512, "width": 512, "segm_file": "val_758.tif", "id": 275}, {"file_name": "val_957.tif", "height": 512, "width": 512, "segm_file": "val_957.tif", "id": 276}, {"file_name": "val_943.tif", "height": 512, "width": 512, "segm_file": "val_943.tif", "id": 277}, {"file_name": "val_942.tif", "height": 512, "width": 512, "segm_file": "val_942.tif", "id": 278}, {"file_name": "val_956.tif", "height": 512, "width": 512, "segm_file": "val_956.tif", "id": 279}, {"file_name": "val_759.tif", "height": 512, "width": 512, "segm_file": "val_759.tif", "id": 280}, {"file_name": "val_981.tif", "height": 512, "width": 512, "segm_file": "val_981.tif", "id": 281}, {"file_name": "val_995.tif", "height": 512, "width": 512, "segm_file": "val_995.tif", "id": 282}, {"file_name": "val_765.tif", "height": 512, "width": 512, "segm_file": "val_765.tif", "id": 283}, {"file_name": "val_771.tif", "height": 512, "width": 512, "segm_file": "val_771.tif", "id": 284}, {"file_name": "val_598.tif", "height": 512, "width": 512, "segm_file": "val_598.tif", "id": 285}, {"file_name": "val_229.tif", "height": 512, "width": 512, "segm_file": "val_229.tif", "id": 286}, {"file_name": "val_201.tif", "height": 512, "width": 512, "segm_file": "val_201.tif", "id": 287}, {"file_name": "val_567.tif", "height": 512, "width": 512, "segm_file": "val_567.tif", "id": 288}, {"file_name": "val_573.tif", "height": 512, "width": 512, "segm_file": "val_573.tif", "id": 289}, {"file_name": "val_215.tif", "height": 512, "width": 512, "segm_file": "val_215.tif", "id": 290}, {"file_name": "val_349.tif", "height": 512, "width": 512, "segm_file": "val_349.tif", "id": 291}, {"file_name": "val_375.tif", "height": 512, "width": 512, "segm_file": "val_375.tif", "id": 292}, {"file_name": "val_413.tif", "height": 512, "width": 512, "segm_file": "val_413.tif", "id": 293}, {"file_name": "val_407.tif", "height": 512, "width": 512, "segm_file": "val_407.tif", "id": 294}, {"file_name": "val_361.tif", "height": 512, "width": 512, "segm_file": "val_361.tif", "id": 295}, {"file_name": "val_836.tif", "height": 512, "width": 512, "segm_file": "val_836.tif", "id": 296}, {"file_name": "val_188.tif", "height": 512, "width": 512, "segm_file": "val_188.tif", "id": 297}, {"file_name": "val_822.tif", "height": 512, "width": 512, "segm_file": "val_822.tif", "id": 298}, {"file_name": "val_15.tif", "height": 512, "width": 512, "segm_file": "val_15.tif", "id": 299}, {"file_name": "val_639.tif", "height": 512, "width": 512, "segm_file": "val_639.tif", "id": 300}, {"file_name": "val_611.tif", "height": 512, "width": 512, "segm_file": "val_611.tif", "id": 301}, {"file_name": "val_177.tif", "height": 512, "width": 512, "segm_file": "val_177.tif", "id": 302}, {"file_name": "val_163.tif", "height": 512, "width": 512, "segm_file": "val_163.tif", "id": 303}, {"file_name": "val_29.tif", "height": 512, "width": 512, "segm_file": "val_29.tif", "id": 304}, {"file_name": "val_605.tif", "height": 512, "width": 512, "segm_file": "val_605.tif", "id": 305}, {"file_name": "val_175.tif", "height": 512, "width": 512, "segm_file": "val_175.tif", "id": 306}, {"file_name": "val_613.tif", "height": 512, "width": 512, "segm_file": "val_613.tif", "id": 307}, {"file_name": "val_607.tif", "height": 512, "width": 512, "segm_file": "val_607.tif", "id": 308}, {"file_name": "val_161.tif", "height": 512, "width": 512, "segm_file": "val_161.tif", "id": 309}, {"file_name": "val_149.tif", "height": 512, "width": 512, "segm_file": "val_149.tif", "id": 310}, {"file_name": "val_17.tif", "height": 512, "width": 512, "segm_file": "val_17.tif", "id": 311}, {"file_name": "val_808.tif", "height": 512, "width": 512, "segm_file": "val_808.tif", "id": 312}, {"file_name": "val_834.tif", "height": 512, "width": 512, "segm_file": "val_834.tif", "id": 313}, {"file_name": "val_820.tif", "height": 512, "width": 512, "segm_file": "val_820.tif", "id": 314}, {"file_name": "val_411.tif", "height": 512, "width": 512, "segm_file": "val_411.tif", "id": 315}, {"file_name": "val_377.tif", "height": 512, "width": 512, "segm_file": "val_377.tif", "id": 316}, {"file_name": "val_363.tif", "height": 512, "width": 512, "segm_file": "val_363.tif", "id": 317}, {"file_name": "val_405.tif", "height": 512, "width": 512, "segm_file": "val_405.tif", "id": 318}, {"file_name": "val_439.tif", "height": 512, "width": 512, "segm_file": "val_439.tif", "id": 319}, {"file_name": "val_388.tif", "height": 512, "width": 512, "segm_file": "val_388.tif", "id": 320}, {"file_name": "val_565.tif", "height": 512, "width": 512, "segm_file": "val_565.tif", "id": 321}, {"file_name": "val_203.tif", "height": 512, "width": 512, "segm_file": "val_203.tif", "id": 322}, {"file_name": "val_217.tif", "height": 512, "width": 512, "segm_file": "val_217.tif", "id": 323}, {"file_name": "val_571.tif", "height": 512, "width": 512, "segm_file": "val_571.tif", "id": 324}, {"file_name": "val_559.tif", "height": 512, "width": 512, "segm_file": "val_559.tif", "id": 325}, {"file_name": "val_767.tif", "height": 512, "width": 512, "segm_file": "val_767.tif", "id": 326}, {"file_name": "val_773.tif", "height": 512, "width": 512, "segm_file": "val_773.tif", "id": 327}, {"file_name": "val_983.tif", "height": 512, "width": 512, "segm_file": "val_983.tif", "id": 328}, {"file_name": "val_997.tif", "height": 512, "width": 512, "segm_file": "val_997.tif", "id": 329}, {"file_name": "val_968.tif", "height": 512, "width": 512, "segm_file": "val_968.tif", "id": 330}, {"file_name": "val_798.tif", "height": 512, "width": 512, "segm_file": "val_798.tif", "id": 331}, {"file_name": "val_940.tif", "height": 512, "width": 512, "segm_file": "val_940.tif", "id": 332}, {"file_name": "val_954.tif", "height": 512, "width": 512, "segm_file": "val_954.tif", "id": 333}, {"file_name": "val_955.tif", "height": 512, "width": 512, "segm_file": "val_955.tif", "id": 334}, {"file_name": "val_941.tif", "height": 512, "width": 512, "segm_file": "val_941.tif", "id": 335}, {"file_name": "val_799.tif", "height": 512, "width": 512, "segm_file": "val_799.tif", "id": 336}, {"file_name": "val_969.tif", "height": 512, "width": 512, "segm_file": "val_969.tif", "id": 337}, {"file_name": "val_996.tif", "height": 512, "width": 512, "segm_file": "val_996.tif", "id": 338}, {"file_name": "val_982.tif", "height": 512, "width": 512, "segm_file": "val_982.tif", "id": 339}, {"file_name": "val_772.tif", "height": 512, "width": 512, "segm_file": "val_772.tif", "id": 340}, {"file_name": "val_766.tif", "height": 512, "width": 512, "segm_file": "val_766.tif", "id": 341}, {"file_name": "val_558.tif", "height": 512, "width": 512, "segm_file": "val_558.tif", "id": 342}, {"file_name": "val_216.tif", "height": 512, "width": 512, "segm_file": "val_216.tif", "id": 343}, {"file_name": "val_570.tif", "height": 512, "width": 512, "segm_file": "val_570.tif", "id": 344}, {"file_name": "val_564.tif", "height": 512, "width": 512, "segm_file": "val_564.tif", "id": 345}, {"file_name": "val_202.tif", "height": 512, "width": 512, "segm_file": "val_202.tif", "id": 346}, {"file_name": "val_389.tif", "height": 512, "width": 512, "segm_file": "val_389.tif", "id": 347}, {"file_name": "val_438.tif", "height": 512, "width": 512, "segm_file": "val_438.tif", "id": 348}, {"file_name": "val_362.tif", "height": 512, "width": 512, "segm_file": "val_362.tif", "id": 349}, {"file_name": "val_404.tif", "height": 512, "width": 512, "segm_file": "val_404.tif", "id": 350}, {"file_name": "val_410.tif", "height": 512, "width": 512, "segm_file": "val_410.tif", "id": 351}, {"file_name": "val_376.tif", "height": 512, "width": 512, "segm_file": "val_376.tif", "id": 352}, {"file_name": "val_821.tif", "height": 512, "width": 512, "segm_file": "val_821.tif", "id": 353}, {"file_name": "val_835.tif", "height": 512, "width": 512, "segm_file": "val_835.tif", "id": 354}, {"file_name": "val_809.tif", "height": 512, "width": 512, "segm_file": "val_809.tif", "id": 355}, {"file_name": "val_16.tif", "height": 512, "width": 512, "segm_file": "val_16.tif", "id": 356}, {"file_name": "val_148.tif", "height": 512, "width": 512, "segm_file": "val_148.tif", "id": 357}, {"file_name": "val_606.tif", "height": 512, "width": 512, "segm_file": "val_606.tif", "id": 358}, {"file_name": "val_160.tif", "height": 512, "width": 512, "segm_file": "val_160.tif", "id": 359}, {"file_name": "val_174.tif", "height": 512, "width": 512, "segm_file": "val_174.tif", "id": 360}, {"file_name": "val_612.tif", "height": 512, "width": 512, "segm_file": "val_612.tif", "id": 361}, {"file_name": "val_12.tif", "height": 512, "width": 512, "segm_file": "val_12.tif", "id": 362}, {"file_name": "val_158.tif", "height": 512, "width": 512, "segm_file": "val_158.tif", "id": 363}, {"file_name": "val_616.tif", "height": 512, "width": 512, "segm_file": "val_616.tif", "id": 364}, {"file_name": "val_170.tif", "height": 512, "width": 512, "segm_file": "val_170.tif", "id": 365}, {"file_name": "val_164.tif", "height": 512, "width": 512, "segm_file": "val_164.tif", "id": 366}, {"file_name": "val_602.tif", "height": 512, "width": 512, "segm_file": "val_602.tif", "id": 367}, {"file_name": "val_831.tif", "height": 512, "width": 512, "segm_file": "val_831.tif", "id": 368}, {"file_name": "val_825.tif", "height": 512, "width": 512, "segm_file": "val_825.tif", "id": 369}, {"file_name": "val_819.tif", "height": 512, "width": 512, "segm_file": "val_819.tif", "id": 370}, {"file_name": "val_428.tif", "height": 512, "width": 512, "segm_file": "val_428.tif", "id": 371}, {"file_name": "val_372.tif", "height": 512, "width": 512, "segm_file": "val_372.tif", "id": 372}, {"file_name": "val_414.tif", "height": 512, "width": 512, "segm_file": "val_414.tif", "id": 373}, {"file_name": "val_400.tif", "height": 512, "width": 512, "segm_file": "val_400.tif", "id": 374}, {"file_name": "val_366.tif", "height": 512, "width": 512, "segm_file": "val_366.tif", "id": 375}, {"file_name": "val_399.tif", "height": 512, "width": 512, "segm_file": "val_399.tif", "id": 376}, {"file_name": "val_548.tif", "height": 512, "width": 512, "segm_file": "val_548.tif", "id": 377}, {"file_name": "val_206.tif", "height": 512, "width": 512, "segm_file": "val_206.tif", "id": 378}, {"file_name": "val_560.tif", "height": 512, "width": 512, "segm_file": "val_560.tif", "id": 379}, {"file_name": "val_574.tif", "height": 512, "width": 512, "segm_file": "val_574.tif", "id": 380}, {"file_name": "val_212.tif", "height": 512, "width": 512, "segm_file": "val_212.tif", "id": 381}, {"file_name": "val_986.tif", "height": 512, "width": 512, "segm_file": "val_986.tif", "id": 382}, {"file_name": "val_992.tif", "height": 512, "width": 512, "segm_file": "val_992.tif", "id": 383}, {"file_name": "val_762.tif", "height": 512, "width": 512, "segm_file": "val_762.tif", "id": 384}, {"file_name": "val_8.tif", "height": 512, "width": 512, "segm_file": "val_8.tif", "id": 385}, {"file_name": "val_776.tif", "height": 512, "width": 512, "segm_file": "val_776.tif", "id": 386}, {"file_name": "val_945.tif", "height": 512, "width": 512, "segm_file": "val_945.tif", "id": 387}, {"file_name": "val_789.tif", "height": 512, "width": 512, "segm_file": "val_789.tif", "id": 388}, {"file_name": "val_951.tif", "height": 512, "width": 512, "segm_file": "val_951.tif", "id": 389}, {"file_name": "val_979.tif", "height": 512, "width": 512, "segm_file": "val_979.tif", "id": 390}, {"file_name": "val_978.tif", "height": 512, "width": 512, "segm_file": "val_978.tif", "id": 391}, {"file_name": "val_950.tif", "height": 512, "width": 512, "segm_file": "val_950.tif", "id": 392}, {"file_name": "val_788.tif", "height": 512, "width": 512, "segm_file": "val_788.tif", "id": 393}, {"file_name": "val_944.tif", "height": 512, "width": 512, "segm_file": "val_944.tif", "id": 394}, {"file_name": "val_777.tif", "height": 512, "width": 512, "segm_file": "val_777.tif", "id": 395}, {"file_name": "val_9.tif", "height": 512, "width": 512, "segm_file": "val_9.tif", "id": 396}, {"file_name": "val_763.tif", "height": 512, "width": 512, "segm_file": "val_763.tif", "id": 397}, {"file_name": "val_993.tif", "height": 512, "width": 512, "segm_file": "val_993.tif", "id": 398}, {"file_name": "val_987.tif", "height": 512, "width": 512, "segm_file": "val_987.tif", "id": 399}, {"file_name": "val_575.tif", "height": 512, "width": 512, "segm_file": "val_575.tif", "id": 400}, {"file_name": "val_213.tif", "height": 512, "width": 512, "segm_file": "val_213.tif", "id": 401}, {"file_name": "val_207.tif", "height": 512, "width": 512, "segm_file": "val_207.tif", "id": 402}, {"file_name": "val_561.tif", "height": 512, "width": 512, "segm_file": "val_561.tif", "id": 403}, {"file_name": "val_549.tif", "height": 512, "width": 512, "segm_file": "val_549.tif", "id": 404}, {"file_name": "val_398.tif", "height": 512, "width": 512, "segm_file": "val_398.tif", "id": 405}, {"file_name": "val_401.tif", "height": 512, "width": 512, "segm_file": "val_401.tif", "id": 406}, {"file_name": "val_367.tif", "height": 512, "width": 512, "segm_file": "val_367.tif", "id": 407}, {"file_name": "val_373.tif", "height": 512, "width": 512, "segm_file": "val_373.tif", "id": 408}, {"file_name": "val_415.tif", "height": 512, "width": 512, "segm_file": "val_415.tif", "id": 409}, {"file_name": "val_429.tif", "height": 512, "width": 512, "segm_file": "val_429.tif", "id": 410}, {"file_name": "val_818.tif", "height": 512, "width": 512, "segm_file": "val_818.tif", "id": 411}, {"file_name": "val_824.tif", "height": 512, "width": 512, "segm_file": "val_824.tif", "id": 412}, {"file_name": "val_830.tif", "height": 512, "width": 512, "segm_file": "val_830.tif", "id": 413}, {"file_name": "val_165.tif", "height": 512, "width": 512, "segm_file": "val_165.tif", "id": 414}, {"file_name": "val_603.tif", "height": 512, "width": 512, "segm_file": "val_603.tif", "id": 415}, {"file_name": "val_617.tif", "height": 512, "width": 512, "segm_file": "val_617.tif", "id": 416}, {"file_name": "val_171.tif", "height": 512, "width": 512, "segm_file": "val_171.tif", "id": 417}, {"file_name": "val_159.tif", "height": 512, "width": 512, "segm_file": "val_159.tif", "id": 418}, {"file_name": "val_13.tif", "height": 512, "width": 512, "segm_file": "val_13.tif", "id": 419}, {"file_name": "val_11.tif", "height": 512, "width": 512, "segm_file": "val_11.tif", "id": 420}, {"file_name": "val_629.tif", "height": 512, "width": 512, "segm_file": "val_629.tif", "id": 421}, {"file_name": "val_601.tif", "height": 512, "width": 512, "segm_file": "val_601.tif", "id": 422}, {"file_name": "val_167.tif", "height": 512, "width": 512, "segm_file": "val_167.tif", "id": 423}, {"file_name": "val_39.tif", "height": 512, "width": 512, "segm_file": "val_39.tif", "id": 424}, {"file_name": "val_173.tif", "height": 512, "width": 512, "segm_file": "val_173.tif", "id": 425}, {"file_name": "val_615.tif", "height": 512, "width": 512, "segm_file": "val_615.tif", "id": 426}, {"file_name": "val_826.tif", "height": 512, "width": 512, "segm_file": "val_826.tif", "id": 427}, {"file_name": "val_198.tif", "height": 512, "width": 512, "segm_file": "val_198.tif", "id": 428}, {"file_name": "val_832.tif", "height": 512, "width": 512, "segm_file": "val_832.tif", "id": 429}, {"file_name": "val_359.tif", "height": 512, "width": 512, "segm_file": "val_359.tif", "id": 430}, {"file_name": "val_365.tif", "height": 512, "width": 512, "segm_file": "val_365.tif", "id": 431}, {"file_name": "val_403.tif", "height": 512, "width": 512, "segm_file": "val_403.tif", "id": 432}, {"file_name": "val_417.tif", "height": 512, "width": 512, "segm_file": "val_417.tif", "id": 433}, {"file_name": "val_371.tif", "height": 512, "width": 512, "segm_file": "val_371.tif", "id": 434}, {"file_name": "val_239.tif", "height": 512, "width": 512, "segm_file": "val_239.tif", "id": 435}, {"file_name": "val_211.tif", "height": 512, "width": 512, "segm_file": "val_211.tif", "id": 436}, {"file_name": "val_577.tif", "height": 512, "width": 512, "segm_file": "val_577.tif", "id": 437}, {"file_name": "val_563.tif", "height": 512, "width": 512, "segm_file": "val_563.tif", "id": 438}, {"file_name": "val_205.tif", "height": 512, "width": 512, "segm_file": "val_205.tif", "id": 439}, {"file_name": "val_588.tif", "height": 512, "width": 512, "segm_file": "val_588.tif", "id": 440}, {"file_name": "val_991.tif", "height": 512, "width": 512, "segm_file": "val_991.tif", "id": 441}, {"file_name": "val_749.tif", "height": 512, "width": 512, "segm_file": "val_749.tif", "id": 442}, {"file_name": "val_985.tif", "height": 512, "width": 512, "segm_file": "val_985.tif", "id": 443}, {"file_name": "val_775.tif", "height": 512, "width": 512, "segm_file": "val_775.tif", "id": 444}, {"file_name": "val_761.tif", "height": 512, "width": 512, "segm_file": "val_761.tif", "id": 445}, {"file_name": "val_952.tif", "height": 512, "width": 512, "segm_file": "val_952.tif", "id": 446}, {"file_name": "val_946.tif", "height": 512, "width": 512, "segm_file": "val_946.tif", "id": 447}, {"file_name": "val_947.tif", "height": 512, "width": 512, "segm_file": "val_947.tif", "id": 448}, {"file_name": "val_953.tif", "height": 512, "width": 512, "segm_file": "val_953.tif", "id": 449}, {"file_name": "val_760.tif", "height": 512, "width": 512, "segm_file": "val_760.tif", "id": 450}, {"file_name": "val_774.tif", "height": 512, "width": 512, "segm_file": "val_774.tif", "id": 451}, {"file_name": "val_984.tif", "height": 512, "width": 512, "segm_file": "val_984.tif", "id": 452}, {"file_name": "val_748.tif", "height": 512, "width": 512, "segm_file": "val_748.tif", "id": 453}, {"file_name": "val_990.tif", "height": 512, "width": 512, "segm_file": "val_990.tif", "id": 454}, {"file_name": "val_589.tif", "height": 512, "width": 512, "segm_file": "val_589.tif", "id": 455}, {"file_name": "val_562.tif", "height": 512, "width": 512, "segm_file": "val_562.tif", "id": 456}, {"file_name": "val_204.tif", "height": 512, "width": 512, "segm_file": "val_204.tif", "id": 457}, {"file_name": "val_210.tif", "height": 512, "width": 512, "segm_file": "val_210.tif", "id": 458}, {"file_name": "val_576.tif", "height": 512, "width": 512, "segm_file": "val_576.tif", "id": 459}, {"file_name": "val_238.tif", "height": 512, "width": 512, "segm_file": "val_238.tif", "id": 460}, {"file_name": "val_416.tif", "height": 512, "width": 512, "segm_file": "val_416.tif", "id": 461}, {"file_name": "val_370.tif", "height": 512, "width": 512, "segm_file": "val_370.tif", "id": 462}, {"file_name": "val_364.tif", "height": 512, "width": 512, "segm_file": "val_364.tif", "id": 463}, {"file_name": "val_402.tif", "height": 512, "width": 512, "segm_file": "val_402.tif", "id": 464}, {"file_name": "val_358.tif", "height": 512, "width": 512, "segm_file": "val_358.tif", "id": 465}, {"file_name": "val_833.tif", "height": 512, "width": 512, "segm_file": "val_833.tif", "id": 466}, {"file_name": "val_827.tif", "height": 512, "width": 512, "segm_file": "val_827.tif", "id": 467}, {"file_name": "val_199.tif", "height": 512, "width": 512, "segm_file": "val_199.tif", "id": 468}, {"file_name": "val_172.tif", "height": 512, "width": 512, "segm_file": "val_172.tif", "id": 469}, {"file_name": "val_38.tif", "height": 512, "width": 512, "segm_file": "val_38.tif", "id": 470}, {"file_name": "val_614.tif", "height": 512, "width": 512, "segm_file": "val_614.tif", "id": 471}, {"file_name": "val_600.tif", "height": 512, "width": 512, "segm_file": "val_600.tif", "id": 472}, {"file_name": "val_166.tif", "height": 512, "width": 512, "segm_file": "val_166.tif", "id": 473}, {"file_name": "val_628.tif", "height": 512, "width": 512, "segm_file": "val_628.tif", "id": 474}, {"file_name": "val_10.tif", "height": 512, "width": 512, "segm_file": "val_10.tif", "id": 475}, {"file_name": "val_143.tif", "height": 512, "width": 512, "segm_file": "val_143.tif", "id": 476}, {"file_name": "val_625.tif", "height": 512, "width": 512, "segm_file": "val_625.tif", "id": 477}, {"file_name": "val_631.tif", "height": 512, "width": 512, "segm_file": "val_631.tif", "id": 478}, {"file_name": "val_157.tif", "height": 512, "width": 512, "segm_file": "val_157.tif", "id": 479}, {"file_name": "val_35.tif", "height": 512, "width": 512, "segm_file": "val_35.tif", "id": 480}, {"file_name": "val_619.tif", "height": 512, "width": 512, "segm_file": "val_619.tif", "id": 481}, {"file_name": "val_21.tif", "height": 512, "width": 512, "segm_file": "val_21.tif", "id": 482}, {"file_name": "val_180.tif", "height": 512, "width": 512, "segm_file": "val_180.tif", "id": 483}, {"file_name": "val_194.tif", "height": 512, "width": 512, "segm_file": "val_194.tif", "id": 484}, {"file_name": "val_802.tif", "height": 512, "width": 512, "segm_file": "val_802.tif", "id": 485}, {"file_name": "val_816.tif", "height": 512, "width": 512, "segm_file": "val_816.tif", "id": 486}, {"file_name": "val_427.tif", "height": 512, "width": 512, "segm_file": "val_427.tif", "id": 487}, {"file_name": "val_341.tif", "height": 512, "width": 512, "segm_file": "val_341.tif", "id": 488}, {"file_name": "val_355.tif", "height": 512, "width": 512, "segm_file": "val_355.tif", "id": 489}, {"file_name": "val_433.tif", "height": 512, "width": 512, "segm_file": "val_433.tif", "id": 490}, {"file_name": "val_369.tif", "height": 512, "width": 512, "segm_file": "val_369.tif", "id": 491}, {"file_name": "val_382.tif", "height": 512, "width": 512, "segm_file": "val_382.tif", "id": 492}, {"file_name": "val_396.tif", "height": 512, "width": 512, "segm_file": "val_396.tif", "id": 493}, {"file_name": "val_553.tif", "height": 512, "width": 512, "segm_file": "val_553.tif", "id": 494}, {"file_name": "val_235.tif", "height": 512, "width": 512, "segm_file": "val_235.tif", "id": 495}, {"file_name": "val_221.tif", "height": 512, "width": 512, "segm_file": "val_221.tif", "id": 496}, {"file_name": "val_547.tif", "height": 512, "width": 512, "segm_file": "val_547.tif", "id": 497}, {"file_name": "val_209.tif", "height": 512, "width": 512, "segm_file": "val_209.tif", "id": 498}, {"file_name": "val_590.tif", "height": 512, "width": 512, "segm_file": "val_590.tif", "id": 499}, {"file_name": "val_584.tif", "height": 512, "width": 512, "segm_file": "val_584.tif", "id": 500}, {"file_name": "val_989.tif", "height": 512, "width": 512, "segm_file": "val_989.tif", "id": 501}, {"file_name": "val_751.tif", "height": 512, "width": 512, "segm_file": "val_751.tif", "id": 502}, {"file_name": "val_745.tif", "height": 512, "width": 512, "segm_file": "val_745.tif", "id": 503}, {"file_name": "val_779.tif", "height": 512, "width": 512, "segm_file": "val_779.tif", "id": 504}, {"file_name": "val_7.tif", "height": 512, "width": 512, "segm_file": "val_7.tif", "id": 505}, {"file_name": "val_792.tif", "height": 512, "width": 512, "segm_file": "val_792.tif", "id": 506}, {"file_name": "val_786.tif", "height": 512, "width": 512, "segm_file": "val_786.tif", "id": 507}, {"file_name": "val_976.tif", "height": 512, "width": 512, "segm_file": "val_976.tif", "id": 508}, {"file_name": "val_962.tif", "height": 512, "width": 512, "segm_file": "val_962.tif", "id": 509}, {"file_name": "val_963.tif", "height": 512, "width": 512, "segm_file": "val_963.tif", "id": 510}, {"file_name": "val_977.tif", "height": 512, "width": 512, "segm_file": "val_977.tif", "id": 511}, {"file_name": "val_787.tif", "height": 512, "width": 512, "segm_file": "val_787.tif", "id": 512}, {"file_name": "val_793.tif", "height": 512, "width": 512, "segm_file": "val_793.tif", "id": 513}, {"file_name": "val_6.tif", "height": 512, "width": 512, "segm_file": "val_6.tif", "id": 514}, {"file_name": "val_778.tif", "height": 512, "width": 512, "segm_file": "val_778.tif", "id": 515}, {"file_name": "val_744.tif", "height": 512, "width": 512, "segm_file": "val_744.tif", "id": 516}, {"file_name": "val_750.tif", "height": 512, "width": 512, "segm_file": "val_750.tif", "id": 517}, {"file_name": "val_988.tif", "height": 512, "width": 512, "segm_file": "val_988.tif", "id": 518}, {"file_name": "val_585.tif", "height": 512, "width": 512, "segm_file": "val_585.tif", "id": 519}, {"file_name": "val_591.tif", "height": 512, "width": 512, "segm_file": "val_591.tif", "id": 520}, {"file_name": "val_208.tif", "height": 512, "width": 512, "segm_file": "val_208.tif", "id": 521}, {"file_name": "val_220.tif", "height": 512, "width": 512, "segm_file": "val_220.tif", "id": 522}, {"file_name": "val_546.tif", "height": 512, "width": 512, "segm_file": "val_546.tif", "id": 523}, {"file_name": "val_552.tif", "height": 512, "width": 512, "segm_file": "val_552.tif", "id": 524}, {"file_name": "val_234.tif", "height": 512, "width": 512, "segm_file": "val_234.tif", "id": 525}, {"file_name": "val_397.tif", "height": 512, "width": 512, "segm_file": "val_397.tif", "id": 526}, {"file_name": "val_383.tif", "height": 512, "width": 512, "segm_file": "val_383.tif", "id": 527}, {"file_name": "val_368.tif", "height": 512, "width": 512, "segm_file": "val_368.tif", "id": 528}, {"file_name": "val_354.tif", "height": 512, "width": 512, "segm_file": "val_354.tif", "id": 529}, {"file_name": "val_432.tif", "height": 512, "width": 512, "segm_file": "val_432.tif", "id": 530}, {"file_name": "val_426.tif", "height": 512, "width": 512, "segm_file": "val_426.tif", "id": 531}, {"file_name": "val_340.tif", "height": 512, "width": 512, "segm_file": "val_340.tif", "id": 532}, {"file_name": "val_817.tif", "height": 512, "width": 512, "segm_file": "val_817.tif", "id": 533}, {"file_name": "val_803.tif", "height": 512, "width": 512, "segm_file": "val_803.tif", "id": 534}, {"file_name": "val_195.tif", "height": 512, "width": 512, "segm_file": "val_195.tif", "id": 535}, {"file_name": "val_181.tif", "height": 512, "width": 512, "segm_file": "val_181.tif", "id": 536}, {"file_name": "val_20.tif", "height": 512, "width": 512, "segm_file": "val_20.tif", "id": 537}, {"file_name": "val_34.tif", "height": 512, "width": 512, "segm_file": "val_34.tif", "id": 538}, {"file_name": "val_618.tif", "height": 512, "width": 512, "segm_file": "val_618.tif", "id": 539}, {"file_name": "val_630.tif", "height": 512, "width": 512, "segm_file": "val_630.tif", "id": 540}, {"file_name": "val_156.tif", "height": 512, "width": 512, "segm_file": "val_156.tif", "id": 541}, {"file_name": "val_142.tif", "height": 512, "width": 512, "segm_file": "val_142.tif", "id": 542}, {"file_name": "val_624.tif", "height": 512, "width": 512, "segm_file": "val_624.tif", "id": 543}, {"file_name": "val_154.tif", "height": 512, "width": 512, "segm_file": "val_154.tif", "id": 544}, {"file_name": "val_632.tif", "height": 512, "width": 512, "segm_file": "val_632.tif", "id": 545}, {"file_name": "val_626.tif", "height": 512, "width": 512, "segm_file": "val_626.tif", "id": 546}, {"file_name": "val_140.tif", "height": 512, "width": 512, "segm_file": "val_140.tif", "id": 547}, {"file_name": "val_22.tif", "height": 512, "width": 512, "segm_file": "val_22.tif", "id": 548}, {"file_name": "val_168.tif", "height": 512, "width": 512, "segm_file": "val_168.tif", "id": 549}, {"file_name": "val_36.tif", "height": 512, "width": 512, "segm_file": "val_36.tif", "id": 550}, {"file_name": "val_197.tif", "height": 512, "width": 512, "segm_file": "val_197.tif", "id": 551}, {"file_name": "val_829.tif", "height": 512, "width": 512, "segm_file": "val_829.tif", "id": 552}, {"file_name": "val_183.tif", "height": 512, "width": 512, "segm_file": "val_183.tif", "id": 553}, {"file_name": "val_815.tif", "height": 512, "width": 512, "segm_file": "val_815.tif", "id": 554}, {"file_name": "val_801.tif", "height": 512, "width": 512, "segm_file": "val_801.tif", "id": 555}, {"file_name": "val_430.tif", "height": 512, "width": 512, "segm_file": "val_430.tif", "id": 556}, {"file_name": "val_356.tif", "height": 512, "width": 512, "segm_file": "val_356.tif", "id": 557}, {"file_name": "val_342.tif", "height": 512, "width": 512, "segm_file": "val_342.tif", "id": 558}, {"file_name": "val_424.tif", "height": 512, "width": 512, "segm_file": "val_424.tif", "id": 559}, {"file_name": "val_418.tif", "height": 512, "width": 512, "segm_file": "val_418.tif", "id": 560}, {"file_name": "val_395.tif", "height": 512, "width": 512, "segm_file": "val_395.tif", "id": 561}, {"file_name": "val_381.tif", "height": 512, "width": 512, "segm_file": "val_381.tif", "id": 562}, {"file_name": "val_544.tif", "height": 512, "width": 512, "segm_file": "val_544.tif", "id": 563}, {"file_name": "val_222.tif", "height": 512, "width": 512, "segm_file": "val_222.tif", "id": 564}, {"file_name": "val_236.tif", "height": 512, "width": 512, "segm_file": "val_236.tif", "id": 565}, {"file_name": "val_550.tif", "height": 512, "width": 512, "segm_file": "val_550.tif", "id": 566}, {"file_name": "val_578.tif", "height": 512, "width": 512, "segm_file": "val_578.tif", "id": 567}, {"file_name": "val_587.tif", "height": 512, "width": 512, "segm_file": "val_587.tif", "id": 568}, {"file_name": "val_593.tif", "height": 512, "width": 512, "segm_file": "val_593.tif", "id": 569}, {"file_name": "val_746.tif", "height": 512, "width": 512, "segm_file": "val_746.tif", "id": 570}, {"file_name": "val_752.tif", "height": 512, "width": 512, "segm_file": "val_752.tif", "id": 571}, {"file_name": "val_4.tif", "height": 512, "width": 512, "segm_file": "val_4.tif", "id": 572}, {"file_name": "val_785.tif", "height": 512, "width": 512, "segm_file": "val_785.tif", "id": 573}, {"file_name": "val_791.tif", "height": 512, "width": 512, "segm_file": "val_791.tif", "id": 574}, {"file_name": "val_949.tif", "height": 512, "width": 512, "segm_file": "val_949.tif", "id": 575}, {"file_name": "val_961.tif", "height": 512, "width": 512, "segm_file": "val_961.tif", "id": 576}, {"file_name": "val_975.tif", "height": 512, "width": 512, "segm_file": "val_975.tif", "id": 577}, {"file_name": "val_974.tif", "height": 512, "width": 512, "segm_file": "val_974.tif", "id": 578}, {"file_name": "val_960.tif", "height": 512, "width": 512, "segm_file": "val_960.tif", "id": 579}, {"file_name": "val_948.tif", "height": 512, "width": 512, "segm_file": "val_948.tif", "id": 580}, {"file_name": "val_790.tif", "height": 512, "width": 512, "segm_file": "val_790.tif", "id": 581}, {"file_name": "val_784.tif", "height": 512, "width": 512, "segm_file": "val_784.tif", "id": 582}, {"file_name": "val_5.tif", "height": 512, "width": 512, "segm_file": "val_5.tif", "id": 583}, {"file_name": "val_753.tif", "height": 512, "width": 512, "segm_file": "val_753.tif", "id": 584}, {"file_name": "val_747.tif", "height": 512, "width": 512, "segm_file": "val_747.tif", "id": 585}, {"file_name": "val_592.tif", "height": 512, "width": 512, "segm_file": "val_592.tif", "id": 586}, {"file_name": "val_586.tif", "height": 512, "width": 512, "segm_file": "val_586.tif", "id": 587}, {"file_name": "val_579.tif", "height": 512, "width": 512, "segm_file": "val_579.tif", "id": 588}, {"file_name": "val_237.tif", "height": 512, "width": 512, "segm_file": "val_237.tif", "id": 589}, {"file_name": "val_551.tif", "height": 512, "width": 512, "segm_file": "val_551.tif", "id": 590}, {"file_name": "val_545.tif", "height": 512, "width": 512, "segm_file": "val_545.tif", "id": 591}, {"file_name": "val_223.tif", "height": 512, "width": 512, "segm_file": "val_223.tif", "id": 592}, {"file_name": "val_380.tif", "height": 512, "width": 512, "segm_file": "val_380.tif", "id": 593}, {"file_name": "val_394.tif", "height": 512, "width": 512, "segm_file": "val_394.tif", "id": 594}, {"file_name": "val_419.tif", "height": 512, "width": 512, "segm_file": "val_419.tif", "id": 595}, {"file_name": "val_343.tif", "height": 512, "width": 512, "segm_file": "val_343.tif", "id": 596}, {"file_name": "val_425.tif", "height": 512, "width": 512, "segm_file": "val_425.tif", "id": 597}, {"file_name": "val_431.tif", "height": 512, "width": 512, "segm_file": "val_431.tif", "id": 598}, {"file_name": "val_357.tif", "height": 512, "width": 512, "segm_file": "val_357.tif", "id": 599}, {"file_name": "val_800.tif", "height": 512, "width": 512, "segm_file": "val_800.tif", "id": 600}, {"file_name": "val_814.tif", "height": 512, "width": 512, "segm_file": "val_814.tif", "id": 601}, {"file_name": "val_182.tif", "height": 512, "width": 512, "segm_file": "val_182.tif", "id": 602}, {"file_name": "val_196.tif", "height": 512, "width": 512, "segm_file": "val_196.tif", "id": 603}, {"file_name": "val_828.tif", "height": 512, "width": 512, "segm_file": "val_828.tif", "id": 604}, {"file_name": "val_37.tif", "height": 512, "width": 512, "segm_file": "val_37.tif", "id": 605}, {"file_name": "val_169.tif", "height": 512, "width": 512, "segm_file": "val_169.tif", "id": 606}, {"file_name": "val_23.tif", "height": 512, "width": 512, "segm_file": "val_23.tif", "id": 607}, {"file_name": "val_627.tif", "height": 512, "width": 512, "segm_file": "val_627.tif", "id": 608}, {"file_name": "val_141.tif", "height": 512, "width": 512, "segm_file": "val_141.tif", "id": 609}, {"file_name": "val_155.tif", "height": 512, "width": 512, "segm_file": "val_155.tif", "id": 610}, {"file_name": "val_633.tif", "height": 512, "width": 512, "segm_file": "val_633.tif", "id": 611}, {"file_name": "val_27.tif", "height": 512, "width": 512, "segm_file": "val_27.tif", "id": 612}, {"file_name": "val_33.tif", "height": 512, "width": 512, "segm_file": "val_33.tif", "id": 613}, {"file_name": "val_179.tif", "height": 512, "width": 512, "segm_file": "val_179.tif", "id": 614}, {"file_name": "val_637.tif", "height": 512, "width": 512, "segm_file": "val_637.tif", "id": 615}, {"file_name": "val_151.tif", "height": 512, "width": 512, "segm_file": "val_151.tif", "id": 616}, {"file_name": "val_145.tif", "height": 512, "width": 512, "segm_file": "val_145.tif", "id": 617}, {"file_name": "val_623.tif", "height": 512, "width": 512, "segm_file": "val_623.tif", "id": 618}, {"file_name": "val_810.tif", "height": 512, "width": 512, "segm_file": "val_810.tif", "id": 619}, {"file_name": "val_804.tif", "height": 512, "width": 512, "segm_file": "val_804.tif", "id": 620}, {"file_name": "val_192.tif", "height": 512, "width": 512, "segm_file": "val_192.tif", "id": 621}, {"file_name": "val_186.tif", "height": 512, "width": 512, "segm_file": "val_186.tif", "id": 622}, {"file_name": "val_838.tif", "height": 512, "width": 512, "segm_file": "val_838.tif", "id": 623}, {"file_name": "val_409.tif", "height": 512, "width": 512, "segm_file": "val_409.tif", "id": 624}, {"file_name": "val_353.tif", "height": 512, "width": 512, "segm_file": "val_353.tif", "id": 625}, {"file_name": "val_435.tif", "height": 512, "width": 512, "segm_file": "val_435.tif", "id": 626}, {"file_name": "val_421.tif", "height": 512, "width": 512, "segm_file": "val_421.tif", "id": 627}, {"file_name": "val_347.tif", "height": 512, "width": 512, "segm_file": "val_347.tif", "id": 628}, {"file_name": "val_390.tif", "height": 512, "width": 512, "segm_file": "val_390.tif", "id": 629}, {"file_name": "val_384.tif", "height": 512, "width": 512, "segm_file": "val_384.tif", "id": 630}, {"file_name": "val_569.tif", "height": 512, "width": 512, "segm_file": "val_569.tif", "id": 631}, {"file_name": "val_227.tif", "height": 512, "width": 512, "segm_file": "val_227.tif", "id": 632}, {"file_name": "val_541.tif", "height": 512, "width": 512, "segm_file": "val_541.tif", "id": 633}, {"file_name": "val_555.tif", "height": 512, "width": 512, "segm_file": "val_555.tif", "id": 634}, {"file_name": "val_233.tif", "height": 512, "width": 512, "segm_file": "val_233.tif", "id": 635}, {"file_name": "val_582.tif", "height": 512, "width": 512, "segm_file": "val_582.tif", "id": 636}, {"file_name": "val_596.tif", "height": 512, "width": 512, "segm_file": "val_596.tif", "id": 637}, {"file_name": "val_1.tif", "height": 512, "width": 512, "segm_file": "val_1.tif", "id": 638}, {"file_name": "val_743.tif", "height": 512, "width": 512, "segm_file": "val_743.tif", "id": 639}, {"file_name": "val_757.tif", "height": 512, "width": 512, "segm_file": "val_757.tif", "id": 640}, {"file_name": "val_964.tif", "height": 512, "width": 512, "segm_file": "val_964.tif", "id": 641}, {"file_name": "val_970.tif", "height": 512, "width": 512, "segm_file": "val_970.tif", "id": 642}, {"file_name": "val_780.tif", "height": 512, "width": 512, "segm_file": "val_780.tif", "id": 643}, {"file_name": "val_958.tif", "height": 512, "width": 512, "segm_file": "val_958.tif", "id": 644}, {"file_name": "val_794.tif", "height": 512, "width": 512, "segm_file": "val_794.tif", "id": 645}, {"file_name": "val_795.tif", "height": 512, "width": 512, "segm_file": "val_795.tif", "id": 646}, {"file_name": "val_959.tif", "height": 512, "width": 512, "segm_file": "val_959.tif", "id": 647}, {"file_name": "val_781.tif", "height": 512, "width": 512, "segm_file": "val_781.tif", "id": 648}, {"file_name": "val_971.tif", "height": 512, "width": 512, "segm_file": "val_971.tif", "id": 649}, {"file_name": "val_965.tif", "height": 512, "width": 512, "segm_file": "val_965.tif", "id": 650}, {"file_name": "val_756.tif", "height": 512, "width": 512, "segm_file": "val_756.tif", "id": 651}, {"file_name": "val_742.tif", "height": 512, "width": 512, "segm_file": "val_742.tif", "id": 652}, {"file_name": "val_0.tif", "height": 512, "width": 512, "segm_file": "val_0.tif", "id": 653}, {"file_name": "val_597.tif", "height": 512, "width": 512, "segm_file": "val_597.tif", "id": 654}, {"file_name": "val_583.tif", "height": 512, "width": 512, "segm_file": "val_583.tif", "id": 655}, {"file_name": "val_554.tif", "height": 512, "width": 512, "segm_file": "val_554.tif", "id": 656}, {"file_name": "val_232.tif", "height": 512, "width": 512, "segm_file": "val_232.tif", "id": 657}, {"file_name": "val_226.tif", "height": 512, "width": 512, "segm_file": "val_226.tif", "id": 658}, {"file_name": "val_540.tif", "height": 512, "width": 512, "segm_file": "val_540.tif", "id": 659}, {"file_name": "val_568.tif", "height": 512, "width": 512, "segm_file": "val_568.tif", "id": 660}, {"file_name": "val_385.tif", "height": 512, "width": 512, "segm_file": "val_385.tif", "id": 661}, {"file_name": "val_391.tif", "height": 512, "width": 512, "segm_file": "val_391.tif", "id": 662}, {"file_name": "val_420.tif", "height": 512, "width": 512, "segm_file": "val_420.tif", "id": 663}, {"file_name": "val_346.tif", "height": 512, "width": 512, "segm_file": "val_346.tif", "id": 664}, {"file_name": "val_352.tif", "height": 512, "width": 512, "segm_file": "val_352.tif", "id": 665}, {"file_name": "val_434.tif", "height": 512, "width": 512, "segm_file": "val_434.tif", "id": 666}, {"file_name": "val_408.tif", "height": 512, "width": 512, "segm_file": "val_408.tif", "id": 667}, {"file_name": "val_187.tif", "height": 512, "width": 512, "segm_file": "val_187.tif", "id": 668}, {"file_name": "val_839.tif", "height": 512, "width": 512, "segm_file": "val_839.tif", "id": 669}, {"file_name": "val_193.tif", "height": 512, "width": 512, "segm_file": "val_193.tif", "id": 670}, {"file_name": "val_805.tif", "height": 512, "width": 512, "segm_file": "val_805.tif", "id": 671}, {"file_name": "val_811.tif", "height": 512, "width": 512, "segm_file": "val_811.tif", "id": 672}, {"file_name": "val_144.tif", "height": 512, "width": 512, "segm_file": "val_144.tif", "id": 673}, {"file_name": "val_622.tif", "height": 512, "width": 512, "segm_file": "val_622.tif", "id": 674}, {"file_name": "val_636.tif", "height": 512, "width": 512, "segm_file": "val_636.tif", "id": 675}, {"file_name": "val_150.tif", "height": 512, "width": 512, "segm_file": "val_150.tif", "id": 676}, {"file_name": "val_178.tif", "height": 512, "width": 512, "segm_file": "val_178.tif", "id": 677}, {"file_name": "val_32.tif", "height": 512, "width": 512, "segm_file": "val_32.tif", "id": 678}, {"file_name": "val_26.tif", "height": 512, "width": 512, "segm_file": "val_26.tif", "id": 679}, {"file_name": "val_30.tif", "height": 512, "width": 512, "segm_file": "val_30.tif", "id": 680}, {"file_name": "val_24.tif", "height": 512, "width": 512, "segm_file": "val_24.tif", "id": 681}, {"file_name": "val_608.tif", "height": 512, "width": 512, "segm_file": "val_608.tif", "id": 682}, {"file_name": "val_620.tif", "height": 512, "width": 512, "segm_file": "val_620.tif", "id": 683}, {"file_name": "val_146.tif", "height": 512, "width": 512, "segm_file": "val_146.tif", "id": 684}, {"file_name": "val_18.tif", "height": 512, "width": 512, "segm_file": "val_18.tif", "id": 685}, {"file_name": "val_152.tif", "height": 512, "width": 512, "segm_file": "val_152.tif", "id": 686}, {"file_name": "val_634.tif", "height": 512, "width": 512, "segm_file": "val_634.tif", "id": 687}, {"file_name": "val_807.tif", "height": 512, "width": 512, "segm_file": "val_807.tif", "id": 688}, {"file_name": "val_813.tif", "height": 512, "width": 512, "segm_file": "val_813.tif", "id": 689}, {"file_name": "val_185.tif", "height": 512, "width": 512, "segm_file": "val_185.tif", "id": 690}, {"file_name": "val_191.tif", "height": 512, "width": 512, "segm_file": "val_191.tif", "id": 691}, {"file_name": "val_378.tif", "height": 512, "width": 512, "segm_file": "val_378.tif", "id": 692}, {"file_name": "val_344.tif", "height": 512, "width": 512, "segm_file": "val_344.tif", "id": 693}, {"file_name": "val_422.tif", "height": 512, "width": 512, "segm_file": "val_422.tif", "id": 694}, {"file_name": "val_436.tif", "height": 512, "width": 512, "segm_file": "val_436.tif", "id": 695}, {"file_name": "val_350.tif", "height": 512, "width": 512, "segm_file": "val_350.tif", "id": 696}, {"file_name": "val_387.tif", "height": 512, "width": 512, "segm_file": "val_387.tif", "id": 697}, {"file_name": "val_393.tif", "height": 512, "width": 512, "segm_file": "val_393.tif", "id": 698}, {"file_name": "val_218.tif", "height": 512, "width": 512, "segm_file": "val_218.tif", "id": 699}, {"file_name": "val_230.tif", "height": 512, "width": 512, "segm_file": "val_230.tif", "id": 700}, {"file_name": "val_556.tif", "height": 512, "width": 512, "segm_file": "val_556.tif", "id": 701}, {"file_name": "val_542.tif", "height": 512, "width": 512, "segm_file": "val_542.tif", "id": 702}, {"file_name": "val_224.tif", "height": 512, "width": 512, "segm_file": "val_224.tif", "id": 703}, {"file_name": "val_595.tif", "height": 512, "width": 512, "segm_file": "val_595.tif", "id": 704}, {"file_name": "val_581.tif", "height": 512, "width": 512, "segm_file": "val_581.tif", "id": 705}, {"file_name": "val_768.tif", "height": 512, "width": 512, "segm_file": "val_768.tif", "id": 706}, {"file_name": "val_2.tif", "height": 512, "width": 512, "segm_file": "val_2.tif", "id": 707}, {"file_name": "val_754.tif", "height": 512, "width": 512, "segm_file": "val_754.tif", "id": 708}, {"file_name": "val_998.tif", "height": 512, "width": 512, "segm_file": "val_998.tif", "id": 709}, {"file_name": "val_740.tif", "height": 512, "width": 512, "segm_file": "val_740.tif", "id": 710}, {"file_name": "val_973.tif", "height": 512, "width": 512, "segm_file": "val_973.tif", "id": 711}, {"file_name": "val_967.tif", "height": 512, "width": 512, "segm_file": "val_967.tif", "id": 712}, {"file_name": "val_797.tif", "height": 512, "width": 512, "segm_file": "val_797.tif", "id": 713}, {"file_name": "val_783.tif", "height": 512, "width": 512, "segm_file": "val_783.tif", "id": 714}, {"file_name": "val_782.tif", "height": 512, "width": 512, "segm_file": "val_782.tif", "id": 715}, {"file_name": "val_796.tif", "height": 512, "width": 512, "segm_file": "val_796.tif", "id": 716}, {"file_name": "val_966.tif", "height": 512, "width": 512, "segm_file": "val_966.tif", "id": 717}, {"file_name": "val_972.tif", "height": 512, "width": 512, "segm_file": "val_972.tif", "id": 718}, {"file_name": "val_741.tif", "height": 512, "width": 512, "segm_file": "val_741.tif", "id": 719}, {"file_name": "val_999.tif", "height": 512, "width": 512, "segm_file": "val_999.tif", "id": 720}, {"file_name": "val_755.tif", "height": 512, "width": 512, "segm_file": "val_755.tif", "id": 721}, {"file_name": "val_3.tif", "height": 512, "width": 512, "segm_file": "val_3.tif", "id": 722}, {"file_name": "val_769.tif", "height": 512, "width": 512, "segm_file": "val_769.tif", "id": 723}, {"file_name": "val_580.tif", "height": 512, "width": 512, "segm_file": "val_580.tif", "id": 724}, {"file_name": "val_594.tif", "height": 512, "width": 512, "segm_file": "val_594.tif", "id": 725}, {"file_name": "val_543.tif", "height": 512, "width": 512, "segm_file": "val_543.tif", "id": 726}, {"file_name": "val_225.tif", "height": 512, "width": 512, "segm_file": "val_225.tif", "id": 727}, {"file_name": "val_231.tif", "height": 512, "width": 512, "segm_file": "val_231.tif", "id": 728}, {"file_name": "val_557.tif", "height": 512, "width": 512, "segm_file": "val_557.tif", "id": 729}, {"file_name": "val_219.tif", "height": 512, "width": 512, "segm_file": "val_219.tif", "id": 730}, {"file_name": "val_392.tif", "height": 512, "width": 512, "segm_file": "val_392.tif", "id": 731}, {"file_name": "val_386.tif", "height": 512, "width": 512, "segm_file": "val_386.tif", "id": 732}, {"file_name": "val_437.tif", "height": 512, "width": 512, "segm_file": "val_437.tif", "id": 733}, {"file_name": "val_351.tif", "height": 512, "width": 512, "segm_file": "val_351.tif", "id": 734}, {"file_name": "val_345.tif", "height": 512, "width": 512, "segm_file": "val_345.tif", "id": 735}, {"file_name": "val_423.tif", "height": 512, "width": 512, "segm_file": "val_423.tif", "id": 736}, {"file_name": "val_379.tif", "height": 512, "width": 512, "segm_file": "val_379.tif", "id": 737}, {"file_name": "val_190.tif", "height": 512, "width": 512, "segm_file": "val_190.tif", "id": 738}, {"file_name": "val_184.tif", "height": 512, "width": 512, "segm_file": "val_184.tif", "id": 739}, {"file_name": "val_812.tif", "height": 512, "width": 512, "segm_file": "val_812.tif", "id": 740}, {"file_name": "val_806.tif", "height": 512, "width": 512, "segm_file": "val_806.tif", "id": 741}, {"file_name": "val_153.tif", "height": 512, "width": 512, "segm_file": "val_153.tif", "id": 742}, {"file_name": "val_19.tif", "height": 512, "width": 512, "segm_file": "val_19.tif", "id": 743}, {"file_name": "val_635.tif", "height": 512, "width": 512, "segm_file": "val_635.tif", "id": 744}, {"file_name": "val_621.tif", "height": 512, "width": 512, "segm_file": "val_621.tif", "id": 745}, {"file_name": "val_147.tif", "height": 512, "width": 512, "segm_file": "val_147.tif", "id": 746}, {"file_name": "val_25.tif", "height": 512, "width": 512, "segm_file": "val_25.tif", "id": 747}, {"file_name": "val_609.tif", "height": 512, "width": 512, "segm_file": "val_609.tif", "id": 748}, {"file_name": "val_31.tif", "height": 512, "width": 512, "segm_file": "val_31.tif", "id": 749}, {"file_name": "val_646.tif", "height": 512, "width": 512, "segm_file": "val_646.tif", "id": 750}, {"file_name": "val_120.tif", "height": 512, "width": 512, "segm_file": "val_120.tif", "id": 751}, {"file_name": "val_134.tif", "height": 512, "width": 512, "segm_file": "val_134.tif", "id": 752}, {"file_name": "val_652.tif", "height": 512, "width": 512, "segm_file": "val_652.tif", "id": 753}, {"file_name": "val_56.tif", "height": 512, "width": 512, "segm_file": "val_56.tif", "id": 754}, {"file_name": "val_42.tif", "height": 512, "width": 512, "segm_file": "val_42.tif", "id": 755}, {"file_name": "val_108.tif", "height": 512, "width": 512, "segm_file": "val_108.tif", "id": 756}, {"file_name": "val_685.tif", "height": 512, "width": 512, "segm_file": "val_685.tif", "id": 757}, {"file_name": "val_849.tif", "height": 512, "width": 512, "segm_file": "val_849.tif", "id": 758}, {"file_name": "val_691.tif", "height": 512, "width": 512, "segm_file": "val_691.tif", "id": 759}, {"file_name": "val_861.tif", "height": 512, "width": 512, "segm_file": "val_861.tif", "id": 760}, {"file_name": "val_95.tif", "height": 512, "width": 512, "segm_file": "val_95.tif", "id": 761}, {"file_name": "val_81.tif", "height": 512, "width": 512, "segm_file": "val_81.tif", "id": 762}, {"file_name": "val_875.tif", "height": 512, "width": 512, "segm_file": "val_875.tif", "id": 763}, {"file_name": "val_322.tif", "height": 512, "width": 512, "segm_file": "val_322.tif", "id": 764}, {"file_name": "val_444.tif", "height": 512, "width": 512, "segm_file": "val_444.tif", "id": 765}, {"file_name": "val_450.tif", "height": 512, "width": 512, "segm_file": "val_450.tif", "id": 766}, {"file_name": "val_336.tif", "height": 512, "width": 512, "segm_file": "val_336.tif", "id": 767}, {"file_name": "val_478.tif", "height": 512, "width": 512, "segm_file": "val_478.tif", "id": 768}, {"file_name": "val_487.tif", "height": 512, "width": 512, "segm_file": "val_487.tif", "id": 769}, {"file_name": "val_493.tif", "height": 512, "width": 512, "segm_file": "val_493.tif", "id": 770}, {"file_name": "val_1023.tif", "height": 512, "width": 512, "segm_file": "val_1023.tif", "id": 771}, {"file_name": "val_256.tif", "height": 512, "width": 512, "segm_file": "val_256.tif", "id": 772}, {"file_name": "val_530.tif", "height": 512, "width": 512, "segm_file": "val_530.tif", "id": 773}, {"file_name": "val_524.tif", "height": 512, "width": 512, "segm_file": "val_524.tif", "id": 774}, {"file_name": "val_242.tif", "height": 512, "width": 512, "segm_file": "val_242.tif", "id": 775}, {"file_name": "val_518.tif", "height": 512, "width": 512, "segm_file": "val_518.tif", "id": 776}, {"file_name": "val_295.tif", "height": 512, "width": 512, "segm_file": "val_295.tif", "id": 777}, {"file_name": "val_281.tif", "height": 512, "width": 512, "segm_file": "val_281.tif", "id": 778}, {"file_name": "val_732.tif", "height": 512, "width": 512, "segm_file": "val_732.tif", "id": 779}, {"file_name": "val_726.tif", "height": 512, "width": 512, "segm_file": "val_726.tif", "id": 780}, {"file_name": "val_929.tif", "height": 512, "width": 512, "segm_file": "val_929.tif", "id": 781}, {"file_name": "val_915.tif", "height": 512, "width": 512, "segm_file": "val_915.tif", "id": 782}, {"file_name": "val_901.tif", "height": 512, "width": 512, "segm_file": "val_901.tif", "id": 783}, {"file_name": "val_900.tif", "height": 512, "width": 512, "segm_file": "val_900.tif", "id": 784}, {"file_name": "val_914.tif", "height": 512, "width": 512, "segm_file": "val_914.tif", "id": 785}, {"file_name": "val_928.tif", "height": 512, "width": 512, "segm_file": "val_928.tif", "id": 786}, {"file_name": "val_727.tif", "height": 512, "width": 512, "segm_file": "val_727.tif", "id": 787}, {"file_name": "val_733.tif", "height": 512, "width": 512, "segm_file": "val_733.tif", "id": 788}, {"file_name": "val_280.tif", "height": 512, "width": 512, "segm_file": "val_280.tif", "id": 789}, {"file_name": "val_294.tif", "height": 512, "width": 512, "segm_file": "val_294.tif", "id": 790}, {"file_name": "val_519.tif", "height": 512, "width": 512, "segm_file": "val_519.tif", "id": 791}, {"file_name": "val_525.tif", "height": 512, "width": 512, "segm_file": "val_525.tif", "id": 792}, {"file_name": "val_243.tif", "height": 512, "width": 512, "segm_file": "val_243.tif", "id": 793}, {"file_name": "val_257.tif", "height": 512, "width": 512, "segm_file": "val_257.tif", "id": 794}, {"file_name": "val_531.tif", "height": 512, "width": 512, "segm_file": "val_531.tif", "id": 795}, {"file_name": "val_1022.tif", "height": 512, "width": 512, "segm_file": "val_1022.tif", "id": 796}, {"file_name": "val_492.tif", "height": 512, "width": 512, "segm_file": "val_492.tif", "id": 797}, {"file_name": "val_486.tif", "height": 512, "width": 512, "segm_file": "val_486.tif", "id": 798}, {"file_name": "val_479.tif", "height": 512, "width": 512, "segm_file": "val_479.tif", "id": 799}, {"file_name": "val_451.tif", "height": 512, "width": 512, "segm_file": "val_451.tif", "id": 800}, {"file_name": "val_337.tif", "height": 512, "width": 512, "segm_file": "val_337.tif", "id": 801}, {"file_name": "val_323.tif", "height": 512, "width": 512, "segm_file": "val_323.tif", "id": 802}, {"file_name": "val_445.tif", "height": 512, "width": 512, "segm_file": "val_445.tif", "id": 803}, {"file_name": "val_80.tif", "height": 512, "width": 512, "segm_file": "val_80.tif", "id": 804}, {"file_name": "val_874.tif", "height": 512, "width": 512, "segm_file": "val_874.tif", "id": 805}, {"file_name": "val_860.tif", "height": 512, "width": 512, "segm_file": "val_860.tif", "id": 806}, {"file_name": "val_94.tif", "height": 512, "width": 512, "segm_file": "val_94.tif", "id": 807}, {"file_name": "val_690.tif", "height": 512, "width": 512, "segm_file": "val_690.tif", "id": 808}, {"file_name": "val_848.tif", "height": 512, "width": 512, "segm_file": "val_848.tif", "id": 809}, {"file_name": "val_684.tif", "height": 512, "width": 512, "segm_file": "val_684.tif", "id": 810}, {"file_name": "val_109.tif", "height": 512, "width": 512, "segm_file": "val_109.tif", "id": 811}, {"file_name": "val_43.tif", "height": 512, "width": 512, "segm_file": "val_43.tif", "id": 812}, {"file_name": "val_57.tif", "height": 512, "width": 512, "segm_file": "val_57.tif", "id": 813}, {"file_name": "val_135.tif", "height": 512, "width": 512, "segm_file": "val_135.tif", "id": 814}, {"file_name": "val_653.tif", "height": 512, "width": 512, "segm_file": "val_653.tif", "id": 815}, {"file_name": "val_647.tif", "height": 512, "width": 512, "segm_file": "val_647.tif", "id": 816}, {"file_name": "val_121.tif", "height": 512, "width": 512, "segm_file": "val_121.tif", "id": 817}, {"file_name": "val_651.tif", "height": 512, "width": 512, "segm_file": "val_651.tif", "id": 818}, {"file_name": "val_889.tif", "height": 512, "width": 512, "segm_file": "val_889.tif", "id": 819}, {"file_name": "val_137.tif", "height": 512, "width": 512, "segm_file": "val_137.tif", "id": 820}, {"file_name": "val_69.tif", "height": 512, "width": 512, "segm_file": "val_69.tif", "id": 821}, {"file_name": "val_123.tif", "height": 512, "width": 512, "segm_file": "val_123.tif", "id": 822}, {"file_name": "val_645.tif", "height": 512, "width": 512, "segm_file": "val_645.tif", "id": 823}, {"file_name": "val_41.tif", "height": 512, "width": 512, "segm_file": "val_41.tif", "id": 824}, {"file_name": "val_55.tif", "height": 512, "width": 512, "segm_file": "val_55.tif", "id": 825}, {"file_name": "val_679.tif", "height": 512, "width": 512, "segm_file": "val_679.tif", "id": 826}, {"file_name": "val_692.tif", "height": 512, "width": 512, "segm_file": "val_692.tif", "id": 827}, {"file_name": "val_686.tif", "height": 512, "width": 512, "segm_file": "val_686.tif", "id": 828}, {"file_name": "val_876.tif", "height": 512, "width": 512, "segm_file": "val_876.tif", "id": 829}, {"file_name": "val_82.tif", "height": 512, "width": 512, "segm_file": "val_82.tif", "id": 830}, {"file_name": "val_96.tif", "height": 512, "width": 512, "segm_file": "val_96.tif", "id": 831}, {"file_name": "val_862.tif", "height": 512, "width": 512, "segm_file": "val_862.tif", "id": 832}, {"file_name": "val_335.tif", "height": 512, "width": 512, "segm_file": "val_335.tif", "id": 833}, {"file_name": "val_453.tif", "height": 512, "width": 512, "segm_file": "val_453.tif", "id": 834}, {"file_name": "val_447.tif", "height": 512, "width": 512, "segm_file": "val_447.tif", "id": 835}, {"file_name": "val_321.tif", "height": 512, "width": 512, "segm_file": "val_321.tif", "id": 836}, {"file_name": "val_309.tif", "height": 512, "width": 512, "segm_file": "val_309.tif", "id": 837}, {"file_name": "val_490.tif", "height": 512, "width": 512, "segm_file": "val_490.tif", "id": 838}, {"file_name": "val_1020.tif", "height": 512, "width": 512, "segm_file": "val_1020.tif", "id": 839}, {"file_name": "val_1034.tif", "height": 512, "width": 512, "segm_file": "val_1034.tif", "id": 840}, {"file_name": "val_484.tif", "height": 512, "width": 512, "segm_file": "val_484.tif", "id": 841}, {"file_name": "val_1008.tif", "height": 512, "width": 512, "segm_file": "val_1008.tif", "id": 842}, {"file_name": "val_241.tif", "height": 512, "width": 512, "segm_file": "val_241.tif", "id": 843}, {"file_name": "val_527.tif", "height": 512, "width": 512, "segm_file": "val_527.tif", "id": 844}, {"file_name": "val_533.tif", "height": 512, "width": 512, "segm_file": "val_533.tif", "id": 845}, {"file_name": "val_255.tif", "height": 512, "width": 512, "segm_file": "val_255.tif", "id": 846}, {"file_name": "val_269.tif", "height": 512, "width": 512, "segm_file": "val_269.tif", "id": 847}, {"file_name": "val_282.tif", "height": 512, "width": 512, "segm_file": "val_282.tif", "id": 848}, {"file_name": "val_296.tif", "height": 512, "width": 512, "segm_file": "val_296.tif", "id": 849}, {"file_name": "val_725.tif", "height": 512, "width": 512, "segm_file": "val_725.tif", "id": 850}, {"file_name": "val_731.tif", "height": 512, "width": 512, "segm_file": "val_731.tif", "id": 851}, {"file_name": "val_719.tif", "height": 512, "width": 512, "segm_file": "val_719.tif", "id": 852}, {"file_name": "val_902.tif", "height": 512, "width": 512, "segm_file": "val_902.tif", "id": 853}, {"file_name": "val_916.tif", "height": 512, "width": 512, "segm_file": "val_916.tif", "id": 854}, {"file_name": "val_917.tif", "height": 512, "width": 512, "segm_file": "val_917.tif", "id": 855}, {"file_name": "val_903.tif", "height": 512, "width": 512, "segm_file": "val_903.tif", "id": 856}, {"file_name": "val_718.tif", "height": 512, "width": 512, "segm_file": "val_718.tif", "id": 857}, {"file_name": "val_730.tif", "height": 512, "width": 512, "segm_file": "val_730.tif", "id": 858}, {"file_name": "val_724.tif", "height": 512, "width": 512, "segm_file": "val_724.tif", "id": 859}, {"file_name": "val_297.tif", "height": 512, "width": 512, "segm_file": "val_297.tif", "id": 860}, {"file_name": "val_283.tif", "height": 512, "width": 512, "segm_file": "val_283.tif", "id": 861}, {"file_name": "val_268.tif", "height": 512, "width": 512, "segm_file": "val_268.tif", "id": 862}, {"file_name": "val_532.tif", "height": 512, "width": 512, "segm_file": "val_532.tif", "id": 863}, {"file_name": "val_254.tif", "height": 512, "width": 512, "segm_file": "val_254.tif", "id": 864}, {"file_name": "val_240.tif", "height": 512, "width": 512, "segm_file": "val_240.tif", "id": 865}, {"file_name": "val_526.tif", "height": 512, "width": 512, "segm_file": "val_526.tif", "id": 866}, {"file_name": "val_1009.tif", "height": 512, "width": 512, "segm_file": "val_1009.tif", "id": 867}, {"file_name": "val_485.tif", "height": 512, "width": 512, "segm_file": "val_485.tif", "id": 868}, {"file_name": "val_1035.tif", "height": 512, "width": 512, "segm_file": "val_1035.tif", "id": 869}, {"file_name": "val_1021.tif", "height": 512, "width": 512, "segm_file": "val_1021.tif", "id": 870}, {"file_name": "val_491.tif", "height": 512, "width": 512, "segm_file": "val_491.tif", "id": 871}, {"file_name": "val_308.tif", "height": 512, "width": 512, "segm_file": "val_308.tif", "id": 872}, {"file_name": "val_446.tif", "height": 512, "width": 512, "segm_file": "val_446.tif", "id": 873}, {"file_name": "val_320.tif", "height": 512, "width": 512, "segm_file": "val_320.tif", "id": 874}, {"file_name": "val_334.tif", "height": 512, "width": 512, "segm_file": "val_334.tif", "id": 875}, {"file_name": "val_452.tif", "height": 512, "width": 512, "segm_file": "val_452.tif", "id": 876}, {"file_name": "val_97.tif", "height": 512, "width": 512, "segm_file": "val_97.tif", "id": 877}, {"file_name": "val_863.tif", "height": 512, "width": 512, "segm_file": "val_863.tif", "id": 878}, {"file_name": "val_877.tif", "height": 512, "width": 512, "segm_file": "val_877.tif", "id": 879}, {"file_name": "val_83.tif", "height": 512, "width": 512, "segm_file": "val_83.tif", "id": 880}, {"file_name": "val_687.tif", "height": 512, "width": 512, "segm_file": "val_687.tif", "id": 881}, {"file_name": "val_693.tif", "height": 512, "width": 512, "segm_file": "val_693.tif", "id": 882}, {"file_name": "val_54.tif", "height": 512, "width": 512, "segm_file": "val_54.tif", "id": 883}, {"file_name": "val_678.tif", "height": 512, "width": 512, "segm_file": "val_678.tif", "id": 884}, {"file_name": "val_40.tif", "height": 512, "width": 512, "segm_file": "val_40.tif", "id": 885}, {"file_name": "val_122.tif", "height": 512, "width": 512, "segm_file": "val_122.tif", "id": 886}, {"file_name": "val_68.tif", "height": 512, "width": 512, "segm_file": "val_68.tif", "id": 887}, {"file_name": "val_644.tif", "height": 512, "width": 512, "segm_file": "val_644.tif", "id": 888}, {"file_name": "val_888.tif", "height": 512, "width": 512, "segm_file": "val_888.tif", "id": 889}, {"file_name": "val_650.tif", "height": 512, "width": 512, "segm_file": "val_650.tif", "id": 890}, {"file_name": "val_136.tif", "height": 512, "width": 512, "segm_file": "val_136.tif", "id": 891}, {"file_name": "val_44.tif", "height": 512, "width": 512, "segm_file": "val_44.tif", "id": 892}, {"file_name": "val_668.tif", "height": 512, "width": 512, "segm_file": "val_668.tif", "id": 893}, {"file_name": "val_50.tif", "height": 512, "width": 512, "segm_file": "val_50.tif", "id": 894}, {"file_name": "val_78.tif", "height": 512, "width": 512, "segm_file": "val_78.tif", "id": 895}, {"file_name": "val_132.tif", "height": 512, "width": 512, "segm_file": "val_132.tif", "id": 896}, {"file_name": "val_654.tif", "height": 512, "width": 512, "segm_file": "val_654.tif", "id": 897}, {"file_name": "val_640.tif", "height": 512, "width": 512, "segm_file": "val_640.tif", "id": 898}, {"file_name": "val_898.tif", "height": 512, "width": 512, "segm_file": "val_898.tif", "id": 899}, {"file_name": "val_126.tif", "height": 512, "width": 512, "segm_file": "val_126.tif", "id": 900}, {"file_name": "val_87.tif", "height": 512, "width": 512, "segm_file": "val_87.tif", "id": 901}, {"file_name": "val_873.tif", "height": 512, "width": 512, "segm_file": "val_873.tif", "id": 902}, {"file_name": "val_867.tif", "height": 512, "width": 512, "segm_file": "val_867.tif", "id": 903}, {"file_name": "val_93.tif", "height": 512, "width": 512, "segm_file": "val_93.tif", "id": 904}, {"file_name": "val_697.tif", "height": 512, "width": 512, "segm_file": "val_697.tif", "id": 905}, {"file_name": "val_683.tif", "height": 512, "width": 512, "segm_file": "val_683.tif", "id": 906}, {"file_name": "val_318.tif", "height": 512, "width": 512, "segm_file": "val_318.tif", "id": 907}, {"file_name": "val_456.tif", "height": 512, "width": 512, "segm_file": "val_456.tif", "id": 908}, {"file_name": "val_330.tif", "height": 512, "width": 512, "segm_file": "val_330.tif", "id": 909}, {"file_name": "val_324.tif", "height": 512, "width": 512, "segm_file": "val_324.tif", "id": 910}, {"file_name": "val_442.tif", "height": 512, "width": 512, "segm_file": "val_442.tif", "id": 911}, {"file_name": "val_1019.tif", "height": 512, "width": 512, "segm_file": "val_1019.tif", "id": 912}, {"file_name": "val_495.tif", "height": 512, "width": 512, "segm_file": "val_495.tif", "id": 913}, {"file_name": "val_1025.tif", "height": 512, "width": 512, "segm_file": "val_1025.tif", "id": 914}, {"file_name": "val_1031.tif", "height": 512, "width": 512, "segm_file": "val_1031.tif", "id": 915}, {"file_name": "val_481.tif", "height": 512, "width": 512, "segm_file": "val_481.tif", "id": 916}, {"file_name": "val_278.tif", "height": 512, "width": 512, "segm_file": "val_278.tif", "id": 917}, {"file_name": "val_522.tif", "height": 512, "width": 512, "segm_file": "val_522.tif", "id": 918}, {"file_name": "val_244.tif", "height": 512, "width": 512, "segm_file": "val_244.tif", "id": 919}, {"file_name": "val_250.tif", "height": 512, "width": 512, "segm_file": "val_250.tif", "id": 920}, {"file_name": "val_536.tif", "height": 512, "width": 512, "segm_file": "val_536.tif", "id": 921}, {"file_name": "val_287.tif", "height": 512, "width": 512, "segm_file": "val_287.tif", "id": 922}, {"file_name": "val_293.tif", "height": 512, "width": 512, "segm_file": "val_293.tif", "id": 923}, {"file_name": "val_708.tif", "height": 512, "width": 512, "segm_file": "val_708.tif", "id": 924}, {"file_name": "val_720.tif", "height": 512, "width": 512, "segm_file": "val_720.tif", "id": 925}, {"file_name": "val_734.tif", "height": 512, "width": 512, "segm_file": "val_734.tif", "id": 926}, {"file_name": "val_907.tif", "height": 512, "width": 512, "segm_file": "val_907.tif", "id": 927}, {"file_name": "val_913.tif", "height": 512, "width": 512, "segm_file": "val_913.tif", "id": 928}, {"file_name": "val_912.tif", "height": 512, "width": 512, "segm_file": "val_912.tif", "id": 929}, {"file_name": "val_906.tif", "height": 512, "width": 512, "segm_file": "val_906.tif", "id": 930}, {"file_name": "val_735.tif", "height": 512, "width": 512, "segm_file": "val_735.tif", "id": 931}, {"file_name": "val_721.tif", "height": 512, "width": 512, "segm_file": "val_721.tif", "id": 932}, {"file_name": "val_709.tif", "height": 512, "width": 512, "segm_file": "val_709.tif", "id": 933}, {"file_name": "val_292.tif", "height": 512, "width": 512, "segm_file": "val_292.tif", "id": 934}, {"file_name": "val_286.tif", "height": 512, "width": 512, "segm_file": "val_286.tif", "id": 935}, {"file_name": "val_251.tif", "height": 512, "width": 512, "segm_file": "val_251.tif", "id": 936}, {"file_name": "val_537.tif", "height": 512, "width": 512, "segm_file": "val_537.tif", "id": 937}, {"file_name": "val_523.tif", "height": 512, "width": 512, "segm_file": "val_523.tif", "id": 938}, {"file_name": "val_245.tif", "height": 512, "width": 512, "segm_file": "val_245.tif", "id": 939}, {"file_name": "val_279.tif", "height": 512, "width": 512, "segm_file": "val_279.tif", "id": 940}, {"file_name": "val_480.tif", "height": 512, "width": 512, "segm_file": "val_480.tif", "id": 941}, {"file_name": "val_1030.tif", "height": 512, "width": 512, "segm_file": "val_1030.tif", "id": 942}, {"file_name": "val_1024.tif", "height": 512, "width": 512, "segm_file": "val_1024.tif", "id": 943}, {"file_name": "val_494.tif", "height": 512, "width": 512, "segm_file": "val_494.tif", "id": 944}, {"file_name": "val_1018.tif", "height": 512, "width": 512, "segm_file": "val_1018.tif", "id": 945}, {"file_name": "val_325.tif", "height": 512, "width": 512, "segm_file": "val_325.tif", "id": 946}, {"file_name": "val_443.tif", "height": 512, "width": 512, "segm_file": "val_443.tif", "id": 947}, {"file_name": "val_457.tif", "height": 512, "width": 512, "segm_file": "val_457.tif", "id": 948}, {"file_name": "val_331.tif", "height": 512, "width": 512, "segm_file": "val_331.tif", "id": 949}, {"file_name": "val_319.tif", "height": 512, "width": 512, "segm_file": "val_319.tif", "id": 950}, {"file_name": "val_682.tif", "height": 512, "width": 512, "segm_file": "val_682.tif", "id": 951}, {"file_name": "val_696.tif", "height": 512, "width": 512, "segm_file": "val_696.tif", "id": 952}, {"file_name": "val_866.tif", "height": 512, "width": 512, "segm_file": "val_866.tif", "id": 953}, {"file_name": "val_92.tif", "height": 512, "width": 512, "segm_file": "val_92.tif", "id": 954}, {"file_name": "val_86.tif", "height": 512, "width": 512, "segm_file": "val_86.tif", "id": 955}, {"file_name": "val_872.tif", "height": 512, "width": 512, "segm_file": "val_872.tif", "id": 956}, {"file_name": "val_899.tif", "height": 512, "width": 512, "segm_file": "val_899.tif", "id": 957}, {"file_name": "val_641.tif", "height": 512, "width": 512, "segm_file": "val_641.tif", "id": 958}, {"file_name": "val_127.tif", "height": 512, "width": 512, "segm_file": "val_127.tif", "id": 959}, {"file_name": "val_133.tif", "height": 512, "width": 512, "segm_file": "val_133.tif", "id": 960}, {"file_name": "val_79.tif", "height": 512, "width": 512, "segm_file": "val_79.tif", "id": 961}, {"file_name": "val_655.tif", "height": 512, "width": 512, "segm_file": "val_655.tif", "id": 962}, {"file_name": "val_51.tif", "height": 512, "width": 512, "segm_file": "val_51.tif", "id": 963}, {"file_name": "val_45.tif", "height": 512, "width": 512, "segm_file": "val_45.tif", "id": 964}, {"file_name": "val_669.tif", "height": 512, "width": 512, "segm_file": "val_669.tif", "id": 965}, {"file_name": "val_53.tif", "height": 512, "width": 512, "segm_file": "val_53.tif", "id": 966}, {"file_name": "val_119.tif", "height": 512, "width": 512, "segm_file": "val_119.tif", "id": 967}, {"file_name": "val_47.tif", "height": 512, "width": 512, "segm_file": "val_47.tif", "id": 968}, {"file_name": "val_125.tif", "height": 512, "width": 512, "segm_file": "val_125.tif", "id": 969}, {"file_name": "val_643.tif", "height": 512, "width": 512, "segm_file": "val_643.tif", "id": 970}, {"file_name": "val_657.tif", "height": 512, "width": 512, "segm_file": "val_657.tif", "id": 971}, {"file_name": "val_131.tif", "height": 512, "width": 512, "segm_file": "val_131.tif", "id": 972}, {"file_name": "val_90.tif", "height": 512, "width": 512, "segm_file": "val_90.tif", "id": 973}, {"file_name": "val_864.tif", "height": 512, "width": 512, "segm_file": "val_864.tif", "id": 974}, {"file_name": "val_870.tif", "height": 512, "width": 512, "segm_file": "val_870.tif", "id": 975}, {"file_name": "val_84.tif", "height": 512, "width": 512, "segm_file": "val_84.tif", "id": 976}, {"file_name": "val_858.tif", "height": 512, "width": 512, "segm_file": "val_858.tif", "id": 977}, {"file_name": "val_680.tif", "height": 512, "width": 512, "segm_file": "val_680.tif", "id": 978}, {"file_name": "val_694.tif", "height": 512, "width": 512, "segm_file": "val_694.tif", "id": 979}, {"file_name": "val_469.tif", "height": 512, "width": 512, "segm_file": "val_469.tif", "id": 980}, {"file_name": "val_441.tif", "height": 512, "width": 512, "segm_file": "val_441.tif", "id": 981}, {"file_name": "val_327.tif", "height": 512, "width": 512, "segm_file": "val_327.tif", "id": 982}, {"file_name": "val_333.tif", "height": 512, "width": 512, "segm_file": "val_333.tif", "id": 983}, {"file_name": "val_455.tif", "height": 512, "width": 512, "segm_file": "val_455.tif", "id": 984}, {"file_name": "val_1032.tif", "height": 512, "width": 512, "segm_file": "val_1032.tif", "id": 985}, {"file_name": "val_482.tif", "height": 512, "width": 512, "segm_file": "val_482.tif", "id": 986}, {"file_name": "val_496.tif", "height": 512, "width": 512, "segm_file": "val_496.tif", "id": 987}, {"file_name": "val_1026.tif", "height": 512, "width": 512, "segm_file": "val_1026.tif", "id": 988}, {"file_name": "val_509.tif", "height": 512, "width": 512, "segm_file": "val_509.tif", "id": 989}, {"file_name": "val_535.tif", "height": 512, "width": 512, "segm_file": "val_535.tif", "id": 990}, {"file_name": "val_253.tif", "height": 512, "width": 512, "segm_file": "val_253.tif", "id": 991}, {"file_name": "val_247.tif", "height": 512, "width": 512, "segm_file": "val_247.tif", "id": 992}, {"file_name": "val_521.tif", "height": 512, "width": 512, "segm_file": "val_521.tif", "id": 993}, {"file_name": "val_290.tif", "height": 512, "width": 512, "segm_file": "val_290.tif", "id": 994}, {"file_name": "val_284.tif", "height": 512, "width": 512, "segm_file": "val_284.tif", "id": 995}, {"file_name": "val_737.tif", "height": 512, "width": 512, "segm_file": "val_737.tif", "id": 996}, {"file_name": "val_723.tif", "height": 512, "width": 512, "segm_file": "val_723.tif", "id": 997}, {"file_name": "val_910.tif", "height": 512, "width": 512, "segm_file": "val_910.tif", "id": 998}, {"file_name": "val_904.tif", "height": 512, "width": 512, "segm_file": "val_904.tif", "id": 999}, {"file_name": "val_938.tif", "height": 512, "width": 512, "segm_file": "val_938.tif", "id": 1000}, {"file_name": "val_939.tif", "height": 512, "width": 512, "segm_file": "val_939.tif", "id": 1001}, {"file_name": "val_905.tif", "height": 512, "width": 512, "segm_file": "val_905.tif", "id": 1002}, {"file_name": "val_911.tif", "height": 512, "width": 512, "segm_file": "val_911.tif", "id": 1003}, {"file_name": "val_722.tif", "height": 512, "width": 512, "segm_file": "val_722.tif", "id": 1004}, {"file_name": "val_736.tif", "height": 512, "width": 512, "segm_file": "val_736.tif", "id": 1005}, {"file_name": "val_285.tif", "height": 512, "width": 512, "segm_file": "val_285.tif", "id": 1006}, {"file_name": "val_291.tif", "height": 512, "width": 512, "segm_file": "val_291.tif", "id": 1007}, {"file_name": "val_246.tif", "height": 512, "width": 512, "segm_file": "val_246.tif", "id": 1008}, {"file_name": "val_520.tif", "height": 512, "width": 512, "segm_file": "val_520.tif", "id": 1009}, {"file_name": "val_534.tif", "height": 512, "width": 512, "segm_file": "val_534.tif", "id": 1010}, {"file_name": "val_252.tif", "height": 512, "width": 512, "segm_file": "val_252.tif", "id": 1011}, {"file_name": "val_508.tif", "height": 512, "width": 512, "segm_file": "val_508.tif", "id": 1012}, {"file_name": "val_1027.tif", "height": 512, "width": 512, "segm_file": "val_1027.tif", "id": 1013}, {"file_name": "val_497.tif", "height": 512, "width": 512, "segm_file": "val_497.tif", "id": 1014}, {"file_name": "val_483.tif", "height": 512, "width": 512, "segm_file": "val_483.tif", "id": 1015}, {"file_name": "val_1033.tif", "height": 512, "width": 512, "segm_file": "val_1033.tif", "id": 1016}, {"file_name": "val_332.tif", "height": 512, "width": 512, "segm_file": "val_332.tif", "id": 1017}, {"file_name": "val_454.tif", "height": 512, "width": 512, "segm_file": "val_454.tif", "id": 1018}, {"file_name": "val_440.tif", "height": 512, "width": 512, "segm_file": "val_440.tif", "id": 1019}, {"file_name": "val_326.tif", "height": 512, "width": 512, "segm_file": "val_326.tif", "id": 1020}, {"file_name": "val_468.tif", "height": 512, "width": 512, "segm_file": "val_468.tif", "id": 1021}, {"file_name": "val_695.tif", "height": 512, "width": 512, "segm_file": "val_695.tif", "id": 1022}, {"file_name": "val_681.tif", "height": 512, "width": 512, "segm_file": "val_681.tif", "id": 1023}, {"file_name": "val_859.tif", "height": 512, "width": 512, "segm_file": "val_859.tif", "id": 1024}, {"file_name": "val_871.tif", "height": 512, "width": 512, "segm_file": "val_871.tif", "id": 1025}, {"file_name": "val_85.tif", "height": 512, "width": 512, "segm_file": "val_85.tif", "id": 1026}, {"file_name": "val_91.tif", "height": 512, "width": 512, "segm_file": "val_91.tif", "id": 1027}, {"file_name": "val_865.tif", "height": 512, "width": 512, "segm_file": "val_865.tif", "id": 1028}, {"file_name": "val_656.tif", "height": 512, "width": 512, "segm_file": "val_656.tif", "id": 1029}, {"file_name": "val_130.tif", "height": 512, "width": 512, "segm_file": "val_130.tif", "id": 1030}, {"file_name": "val_124.tif", "height": 512, "width": 512, "segm_file": "val_124.tif", "id": 1031}, {"file_name": "val_642.tif", "height": 512, "width": 512, "segm_file": "val_642.tif", "id": 1032}, {"file_name": "val_46.tif", "height": 512, "width": 512, "segm_file": "val_46.tif", "id": 1033}, {"file_name": "val_118.tif", "height": 512, "width": 512, "segm_file": "val_118.tif", "id": 1034}, {"file_name": "val_52.tif", "height": 512, "width": 512, "segm_file": "val_52.tif", "id": 1035}], "categories": [{"id": 1, "name": "building"}], "annotations": [{"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 12.0, 8.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "06j?1O1OO1O1O100O1O1O1O10PPj7"}, "image_id": 0, "id": 0}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 0.0, 20.0, 11.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "RPm01m?3O001O001O001O1O001O001O001ON2N2N2N2NRPi6"}, "image_id": 0, "id": 1}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 32.0, 18.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "PPb21o?2N1O2N1O1O2N1O2N1O1O2N1OO1O100O1O1O100O1O100O1O1O100O1O101N2Noom4"}, "image_id": 0, "id": 2}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 0.0, 64.0, 49.0], "area": 1590, "segmentation": {"size": [512, 512], "counts": "bPc31n?3M2N2O2M2N3N1N2N3N1N2N3M2O20O00N3M2N3N1N2N2OO01O0001O01O000100O1O1O3N1N3M2O2M2N2N3N1N3M00010O00010O000010O0003M2O1N3M2O2M2N2N_o\\3"}, "image_id": 0, "id": 3}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 0.0, 51.0, 31.0], "area": 911, "segmentation": {"size": [512, 512], "counts": "P`P72n?1O2N1O1O2N1O2N1`@DZ?>c@D\\?b0N1O1O2N1O2N1O2N00O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O100O1O1O102M2N3N1N2Njo5"}, "image_id": 0, "id": 4}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 7.0, 51.0, 40.0], "area": 1290, "segmentation": {"size": [512, 512], "counts": "PQe12l?3L3N3L3N2M4M2N3j@YOn>o0M2O1010O01O01O010O01ON3M2010O00010O010O00010OM4M2M3N3L3O01M4M2N3O01O010O0N2Bi@0[?Lh@2Z?Li@0e?N[_a5"}, "image_id": 0, "id": 5}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 12.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "\\`o71c?"}, "image_id": 0, "id": 6}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 27.0, 21.0, 27.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "]Q`22l?2M4M2N3M2N3M2N3N1010O010O0010OBj@2W?Kk@3W?Kl@2W?Kk@3c?Mf^U5"}, "image_id": 0, "id": 7}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 41.0, 33.0, 40.0], "area": 681, "segmentation": {"size": [512, 512], "counts": "jal32n?1N2N3N1N2N3M2O2M2N2O2M2N2N3N1N20N01O01O0001OZOSA:n>EUA8k>IVA6i>JZA3g>L[A2g>L[A2h>LZA2g>L[A2h>K[A2[?Olmb3"}, "image_id": 0, "id": 8}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 45.0, 33.0, 33.0], "area": 584, "segmentation": {"size": [512, 512], "counts": "jaV32m?3M2O1N3M2O2M2N3N1N2N3M2O1N1O01O01O01O3M2O2M2N2O2M2N3N1N3M2N2O2MQnX4"}, "image_id": 0, "id": 9}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 46.0, 14.0, 43.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "PRi72n?2DNe@4Y?Od@3[?Nc@5Z?Me@4Y?;i@\\OP?k0N3M2N2O2M2N2aN"}, "image_id": 0, "id": 10}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 48.0, 62.0, 58.0], "area": 1577, "segmentation": {"size": [512, 512], "counts": "iR\\42m?2O1N2N3M2O1N3M2N2N3N1N2GXOWAi0h>ZOUAf0k>\\OTAd0l>7000010O0000010O0000010O000010O0000010O0000010O000010O000002O1N2N3M2O1N3M2N2O2M2N2N2O2M2N2Nmmd2"}, "image_id": 0, "id": 11}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 87.0, 47.0, 50.0], "area": 1181, "segmentation": {"size": [512, 512], "counts": "XcU51n?2N2O2M2N3N1N2N3M2O2M2N2N3N1N2N3N1N3M2N2O2M2N3ON2N2N3N1N2N3N1N3M2N2O2M2N3N1N2N3M2O1N3M2N3N1N[lR2"}, "image_id": 0, "id": 12}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 89.0, 21.0, 20.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "PcQ41n?2N2N1N3N2N2O100O10O10000000OO2N2M3N2N2Nmlc3"}, "image_id": 0, "id": 13}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 110.0, 15.0, 9.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "_cR47h?1000000001O0000000001O0000Ncle3"}, "image_id": 0, "id": 14}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 129.0, 38.0, 60.0], "area": 1385, "segmentation": {"size": [512, 512], "counts": "f4i0U?2N2M40O01WAPO]>S1`APO^>R1`AQO\\>[1M4M2N201O010OO2L3N2O2O001M2N2M4M2M4L3N2M4M2M4M2M3N3L3N2M4Mgk\\7"}, "image_id": 0, "id": 15}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 129.0, 29.0, 31.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "[Tb52m?2O2M2N2N3N1N2N3M2O1N3M2N2010M2O1N3M2N2O2M2N2N2O2M2N2N3N[[o1"}, "image_id": 0, "id": 16}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 136.0, 24.0, 24.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "`dT62m?2N2N3N1N2N3M2O1N2N3N10O1N3M2O1N3M2O1N2N3M2O1NZ[_1"}, "image_id": 0, "id": 17}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 138.0, 61.0, 52.0], "area": 1674, "segmentation": {"size": [512, 512], "counts": "\\ea02l?2M3N3L3010O00O2L3N3L3N2M4M2M4M1O00O3N210O0010O0010O010O0010O0010O00ZAPO]>P1`ARO`>n0]AUOd>T1O0010O0O2M2M4M2N2010O010O01L3N2M4M2M4M2N2M4M2MXk_6"}, "image_id": 0, "id": 18}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 173.0, 51.0, 53.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "TVR25k?3M4L4K5L4L3L4M00000O010000O010000O010CQOiAo0W>TOfAl0Z>XOaAh0`>PBDP>9hAKW>5gAMY>4cAO^>0`A3_>M^A6b>e00O0010O0010O0010O0010O0010O0001N1N2M4L3O20O00010O010O00010O0010O0010O0010O0010O0001N1M4M2M3N3L3N2M4L3N3LPYi3"}, "image_id": 0, "id": 21}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 224.0, 48.0, 53.0], "area": 1415, "segmentation": {"size": [512, 512], "counts": "Rh21m?2M3N3M2M40O010O0010O001O0N3L3N2N3L3N3M2M4M2N3M21O010O010O01O01O010O010O01O01O010O0QO_A`0a>]ObA`0`>^ObA?a>^OcA3G1i>IbA3I0X?Nj@0^gU7"}, "image_id": 0, "id": 22}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 244.0, 33.0, 31.0], "area": 653, "segmentation": {"size": [512, 512], "counts": "WXc24i?3M3M4L3M3M4N10010O00010O00010O00010O00010O00010O00M4L3M3M4L3M3MWXl4"}, "image_id": 0, "id": 23}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 259.0, 57.0, 64.0], "area": 2177, "segmentation": {"size": [512, 512], "counts": "bY\\43k?2M3N3L3M4M2M3N3L3M3N3L3N3L3M3N3L31OO2L310O00010O00010O010O00010O01O01O010OO1M4M2M4L3N2M4M2M4L3N2M4M2M3M4M2M4M2MbWg2"}, "image_id": 0, "id": 24}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 261.0, 59.0, 48.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "ohR13k?3M2N2N3M2N3L3N3M2N2N3M2O2O0010O0010O0010O0010O010O0010O0010O0010O010O00010O010O0010O0010O001L3N2M4M2N3L3N3L3N2N3L3N[go5"}, "image_id": 0, "id": 25}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 262.0, 15.0, 21.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "XXW34l?4L5J6K1O0000O0100001N5L4L5K\\Wa4"}, "image_id": 0, "id": 26}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 266.0, 24.0, 30.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "]hb34l?4L5K4K6K4L1O0O100000O10O1000O10001O000[Om@0O01O010O01O01O010O01O010O01O01O010O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01O01O010O01O010O01O01O010O01O010aBRNjf0lA[OT>a0nABP>2M2N2O2M2N3N11O01mE"}, "image_id": 0, "id": 30}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 289.0, 57.0, 58.0], "area": 1694, "segmentation": {"size": [512, 512], "counts": "QZf5290Y?2e@0Y?3e@NY?4e@NY?4e@NZ?=N2N3M2N2O1N2N2N1O0010O000000000010O000N2N2N2N2100O1O2N1O1O2N100O1O2N1O1O00011N1O2N1O1N3M2N3M2M4M2N3M2MkV]1"}, "image_id": 0, "id": 31}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 298.0, 27.0, 28.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "kiQ24j?2M3N3M2M4M2M40O00010O010O0010O0010O0N3M2M3N3M2M4M2Maf`5"}, "image_id": 0, "id": 32}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 300.0, 70.0, 94.0], "area": 3431, "segmentation": {"size": [512, 512], "counts": "ei_31o?3M4K4M4L3M4K4M4L4L3L5L3M4L3L5L3M4L3L5L2N001N10001O0O2O001O0O101O001N1103L3N3L3M4M2M3N3L3N3L3M4M2M4M1N010O0010OM3N3M2N3L3N3M2M3N3M2M4M2N3LkT]3"}, "image_id": 0, "id": 33}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 311.0, 50.0, 51.0], "area": 1564, "segmentation": {"size": [512, 512], "counts": "bj`62k?3N2M4M2N3L3o@^Ob>d0[A@a>d0[A_Oc>c0[A_Oc>Q1M201O01O01O01O010O00010O01ON3L3M3N3N101O01M2M3M4O00O2L3M3M4M2M4M21ON3M2M3M4G]@1e?M\\Vf0"}, "image_id": 0, "id": 34}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 354.0, 25.0, 27.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "dkh53j?3N3M2M3N3L3N3N110O01O010O01O01O001M2N2M4M2M4M2Nidj1"}, "image_id": 0, "id": 35}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 358.0, 14.0, 16.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "bkV61l?4L3M3N3N100010O0O1M4L3M3MkTb1"}, "image_id": 0, "id": 36}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 372.0, 35.0, 44.0], "area": 920, "segmentation": {"size": [512, 512], "counts": "d\\n54j?2M3N4K3M3N3L3N2M4M2M4M20001O01O01O010O00M4M2010OITAYOm>e0UAXOm>f050103L3M4M2G`@Oc?NcT`1"}, "image_id": 0, "id": 37}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 396.0, 20.0, 41.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "i\\f72m?2O2M2N3N1N2N3N1N3M2h@[OR?g0m@ZOR?j0O1O101N3M2N3N1cC"}, "image_id": 0, "id": 38}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 400.0, 23.0, 17.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "fl[54j?2N3M21O010O010O01O01O010O010O01O01O001M2M4MYcX2"}, "image_id": 0, "id": 39}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 416.0, 14.0, 15.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "X]i53k?2N3M2N3O001O01O010M2N3M2MPco1"}, "image_id": 0, "id": 40}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 416.0, 14.0, 16.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "YmQ63k?2N3M2M40O010O0010ON2N3M2MoRg1"}, "image_id": 0, "id": 41}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 418.0, 24.0, 24.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "dmc62m?2O1N3GId@7\\?Kc@4^?N_@3`?60010O00O110O00010O00101N3M2N2O2MhRP1"}, "image_id": 0, "id": 42}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 430.0, 60.0, 52.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "X^i61n?2S@Oh?7M2O2M2N2O2M2N30O00O2M2N3M2O1N0001O01O01O01O0001O01O01O01O00010O02N1O01O01O01O01O0001O02N2O2M2N2O2M2N3M2O2M2N2O2M2NPb8"}, "image_id": 0, "id": 43}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 459.0, 27.0, 26.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "fna72m?2N2N2O2M2N2N2N3N1N2N2N1O01O1O2N2O1N3M2N2N2O2M2N2N2NYa0"}, "image_id": 0, "id": 44}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 480.0, 42.0, 32.0], "area": 897, "segmentation": {"size": [512, 512], "counts": "g_[72m?2N2N2N2O2M2N1O1O1O100O1O1O1O1O1O100O1O1O11O2N1O1O1OO100O1O1O1O1O1O100O1O1O1O1001O"}, "image_id": 0, "id": 45}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 13.0, 13.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "0:f?001O001O00001ON2M3N2M3NR`i7"}, "image_id": 3, "id": 46}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 0.0, 301.0, 180.0], "area": 30992, "segmentation": {"size": [512, 512], "counts": "QbY31m?2N3L3N2M4M2M4M2M3N3M2M4M210O00010O010O00010O010O00010O010O00010O010L3N3L3N2M4M3M3L3N3L3N2M4M2M4M2N2M4N101O00001O001O00001O001O001O000000M3N2M31O00001O001O00001O00ROkBROT=l0nBTOR=i0RCVOnZ;@iD?W;^OlDc0S;[OPEd0P;YOSEh0l:VOVEj0j:TOYEk0n<10O01O01O010O01O010O01O01O010O01O010O01O01OO2M2mNQOSCR1ch0L4M4M2N3M2\\D[Nk8g1SG[Nl8g1PG]Nm8e1QG]Nn8e1nF_No8c1oF_NP9b1nF`NQ9b1kFbNS9_1kFcNT9_1hFeNV9\\1hFfNW9\\1eFgNZ9Z1dFiN[9W1bFlN]9V1`FmN^9T1`FnN_9T1]FPOa9Q1]FQO`9R1]FQOa9R1\\FQOa9Q1\\FROa9R1\\FQOa9Q1\\FROb9P1\\FROd9o0XFUOh9j0VFXOj9i0SFZOm9e0PF^OP:b0nE@R:a0jECV:S3010O00010O010O00010O010O010O00010O010O00010O010O00010O010O00010O01O0N3M2M3N3L3N3L3N2M4M2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2O11O001O00001O001O00001O001O00001O0000N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2N2001O001O00001O001O00001O001O001O00001O001O00001O001O001O0]HcIc5]6[JeIe5\\6WJhIh5X6VJjIj5W6RJmIm5S6QJoIo5Q6nIRJR6o5kITJT6l5jIVJV6k5fIYJY6g5eI[J[6f5aI]J_6c5_I`J`6`5^IbJb6_5ZIeJe6[5YIgJg6Z5UIiJk6W5SIlJl6T5QIoJo6R5nHQKQ7o4mHSKS7m601O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O000000O100001O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O"}, "image_id": 3, "id": 48}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 352.0, 134.0, 160.0], "area": 11621, "segmentation": {"size": [512, 512], "counts": "n_>2l?2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M31O001O00001O001O001O00001O001M2N2M4M2M4M2M3N3M2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2M3NVd^5"}, "image_id": 3, "id": 49}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 494.0, 55.0, 18.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "n_f22l?2M3N2M3M3N2O100001O001O00001O001O00001O001O00001O00001O001O00001O001O00001O001O00001O001O00001ON2N21O00001LWP^4"}, "image_id": 3, "id": 50}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 0.0, 174.0, 215.0], "area": 23583, "segmentation": {"size": [512, 512], "counts": "fec03i?4M3L4L5L3L4M3L5L3L4M3L5K4M3L5L3L4N201O01O00010O0000010O000010O00010O0N2M3L5K4L4M3L5K4M3L4L5K4M3L4L5L3L4L5K4M3L4L5L3L4L4L5L3L4L4M4K4L4M3L5O00001O0000001O0`FdI\\9`6000001O0000001O00001O0000N200000000001O0000001O0000001O0000001O0000001O00000000O100O1O100O100O1O100O1O100O1O1YN^FPMb9l2dFQM]9k2iFSMW9j2nFSMS9i2RGVMn8f2XGXMh8e2]GYMc8c2cG\\M\\8`2jG^MV8^2PH`MP8]2THbMl7Z2ZHeMe7W2`HhM`7T2eHhM^7U2eHhM^7T2fHhM^7T2gHgM]7V2fHfM^7V2fHgM]7U2gHgM]7U2hHfM\\7W2X3L4L4L4M3L4L4M3L4L4L4M3L4L4M3L4L4L4M3LT`e4"}, "image_id": 4, "id": 51}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 0.0, 11.0, 6.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "P`Z51o?1O1O001O1O1OO1N2O1OQP`2"}, "image_id": 4, "id": 52}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 259.0, 15.0, 54.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "bih73j?3L5L3M3L4M4K4M3M4K4M3L4M4L3mG"}, "image_id": 4, "id": 53}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 349.0, 59.0, 36.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "_[a13i?4M4K4M3L4010O00010O0000010O00010O0001O01O00010O0001O01O00010O0001O01O00010O0001O01O00010O0001O01O00010O000L4M4K4M3LhTa5"}, "image_id": 4, "id": 54}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 366.0, 149.0, 146.0], "area": 12814, "segmentation": {"size": [512, 512], "counts": "Vo\\14i?4K4L4M4K4L4M3L5K4M3L5K4M3L4L5O01O01O0001O01O01O0001O01O01O01O0001O01O01O01O0001O01O01O01O0001O01O0001O01O01M2L4M3L5K4M3L5L3L4M3L5L3L4L5L3L4M3L5L3L4L4N30O0000001O00001O0000001O00001O0000001O00001O0000001O0000001O00001O00000O2O0O1FYDZLi;b3[D]Le;_3_D`Lb;]3aDbL`;[3dDdL];W3h0M3L5L3M3L4M4K4M3M3L5L3L4M4L3L4M3L5L3M3L5L3L4M3M4K4M3L5LPdX4"}, "image_id": 4, "id": 55}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 440.0, 19.0, 42.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "lnV14h?4M3L5K4L4M3L5K4L11O3L4L4L5L3L4L4L5LYb_6"}, "image_id": 4, "id": 56}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 304.0, 41.0, 40.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "RZ_42m?1O2N2M3N2N2N2N1O2M3N2N2N2N1N3O1000000000O0100000N2N2N1O2N2M3N2N2N2N1O2M3N2N2N2NlUl2"}, "image_id": 5, "id": 57}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 0.0, 20.0, 5.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "P`m61o?0000001O0000001O0000001O0000001O0000NR`h0"}, "image_id": 6, "id": 58}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 4.0, 49.0, 43.0], "area": 1658, "segmentation": {"size": [512, 512], "counts": "\\`W71o?6I6K6J6J6J5K0O100000O1000O100000O1000O1000O1000O100000O1000O100000O1000O100000O1000O100000O106JF"}, "image_id": 6, "id": 59}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 90.0, 97.0, 73.0], "area": 2418, "segmentation": {"size": [512, 512], "counts": "XTZ6;b?30000001O00000001O000000002OO000001O0O1E;F:L4000^O]AGc>OgA1R>MWB3e=2ZBOe=1[BOe=1[BOe=1[BOe=1[BOe=1[BOe=1[BOe=1\\BNd=2\\BNd=2\\BNe=2ZBOe=1[BOe=1[BO\\=:dBF\\=:dBF\\=:dBF]=9cBG]=9cBG]=9dBF\\=:dBF\\=;cBE]=;cBF\\=:dBF\\=:dBF\\=:dBF\\=:dBF]=9cBG]=9cBG]=9dBF\\=:dBF\\=;cBE]=;cBF\\=5iBKW=JTC6[>00000001Oa@FX?a000000000000000010L3L40000000000001O0001O00000000000001O00Ii[5"}, "image_id": 6, "id": 60}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 123.0, 14.0, 10.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "RTi71i?6O1010O00000000010O000000SL"}, "image_id": 6, "id": 61}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 168.0, 64.0, 44.0], "area": 2655, "segmentation": {"size": [512, 512], "counts": "Sfg4?f>k00000000000000000000000000000000000000001O000001O000000000000000000000000000000000000000000000001O00000001O00000000000000000AU[X2"}, "image_id": 6, "id": 62}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 208.0, 86.0, 43.0], "area": 2225, "segmentation": {"size": [512, 512], "counts": "QWe67a?8I7N3O000000000000010O0000000000010O000000000000010O000000000000010O00000QA\\Ob>Q100000010O0000000000O2H7J61O0000000001O0001O0000000001O0001O00000001O0001O0000000001O0001O00000VI"}, "image_id": 6, "id": 63}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 0.0, 31.0, 10.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "PPd21o?0000001O00001O0000001O00001O0X@Mc?80O100O1O100O1O100O100O1O100OQ`l4"}, "image_id": 8, "id": 64}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 3.0, 20.0, 14.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "X`a73j?3N3O00010O00010O00010OI[@3i?0010O00010N1Mg_4"}, "image_id": 8, "id": 65}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 204.0, 134.0], "area": 16007, "segmentation": {"size": [512, 512], "counts": ">T2j=200010O0000010O000010O0000010O000010O0000010PNWBg1i=UN\\Bj1m=001O01O01O0001O01O0001O01O01O0001O01O01O0001O01O0001O01O01O0001O01O01O0001O01O0001O01O01O01O0001O01O0001O01O01O0001O01O01O0001O01O0001O01O0\\BUNT=j1hBZNX=f1dB^N\\=c1`BaN_=P21O0001O01O01O0001O01O01O0001O0M3L5L3L4L4L5K4M3L5O0001O01O01O0001O01O01O01O0001O01O01O01O0001O01O0001O01O01O01O0001O01O01O01O0001O01O0001O01O01O01O0001O01O0O1M4K4M3L4L5TOnBeNU=W1oBeNU=X1oBdNT=X1PCdNU=W1oBeNU=X1oBdNT=\\1i0O01O00N3L3L4L5L3L4M3L5K4M3L4Mlni4"}, "image_id": 8, "id": 66}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 112.0, 40.0, 41.0], "area": 1259, "segmentation": {"size": [512, 512], "counts": "dS\\733Ne?;L4L4L5J5L4L4L1N01000000O5L1O000O10O1000O10O1000O10O1000O10O1000O10O1000O10O1`L"}, "image_id": 8, "id": 67}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 121.0, 44.0, 41.0], "area": 1251, "segmentation": {"size": [512, 512], "counts": "i3o0Q?00010O0001O01O00010O0001O01O00010O0000010O00010O0000010O0001O01O00010O00L5L3L4L4M4K4MnkY7"}, "image_id": 8, "id": 68}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 167.0, 18.0, 18.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "aea63j?3N2M4M201O01O010O00010O01OM4M2M4MfZU1"}, "image_id": 8, "id": 69}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 0.0, 46.0, 33.0], "area": 987, "segmentation": {"size": [512, 512], "counts": "YPY72l?2N3M2N3N101O001O001O001O001O001O001O001O001O001O001O001O001O001O001O001O001O001O001O001O00"}, "image_id": 10, "id": 70}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 33.0, 17.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "01o?2N1O1O1O2N1O1O1O1O2N1O1O1OO1O1O1O1O100O1O1O1O100O1O1O1O1O100O1OQ`_7"}, "image_id": 11, "id": 71}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 0.0, 58.0, 57.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "PQ>2n?1N2N2N2N3M2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2OO0000000000000000010O000000001O2N2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N2N3M2N[od6"}, "image_id": 11, "id": 72}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 0.0, 32.0, 24.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "UPQ31n?2N2N3M2N2O1O1O1O1O1O2N1O1O1O1O1OO1O1O1O100O1O2N2N2N2N3M2N2O1Nho^4"}, "image_id": 11, "id": 73}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 0.0, 64.0, 47.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "P`P42n?1O1O1O1O1O2N1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O2N1OO1O3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2NY_o2"}, "image_id": 11, "id": 74}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 0.0, 20.0, 10.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "P`T51o?1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O100O1OQ`a2"}, "image_id": 11, "id": 75}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 9.0, 9.0, 9.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "^`m21n?2N2N2N1O03M2N2Ndom4"}, "image_id": 11, "id": 76}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 19.0, 58.0, 58.0], "area": 1570, "segmentation": {"size": [512, 512], "counts": "XQf32m?2O1N3M2N2O1N3M2N2O2M00000010O0001O2O1N3M2N2N2YAoN]>S1aAoN\\>S1bAoN\\>S1bAoN\\>\\1O2O000010M2O1N3M2N2O1N3M2N2N3N1N2N3M2O1N2N3M2O1N3M2N2N2O2M2NYn\\3"}, "image_id": 11, "id": 77}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 20.0, 5.0, 10.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "i`m71n?2N2M3N2[O"}, "image_id": 11, "id": 78}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 27.0, 3.0, 4.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "k04m?N2OS_n7"}, "image_id": 11, "id": 79}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 31.0, 24.0, 33.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "S1e0Z?3N1N2N1O0102M2001O01O000N3N1N2N3M2N2O2M2N2N3NZnc7"}, "image_id": 11, "id": 80}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 36.0, 64.0, 63.0], "area": 1895, "segmentation": {"size": [512, 512], "counts": "]bm01o?2M2N2N2N2KFa@<]?5N2N2N2N2N3M2N2N2N2N2O1N2N1O000000000000000010O00000000000000000000010O2N2N2N2N2N2N002N2N2N2XOi@e0Z?N2N2N2N2N2N2N3M2N2NW^R6"}, "image_id": 11, "id": 81}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 47.0, 50.0, 48.0], "area": 1400, "segmentation": {"size": [512, 512], "counts": "RRW71n?2N2N2N2N2N2N2N2N2N2O2O0O1N2N2N2LSORAo0l>3O2N1O1100O1OO2N100100O1O100O1POUAi0l>UOUAk0l>SOVAm0n>01O01O00010O00010O00010O000oM"}, "image_id": 11, "id": 82}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 63.0, 13.0, 12.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "VR?2m?1O2N2N2O0O1O0002N2N2N2Nn]Z7"}, "image_id": 11, "id": 83}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 70.0, 59.0, 54.0], "area": 1608, "segmentation": {"size": [512, 512], "counts": "ibe12m?2O1N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2N2N2O10000000001O00000001O0O1O0O0000002N2N2N3M2N2N1O00002N2N2N2N2N2N2O1N3M2N2N2Nol\\5"}, "image_id": 11, "id": 84}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 86.0, 62.0, 54.0], "area": 1710, "segmentation": {"size": [512, 512], "counts": "Xca62m?2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N3M2N2O1000000000000001O000000N2N2N1O000001O3M2N2N2N2N2N2O1N1O002N2N2N2N2N2N2N2O1N2N2N2N2N`\\?"}, "image_id": 11, "id": 85}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 95.0, 29.0, 29.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "[c12m?2O2M2N2N2N2N2N2N2N3N1N2N2N01O2N2O1N2N2N2N3M2N2N2N2O1N2Ndl_7"}, "image_id": 11, "id": 86}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 114.0, 70.0, 45.0], "area": 1684, "segmentation": {"size": [512, 512], "counts": "RTn02m?2N2N2N2N2O1N2`@B[?`0d@AZ?d0N2N2N2N2O1000000000001O000000OO00000000000001O000000000001O01O3O0000000000000001O0000000000O1N2N3M2N2N2N2N2N2N2O1N2N2N2N`kn5"}, "image_id": 11, "id": 87}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 115.0, 27.0, 31.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "Rdb72m?2N2N2N3M2N2N2O1N2N2N2N2N2N1O0001O3M2O1N2N2N2N2N2N2NTL"}, "image_id": 11, "id": 88}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 116.0, 13.0, 13.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "hch11n?3M2O1N2N2O1000N3M2N2N2NVlP6"}, "image_id": 11, "id": 89}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 119.0, 16.0, 25.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "k3e0Z?10O0010O0011N3N1N3N2M3M3N1N3Njkg7"}, "image_id": 11, "id": 90}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 120.0, 80.0, 78.0], "area": 2888, "segmentation": {"size": [512, 512], "counts": "ZeQ21n?2N2N2N2O1@HUA;h>HUA:i>HUA:i>HUA:i>HUA:i>HUA:j>?O101O0000000O1O1N2N3M2N2N000000010O0000000000000001O01O00000000000000010O000000000003M2N2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N][f4"}, "image_id": 11, "id": 91}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 124.0, 58.0, 59.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "`dk52m?2O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2000001O0000000001O000000000N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2O1N2N2NP[W1"}, "image_id": 11, "id": 92}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 137.0, 18.0, 16.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "`T72m?2N2N2N3N1N2N0010O2N2G\\@3f?K\\@3h?010010O1N`k_7"}, "image_id": 11, "id": 93}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 143.0, 10.0, 10.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "bdQ72m?2N2N200000N2N2O1N\\[i0"}, "image_id": 11, "id": 94}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 147.0, 44.0, 44.0], "area": 909, "segmentation": {"size": [512, 512], "counts": "TU72m?2N2N2N2N2N00002N2O1N2N2N2N3M2N2N2N2O100000001O02N00000N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1NbjR7"}, "image_id": 11, "id": 95}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 162.0, 30.0, 29.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "`eh62m?2N3M2N2N2N2O1N2N2N2N2N2N1O1O01O2N2N2N2N2N2N2N2N3M2O1N2N2NaZh0"}, "image_id": 11, "id": 96}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 163.0, 53.0, 57.0], "area": 1514, "segmentation": {"size": [512, 512], "counts": "cU\\52m?2N2Z@L]?6a@L]?6a@L]?=N2N2N2N2N2N2N2N2N2N2N3N10000O1N200000000000000001O000fN]AU1d>iN^AU1h>N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2NiYi1"}, "image_id": 11, "id": 97}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 167.0, 59.0, 57.0], "area": 1693, "segmentation": {"size": [512, 512], "counts": "leV12m?2DMh@5W?Lg@6W?Lg@6W?Lg@6W?Lg@6W?;N2N2N2N20010O00000N2N2N2N2N2N2N2O1000001O00000000dNaAV1_>hNcAW1^>hNcAV1e>N2N2N2N2O1N2N3M1O000000001O2N2N3M2N2N2O1N2N2N2Njik5"}, "image_id": 11, "id": 98}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 172.0, 16.0, 32.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "iUh72m?2N2O1N2N3M2N2N2N2N2N2O1N2N3M2cJ"}, "image_id": 11, "id": 99}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 194.0, 16.0, 30.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "R6n0S?O1N2N2N2N2N2N2N2N2N2N2N2N3N1N_ig7"}, "image_id": 11, "id": 100}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 194.0, 56.0, 58.0], "area": 1587, "segmentation": {"size": [512, 512], "counts": "fVg43l?2N2N2N2N2N2O1N2N2N2N2N2j@XOQ?m0N2N2N2N2N002N2N20000000000000010O00000000O1N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2Nkh\\2"}, "image_id": 11, "id": 101}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 194.0, 29.0, 30.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "\\V[71n?3M2N2N2N2N2O1N2N2N2N3O00000001O000N2N2N2O1N2N2N3M2N2N2N\\Y6"}, "image_id": 11, "id": 102}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 202.0, 30.0, 30.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "ifY62m?2N3M2N2N2O1N3M2N2N2N2OO01O00000001O2N2O1N2N2N3M2N2O1N2N3MZYW1"}, "image_id": 11, "id": 103}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 204.0, 62.0, 53.0], "area": 1724, "segmentation": {"size": [512, 512], "counts": "mfb01n?2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2000001O00000000000000000N2N2N2N1O00000001O002N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2O2M2N2NkX^6"}, "image_id": 11, "id": 104}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 225.0, 53.0, 64.0], "area": 1741, "segmentation": {"size": [512, 512], "counts": "lWR72m?2N2O1N2N2N3M2N2N2O1E\\OWAf0g>\\OWAg0f>[OXAg0f>[OXAg0g>:N2N2N3M2N2N2001O01O0N2N2N20001OO1N2N3M2N2aNbAY1`>fNbAX1d>N2N2N2N2VOSA>P?@QA>Q?@RA>o>@SA>Y?N2N2N2N3M2O1NdW3"}, "image_id": 11, "id": 105}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 229.0, 63.0, 56.0], "area": 1671, "segmentation": {"size": [512, 512], "counts": "hWS41n?2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N2O10001O000001O000000000000000N2N2N2N2N2N2N000000000000001O2N2N2N2O2M2N2N2N2N2N2N2N2NPXm2"}, "image_id": 11, "id": 106}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 232.0, 40.0, 41.0], "area": 942, "segmentation": {"size": [512, 512], "counts": "kgd52m?3M2N2N2N2N2N2N2O1N2N3M2N2N2N2N2O1O1N00000002N2O1N3M20000001OO1N2N2Ci@NY?0i@NY?1h@M[?0g@N[?0g@N[?0`Xg1"}, "image_id": 11, "id": 107}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 235.0, 58.0, 58.0], "area": 1638, "segmentation": {"size": [512, 512], "counts": "n73l?2N2N3N1N2N2N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N200000000010O000000000N2N2N3N1N2N2N2N2N00002N2O2M2N2N2N2N2N2N2N3N1N2N2NcgR7"}, "image_id": 11, "id": 108}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 262.0, 57.0, 58.0], "area": 1596, "segmentation": {"size": [512, 512], "counts": "hh`31n?2N2O1N3M2N2N2N2N2N2O101O0N2N2O1N2N2N2N3M2N200O1N2N2N2N3O00000001O00000N2N2N2N2N3M2QOUAf0m>XOUAf0m>XOUAg0S?N2N2N2N2N3M2N2N2O1N2N2Nefb3"}, "image_id": 11, "id": 109}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 268.0, 28.0, 28.0], "area": 400, "segmentation": {"size": [512, 512], "counts": "ihS52m?2O2M2N2N2N2N2N2N2N3N1N00000001O2N2O1N3M2N2N2N2N2N2N2OXW^2"}, "image_id": 11, "id": 110}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 274.0, 62.0, 54.0], "area": 1501, "segmentation": {"size": [512, 512], "counts": "SYS61n?2N2N3M2N2N2N2N2N2N2N2O1N2N3M2N2N2N2001O0000000001O0000000000000000N2N3M2N2N2N1O0000010O00000000001O3M2N2N2N2N2N2O1N2N2N2Ndfm0"}, "image_id": 11, "id": 111}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 277.0, 15.0, 54.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "e8f1[>N2N3M2N2mN]Ag0d>WO^Ag0e>WO]Af0e>XO]A9JOk>F]A:JNV?Ol@OV?Ol@OV?Ol@OV?Om@NV?0kVh7"}, "image_id": 11, "id": 112}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 281.0, 7.0, 13.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "khl71o?3M2M4M2M10OXG"}, "image_id": 11, "id": 113}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 297.0, 53.0, 55.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "kim22m?3M2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N200000001O000000000O10000O1N2N1O02N2N2N2oNYAf0i>XOYAf0i>XOYAf0i>XOYAf0R?N2N2O1N2N2N3M2N2N2N2NdeW4"}, "image_id": 11, "id": 114}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 298.0, 26.0, 26.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "eY`41n?2N2N2N2N3M2N2N2N2N2O1N2000O1N2N2N2N2O1N3M2N2N2N2NYfR3"}, "image_id": 11, "id": 115}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 326.0, 20.0, 23.0], "area": 201, "segmentation": {"size": [512, 512], "counts": "ZjV42m?2N2N2N3O00000\\@G`?=000000000O0O2N2N2N3M2N\\U_3"}, "image_id": 11, "id": 116}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 330.0, 35.0, 29.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "fjm43l?2N2N2N2N2N2O1N2N2N2N3M2N2O01N2N2N2N2O1N3M2N2N2N2N200000O1O10O1N2N2NTe`2"}, "image_id": 11, "id": 117}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 330.0, 13.0, 26.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "eji72m?2N2N2N2N2N3M2N2N2O1N2N2eE"}, "image_id": 11, "id": 118}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 331.0, 29.0, 29.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "iZk31n?2N2N2N2N2N2N2N3M2N2O1N2N2N0011N2N2N2N2N2N3M2N2N2N2N2N2NXUf3"}, "image_id": 11, "id": 119}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 332.0, 68.0, 71.0], "area": 2245, "segmentation": {"size": [512, 512], "counts": "mjY21n?2N2O1N3M2N2N2N2N2N2O1N2N2N2N3M2N2N2O1N2O100001O01O0000000001O0000O1N2N3O000001O000000000001N1N2N00000001O2N2N3M2N2YOWA4k>JWA4k>JWA4k>KVA4k>JWA4k>JWA4l>IWA4k>JWA4\\?N2NRTd4"}, "image_id": 11, "id": 120}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 346.0, 32.0, 42.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "ckl62l?3N2N2N2N2N2N2N2N2N001O2N2M3N2N2N1O2N2N]OYANg>2[ALe>4]AJc>6_AHc>6_AHc>6_AHc>5_AId>5^AId>5^AId>5e0N2NoTc0"}, "image_id": 11, "id": 121}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 348.0, 27.0, 36.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "Q[c51o?3L5L3M3M3L4M4L3L4M1OO01000O0100003L4M4L3L4M3M3L5L3MWTo1"}, "image_id": 11, "id": 122}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 351.0, 80.0, 73.0], "area": 2568, "segmentation": {"size": [512, 512], "counts": "`\\X11n?3M2N2N2N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N1O1O0001O00000000000000000001O0001O0000000000000000000001O0001O1O2N2N2N2N3M2N2N2N2O1N2N1O000000001010000N2Fd@M_?1b@M`?1b@M`?19Nad_5"}, "image_id": 11, "id": 123}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 353.0, 46.0, 59.0], "area": 1673, "segmentation": {"size": [512, 512], "counts": "`\\R61T?1cA1U>OXA3?2V>6\\AF46^>7[AF46]>?aAC]>?`AD^>Q1N110O0010O010O0010O010O0010O010O0010O00O2M2N3M2M4M2N3L3N2N3M2M4M2N3L3N2N3M2MbdV1"}, "image_id": 11, "id": 124}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 362.0, 12.0, 14.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "][`43m?2M3N2M3N0O0101N3N2M4M^dY3"}, "image_id": 11, "id": 125}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 362.0, 6.0, 6.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "][Q52l?2O2O01M2Ofdk2"}, "image_id": 11, "id": 126}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 367.0, 32.0, 33.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "i[[31n?2N3M2N2N2N2N2N2O1N2N20000010O00000000000O1N2N2N2N2N3M2N2N2N2OkcT4"}, "image_id": 11, "id": 127}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 368.0, 50.0, 57.0], "area": 1830, "segmentation": {"size": [512, 512], "counts": "k[P52m?4M3M3[@F]?b0M3M3L5L3M3L1000O11N5L3M3L3N000O01000O01000O010000O01000O0100003L4M4L3L4M3M3L5L3M3L4M3M3L5LScV2"}, "image_id": 11, "id": 128}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 369.0, 11.0, 21.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "jkj71n?2N2N2N2N2N2N2O1N2N3]D"}, "image_id": 11, "id": 129}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 371.0, 7.0, 8.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "e[g72m?2O1N3ON2N3MZT5"}, "image_id": 11, "id": 130}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 378.0, 31.0, 37.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "`l\\21n?2N2N2O1N2N2AFUAFUAFUAETA=j>ETA>j>CTA?l>;0O1N2N2N002N2N2N2N2O2M2N2N2N2K[@Lg?25NfcS5"}, "image_id": 11, "id": 131}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 380.0, 19.0, 18.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "V\\S41n?2N2N2N2N3N1N1O00001O01O2N3M2N2N2N2OmSc3"}, "image_id": 11, "id": 132}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 393.0, 62.0, 54.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "\\mm63m?3M3L3N3L4M2N0O10O10O01000O01000O01000O010I]OPAd0P?_Om@`0S?700O0100O0100EVO`Ai0a>ZO[Ag0e>\\OXAd0g>:10O10O10O10O10O01000O01000O010O13L4M3M3L3N3M3L4M3L3N3MhR3"}, "image_id": 11, "id": 133}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 409.0, 15.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "Qmm11n?2N2N2N2N2N000001O1O2N2N2N2NTcj5"}, "image_id": 11, "id": 134}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 411.0, 46.0, 45.0], "area": 1031, "segmentation": {"size": [512, 512], "counts": "hmY32m?1N3N2N2M2O2N2M3N1O2M3O01000O1N1O2N2M3NO10O10O0102N2M2000O2N2M3N1N3Kn@YOT?d05O2M3N2N1N3N2N2M2O2NjRo3"}, "image_id": 11, "id": 135}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 418.0, 53.0, 54.0], "area": 1673, "segmentation": {"size": [512, 512], "counts": "j]S24l?3M3M3L4M3M3L4M3M2M01000GRO_An0b>UO[Ak0e>8O01000O10O10O10O10O10O10O10O10O10OMkN]AU1c>4O01000O0101O3L4M3M3L4M3M4L3L4M3M3L4MhQR5"}, "image_id": 11, "id": 136}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 428.0, 60.0, 51.0], "area": 1566, "segmentation": {"size": [512, 512], "counts": "RnU43l?1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2N10100000O0100000O0100000O01000O10O1000O1O0O2N1N100O01001N2O2N2M3N1O2M3N2N1N3N2N2M2O2NPRl2"}, "image_id": 11, "id": 137}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 438.0, 15.0, 14.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "jml32m?2N2M2010000O01000000N1O2N2MTbk3"}, "image_id": 11, "id": 138}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 453.0, 53.0, 53.0], "area": 1594, "segmentation": {"size": [512, 512], "counts": "[oT71l?3M3M4L3M3O2O01O010O00O2L3M4L3N2010O0010O000N3L3XAlNa>[1M3N3N11O01O01O0N2O2O010O000N3M2M3M4L3M3M4L3N2M4L3M3M4Lea0"}, "image_id": 11, "id": 139}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 461.0, 64.0, 51.0], "area": 1711, "segmentation": {"size": [512, 512], "counts": "_om21n?2N1N3N2M3N1N3N2N2M3N11000O10O10O1000O01N2M2O1O1N2O1N2O1N2O1O1N2O1N2O1O1N12N1N3N2M2O2N2M2O2N2M010O10O10O102N2M2O2M3N2Ic@H^?77M3N1N^QR4"}, "image_id": 11, "id": 140}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 480.0, 34.0, 29.0], "area": 505, "segmentation": {"size": [512, 512], "counts": "\\_]42m?1N3N2N2M2O2M3N1O2O10O1000O10O10O10O10O10O01N11N1O2M3N2M2O2N2M2O2Md`Q3"}, "image_id": 11, "id": 141}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 483.0, 13.0, 26.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "S?j0W?N3M2O1N2N2N3M2N2O1N2N3M_Pi7"}, "image_id": 11, "id": 142}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 487.0, 41.0, 25.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "d_o32m?2N1N3N2N2M2O2N2d@_OU?g0O1001O1O1O001O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1NTP\\3"}, "image_id": 11, "id": 143}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 0.0, 133.0, 63.0], "area": 5517, "segmentation": {"size": [512, 512], "counts": "PPb02n?1O1O2N1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N00O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1002N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1OO100O1O1O100O1O100O1O1O100O1O100O1O1O2kNaAg0`>XOaAf0b>WO`Ag0b>WOaAg0a>VOaAh0a>WOaAf0n>N2O2M2N2O2M2N3M2O1N3Mm^[5"}, "image_id": 12, "id": 144}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 0.0, 126.0, 74.0], "area": 4814, "segmentation": {"size": [512, 512], "counts": "VQQ61n?2N3M2@KSA7j>KTA7k>KRA7l>KSA7j>KTA7j>LSA7k>`0N3M2N3N1N2N3N1N3M2N3N1N2N3N1N2N1O01O00010O00100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100"}, "image_id": 12, "id": 145}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 69.0, 25.0, 26.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "_Ri61o?2M2N3N1N2N3N1N3M2O2M00010O0003N1N3M2O2M2N3M2O2M^]j0"}, "image_id": 12, "id": 146}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 82.0, 168.0, 151.0], "area": 12979, "segmentation": {"size": [512, 512], "counts": "UcS31n?2N3M2O2M2N2O2M2N3N1N2N3M2O1aBTOg;o0VDSOh;o0VDTOh;n0VDSOh;o0VDTOh;m0VDUOk;k0SDVOm;j0QDYOo;f0oC\\OQO2M2N3M2O1N3D]AVOe>i0\\AVOf>g0]AVOe>9VA27Cf>8UA4U?Kl@3V?Km@2V?Kl@3V?Km@3a?NY[X2"}, "image_id": 12, "id": 147}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 98.0, 2.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "SSo73l?2mL"}, "image_id": 12, "id": 148}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 104.0, 475.0, 375.0], "area": 60414, "segmentation": {"size": [512, 512], "counts": "dmb01n?1O101N1O100O1O2N100O1O1O2O0O1O100O2N1O100O1O2N100O1O2O0O1O1O101N1O1O100O2N100O1O00010O0000012M2N0101N3M2N2O1JRAWOo>h041O01O01O0001O01O0001O03M2O1N3M2N2O2M2N3M2O1N3M2O1N3M2N3N1N2N1O100O2N100O1FhMkBY2T=iMjBX2U=jMjBV2V=lMgBV2W=:O1O100O1O10O0001O02N2O2M2N2N010O000010O000010O0000010O000010O000010O0000010O000010O000010O0000010O000010O000010O0000010O000010O000010O0000010O00010O0000010O0000010O00010O0000010O0000010O00010O0000010O0000010O00010O0000010O0000010O00010OF`MUC`2k4[KZNX4b1?1YK]NX4b1a00VK^NY4b1d0MSKbNX4a1g0KRKcNX4b1g0ISKcNV4d1j0GQKcNU4f1l0EQKcNT4h1m0BRKdNP4j1P1AQKcNo3l1R1_OQKcNm3n1U1\\OQKcNk3Q2U1ZO3f00XOOh03VOMj05TOLl06QOJo08POGP1kNCU1>iNBW1a0gN^OY1d0eN]O[1d0cN\\O]1g0aNXO_1j0_NVOa1m0\\NTOd1m0ZNSOf1o0YNPOg1S1VNoNh1S1VNPOh1R1UNPOi1R1VNPOh1e7000lAYNo=g1oAZNQ>k1001M[L"}, "image_id": 12, "id": 149}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 205.0, 14.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "bV_23m?2M2N3N1N2N01O012M2N3N1N2N]iY5"}, "image_id": 12, "id": 150}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 0.0, 14.0, 7.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "P`X31o?1O001O1O1O001O1O00O1N2O1OQ``4"}, "image_id": 13, "id": 151}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 0.0, 49.0, 38.0], "area": 974, "segmentation": {"size": [512, 512], "counts": "f`j31m?2O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2O001O1O1OO1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1OQP]3"}, "image_id": 13, "id": 152}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 2.0, 54.0, 53.0], "area": 1540, "segmentation": {"size": [512, 512], "counts": "i0:e?2N2N1O2N2M3N2N2N2N2N2N2N2N2N2N20000000000N000000O100000000000001O2N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N\\oT7"}, "image_id": 13, "id": 153}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 2.0, 60.0, 56.0], "area": 1655, "segmentation": {"size": [512, 512], "counts": "jPd61m?3N2M3N1O2M3N2N1N3N2N2M2O2N2M2O2N2O1O0O2O1O10O1000O10O1000O10O1000O10O10OiN[AT1i>O10O1000N2N1O2M3N2N1N3N2N2M2O2M3N1O2M3N2N1NQo="}, "image_id": 13, "id": 154}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 16.0, 21.0, 31.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "Rae71n?2N1N3N2M3N1O2M3N1O2M3N2M210O1000N2M2O20ZO"}, "image_id": 13, "id": 155}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 37.0, 60.0, 49.0], "area": 1803, "segmentation": {"size": [512, 512], "counts": "nae52l?3M2N3M2N3M2N3M3M2N3M2N3N1010O010O010O010O0100O010O0100ON3M2N3N1010O010O10O10O010O0N30N1N30O0N3M2DZA\\Oi>a0YA]Oi>a0ZA\\Oi>a01m?2O2N2N2M3N2N1O2M3N2N2O10O100000O1000O10O1N2M3N2N1O2N2M3N2N1O0O0100000O0100000O03N2N2N1N3N2N000O012N2N1O2M3^Oh@9Z?Eg@:[?Dg@:a?M3N2N1O[nb6"}, "image_id": 13, "id": 157}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 48.0, 32.0, 33.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "RRf61n?2M3N2N1N3N2M3N1O2M3N1O2M3O10O1000O10N2N2N1N3N2M3N1O2M3N1O2M3NTni0"}, "image_id": 13, "id": 158}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 52.0, 15.0, 14.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "iQ21n?2N2M3N20O10O100000N2N2N2N1OU^f7"}, "image_id": 13, "id": 159}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 55.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "iQo71m?2YN"}, "image_id": 13, "id": 160}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 67.0, 56.0, 61.0], "area": 1669, "segmentation": {"size": [512, 512], "counts": "\\SU12m?2M2O2N2M3N1N3N2N2M2O2N2M2O2N2M3N1O2M3N2N1N3N2N2M10000O010000O3N2N1N3N2N2M2O2M3N2N1N3N2N2M2O2N2M2O2N2M3N1O2M3N\\mn5"}, "image_id": 13, "id": 161}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 67.0, 15.0, 27.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "cbh71m?3N2M2O2N2M3N1N3N2N1N3O100O0lM"}, "image_id": 13, "id": 162}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 71.0, 27.0, 37.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "d2b0]?2N2N2N2N2N2N0O1000000002N2N2N2M3N2N2N2N2N2N1O2N2N2M^]b7"}, "image_id": 13, "id": 163}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 71.0, 30.0, 37.0], "area": 523, "segmentation": {"size": [512, 512], "counts": "nRl62l?3N2N1N3N2N2M2O2N2M2MZOk@h0S?4M3N1O2N2OROXAc0j>[OXAc0j>[OXAc0S?N3N2M3N1O2M3N2N1N3NWmd0"}, "image_id": 13, "id": 164}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 82.0, 63.0, 59.0], "area": 1892, "segmentation": {"size": [512, 512], "counts": "YcR51n?1O2M3N2M2O2N2M3N1O2M3N2N1N3N2N1\\AQOU>R1hAQOW>P1gAROY>n0eASO\\>l0bAWO]>W1000O0100000O010000N1O000001N3N2N2M2O2N2M3N1O2M3N1OO01000O03N2M2O2N2M3N1O2M3N2N1N3Nolm1"}, "image_id": 13, "id": 165}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 89.0, 17.0, 16.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "kRQ61n?2O1U@Nf?6O2O10000N2N110000M2O2M3N2NR]f1"}, "image_id": 13, "id": 166}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 100.0, 61.0, 56.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "Tdg11n?2M3N2N1N3N2M2O2N2M3N110O10O1000O10O1000N2N1N3N2N1N3N2M3N1O2M3N1O0O010O010002M2O2N2M3N1N3N2N1N3N2M3N1O2M3N1N3N2N2M2O2N2M^lY5"}, "image_id": 13, "id": 167}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 101.0, 39.0, 41.0], "area": 791, "segmentation": {"size": [512, 512], "counts": "PdP62m?2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N0O1000O101O1N3N2N2N1N3N2N2M2O2N2M3N1O2M3N2N`l[1"}, "image_id": 13, "id": 168}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 104.0, 39.0, 38.0], "area": 767, "segmentation": {"size": [512, 512], "counts": "icd03l?2N1N3N2M3N1O2M3N1O2M3N2N110O1000O01000O10O1000O10ON3N2M2O2N2M3N1O2M3N2M2O2NWlg6"}, "image_id": 13, "id": 169}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 113.0, 22.0, 25.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "PTb73l?2N1N3N2M2O2M3N2M2O2NO3N1N3N2M3N1N3N2N1N3NYl2"}, "image_id": 13, "id": 170}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 115.0, 55.0, 58.0], "area": 1517, "segmentation": {"size": [512, 512], "counts": "jT`61n?2N2N2M2O2N2M3N1O2M3N2N1O2M3N2N1N3N2N2M3N1O2M3N1O00O01000O012N2M2O2N2N2M2O2N2M3N1O2M3N2N2M2O2N2N2M2O2N2M3N1Ol[d0"}, "image_id": 13, "id": 171}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 129.0, 55.0, 60.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "ZU_23l?2M2O2N2M3N1O2M3N2N1N3N2M2010000N1N3N2M3N1O2M1000O010O10O10O10O10O011O2M2O2N2M3N1N3N2N2M2O2N2M3N1O2M3N1N3N2Nb[e4"}, "image_id": 13, "id": 172}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 142.0, 49.0, 60.0], "area": 1493, "segmentation": {"size": [512, 512], "counts": "jeW71n?2M2O2M3N2M2O2N2M2^O_OdAc0Z>_OcAd0Z>^OeAc0Z>_OcAd0[>^OcAd0Z>_OdAc0Z>^OdAd0Z>c0N2M20100O01M3N1O2M3N1N3N2M3N1O2M3N1N3N2M2O2N2M3N1N3N2M2O2N2M2OPK"}, "image_id": 13, "id": 173}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 150.0, 25.0, 27.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "VeU13l?2N2M2O2N2M3N1O2M3N2N1N21N2N1N3N2N2N1N3N2N2M2O2NRk]6"}, "image_id": 13, "id": 174}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 161.0, 58.0, 53.0], "area": 1520, "segmentation": {"size": [512, 512], "counts": "PVR32l?3N1N3N2N1N3N2M2O2N2000O0100M2O2M3N2N110O10O1000N1O2M3N2M2O1O0O010O01N11100O2N2N2M2O2M3N1N3N2N2M2O2M3N1O2M3N1N3N2McjP4"}, "image_id": 13, "id": 175}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 185.0, 23.0, 23.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "TVl13l?2N1N3N2M2O2M3N1010000O0100N1O2M3N1N3N2N2M2OoYh5"}, "image_id": 13, "id": 176}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 195.0, 63.0, 55.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "Rgc32m?2N1N3N2M3N1O2M3N1O2M3N2N1N3N2M3N1O2N2000O01000O10O1000O10O10O1N1O1N1000O010O01000O01000O0102M3N1O2M3N2N1N3N2N1N3N2M3N1O2M3Nbi\\3"}, "image_id": 13, "id": 177}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 199.0, 23.0, 25.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "fVe22m?2M2O2N2M2O2M3N2N1N3N20N1N3N2N1N3N2M3N1O2M3NbYo4"}, "image_id": 13, "id": 178}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 200.0, 13.0, 26.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "hfi71n?2N2M3N1O2M3N2N1N3N2N2M2hI"}, "image_id": 13, "id": 179}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 207.0, 16.0, 16.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "ffS22m?2M3N1N3N2O1O010000N1O2N2M3N1N]Yd5"}, "image_id": 13, "id": 180}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 222.0, 57.0, 55.0], "area": 1502, "segmentation": {"size": [512, 512], "counts": "mWZ42l?3N2M2O2M3N2N1N3N200O01000O10O10M3N2N1N3N2M2O2M3N2N1N3N2M3NO010O3N2N1N3N2N2M2O2N2M2O2N2M2O2N2M3N1N3N2N1N3N2N1N3NbXi2"}, "image_id": 13, "id": 181}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 246.0, 58.0, 43.0], "area": 973, "segmentation": {"size": [512, 512], "counts": "UXR31n?2M3N1N3N2N2M2O2N2M3N10100O10O1000O10O1000O10O1000O10O1000O01000O10O1000O01N2M3]Od@?`?N2M3N1O2M3N2OO2N2N0O100O0102N2M2OegP4"}, "image_id": 13, "id": 182}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 254.0, 58.0, 64.0], "area": 1710, "segmentation": {"size": [512, 512], "counts": "PYn42l?3N2N1N3N2M2O2M3m@]Of>d0YA^Oe>d0YA^Od>e0YA]Of>e0XA]Og>o00O1000O10O10O10OO2M3N00O010O010O01000O010O010O011O2M3N1N3N2M3N1O2M3N1N3N2M2O2N2M3N1N3N2MfgT2"}, "image_id": 13, "id": 183}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 270.0, 29.0, 31.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "Qig32m?2M2O2N2M3N1O2M3N2N1N3N2N2M12N2N1N3N2N2M2O2N2M3N2N1N3N2NXgi3"}, "image_id": 13, "id": 184}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 286.0, 57.0, 56.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "nYa51n?2N2N3M2N2N2N2N3M2O1N2N2N3M2N2N2N2N3M2OOJjNcAV1]>lNaAT1_>7O01O000000001O2N2O0O1O00001O2N2N2N2N3N1N2N2N2N2N2N2N2N3M2O1N2N2N2N^Vb1"}, "image_id": 13, "id": 185}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 299.0, 34.0, 43.0], "area": 699, "segmentation": {"size": [512, 512], "counts": "RjZ42m?2M3N1O2M3N2N1N3N2N2M2O2M3N2N1SAPOh>U1O20000OFZAZOg>c0\\A]Od>a0^A^Oc>`0^A_Oc>?`A_Ob>?`A^Oc>`0`0M2O2N2M3N1O2M[VT3"}, "image_id": 13, "id": 186}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 311.0, 54.0, 61.0], "area": 1524, "segmentation": {"size": [512, 512], "counts": "S[Z61n?1N3N2N2M2O2M3N1N3N2N1N3N2M3N1O2M3N1N3N2M2O2N2M10O010O010O10O03N2M2O2M3N1N3N2M2O2M2O2M3N1N3N2M2O2M3N1N3N2Mlej0"}, "image_id": 13, "id": 187}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 327.0, 32.0, 31.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "eZR51n?1O2M3W@Je?:N2M2O2N2N2M3N1010000O0100000O01O1M3N1O2M3N2N1N3N2N2M2O\\e]2"}, "image_id": 13, "id": 188}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 340.0, 55.0, 61.0], "area": 1657, "segmentation": {"size": [512, 512], "counts": "n[Q71n?1N3N1N3Y@Ia?=N1N3N1N3N2M2N3N2M2O2M3N1N3M2O2M3O001O1N01O1N2N10O0100O010O2N3N2M2O2M3N1N3M3N1N3N1N3N2M2N3N2M2O2M3MQU3"}, "image_id": 13, "id": 189}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 353.0, 24.0, 25.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "a[k52l?2O2M3N2N1N3N2M2O2N2M21O0N3N2N1N3N2M2O2N2M3N1Nidh1"}, "image_id": 13, "id": 190}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 377.0, 20.0, 47.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "l\\f72l?2N3M2N3N1N3M2N3M2O2M3M2N3M2O2M2N3M2N3VD"}, "image_id": 13, "id": 191}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 401.0, 30.0, 29.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "PmX61n?2M3N1O2M3N2N1O2M3N2N2N110O1000O10OO2N2N2M3N1O2M3N2N1N3N2NSSX1"}, "image_id": 13, "id": 192}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 421.0, 26.0, 25.0], "area": 376, "segmentation": {"size": [512, 512], "counts": "c]W72l?2O2M2N3M3N1N3M201O10O010O10O010NO101N3M2N3N4K2N3Meb;"}, "image_id": 13, "id": 193}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 430.0, 16.0, 14.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "e]e61m?2N3N2M21000O01000O01O0N3N2M2O\\bR1"}, "image_id": 13, "id": 194}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 462.0, 28.0, 31.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "RoQ74j?2M3N3L3N3L3N2M40O010O00010O010O00010N1N3L3N2M4L3N3L3N\\Q`0"}, "image_id": 13, "id": 195}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 490.0, 41.0, 22.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "oo[71m?2N2N2N2N2N2N2N200O1N2N21O001O001O001O001O001O001O001O001O001O001O0J]@Od?N^@2i?O001O001O"}, "image_id": 13, "id": 196}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 350.0, 18.0, 30.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "n:k0V?O010O01O010O01M2N3M2N3M2N3M2N3M2Nldf7"}, "image_id": 15, "id": 197}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 358.0, 118.0, 124.0], "area": 5612, "segmentation": {"size": [512, 512], "counts": "foNbAP1^>QOaAo0_>QObAo0]>QOcAo0]>RObAo0]>QOcAo0]>;O00001O001OO1N2M3N2N2M3N2M3N2"}, "image_id": 16, "id": 205}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 3.0, 51.0, 73.0], "area": 1956, "segmentation": {"size": [512, 512], "counts": "maR21m?2N3M2N3N2M2N3M2N3M2N3M2N3M2N3M2N3DhNlAZ1R>hNlA[1R>gNkA[1S>;M2N3M2N3N1010M2N3M2N3M2N3M2N3M2N3M2N3M2O2M2N3[ORA4P?JSA3P?JRA4P?JSA3P?JRA4P?JSA3^?NUoS5"}, "image_id": 16, "id": 206}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 13.0, 27.0, 30.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "i0`0]?10O03N3M21O010O01O010O01O010O01O010ON3M2M4M2N2M4M2N\\_b7"}, "image_id": 16, "id": 207}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 40.0, 57.0, 60.0], "area": 1690, "segmentation": {"size": [512, 512], "counts": "aRi21m?3N2N2M2O2N2M3N1O2M3N2N1IWOUAl0i>UOVAm0h>UOVAm0g>7O2N2M3N1O2M3N2N0O010002M2O1O1N3N1O2M3N2N1N3N2N2N1N10O12M3N2N1N3N2N2M2O2N2M3N1O2M[^Z4"}, "image_id": 16, "id": 208}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 41.0, 39.0, 54.0], "area": 1071, "segmentation": {"size": [512, 512], "counts": "cbc04j?4K4M2M3N3L3N3M2N2LUOSAl0j>6NSOXAf0e>=MSO^A`0_>CaA>\\>d0MTOgA7Y>f03SOdA2^>NbAOb>e02\\O\\AIh>e02BVACm>=SAAP?d03Km@[OV?e020010M2N3L3N2M4M2M4M`nh6"}, "image_id": 16, "id": 209}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 50.0, 18.0, 27.0], "area": 321, "segmentation": {"size": [512, 512], "counts": "b1g0Z?0O01O01O01O010O01OO2M2M3N3L3N3L3MZnf7"}, "image_id": 16, "id": 210}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 51.0, 55.0, 67.0], "area": 2063, "segmentation": {"size": [512, 512], "counts": "iRe33j?3N2M4M2M4M2M3M4M2M4M2M3N3L3N3O01O010O01O01OgAdNn=]1oAfNQ>Y1lAjNT>a1010O010O00010O0ZNkAb1Y>00010O010O0001M2M4L3jN^Al0d>RO_Aj0m>N3L3N2M4M2M4M2M3Nc]_3"}, "image_id": 16, "id": 211}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 63.0, 29.0, 28.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "bbc12k?3N2M4L3N3L3M3010O00010O00010O010O00010O000M4L3M4M2M3M4Mkmm5"}, "image_id": 16, "id": 212}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 84.0, 30.0, 65.0], "area": 1173, "segmentation": {"size": [512, 512], "counts": "d2n1R>01O01O010O010OO2L3N2M4M2N3L3N2M4M2N3L3N3L3N2M4M2N3L3N2M4MRm`7"}, "image_id": 16, "id": 213}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 85.0, 47.0, 60.0], "area": 1542, "segmentation": {"size": [512, 512], "counts": "VTe41l?3N2N3L3N3L3N2M4M2N3L3N2M4M2N3L3N3L3N2M4O001O01O010O0N3M2M3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N3M2MT]c2"}, "image_id": 16, "id": 214}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 86.0, 29.0, 30.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "]c]24j?2M3N3L3M4M2M3N3L03M300010O01O01O010O00010ON3L3M3N3L3N3LVmS5"}, "image_id": 16, "id": 215}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 87.0, 33.0, 41.0], "area": 782, "segmentation": {"size": [512, 512], "counts": "nbZ12l?2M4m@JX>8fAJZ>7bAM]>3aAO_>1^A3b>M[A5e>KXA8i>HTA;k>=010O0010O0010O0010O01N1M3N3L3N3M2M3N3L3N3L3NPmT6"}, "image_id": 16, "id": 216}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 101.0, 50.0, 59.0], "area": 1711, "segmentation": {"size": [512, 512], "counts": "aT`52l?2N3M2M4[ODbA>\\>EaA>\\>DbA>\\>EaA>\\>DbA>\\>EaA>\\>DbA>\\>e0N1010O010O010OO2O0010O0010O010O01M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M]lf1"}, "image_id": 16, "id": 217}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 107.0, 51.0, 65.0], "area": 1921, "segmentation": {"size": [512, 512], "counts": "nd`04j?2M3N3L3N3L3^O^OdAd0Z>_ObAe0Z>^OdAd0Z>^OcAf0Y>^OdAd0Z>b0M4O010O00010O010O00010O010O0001M000O010O3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N`le6"}, "image_id": 16, "id": 218}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 110.0, 28.0, 29.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "PTZ33k?3L3N2M4M2M4M2O1010O010O01O01O010O01O01N1N3L3N2M4M2M4M\\lW4"}, "image_id": 16, "id": 219}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 113.0, 98.0, 66.0], "area": 3091, "segmentation": {"size": [512, 512], "counts": "Td^63j?3N3_@IU?9h@JU?:h@HV?c0M201O0010O00O2M2M40O010O00010O010O010O00010O010O010O00010O010M210O01O01OZAPO\\>Q1aARO_>m0^AVOb>U10O0hN^AR1b>lN`AU1f>N1N3M2O2O01O010O01O010O01O010M2N2O2O010O0010O0010O010O00010O010O010O0001O0N3M2M4M2N2M4M2N3L3N3M2M3N^;"}, "image_id": 16, "id": 220}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 115.0, 28.0, 28.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "UTY23k?2M4M2M3N3L3N3N11O01O010O01O01O010O01O0O1M4M2M4M2M3N3LXlX5"}, "image_id": 16, "id": 221}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 130.0, 32.0, 31.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "gTT41m?2N3L3N2N3M2M4M2N3N11O010O010O01O010O01O010O0O2M2N2M4M2N3M2M4Mfk[3"}, "image_id": 16, "id": 222}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 135.0, 59.0, 58.0], "area": 2069, "segmentation": {"size": [512, 512], "counts": "ceV11l?4L3N3L3N2M4M2M4N100O2M2M3N3L3N3L3NO04N100010O010O01O01O01O010O010O00010O010O00O2L3N3L10O03N2M4L3N2M4M2M3N3L3N3L3N2Mdkk5"}, "image_id": 16, "id": 223}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 146.0, 30.0, 29.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "UUP61l?4M2N3L3N2N3L3N3O0010O00010O010O010O01O01N1N3M2N3L3N3M2M3NXk`1"}, "image_id": 16, "id": 224}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 147.0, 28.0, 35.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "\\UQ52l?3L3N3BGQA;m>GPAGo@Eo@=Q?:0O0WOQAb0o>[OSAe0U?O01O010O01O01O01O0N3L3N3L3N2N3LPk`2"}, "image_id": 16, "id": 225}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 148.0, 52.0, 70.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "]VW23j?3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2M4M2M3N3O001O01O0O2L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2NTkn4"}, "image_id": 16, "id": 226}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 152.0, 29.0, 32.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "YUR31m?3V@Nb?9M4O010O0M4M2N3M2N201O010O010O010ON2N3M2N30O01Ge@LZ?2h@K\\?1h@LZ?2\\[_4"}, "image_id": 16, "id": 227}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 165.0, 49.0, 60.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "cVZ32l?2M4M2N3M2M3N3M2N3L3N3M2N3L3N2N3M2M4M2N3N110O01O010O01OM4M2N3M2M4M2N3M2N2M4M2N3M2M4M2N3M2M3N3M2NaZm3"}, "image_id": 16, "id": 228}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 174.0, 72.0, 55.0], "area": 2107, "segmentation": {"size": [512, 512], "counts": "oeZ62l?2M3N3L310d@FP?:n@IQ?7l@LU??010O01O000M4M201O01O01O010O010O000O2L3N3M21O010O01O01O010O01O01O010O01O01lN\\Ak0c>SO_Am0k>O010O01O01O010O010O00010O010M2N2M4M2M4M2M3N3M2M4MiYa0"}, "image_id": 16, "id": 229}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 176.0, 52.0, 57.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "kVl32l?3M2O2M2N3M3N1N3M2O2M3M2O2M2N3N2M2N3N1N3M2O1N000100O2N3N1N3M3N1010O01O1M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3MSjY3"}, "image_id": 16, "id": 230}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 190.0, 29.0, 19.0], "area": 247, "segmentation": {"size": [512, 512], "counts": "TVZ51l?3N3N11O0101N1O010O01O01O01O01O010O01O01O010O01O01OO2L3NjYW2"}, "image_id": 16, "id": 231}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 194.0, 52.0, 43.0], "area": 1230, "segmentation": {"size": [512, 512], "counts": "mf^42l?2N3M3N1N3M2N3M2O2M2N3M3N1N3M210O010O01M2N3N2O0100O010O010O01000O010O010O0100N1O2M2N3M2N3N1N3M3M2O2M2N\\Yg2"}, "image_id": 16, "id": 232}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 218.0, 26.0, 26.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "\\ge51l?3N3L3M3N3L3N3O00010O0010O0010O00010ON3L3N2M4M2M3MSYm1"}, "image_id": 16, "id": 233}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 219.0, 63.0, 55.0], "area": 1965, "segmentation": {"size": [512, 512], "counts": "`WT61l?3HN`@5\\?Na@57Ji>`0TABj>`0SACk>k00O010O00010O00010O010O00010ON3N11O01O010O01O01O010O01O01O01O0oNYAj0f>SO]Am0l>0O00010O010O00010O010O00010O00010O01M2M3Ck@LY?1j@LX?1k@LY?1fXl0"}, "image_id": 16, "id": 234}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 274.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "b81]go7"}, "image_id": 16, "id": 235}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 53.0, 68.0], "area": 1856, "segmentation": {"size": [512, 512], "counts": "U:=a?3N2M2O2M3N1N3N2M2O2M2N3N2M2O2M3N1N3N2M010O010O01O01O03N2M2O2N110O10O10EbAPOa>n0`AQOa>m0bAPOa>n0:M2O2M3N1N3M3N1N3N2M2O2M3NeVU7"}, "image_id": 16, "id": 236}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 307.0, 57.0, 62.0], "area": 1921, "segmentation": {"size": [512, 512], "counts": "Q[h02k?3N2M4M2M4M2O101O010O00010L3N3L3N2M4M2M4M2M3N3L3M4N100010O010O00010O0010OO1M4M2M4M2M3N3L3N3L3N2M4L3N3L3N2M4M2M4MSV[6"}, "image_id": 16, "id": 237}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 323.0, 59.0, 69.0], "area": 2109, "segmentation": {"size": [512, 512], "counts": "hkc12l?2M3N3M2M4M2N30O01M2M3N3M2M4M2N3L3N2O2O010O010N1N2M4M2N3L3N3M2O110ON2N3M2M4M2N3M2M3N3M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2Nde^5"}, "image_id": 16, "id": 238}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 326.0, 63.0, 77.0], "area": 2675, "segmentation": {"size": [512, 512], "counts": "R\\c31m?3L3N2[@K\\?>N3L3N2010O010ON2M4M1N101N3AlNPBW1m=kNPBX1m=kNQBX1l=kNPBX1m=?N3L301O01O01O010O01O010O01O01O010M2M4M2M4M2M3N3L3N3L3N3L3N3L3N2M4M2M4M2M4M2M4M2M]U]3"}, "image_id": 16, "id": 239}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 327.0, 64.0, 94.0], "area": 2795, "segmentation": {"size": [512, 512], "counts": "gl[22l?3L3N3L3N2M4L3N3L3N2M4M201O00010O00010O0\\OkN^BU1^=oN^BT1`=nN^BT1_=PO]BT1_=oN_BS1_=oN^BU1^=oN_BS1_=h0M4M2M3N0O02O2M4L3N3L3N2M4M2M3N3L310EkAiNU>T1nAlNR>R1QBjNS>S1oAkNS>U1=M2M3N3L3N3L3N2M4L3N3L3N2MaUd4"}, "image_id": 16, "id": 240}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 346.0, 66.0, 71.0], "area": 2589, "segmentation": {"size": [512, 512], "counts": "jkd43k?2M3N3M2M4M2M4M2N2M4M2N3L3O1010O010O01O01O010OjAiNc=X1YBkNGMk=W1\\BoNENn=T1YBWOg=i0WBYOj=f0TB]Ok=\\11O0M4N10010O010O01O0N2N1N0102N2M3N3M2M4M2N3L3N2M4M2N3L3N3M2M3N3M2M4M2M3NoTZ2"}, "image_id": 16, "id": 241}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 350.0, 66.0, 85.0], "area": 3190, "segmentation": {"size": [512, 512], "counts": "Pmc54j?2N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4C]NYBe1e=]NXBf1e=]NYBf1d=Y10010O010O0010O0010O01gN]AT1h>N1N3N11O01O010M2N2N3O010O01O01O01o@WOj>1QAd02_Oo>i00010O010O01O01O0O2L3O2O00010O010O00010O010O00010O010O01O01N101O01O010O01O010O01O0O2M2XOSA:P?DSA9P?CSA:P?DRA:\\?M2M4M]ao1"}, "image_id": 16, "id": 246}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 425.0, 60.0, 73.0], "area": 2085, "segmentation": {"size": [512, 512], "counts": "T_S32l?2M4L3N3L3M3N3L3M3O2O010O00010N1M3M4M2M4L3N2M4L30010O010O00010O0001N1N3L3M3N1N1O01O01O01O02N3N2M4L3N3L3M3N3L3M3N3L3M4Mdbn3"}, "image_id": 16, "id": 247}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 427.0, 24.0, 55.0], "area": 928, "segmentation": {"size": [512, 512], "counts": "i^d72a?1f@2X?0f@3V?0g@3W?=L3N2N3L3N3L3N2M4M2M4N11O01O01O0M40O000bB"}, "image_id": 16, "id": 248}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 430.0, 26.0, 34.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "W^n32k?4M2M3N3L3N3L3N2M4M201O01XOPA>P?_OSAb0m>[OVAd0S?10O0O1N3L3N3L3N2M4MUbd3"}, "image_id": 16, "id": 249}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 464.0, 40.0, 29.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "k^U41m?2N3L3N3M210O01O01O010O010O01O01O010O010O00010O010O0010O02O0O010O1ON3M2M4M2N2MUaV3"}, "image_id": 16, "id": 250}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 485.0, 45.0, 27.0], "area": 704, "segmentation": {"size": [512, 512], "counts": "oom31m?2M3N2IKa@7\\?8N2M3N2N2001O001O00001O001O00001O001O001O000Ce@5\\?Gg@9Y?Ej@:^?0001O001O001O00001O001O000NW@Mj?0X@0mo[3"}, "image_id": 16, "id": 251}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 487.0, 17.0, 17.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "boo22k?3N2M4M2O20O00010O010O000N3M2M4Lg`g4"}, "image_id": 16, "id": 252}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 509.0, 12.0, 3.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "o_j71o?0000000O100000000O100"}, "image_id": 16, "id": 253}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 6.0, 68.0, 52.0], "area": 2055, "segmentation": {"size": [512, 512], "counts": "PQm61l?4M2M4M2N3L3N2N3L3N3N1010O0010O0010N1N3M210O01O01O010O010O01O01O01O0m@YOj>P1N3L3N3O000010O010O0010O0010O010O00010O010O010]O^AFb>7bAH^>6dAJ]>3fAJ\\>3gAK\\>2gAJ\\>4fAJ]>2fAL\\>2j0Mbo0"}, "image_id": 17, "id": 254}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 44.0, 41.0], "area": 1080, "segmentation": {"size": [512, 512], "counts": "02n?1O001O001O0a@LR?4l@NT?3h@0X?0f@3Y?:01O000n@_Oe>b0WAAi>?UADj>i00001O001O00001O001O00001O001O00001O00TOZA=g>@]AB\\AA]AB]A;U?M4M2M3NaoY7"}, "image_id": 18, "id": 255}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 0.0, 64.0, 76.0], "area": 2800, "segmentation": {"size": [512, 512], "counts": "n`h03j?3M3N3L3M4M2M3O2O00010O0O2L3N2M3PBnNP=R1PCQOoQ16M3M3N2M3M3N2M3M3N2M3M3NR`W6"}, "image_id": 18, "id": 256}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 0.0, 102.0, 60.0], "area": 3442, "segmentation": {"size": [512, 512], "counts": "X`^32l?3M2O2M2O2O001O1O001O001O001O001O1O001O001O001O1O001O001O001O001O1O001O001O001O001O1XAPO^>P1aAQO_>P1^ASOa>m0]AUOc>U1O0010O01N2O001O001O001O010O10O010O10O010N2N1N3N101O001OO1N2O1N2N2N2N2O1N2N2O20O10O010N2M2CXA@k>=XAAi>=ZA@i>>XA@j>>>M2N3N2M2N3Mh_n2"}, "image_id": 18, "id": 257}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 67.0, 39.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "PQ\\54d?8H8H8H9O01O000000000001N100000000000001O00000000N2H8G9H8M300001O0000000000004L001O000000000000001O000000000000001O00000000000000O1HX`b1"}, "image_id": 18, "id": 258}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 0.0, 49.0, 45.0], "area": 1753, "segmentation": {"size": [512, 512], "counts": "Ra`61g?8I7H8M30001O0o@UOm>o0000000K5M301O0000000000001O000000000000001O0000000000001O00000000000000RObA;]>^OkAa0j>M3I7Hcof0"}, "image_id": 18, "id": 259}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 15.0, 66.0, 87.0], "area": 2776, "segmentation": {"size": [512, 512], "counts": "URk12k?4L3M3M4M2M3M4L3M4M20010O01OaAQOm=P1oASOQ>m0lAWOT>h0iA[OW>f0fA\\OZ>V10O00010O010O00010O0001O0M3M4L3M3N3L3M2N0001O010O4L3M3M4M2M3M4L3M3N3L3M3M4L3N3L3M3M4L3N2M4L3M\\oS5"}, "image_id": 18, "id": 260}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 42.0, 69.0, 85.0], "area": 2823, "segmentation": {"size": [512, 512], "counts": "dRh22l?2O2M2N3M2N3N1N3M3M2N3O010O010`AVOh=j0VBXOh=k0UBXOh=k0UBWOj=j0UBXOj=i0SBYOn=f0PB]Oo=d0nA^OS>a0kABT>U110O1M2O20O01N1N3M2O2M1O01O00001O00003M2JTBVNo=g1SBXNn=f18M3M2N2N101N1O2N2N3M2N2N3M3M3M4L5L2M3M2N3M2NZ^U4"}, "image_id": 18, "id": 261}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 58.0, 78.0, 73.0], "area": 2849, "segmentation": {"size": [512, 512], "counts": "_bW5240b?3\\@O]?Nd@;Z?Gd@;Z?7N2N2N11N2N0O12M3M2N3N2O1O0O2O1O1O1O0100000005J100000O10O100000O0O2N2N2N2N1100000O10O100000O010N2N2M3N1O2N2M3N2NTOoAKP>4RBMl=3VBMj=1XBOh=N[B1f=M\\B1f=M[B2f=N[B0g=OZBNi=1XBMj=2WBLk=3UBKm=5TBIn=5TBIn=5TBIn=4TBJo=4SBJU>NmA0`ka1"}, "image_id": 18, "id": 262}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 74.0, 73.0, 86.0], "area": 3007, "segmentation": {"size": [512, 512], "counts": "gSb32l?2N3M3M2O2M2N3M2N3M2N3M2N3fAoNb=R1\\BPOb=S1[BPOc=Q1\\BPOd=Q1YBROg=m0WBUOi=l0TBWOl=i0QBYOo=g0oA\\OQ>Y1010O0100O010O001M2N3M2OO0000001O2N3M2O2M2N3M2N3M0000010O00001O3M2N3M2N3N1N3M3M2N3M2N3M2N3N1N3M2N3M2N^]Y3"}, "image_id": 18, "id": 263}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 110.0, 80.0, 87.0], "area": 3242, "segmentation": {"size": [512, 512], "counts": "eea41g?1^@1a?0]@3n>3XA5f>NWA4g>NWA4g>NWA5f>d0N2O2M2N2N2N1O10O000001O0001O0001O0M]NjAc1V>3010JkA`NU>a1lA]NT>c14000001O01O00000001O01O00000001O01O00000001O101N2N3M2N2N2O1N2N001O2N2O1N2N2N3M2N2O1N2N3M2N2N2O1N3Mh[V2"}, "image_id": 18, "id": 264}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 126.0, 41.0, 91.0], "area": 2258, "segmentation": {"size": [512, 512], "counts": "g4R2a=QNfBR2X=QNeBn1[=UNbBl1^=901002M3N2O2O0010O0010O0010O000ON2O3M2M4M2M3EkAiNX>S1lAiNW>U1kAiNX>S1]1fAeNZ>[1dAgN\\>_11ON3N1N2N2N2N3M2O1N2N2N3M2N2O1N2OkNbBJ]=4eBK\\=3fBK\\=3fBL\\=3eBJ]=4eBJ]=4eBJ]=4eBJ]=5dBI_=4dBJ]=2gBL[=2gBL[=2gBL[=2gBL\\=2fBK\\=3fBL[=2gBL[=2gBL[=2gBL\\=1[1O1NTjW1"}, "image_id": 18, "id": 266}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 165.0, 55.0, 67.0], "area": 1987, "segmentation": {"size": [512, 512], "counts": "nVb11m?2M4M2M3N3L3[O@hAc0U>_OhAd0U>@hAb0U>AhAc0U>@hAb0U>AhAc0U>e01O010O01O01O01M2N3L0102M4M2M3N3M210O00010O0O2M2M3N3L3N3L3N2O2O001YOi@d0Z?O0N2N3L3N3L3N2M_Zb5"}, "image_id": 18, "id": 267}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 187.0, 62.0, 58.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "cVj43l?2N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2O1000000O1N3M2N2N2N2N2O1N0010O2N3M001O000001O00000002N2N2GZAUOh>i0ZAVOg>h0[AVOh>g0ZAWOh>g0:O1N2N2N2N3M2N2N2N2O1N2NaiV2"}, "image_id": 18, "id": 268}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 190.0, 73.0, 58.0], "area": 2071, "segmentation": {"size": [512, 512], "counts": "\\WR23j?3N3L31O010O01ON3L310OO2L3N2M4M2010O00010ON3M2M3N3L3N3L3N3L3N2M4M2M4N11O001N100O2N1O2N1O1O2N1O2N1O2N1O1100O10kNYAP1f>nN]AQ1j>N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NTYi4"}, "image_id": 18, "id": 269}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 225.0, 66.0, 58.0], "area": 1879, "segmentation": {"size": [512, 512], "counts": "ngY51n?2N2O1N3II`@9^?6N2N2O2N11ON2N3N1N2N2N2N3O00000O1N3M1O000001O2O1N1O000001O01O1O1O0001O0001O000001O0001O002O1N2JQAXOQ?f0QAXOQ?f07N1N2N2N2N3N1N2N2N3M^Xe1"}, "image_id": 18, "id": 270}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 226.0, 21.0, 27.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "ege71l?3M4M2M3M4L3N30O00010O0010O00010O00010O0iH"}, "image_id": 18, "id": 271}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 239.0, 83.0, 74.0], "area": 3359, "segmentation": {"size": [512, 512], "counts": "lXR12l?2M3N3M2M4M2M4M2M3N3L3N3M2M3N3L301O01O010O01O01O001O0010O0010ON2N3L3N3L300010O010O01O01O010O01O01O010O01O01XNPB^1P>`NRBa1W>O0jNdAi0[>UOgAk0Z>QOiAo0W>oNlAQ1T>kNoAU1_>0O010O00010O010ON2N3L3N3M2M3N3L3N3L3N2M4MZWd5"}, "image_id": 18, "id": 272}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 242.0, 98.0, 84.0], "area": 3685, "segmentation": {"size": [512, 512], "counts": "\\i`22m?3M2O1N2N2N3M2N2O1N3M2N2N2O2M2N2VOnNcBT1[=nNcBU1[=mNcBT1[=nNcBS1\\=oNbBQ1^=QO`Bo0`=TO]Bm0c=TO\\Bk0d=WOZBi0f=YOXBg0h=\\OUBd0k=i00O000000010O00000001O01O0000101N2N3M2N2N2O2M2N2N2N3N1N2N2N3M2O1N2N2N1O01O0001O0002N2N2OO000001O01O000001O0001O20001M2N2N2O2M2Ke@B]?<5N3M2O1N2NgWn3"}, "image_id": 18, "id": 273}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 292.0, 81.0, 90.0], "area": 3490, "segmentation": {"size": [512, 512], "counts": "fjS32m?3M2N2N2N2O2h@Ch>?VACh>?VACh>?VACh>?VACi>?UABj>l0N0000O[AkNb>S1^AoNa>P1_ARO`>m0`AUO_>k0`AWO`>i0^AYOb>S1N01O00HiNhAX1W>81O00000001O000001O00FcNRB^1h=?001O0000010O0000001O100O1O1O1O2N100O1O1O1O2O2M2N2N2N3M2O1N2SOYA>i>AXA=k>@XA=j>AXA=j>AXA>i>@YA>V?N3M2N2N2O1N3Meec3"}, "image_id": 18, "id": 274}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 297.0, 80.0, 79.0], "area": 2754, "segmentation": {"size": [512, 512], "counts": "ej`62m?2M3M2O2M2N3N2M2O2M3i@XOR?m000O0100O0100O0100N1010O01000N1O4L101000O0100O0100O0100O0N3\\OlNYBV1e=kNYBW1e=lNXBW1e=lNYBU1f=lNXBW1e=lNYBV1d=lNZBV1e=d0M010O01O3N2M2O2M3M2FoAdNS>Z1oAcNT>Z1nAdNT>Z1;N2M2N3N2M2O2M3M2O2M3M2O2M3N1N3M3N1NWV7"}, "image_id": 18, "id": 275}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 299.0, 6.0, 17.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "gYm73k?3L3N2N3L3eF"}, "image_id": 18, "id": 276}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 321.0, 85.0, 78.0], "area": 3283, "segmentation": {"size": [512, 512], "counts": "lZY11l?3N3L3N2M4M2M4M21O01O010O01O01O01O010O01O01O01O0N3L3N2M4M2O2O01O01O010O01O01O010mAkNY=U1eBnNZ=R1cBQO^=o0_BTO`=l0]BWOd=i0YBYOg=g0VB]Oi=c0UB_Ol=Z1010O01O01OO2M2M40O0010O0010O0N3M2M3N3L3N3\\NiAZ1Z>cNjAZ1_>M4M2M3N3L3N3L3N2M4M2M4M2M3N\\U\\5"}, "image_id": 18, "id": 277}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 337.0, 94.0, 87.0], "area": 3586, "segmentation": {"size": [512, 512], "counts": "Wlk32m?2N2O1N2N3M2N2f@AP?a0n@AP?a0n@AQ?`0m@BQ?h0N2N3M2N2N2O1N2N3M2N000001O@`NaB`1_=cN^B]1b=eN\\B\\1c=fN[BZ1e=hNYBX1g=jNXBU1h=mNVBS1k=`00001O00000001O0001O000001O3M2N2O1N2N2N3M2N2O1N2N2N3M2N2N2O1N2N1O00000001O2O1N2N000000NTAQOl>o03O002N2O1N3M2N2N2N2N2O2M2N2N2N2N3NdTe2"}, "image_id": 18, "id": 278}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 354.0, 44.0, 78.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "X\\Z71l?4M2M3N3L3N2M4M2O2O01O01O010O01O01O010O0_AXOk=g0RB\\On=e0oA]OQ>c0lAAT>?hADU>?iACU>?hAET>U1N3L3N2M4M2M4O000010O010O00010O0OlD"}, "image_id": 18, "id": 279}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 381.0, 77.0, 89.0], "area": 3247, "segmentation": {"size": [512, 512], "counts": "\\lf22S11d=1ZB1e=1YB0e=2YB0e=2YB0e=2YB1d=2YB0e=2kAA6?n=1kAA6?m=2kAA6?m=2kAA6`0l=9RBIl=:QBHn=9QBHm=W1N2N000003M2N2O1N2N3M200000010O000000010O0000O1N3N1N2N2N3O0001]NTBo0l=oNVBP1k=nNWBP1k=nNWBP1l=mNWBP1k=oNVBP1k=nNWBP1k=nNWBP1k=nNXBo0^>N2N2O0O1O00000001O01O2N2N2O2M2N2N2N2N3N1N2N2NUbR4"}, "image_id": 18, "id": 280}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 382.0, 50.0, 83.0], "area": 2741, "segmentation": {"size": [512, 512], "counts": "TL3N3L3N2O20OO2M2M4M2N2M4M2NRcV7"}, "image_id": 18, "id": 281}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 387.0, 79.0, 83.0], "area": 2860, "segmentation": {"size": [512, 512], "counts": "Pn`41n?2N2N2N2N3N1N2N2N2N2N3N1N2N2N2N3M2O1N1O000@lNUBT1l=mNRBS1n=oNQBP1o=SOnAn0Q>TOmAl0S>VOkAj0U>XOiAh0W>`000G:0O1O1O1O1O100O1O1O1O1OH90O1O1O1O1O1O100O1O1O1O1O10d0]O001N1N2N1O000001O02N2N3M2N2O1N2N3M2N2N2N2OjbW2"}, "image_id": 18, "id": 282}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 410.0, 67.0, 91.0], "area": 3481, "segmentation": {"size": [512, 512], "counts": "Rnl02l?3EMd@6Y?Me@5Y?Me@6W?;O2hAYOU=g0iB[OW=f0eB^OX=d0fB^OW=e0gB]OW=f0eB^OX=d0fB^OW=f0fB]OW=e0fB^OX=e0eB^OW=e0gB]OY=c0dB@]=b1O010O00010O010O010O00010O010O010O0001N1N3M2M4M2nMWBk1k=SNXBi1Q>N2M4M2N3L3N3M2M3N3M2M4O010M2M3N3M2M4M2N3L3N2N^bQ6"}, "image_id": 18, "id": 283}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 443.0, 71.0, 69.0], "area": 2648, "segmentation": {"size": [512, 512], "counts": "iol11n?2N2N2N2N2N2N1O1O1O1O1O1O1TO^OYBc0f=_OXBb0g=@WBa0h=AVB`0i=BUB?j=CTB>k=DSB=l=ERBGnA:Q>HmA9R>HmA9R>k0O1O1O1O1O1O1O1O1001O1O1O1O1N2N2N2N2N2N2N2000000N2N2N2N2N2oNZAe0h>YOZAe0h>YOZAe0g>ZO[Ad0g>ZO[Ad0R?N2N2N2N2N2N2N2N2N2Nj`o4"}, "image_id": 18, "id": 284}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 447.0, 71.0, 65.0], "area": 2414, "segmentation": {"size": [512, 512], "counts": "goV51n?2M2O2M3N1O2M3N2N101000OO1O1N2O1N2O1CXOaAi0^>XO`Aj0_>XO_Ai0_>ZO^Ah0a>YO^Ah0`>=O1N2O1001O001O1O001IaAkN_>T1cAkN\\>U1fAjNX>V1jAjNU>V1mAiNQ>Y1:1O2M3O01000O01000O0O2M3N1NWOkALS>4oALo=3SBMk=4WBIj=7XBGh=8ZBGh=7ZBFh=8[BFg=8[BEh=8ZBGg=8[BEh=8[BFg=8ZBFh=9ZBEh=8R1N2M2Ojae1"}, "image_id": 18, "id": 285}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 467.0, 70.0, 45.0], "area": 1701, "segmentation": {"size": [512, 512], "counts": "o_W31n?100O1O1O1O100O1O1O1O1O100O1O1O1O1O1GAQA?o>Bo@?P?Cn@>Q?El@T14O01O0001O3M2OO00000002N2O1N2N2Ai@2Z?Kh@3Z?Kh@3Z?Ki@3X?Kj@3c?Nh`e3"}, "image_id": 18, "id": 286}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 486.0, 42.0, 26.0], "area": 614, "segmentation": {"size": [512, 512], "counts": "aoc61n?4M2N2M4M2M3N0O10000O100O100O100O10000O100O100O12N2N2N3M2N2N3M2N2N2N1O001O001O0000QPg0"}, "image_id": 18, "id": 287}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 0.0, 25.0, 18.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "TPV31m?3N2N101O1O1O1O001O1O1O1O001O1O1O00O1Bb@8`?Fc@8d?N2N2M2Oi_]4"}, "image_id": 19, "id": 288}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 0.0, 7.0, 3.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "PPb31o?1O1O0000O1OQ`Z4"}, "image_id": 19, "id": 289}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 44.0, 110.0, 71.0], "area": 3453, "segmentation": {"size": [512, 512], "counts": "kR[52l?3N2N2N2N1N3N2N2N2M3N1O2N2N2M3N1O2N2M3N2N2N1O2M3N2N2NO10O100000O0100000O10O12N2N1N3N2N2cN_AX1f>M3N2N2N1O2M3N2N2N1O2M3OO2f@^OT?b0k@@U?`0i@BW?d0O010000YOi@d0Z?00O10N2M3N2N2N1O200O100000O10O100000O1000O100000O10O100000O1N2M2O2N2N2M3N1O2N2N2M3N2N1O2MTmm0"}, "image_id": 19, "id": 290}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 266.0, 37.0, 63.0], "area": 1225, "segmentation": {"size": [512, 512], "counts": "l8[1d>2N2N0O100000O1000O100000O10000LaAhNa>U1bAiN`>U17N1O2N2N2M3N2N2N1O2N2M3N2N2N2N1O2M3NVW]7"}, "image_id": 19, "id": 291}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 305.0, 63.0, 73.0], "area": 2261, "segmentation": {"size": [512, 512], "counts": "\\[?1n?2N2N1VOMVAN88`>LVAN88`>LVAN88`>3]A0`>2_AO`>3^AO^>5`AM^>l0M3N1O2N2M3N1O1O01O0O0100000O01000O2O2N2JiA`NX>^17N2N2N2M2O2N2N1N10O10O102N2N2M3N1010000N2N1N3N2N2M3N1O2N2M3NnUa6"}, "image_id": 19, "id": 292}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 342.0, 75.0, 72.0], "area": 2671, "segmentation": {"size": [512, 512], "counts": "U\\o01m?3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3O1000N1O2M1000O2O2N2N2N2M2O2N2JeNdA]1Z>6N2N1O2M3N2N2N2N2N1N21M3N1O2N2N2M3N2N1O2N2M3N2N2H]APOd>n0_APOc>n09N2N2N2M2O2N2N2N2N2M2O2N2N2N2MbTk5"}, "image_id": 19, "id": 293}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 399.0, 71.0, 58.0], "area": 1847, "segmentation": {"size": [512, 512], "counts": "[]l13l?2N2N1N3N2N2N2M3N1O2N2N2M3N1O2N10O2NSAROk>l0UAVOk>j0SAXOm>m001000000000O010000000nNSAo0P?0O1NO100000O0100000O10O100000O0100000O01000001O2M3N2N2N1N3N2N2N2M2O2N2N2N2MWSP5"}, "image_id": 19, "id": 294}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 434.0, 59.0, 72.0], "area": 2176, "segmentation": {"size": [512, 512], "counts": "Zoj21m?3N2VOLeA6Y>LeA6Y>LWAL89`>MVAL7:`>LXAK7;_>2_A0_>2_A0_>2_A0_>i0N3N2N2N2N2M2O2N0000O10O100000O12O1000@PBmNR>Q1PBmNQ>R1QBeNM3T>U1VBiNl=U1VBiNl=U1`0O000O1000O12N2M3N2N2N1O2M3N2N2N2N1O2M3N2N2N2NPbW4"}, "image_id": 19, "id": 295}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 450.0, 37.0, 51.0], "area": 925, "segmentation": {"size": [512, 512], "counts": "^>W1j>M3N2N1O00O1000O10000000O10O10000101000O0O2N2N2N2M3N2N2N1O2N2N2M3N2N2N1O`Q]7"}, "image_id": 19, "id": 296}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 478.0, 75.0, 34.0], "area": 1303, "segmentation": {"size": [512, 512], "counts": "lo\\31n?2N2N2M2O1O1O1O1O1N2O1O1O1O1O1N2O11O001O1O1O1O1O1O001O1O1O1O1O1O001O1O00N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O10O2N2N1O2N2M3N2N2N2N1O2M3N2N2N2N2Ne`]3"}, "image_id": 19, "id": 297}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 486.0, 52.0, 26.0], "area": 704, "segmentation": {"size": [512, 512], "counts": "o_>1n?1N2O1O1O1N2O1O1O1O11O1O1O1O1O0000O1O1O1O1N2O1O1O1N2O1O1O1O1N2O11O1O1O1O001O1O1O1O001O1O1N2M3N1O2N2N2M\\`g6"}, "image_id": 19, "id": 298}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 219.0, 27.0, 22.0], "area": 246, "segmentation": {"size": [512, 512], "counts": "Qga02l?3N2N2M20100000O10O100000O10O1000O1N2O01000OO02N2M3NfhP7"}, "image_id": 20, "id": 299}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 229.0, 18.0, 16.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "[W>1n?2N1N3N2N2O1O01000000O010N2N2N2N1NehX7"}, "image_id": 20, "id": 300}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 245.0, 43.0, 50.0], "area": 792, "segmentation": {"size": [512, 512], "counts": "oXh21n?1DOh@3V?Og@4V?Oh@3V?Oh@3V?Oh@3V?=N1O20O1N2N2N2M2O2N2O1N2NYOHiA6W>LiA1X>1gANY>3hALW>6iAJT>9jAIU>8hAKX>5fAMZ>3dAN]>1bA1]>0aA0a>0\\A1f>OXA1j>OTA0o>0o@0S?=1O2Ak@0W?Nk@OX?Oj@OX?Oj@OX?Oj@OX?OZXb4"}, "image_id": 20, "id": 301}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 309.0, 19.0, 20.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "Sj32m?2N2N1O2N2M10000000000O12M3N2N1O2N2NWfb7"}, "image_id": 20, "id": 302}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 343.0, 123.0, 111.0], "area": 4258, "segmentation": {"size": [512, 512], "counts": "[[;2m?2N1O2N2M3N2N2N1N3N2N2N2N1N3PASOj>o0TASOj>S1N1010000O100O0O2N2cAdNS>^1kAdNU>\\1hAfNY>a10000O1000O1N2N2N1N3N200O1000O100000O10O100000O1000O100000O1000O1000nN]Af0c>XO_Ag0b>WO`Ai0_>VObAk0^>SOdAm00RO]>n0aATO_>k0_AXOa>h0]AZOc>f0[A\\Od>P11000000N2M2O2N2N2O1000O0100000O1N2N1O2N2O100000O10OoNZAi0f>TO]Al0c>RO_An0i>10O10000000O0100O1N2N3L3N2N2N2N1N3N2N2N2N2M2O]Rg5"}, "image_id": 20, "id": 303}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 357.0, 35.0, 25.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "[k[11n?1N3N2N2N2000000O010000000000000O0100000N2N0000000O1000O3N2N2N2N2N2NbdR6"}, "image_id": 20, "id": 304}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 372.0, 19.0, 24.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "f[i42n?1N3V@Ke?:M2O20O01O00100O01N1O2M2N3N2M2NjSm2"}, "image_id": 20, "id": 305}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 398.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "^<2`co7"}, "image_id": 20, "id": 306}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 411.0, 20.0, 19.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "PmP41n?2N2N2N2N2000000000000000O100N1O2N2N2NiRe3"}, "image_id": 20, "id": 307}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 430.0, 14.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "dm^42m?2N2O1N2N2N2N11N2N2N2N2N2N]RZ3"}, "image_id": 20, "id": 308}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 453.0, 57.0, 59.0], "area": 1067, "segmentation": {"size": [512, 512], "counts": "o_X31m?2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O0O3N1O1OKn@^OP?c0RA\\Om>d0UA[Oj>d0YA[Of>e0[A[Oc>f0;00O02O2N000OM400002M3NJo@@o>`0SA_Ok>b0WA[Oj>e0XAYOh>f090O12N2M2O2N2M3N1O2N2M3N2NcQk3"}, "image_id": 20, "id": 309}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 148.0, 45.0, 93.0], "area": 1724, "segmentation": {"size": [512, 512], "counts": "jfY71n?3M2O2M3N1N3M3N1N3N2M2N3N2M1O10O0010O0010O0010O0010O_OQORBo0m=SOSBm0k=VOSBj0l=XOSBi0k=ZOTBe0_=RO_B;1c0_=UO]B;2a0_=VO]B;4>^=ZO[B;5<^=2aBN^=5_BL`=6^BL`=Z11O2O1N101NZK"}, "image_id": 21, "id": 310}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 254.0, 24.0, 23.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "Zhn62m?2M2N3M2N3M2O2O010O01000O010O010ON3M2N3M2N3M2NlWe0"}, "image_id": 21, "id": 311}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 264.0, 32.0, 74.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "XZ`71m?3L3N3M2ZOEeA>X>DfA>W>FeA>X>DfA>X>EeA=Y>EeA>X>DfA>W>h0M201O010O010ON2N3M0002N3M2M3N3M2N3N110fG"}, "image_id": 21, "id": 312}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 311.0, 17.0, 21.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "WjX62m?1N3M3M2O2M2N2O0O01O3M3N1N3M2O2MWf^1"}, "image_id": 21, "id": 313}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 0.0, 24.0, 12.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "P`81o?1O1O1O1O1O1O1O1O1O2N00O1O1O1O1O100O1O1O1O1O1OQ`[7"}, "image_id": 22, "id": 314}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 0.0, 72.0, 80.0], "area": 2402, "segmentation": {"size": [512, 512], "counts": "V`W42n?2M3M3N2M3N2M2O2M3N2M3N2N2N2N2N1O2N2N2N2N2N2N2N2O01000000000000000O010000^NgA[1Y>cNiA]1^>N2M3N2M3N2M3NO010O010O010O010O010O00010O010O010O01]Oo@7Q?IQA4R?IQA5Q?IPA5S?HPA6Q?HRA5_?N2MS^d2"}, "image_id": 22, "id": 315}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 0.0, 53.0, 30.0], "area": 897, "segmentation": {"size": [512, 512], "counts": "PPS51o?1O1O1O1O2N1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O2N1O0000O1O1O1O1O1O100O1O1O1O1O1O1O100O1O1O1O1O2N3N1N2N2Nk_R2"}, "image_id": 22, "id": 316}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 0.0, 61.0, 71.0], "area": 2539, "segmentation": {"size": [512, 512], "counts": "gQ]62l?3L3N2N3M2N3L3N3M2AWOeAl0X>VOfAl0X>VOeAn0W>VOfAl0X>VOfAl0X>`0M2N3L3N3O001O001O001O00001O001O00O1M3N2N2N2N2M3N2N2N2N2N2M3N3M2N3M2M4M2N3M2N2N3L3N3M2N3M2Ml_d0"}, "image_id": 22, "id": 317}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 0.0, 6.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "PPZ71o?001O0000OQPc0"}, "image_id": 22, "id": 318}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 0.0, 27.0, 11.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "P`b71o?00001O001O00001O001O00001O001O00001O001O00001O001O00"}, "image_id": 22, "id": 319}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 11.0, 52.0, 94.0], "area": 2592, "segmentation": {"size": [512, 512], "counts": "c`a33l?5L3M4K5L4L4L4K4M4L4L1N01000aAfNX>Z1hAjNS>W1mAlNP>f1L4L4K5L4L4L3L5L00O10O14L4K4M4L4L4K5L4L3L5\\NhAX1`>0000O5L3M4L4K5L4L4L4K4M4LQ]d3"}, "image_id": 22, "id": 320}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 18.0, 4.0, 8.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "b08i?N1N3N]om7"}, "image_id": 22, "id": 321}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 34.0, 55.0, 84.0], "area": 2872, "segmentation": {"size": [512, 512], "counts": "[a`22n?4K5L4L4L3L5L4L4K5L4L4L3L5L4L4L4K5L4L3M0O100000O0100000O01000O10O1000O5L3M4L4K5L4L4L4K4M4L4L4K5L4L4L3L5L4L4Lelc4"}, "image_id": 22, "id": 322}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 41.0, 28.0, 27.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "lag51l?3N3L3N2M4M2M4N10010O0010O0010O0010O001O0N2M4M2N3L3N2Nb^j1"}, "image_id": 22, "id": 323}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 48.0, 52.0, 85.0], "area": 2394, "segmentation": {"size": [512, 512], "counts": "Ubl14l?3M4L4K4A_OYAc0e>AWA?h>ETA;m><000O010002WBnN_Gn@Hn@;o>b?6J0000000000000000000?AW]Q1"}, "image_id": 22, "id": 326}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 79.0, 23.0, 28.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "_RP7l0T?0000000000000000000000000000000000000000000a]d0"}, "image_id": 22, "id": 327}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 85.0, 16.0, 16.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "jRl41n?3N1N3M3N2M2N10O102M2N3N2M3N1NRmk2"}, "image_id": 22, "id": 328}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 88.0, 19.0, 21.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "mRc43l?3N2M2O2M3N2M3N0O11N2O2M3N2M3M2O2M3Ni\\S3"}, "image_id": 22, "id": 329}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 88.0, 43.0, 90.0], "area": 2584, "segmentation": {"size": [512, 512], "counts": "obW66j?<^Ob0D6J0O10000[BlN^g0_A\\O^>h0]A\\OP>HYBP1D[Oo=IZB^1b=eN[B^1b=fNYB^1c=c0M3M3M1N10O10O1000O01000O103M4K4M3M3L4M3M4K4M3M3hNaAh0c>UO`Ah0c>UO`Ah0o>M3L4M4L3L4M3MQjn2"}, "image_id": 22, "id": 332}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 118.0, 44.0, 55.0], "area": 2046, "segmentation": {"size": [512, 512], "counts": "hdT7b0^?3M00000000000oNQ1000000000000000000000O1000000000000000000000000000O1000000000000006JT1lNP[5"}, "image_id": 22, "id": 333}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 121.0, 26.0, 27.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "oSj34l?3L4M4L3L5LO10O10O1000O01000O10O10O10O3N4L3L4M4L3Mdkh3"}, "image_id": 22, "id": 334}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 122.0, 30.0, 30.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "XTV22m?2N2N2N2N2N2N2N2N2N2N2N2N2N2OO2N2N2N2N2N2N2N2N2N2N2N2N2N2NikZ5"}, "image_id": 22, "id": 335}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 128.0, 54.0, 65.0], "area": 2391, "segmentation": {"size": [512, 512], "counts": "_dR54l?5K6J5^O]O]Ah0^>^O[Ai0_>a0K1N10O100000000O01000000001N7J4L00000O01000005K5J6K2N0000O01000000000O010000000O2O5K5K6J5XOTA1R?JSA1R?JSA1fiR2"}, "image_id": 22, "id": 336}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 130.0, 89.0, 36.0], "area": 2700, "segmentation": {"size": [512, 512], "counts": "Vdi0:f?`0@6J000000000000000000000O10000000O1000000000000000000000O10000000O1000000000000000000000O1000000000O1000000000000000000000O10000000O10000000000000003M6J00000000000O1000002N`0@Ski5"}, "image_id": 22, "id": 337}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 150.0, 44.0, 85.0], "area": 2655, "segmentation": {"size": [512, 512], "counts": "ldW32m?7J5K6J6J5K6I6K6J5K6J6J5J7J4LO1000O100000O1000O1000O1000O100000O10006J6J5J7J5SOjAF\\>4iAG\\>4jAF\\>3kAGZ>4aZR4"}, "image_id": 22, "id": 338}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 181.0, 60.0, 57.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "fVn02f?0`@1^?2_@0`?:M2N2N3N1N2N3M2O2M2N2O2M2N2O10N001O01O01O01O0001O01O0001O01O01O0001O01O00101N3M2N2O2M2N2N3N1N2N2N3N1N2N3M2O1N3M2ObiS6"}, "image_id": 22, "id": 339}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 182.0, 27.0, 26.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "neS21n?4M2N3L3N2M4M2M10O10O010O010O10O10O0101N4M2N2M4M2M3Nki^5"}, "image_id": 22, "id": 340}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 184.0, 51.0, 68.0], "area": 1841, "segmentation": {"size": [512, 512], "counts": "TVc22n?2M3N2M3N2M3M3N2o@]O`>e0_A]O_>e0^A^O_>e0_A]O_>e0^A^O_>T1N2M3N2M3N2M3N2M100O0102M3N2M3N2M3N2M3N2M3N2M3M3N2M3N2M3N2M3N1N3N2M3N2M3N2MfXc4"}, "image_id": 22, "id": 341}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 230.0, 67.0, 64.0], "area": 2222, "segmentation": {"size": [512, 512], "counts": "Wh^11n?3N2M4M2M3N2N2M4M2M3N2M4M2M2O00O0100O010O010FmNfAS1[>oNcAQ1\\>ROaAn0`>9O10O10O010O010O010O10O10O010O010OIaAnN`>R1bAlN]>U1eAiN[>V180O010O010O04M2N2M3N3L3N2M3N3L3N2M3N3M2Mkg_5"}, "image_id": 22, "id": 342}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 267.0, 54.0, 48.0], "area": 1603, "segmentation": {"size": [512, 512], "counts": "iXk52n?3L4M3M3L3N3M3L4M3M3L4MO10O10O10O10O10O10O10O10O10O10O01000O01000O01000O01000O01000O3N3L4M3M3L3N3M3L4M3M3LgfY1"}, "image_id": 22, "id": 343}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 277.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "e82Ygo7"}, "image_id": 22, "id": 344}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 280.0, 58.0, 83.0], "area": 2404, "segmentation": {"size": [512, 512], "counts": "Uil41n?3N2M4M2M3N2N2M4M2M3N2M3N0O1YAQO]>Q1aARO\\>P1aASO\\>[1N2M3N3L3N2M3N2M4M2N2M3N2N02M4M2M3N2M3N3ZNRBS1Q>jNRBT1P>jNRBS1Q>kNoAU1T>gNmAY1]>O3N2M3N3L3N2M3N2M4M2N2M3N3L3N2MUUV2"}, "image_id": 22, "id": 345}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 292.0, 44.0, 43.0], "area": 1736, "segmentation": {"size": [512, 512], "counts": "TiR7U1k>000000000000000000005K000000001O00000000000000000000K50000000000000000000000000000000lV7"}, "image_id": 22, "id": 346}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 298.0, 76.0, 88.0], "area": 2911, "segmentation": {"size": [512, 512], "counts": "oYm31n?3N2M3N2M3N2M3M3N2M2O2M3N2M3NO010O010O01aAnNP>Q1PBROm=P1RBROl=P1QBSOg=HSBX14ROg=HSBX13ROh=V1VBlNh=h1M3N2M3O100O1N2NO3N2M3N2M3N2]NVBk0m=ROVBl0l=ROUBl0n=QOSBo0o=oNPBQ1S>lNoAS1S>kNoAR1T>kNoAS1`>N010O0101N11O2M3N2M3N2M3N2M3M3L4N1N11N3N1N10O3N2M3N2Madl2"}, "image_id": 22, "id": 347}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 321.0, 44.0, 63.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "[:54P1\\>SObAo0\\>KgA7V>LgA6W>MgA3Y>OdA1\\>1cAO\\>4aAL`>6^AJb>8[AHg>9WAFl>f02M3N2M3N2M2O2M3N2M3N2M3M2O2MlTk1"}, "image_id": 22, "id": 350}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 331.0, 38.0, 28.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "cjP62m?3N2M3N2M3N2M3N2M3N0O10O010O010O103L3N2M3N2M100O010O010O010O010O011N3N2M3NXU\\1"}, "image_id": 22, "id": 351}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 335.0, 29.0, 33.0], "area": 775, "segmentation": {"size": [512, 512], "counts": "`ja7e0[?001O001O00001O001O00001O00001O001O00001N101O00001O00001`E"}, "image_id": 22, "id": 352}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 345.0, 54.0, 83.0], "area": 2540, "segmentation": {"size": [512, 512], "counts": "n\\a64Z?NTA4i>OTA5i>MUA5h>OTA4j>NTA5h>c0BnNmAT1P>oNoAQ1g=jNVB84m0d=nNUB86k0a=_O`B`0^=BbB?Z=EcB=[=EcB=^=CbB=]=CbB>_=A_Bb0`=o0N210O010O00010ON3M2M3N3M2M4M2YOnA]OU>a0mA]OV>?nA]OU>d0jAZOY>e0hAWO[>j0dATO^>l0bAQOb>n0810O0O1N3M2M4M2M3N3L3N3L3Nkdc0"}, "image_id": 22, "id": 353}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 353.0, 65.0, 81.0], "area": 2622, "segmentation": {"size": [512, 512], "counts": "d[[22j?OX@4e?6N2M3M2O2M3N2M3N0O102M3N2M3N2M2O2M3N2M3N2M3lAXNk=k1RBXNl=P2M2O2M3N2O10O1M3N2M3N2M3N2M3N2M2O2M3N2M3N2M3N2O100N1N3N2M3N2M3N2M3N2M3N2M2O2M3NmRd4"}, "image_id": 22, "id": 354}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 361.0, 33.0, 31.0], "area": 596, "segmentation": {"size": [512, 512], "counts": "kk_72l?3M2N2N3L3N3M2N3N110O010O010O010O010O0010O0010O01M2N3M2N3M2N3M2N^D"}, "image_id": 22, "id": 355}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 379.0, 27.0, 27.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "]\\41l?4M2N2M4M2N3L3O2O01O010O01O01O010O010M2N2M4M2N3L3N3MoS^7"}, "image_id": 22, "id": 356}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 381.0, 17.0, 23.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "PlX64l?3M4K5L4L1O0O01000O10O4M3M4K5L4Lac^1"}, "image_id": 22, "id": 357}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 385.0, 54.0, 66.0], "area": 1753, "segmentation": {"size": [512, 512], "counts": "`]Y51m?3M3N1N3N1N3N2M2O2M3N1N3M3N1N3N2M2O2M3N1N3N1N3M3N1N3N2M2O0O12M3N1N3N1N3M3iNbAi0_>VObAh0a>UObAi0`>TOcAi0_>VObAi0l>M3N1N3N2M2N3N1N3N2M2O2MVck1"}, "image_id": 22, "id": 358}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 396.0, 29.0, 28.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "el\\31o?2M3N2M3N3L3N2M3N1N10O010O010O10O10O011N3N2M4M2M3N2M3N2MScT4"}, "image_id": 22, "id": 359}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 402.0, 39.0, 47.0], "area": 1196, "segmentation": {"size": [512, 512], "counts": "Qm\\73m?2M3N2M3N2M2O2M3N2M3N2M3N2M3N2M10O0010O010O010O010O0012M2O0O010O010O010O010O_C"}, "image_id": 22, "id": 360}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 413.0, 16.0, 46.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "m<^1b>N2M4M2M4L3N2M4M2M4M2M3N3L3N2MQcg7"}, "image_id": 22, "id": 361}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 418.0, 55.0, 54.0], "area": 1650, "segmentation": {"size": [512, 512], "counts": "nma01o?3M2M4M2M4M2N3L3N3L3N0O10O10O010CTOeAm0[>UOcAj0^>YO_Ag0`>\\O]Ad0d>B0000O10000000O10000000000000000000000000O1000001O000000000000000000O100000O1000000000000000000000000000O1005Ka0^OdP\\2"}, "image_id": 22, "id": 371}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 479.0, 27.0, 29.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "To54k?5L5K4L5K2M1000000O0100000O010000000O010005K4K5L5K4L[`\\7"}, "image_id": 22, "id": 372}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 489.0, 10.0, 23.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "Y?g0Z?O001O0_Og@;Y?Bj@<^?N3L3M3N``j7"}, "image_id": 22, "id": 373}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 493.0, 61.0, 19.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "j_e06j?000000000000000O1000000000000000000O1000000KL^@4b?500000000O100000000O1O1N2000000O1000000001O001O1O3M00001O9G2N0000000000O1RP\\6"}, "image_id": 22, "id": 374}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 506.0, 18.0, 6.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "k_l35k?0000000000000O100000000000000004LR`j3"}, "image_id": 22, "id": 375}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 506.0, 21.0, 6.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "noe72l?2N20000001O001O00001O001O001O0000O1N2O1"}, "image_id": 22, "id": 376}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 0.0, 25.0, 9.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "PPk41o?00001O001O00001O001O00001O001O00001O001O00O1N2MS`h2"}, "image_id": 23, "id": 377}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 0.0, 49.0, 40.0], "area": 1089, "segmentation": {"size": [512, 512], "counts": "P``51o?001O00001O0V@Nf?7O001O0n@H\\>8bAJ^>7^AMa>3]AOc>2YA1g>OWA4h>LVA6j>KRA9m>=01O00001O001O00001O001O00001ON2N2M3N2N2M3N2N2M3N2M3N2N2M3N2NRPg1"}, "image_id": 23, "id": 378}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 0.0, 78.0, 48.0], "area": 2387, "segmentation": {"size": [512, 512], "counts": "UQV62k?4M2N3L3N2M4M2M4M2M3N3M2M4N10010O0N3L301O000000M3N2M3N2M3N2M31O00001O001O0000O1O2O010O00010O010O00010O00N3L30001O001O00001O000000N2M3M3N2M3N2M3Ih@DZ?:7M3M3N2MSPc0"}, "image_id": 23, "id": 379}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 0.0, 50.0, 73.0], "area": 2286, "segmentation": {"size": [512, 512], "counts": "jQW72l?2M4L3N2M4M2M4L3N2M4M2M4L3N2M4N100010N1O20O0010O0010O001M2M3N3L3N2M4M2M4M21O01O01O0M4M2M3M4M2M3N3IcAfN[>"}, "image_id": 23, "id": 380}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 29.0, 67.0, 82.0], "area": 2719, "segmentation": {"size": [512, 512], "counts": "PRa23k?2N3M2N3M2N3^A_Oa=c0]B_Oc=b0ZBAe=?YBCh==UBFj=:TBHm=8PBKo=5oAMR>3kA0T>0jA2W>NfA5Y>KeA7Z>i0L3N3M2N3M2N3M2N3M2M4M21O0N3M2N3M2N3M2N3M2N3M2N3M2N3M2fN`AR1c>kN`AR1h>N3M2N3N1010O010O010ON3M2N3M2N3M2N3M2N3M\\^]4"}, "image_id": 23, "id": 381}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 33.0, 29.0, 43.0], "area": 695, "segmentation": {"size": [512, 512], "counts": "Q1[1f>M2N3M2N3M2N3N110O010O010O010O010O010M2N3M2N3M2N3M2N3M2Nc^a7"}, "image_id": 23, "id": 382}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 62.0, 69.0, 78.0], "area": 2666, "segmentation": {"size": [512, 512], "counts": "hSZ33k?3L3N3L3N2M40O01O01O010O01O01O010O01M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2N2M4O010OM3N3L3N3L3N2M4M210O00010O0SOhA3X>IkA7U>GnA9Q>^OhAL9f0P>[OiAL:i0m=XO]Bh0c=UO_Bi0^>M2M3N3L3N3L3N2Nn\\c3"}, "image_id": 23, "id": 383}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 68.0, 73.0, 66.0], "area": 2372, "segmentation": {"size": [512, 512], "counts": "lRg02k?3N3L3M4L3M3M4L3O110O0010O00010O00010O01O01O01O01O010O01O01O01O0`ASOn=l0oAWOQ>j0kAZOT>f0jA\\OW>c0fA@Z>a0bAC]>P110O00010O0010O0010O00010O0010O00O2MO3M4M2M3M4M2M3M4M2M3N3L3M3N3L3M4M[]T6"}, "image_id": 23, "id": 384}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 99.0, 59.0, 85.0], "area": 3013, "segmentation": {"size": [512, 512], "counts": "UT_43k?2N3YAK\\=NcA:o0KZ=:cBI[=:bBH\\=:XB]OJH^B9ZO1U>J^Bg0_=[O^Bh0_=[O_Bg0_=\\O]Bh0`=ZO^Bh0`=[O]Bh0b=XO[Bk0e=UOYBm0h=f0M2N3L3N3L3N210O010O00010N1M4L3N2M1O@UCoMlM4M2M4M2N2M4MW[j1"}, "image_id": 23, "id": 386}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 116.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "d34l?N^ln7"}, "image_id": 23, "id": 387}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 131.0, 71.0, 72.0], "area": 2860, "segmentation": {"size": [512, 512], "counts": "QVe11l?4L3M4L3M3M4L3M3N3L3M4L3M3M4L3M3N3L3M4L3M310ON3L3N2M4O01O01mNkA9U>DoA@RB`0n=^OTBc0k=ZOYBe0h=WO[Bi0e=UO]Bl0b=QObBn0X>010O0N3M2M3M4M2M3M4M0O00010O0003N3L3M3N3L3M3N3L3M4M2M3M4M2M3M4M2Mj[W5"}, "image_id": 23, "id": 388}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 139.0, 85.0, 57.0], "area": 2953, "segmentation": {"size": [512, 512], "counts": "iT;b0^?00000000000000000000=C00000000000000000000004L0000000000000000OTAPOj>S101O00001O001O00001O0N2M4M2M4L3O11O0N3L3M3N3L3O1010O01O01O01O010O01O000N3N1010O001N1N2M4L3N3L3Do@GS?7o@FU?6a=F[B>a=E\\B>b=R1L3N2M4M2M2OO2O3O0010O00010O010O0001O0N3cMdBQ2_=mMcBP2`=mMcBQ2h=L3N1CQBhNP>X1RBeNn=[1VBbNi=_1>L3M3N3L3N2M4M201O01O01O010N1N2M4M2M4L3N2M4MT[f0"}, "image_id": 23, "id": 390}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 153.0, 74.0, 77.0], "area": 3154, "segmentation": {"size": [512, 512], "counts": "YVd22k?3M3M4L3M3e@@S?h001O01O01O0M3M4M2M3M4M2M3N3L3M3N3M02O2O00010OPBXNh=h1UB\\Nj=m0VBJk=2XBNh=N\\B3c=JaB5`=GcB9]=GdB9[=GeB9\\=GcB9]=GdB9[=GeB9\\=GcB9]=G`B^ODk0l=G\\BAHh0l=GYBDKe0l=GZBDJe0l=GYBc0g=]OYBd0f=\\O[Bc0f=]OWBe0i=[OTBi0k=f01O01O01L3M3M4L3M3M4M2M4L3M3M4L3M3N3L3M3M4L3M3MijV4"}, "image_id": 23, "id": 391}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 169.0, 68.0, 84.0], "area": 2703, "segmentation": {"size": [512, 512], "counts": "egm61k?4M4K4L4M3L5K4N200010OL4M4K4L4N2010O000010O000010K4M3M3L5L3N03M2M3M3N3L3N2M3N2M03010O0N2M2N3N2N3OO2M2M3N3L3N3L3N2N3O0O2L3N2M4L3An@NT?0n@MV?On@NT?0n@Mei0"}, "image_id": 23, "id": 392}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 193.0, 94.0, 65.0], "area": 3553, "segmentation": {"size": [512, 512], "counts": "WWd33j?3M4L3M3M4L3N2010OPAYOg>g0VA\\Oj>m010O00010O00010O2N10O00010O00010O00010O00010O00010M200010O000N3L3N2M4L003N2M3M4M2M310OO1M4L3M301O01O01O01O01O01O01O01O01O01O01O01O01QOeA7[>FhA;W>BmA=T>_OoAa0Q>\\ORBe0m=XOWBg0j=UOYBk0^>10O0M3M4L3M3M4L3M3Mmhl2"}, "image_id": 23, "id": 393}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 232.0, 82.0, 70.0], "area": 2982, "segmentation": {"size": [512, 512], "counts": "[XX52k?4M2M3N3L3N3M2M3N3L3N3L3eAmNg=V1UBmNh=V1VBmNi=S1TBPOl=Q1QBROn=n0oAUOR>\\1010O01O01O010O01O01O010hNnA`0R>^OPBc0o=ZOTBf0m=WOVBi0i=TOZBl0g=QO\\Bn0d=oN_Bo0d=nN^Bo0\\>M2N30O01O01O010O01O01O010O01O01O010N1M4M2M3N3L3N3L3N21M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2NQh^1"}, "image_id": 23, "id": 394}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 235.0, 57.0, 90.0], "area": 3299, "segmentation": {"size": [512, 512], "counts": "bh35k?;E;E000000O1000O1000004L4L00000000:F7I0O01000mNaNeC_1[OQB1o=MTB3l=IWB7i=F[B9e=_OoAH?j0b=ZOfBf0Z=WOjBh0W>00M4L3M3M4M2M3M]Vf4"}, "image_id": 23, "id": 398}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 288.0, 96.0, 61.0], "area": 3315, "segmentation": {"size": [512, 512], "counts": "iY`62l?3M2M4M2N3M2M3N3M2M4M210O0010O0010O010O0010O0010O010O0010O010O0010O010O0010O010O0010O0010O010O010O00QAROl>Q110O01O010O01O0O2M2N3L3N2N3L3N3M210O01O010O0M3N3M2M4M2N2M2011O01O010O010N1N2M4M2O2O001O0NgF"}, "image_id": 23, "id": 399}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 298.0, 72.0, 73.0], "area": 2866, "segmentation": {"size": [512, 512], "counts": "hZ`34i?3N2M4L3M4O01O01O01O01O010O00010O00010O0N3L3N2M4L3M4L3M3M4L3N2M4L3O20O00010O00010O01L300010O010O00010O00010O01OUOTBBm=:VBFj=WOUBl04Nk=NYB1g=L\\B4d=I_B8a=EbB:^=CeB=[=@iB=Z=_OiB>Z=_OiB>_>M2M3M4M2MWe[3"}, "image_id": 23, "id": 400}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 325.0, 93.0, 56.0], "area": 3007, "segmentation": {"size": [512, 512], "counts": "^kc42k?4L3N3L3M3N3L3N2M4N101O01O01O010O00010O00010OM3N3L3N3L3ON3N2M4L3M4L3M3N30O01O01O010O00010O01O01O01O010O00010O00010O0010O0010O0010O00010O00010O01M2M3N30O000M4L3M3M4L3M4M2M3M4L3M3M4M2M3Mbem1"}, "image_id": 23, "id": 401}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 339.0, 9.0, 23.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "c:g0[?M3N2M4M2N2M4M2MlTk7"}, "image_id": 23, "id": 402}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 340.0, 47.0, 85.0], "area": 2707, "segmentation": {"size": [512, 512], "counts": "P\\=:f?;D;F:F000000O10000000O1hNlNdCT1\\d09000nNF^B:b=F^B<`=D`B<`=D`B<`=R1000000000000000000000001O000000000000000000C=C=B>Cbeh5"}, "image_id": 23, "id": 404}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 350.0, 99.0, 62.0], "area": 3729, "segmentation": {"size": [512, 512], "counts": "k[P61l?3M4L3M3N3L3M3M4L3N2O20O01O01O01O01O01O010O01OZAROZ>o0bATO^>l0_AWOb>T1O00010O0010O00010O0010O00010O00010O0010O00010O0010O00010O00010L3M2OO4L3M3N3L3M310O01O0O1M4L3M3O2O00010O00010O0010O0010OO1N3L3M3N30O00ZOTA6o>FTA7o>FUA6n>GUA7[?M4L^T>"}, "image_id": 23, "id": 405}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 361.0, 66.0, 64.0], "area": 2599, "segmentation": {"size": [512, 512], "counts": "l\\X22j?5K4K5L4K5L5N11O0001O013L00001O01O0001O01O000001O01O0001O01OO1L3L105K4001O00O2G8L40O2M2000001O01O0000010O000001O01O00nNmA7S>EQB9R>ASB;Q>ATB:P>AUB:k>L4Klcf4"}, "image_id": 23, "id": 406}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 383.0, 72.0, 58.0], "area": 2477, "segmentation": {"size": [512, 512], "counts": "U]Y32i?5K5J60001O0h@_OQ?i00O000000010O00000000010O000000010O0M3001O01Oh@\\OT?h0000000010O00000001L3J6K5K5J7M2000N3O00000000N3O01O000000010O00000001O01VOhAG[>4jAG[>4jAG[>3kAG[>4l0JRdb3"}, "image_id": 23, "id": 407}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 385.0, 18.0, 42.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "S0O1000O10O1000O5L4L4L4K5L4L4L4K5Lkbf7"}, "image_id": 23, "id": 408}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 399.0, 63.0, 58.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "fm\\46b?8J61O00e@AU?e0000001O0001O0000000001O0001O00000001O000001O03M000K5I7I7J7K4001O0000000M3M31O01O000000000001O01O000000000QOjA4V>DRB=m=[O[B>h>G8Hjbc2"}, "image_id": 23, "id": 409}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 411.0, 86.0, 45.0], "area": 2532, "segmentation": {"size": [512, 512], "counts": "mm[55c?8G9M301O01OJ60000000010O0000000001O0001o@SOm>P10000001O0000O10000O10O10O1000M30O10O10O1000O01000O01000O01000O04M0000O01000O01000O0100001N4M3M0O0Mn@ZOR?e050O10O10O10O10O13L4M3M3L4M3M`RY1"}, "image_id": 23, "id": 410}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 431.0, 1.0, 4.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "_=4]bo7"}, "image_id": 23, "id": 411}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 493.0, 32.0, 19.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "a_h05k?8H2N0000000O1000000000000O100000000000000O1000000000000O17I7IU`g6"}, "image_id": 23, "id": 412}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 493.0, 34.0, 19.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "do[13m?7I2N00000O10000000000O1N20000O10000000000O1000000000000O1005K7I6JQPS6"}, "image_id": 23, "id": 413}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 494.0, 21.0, 18.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "aoR24k?8I4L0000000O10000000000O1000000003M7I6JR`b5"}, "image_id": 23, "id": 414}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 505.0, 18.0, 7.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "l_d22n?2N000O10000000000O100000000O1001OV`R5"}, "image_id": 23, "id": 415}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 510.0, 6.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "oo[31o?0000000O1RPa4"}, "image_id": 23, "id": 416}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 511.0, 4.0, 1.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "o_:1o?00000Q`c7"}, "image_id": 23, "id": 417}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oo?1PP`7"}, "image_id": 23, "id": 418}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 211.0, 38.0, 67.0], "area": 1338, "segmentation": {"size": [512, 512], "counts": "nW]72m?2N1O2N2N2M3N2N2N2N1O2M3N2N2N2N2N2M2O2N2N2N2N2N2M3N1OO100000O100000O1000O1^I"}, "image_id": 24, "id": 419}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 241.0, 22.0, 19.0], "area": 183, "segmentation": {"size": [512, 512], "counts": "lWl51o?1N2N3N1N3M0010O000010O000010O002N2O2M2N2OYhh1"}, "image_id": 24, "id": 420}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 0.0, 30.0, 11.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "PPe61o?001O00001O00001O001O00001O001O00001O001O00001O001OO1M3N2MSPl0"}, "image_id": 25, "id": 421}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 0.0, 52.0, 31.0], "area": 1026, "segmentation": {"size": [512, 512], "counts": "WPV72l?2M4M20001O00001O001O00001O000_@C_??01O00001O00001O001O00001O00001O00001O0i@ZOT?i00001O001O00001O00001O001O0000M3"}, "image_id": 25, "id": 422}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 499.0, 38.0, 13.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "o_T11o?000O10000O10000O100O10000O10000O100O10000O10000O10000O100O10000O13M2N3M3MR`X6"}, "image_id": 25, "id": 423}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 264.0, 160.0, 133.0], "area": 12057, "segmentation": {"size": [512, 512], "counts": "Pk`23k?2^@MT?7h@LV?6h@LV?a0L301O01O01N1N3L3N2M4M2N3L3N2M4M2M4M2M4M2N2M4M2M4M2M3NO010O10O010O4M2M4M2M40O0010O0010O0010O0010O0010O010O00010O010O010O00010O010O01O01O010O01O02N010O01O010O01O010O01O010O01O01O010O01O01O010O010O01O01O2O0O010O00010O010O00010O010O001M2N2M4M2M4M2N3L3N2N3M2N3M2N2N3M2O2M2N3M2N2N3M2N3M2O2M2N2N3M2N3M2N2N3N1N3M2N3M2NVVo2"}, "image_id": 26, "id": 424}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 298.0, 35.0, 23.0], "area": 355, "segmentation": {"size": [512, 512], "counts": "lYe02l?2M3N3M2N3M2M4M02N3M2N3M2N30O010O010O0010O010O010O010O010O010N1N3M2N\\Vi6"}, "image_id": 26, "id": 425}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 314.0, 16.0, 22.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "Zj;2l?3M2M4M2N3M2N3NO2N2N3M2N3L3N3MUV\\7"}, "image_id": 26, "id": 426}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 328.0, 16.0, 22.0], "area": 175, "segmentation": {"size": [512, 512], "counts": "jZP11m?2M4M2M4M2M4M2OO3M2N2N3L3N3M2Ngeg6"}, "image_id": 26, "id": 427}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 386.0, 23.0, 65.0], "area": 784, "segmentation": {"size": [512, 512], "counts": "RfFA[9=fFCZ9;hFFW98lFGU97lFIT95nFLQ92RGMo80SG1l8NVG1k8LWG4i8JYG7f8G]G8d8F]G:c8D_G=`8AcG>^8_OdGb0[8]OfGc0Z8[OiGd0X8YOjGh0U8WOmGh0T8UOnGk0R8SOPHk0R8SOQHk0Q8SOPHk0R8SOPHk0R8SOQHk0Q8ROQHl0Q8SOQHj0Q8TOQHj0R8SOPHl0Q8SOQHj0R8SOPHk0R8SOPHk0e;N1N2N3N1N3M2N2O2M2N2N3N1Nhb<"}, "image_id": 27, "id": 431}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 287.0, 41.0, 114.0], "area": 2722, "segmentation": {"size": [512, 512], "counts": "hj[72m?3N1N3]NJWC8hl@dA`0\\>BaA?^>C`A>_>D_A=`>F^A:b>G\\A:c>b0O100O1O100O1O1O100O1O100001O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1NS`[6"}, "image_id": 27, "id": 436}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 0.0, 40.0, 105.0], "area": 3521, "segmentation": {"size": [512, 512], "counts": "ZR;1o?g0XO8I0gMGkD9U;=UDCk;T1^ClNb]K_OmL0i7a0ZK_OmL0i7BSH0W3>mL0i7AZL?mK0j7@YL`0mK0n<0RC0n<0QC1o00000000000O1f0ZOfhU1"}, "image_id": 28, "id": 438}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 117.0, 23.0, 257.0], "area": 4237, "segmentation": {"size": [512, 512], "counts": "ncd73m?e0[Of0ZOe0[Oe0[Oe0[Oe0[Oe0RO]KUEX5V:n0[Oe0[Oe0[O4L1O1O1O1O001O100O1O1YL"}, "image_id": 28, "id": 439}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 355.0, 58.0, 127.0], "area": 3705, "segmentation": {"size": [512, 512], "counts": "S;o3R0000000000000000000000000O1jR;"}, "image_id": 28, "id": 441}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 432.0, 19.0, 15.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "`mf7?a?00000000000000000000000000000000000`B"}, "image_id": 28, "id": 442}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 483.0, 246.0, 29.0], "area": 6271, "segmentation": {"size": [512, 512], "counts": "T_\\1l0T?0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000001O001O001O001O001O001O001O1O001O001O001O001O001O001O001O001O001O1O001O001O001O001O001O001O001O001O001OR`h2"}, "image_id": 28, "id": 443}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 22.0, 78.0, 78.0], "area": 3428, "segmentation": {"size": [512, 512], "counts": "Ub]12k?4L3M3M4L3M3O2O01O01O01O010O00010OC]O]Ac0a>_O_Ab0]>BcA=Z>FfA:W>IiA8T>KlA4Q>OmA3P>0mA4R>m01O01O01O01O01O01O01O01O01O01O01O01O01O01mAWNm=i1PB[NP>j100010O00010ON2O20O00010O000oNVBMj=OYB1g=NZB2f=OZB1f=NZB2f=_OSB17`0f=\\OVB54?f=XOZB53c0c=UO]B53c0Q>ZOSBb0Q>ZORBc0g>M4L3M3M4Lj][5"}, "image_id": 29, "id": 444}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 79.0, 20.0, 58.0], "area": 774, "segmentation": {"size": [512, 512], "counts": "QTf72j?5L3EHk@;R?Hk@L3M3SOXA?l>]OWA`0V?L5L3M3M4LllQ7"}, "image_id": 29, "id": 446}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 107.0, 69.0, 71.0], "area": 3176, "segmentation": {"size": [512, 512], "counts": "ndT53j?4L3M3L5L3M310_OZOfAf0W>]OiAc0T>@kAb0Q>AlAb0Q>BkAa0R>BkAb0R>g00010O00010O00010O0001O01O01O01O00010O00010O00010M2N2010O00010O00010O00010M2M3L5L3M3M3N3O0O1M4K4FRA_OQ?>RA_OR?=:M3M4K4M[lh1"}, "image_id": 29, "id": 447}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 109.0, 68.0, 90.0], "area": 3131, "segmentation": {"size": [512, 512], "counts": "eei01l?3M4L3M3L5L300010O0000N3L3M3M4L30001O01O01XAjNd>Y11OO2K4M3M4L3M3M4L3M3010OM3M4L3L4M3O2O00010O00O2O0O1WOiBfN[=V1hBgN[=V1hBfN\\=Y1fBcN^=\\1dB_N_=a1`0N3L3M3M6J3M4K4M3M4L3M3M4L3L\\\\T6"}, "image_id": 29, "id": 448}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 109.0, 56.0, 74.0], "area": 1886, "segmentation": {"size": [512, 512], "counts": "[el12k?3M3L5N10010O00010O0001O01O01O01O0O1M4nN_O`Bd0]=_O_Be0]=_O`Be0\\=_O`Bd0]=_O`Bd0]=_O`Be0]=^O^Bf0c=YOZBj0f=h0010O00010O000M4L3N20001EoAbNT>[1oAbNT>[1;K4M3M4L3M3L5L300N3L3L4M3M]\\W5"}, "image_id": 29, "id": 449}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 138.0, 69.0, 67.0], "area": 3149, "segmentation": {"size": [512, 512], "counts": "kem61l?3M4L3DFo@=n>Fo@>m>Fo@=n>0O000oJ"}, "image_id": 29, "id": 450}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 172.0, 74.0, 70.0], "area": 2955, "segmentation": {"size": [512, 512], "counts": "PVi43j?3M4L3M3M4`A]O]=g0`B]O^=d0^B@c=`0YBCg==VBFj=:SBJl=6TBJm=6RBJn=6SBJl=6TBJm=6RBJn=6SBJl=6TBJm=6RBJn=6RBKm=5TBJm=6RBJn=6RBKm=5TBJm=6RBJn=6RBKm=5TBJm=6RBJn=6RBJn=6PBMo=3nA0S>0iA3W>l0O01O01O01O01O0N2010O000SOmANS>1mAOS>1mAOS>1nANS>2lAOS>1mAOS>1nANR>3mANS>1mAOS>1nANR>2nAKV>5iAHZ>8gAD\\>?_A^Od>b0\\AZOh>f0YAVOk>j0500010O00M4L3M3M3M4K4M3MUjQ2"}, "image_id": 29, "id": 451}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 181.0, 47.0, 40.0], "area": 1192, "segmentation": {"size": [512, 512], "counts": "`Vb11l?3M3M4L3M4L3M3N30O00010O00010ON2M4M20010O01O01O01O01O01O01O01O01O01O01O01N1M3M4M2M3M4L3M3M4LTZf5"}, "image_id": 29, "id": 452}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 212.0, 82.0, 69.0], "area": 3275, "segmentation": {"size": [512, 512], "counts": "ZWe61l?3M3M4L3YACh=`0UBCg=b0UBBg=a0VBBj=>SBEm=7hA0X>l0010O00010O00010O0001O0N20010O00010O0001N1M3010O00010O0003NO01O01O01O01O01O01OiNdAj0]>ROfAn0Z>oNjAP1c>1O01O01O00010O00010O00010O00010O0003NON2M4L3M3M4L3M3M4L3MYh1"}, "image_id": 29, "id": 453}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 286.0, 90.0, 76.0], "area": 3822, "segmentation": {"size": [512, 512], "counts": "VZR62l?2N3L3N3M2N3M2N3M2N3M201O010O01O01O010O010O01M2O20O010O0O2M2N3M2N3M2010OO2M2N2N3M2N3M2O20O010O010O010O010O01lAXNo=l1010O01N1N3M2010O0010O010O0100O001M2M4M2N3M3M2N3M2N3_O^A@d>>_A_Od>>^A_Of>>\\A@f>>]A_Of>>?E`@4c?I_@5h?L_f`0"}, "image_id": 29, "id": 454}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 291.0, 69.0, 88.0], "area": 3161, "segmentation": {"size": [512, 512], "counts": "\\:j0T?3RATOd>n0ZAUOe>l0XAVOi>Q1O0100O010O0100O0100O010O01M3M2BgNTB[1j=fNTB\\1j=gNSB\\1j=fNTB\\1j=?N1N1O00L[BoMf=P26N2O2M2N2N11N2N3M2O2M2N3N10N3N1N3M2N3M2O2M3M2N3N1N3N101M2CZA^Oi>?YA_Oi>?ZA^Oi>?YA@h>>?M2N3N2M2N3MaVm6"}, "image_id": 29, "id": 455}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 304.0, 5.0, 13.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "iim72l?2N3M2M4_F"}, "image_id": 29, "id": 456}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 328.0, 28.0, 57.0], "area": 954, "segmentation": {"size": [512, 512], "counts": "b[b72l?2N3M2N3M2N3M2N3M2N3O0HWOYAj0d>YOYAi0e>YOYAj0d>:N3M2N3M210O010O010O010O0dE"}, "image_id": 29, "id": 457}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 375.0, 71.0, 72.0], "area": 3058, "segmentation": {"size": [512, 512], "counts": "a\\e32k?4d@Lh>6VAMf>7VALh>6VALg>7VAMh>5UAMk>e00O00010O010O00010O0010O0010O0010O0010OYAoN_>Q1^AROc>V1O00010O0M3N3L3M3N3L3N2N3O000N3O01O010O00010O001N10001O0M4M2M3CkAlNY>5iA:1^OY>5iA4hA`0`>^OcAa0]>\\OfAe0k>M2M3M4L3M3N`SW3"}, "image_id": 29, "id": 458}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 415.0, 52.0, 97.0], "area": 3343, "segmentation": {"size": [512, 512], "counts": "ToR11l?4L3o@Ib=3WB73Jc=3VB64Jc=3VB73Jc=2WB73Jc=3UB75Ic=n0YBVOc=e1M3L4M31O00O1M3M3M3M30000001O00001O00001O000M4L3L4M4L3J6M4L3L4M3M4L3M3M4_O]ABd>:`AG_>6dABI0d>:gABH4a>L]A6d0KW>2m0MfRS6"}, "image_id": 29, "id": 459}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 475.0, 46.0, 37.0], "area": 1138, "segmentation": {"size": [512, 512], "counts": "o_Y71m?2N2N2N2M3N2N2N2N2N2N2N2M3N2N2N2N2001O001O001O001O001O00001O001O001O0WOo@c0R?ZOPAf0V?O001O00001O001O00"}, "image_id": 29, "id": 460}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 154.0, 18.0, 35.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "j4S1n>N2N2N3M2O1N2N2N3M2O1N2N3M2N2N2O1Ncjf7"}, "image_id": 30, "id": 461}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 133.0, 69.0, 86.0], "area": 3095, "segmentation": {"size": [512, 512], "counts": "XVY42l?3M2M3N3M2N3M2N3M2N3L3N3M2N3M2N3M2N2M4M2N3M2N3M2N3L3N3M2N3M2N2O20O01O010ON3M2N3L3N3M2N3M2N2N3L3N3M2N3M2N3L3N2N3M2N3M2N3L3N3M2N3M2N2M4M2N][d2"}, "image_id": 31, "id": 462}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 206.0, 15.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "dVX42m?1O2N2M3N2000O10O10O1M3N2N1O\\Y`3"}, "image_id": 31, "id": 463}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 218.0, 67.0, 54.0], "area": 1654, "segmentation": {"size": [512, 512], "counts": "kWV43l?2N2M2O2N2M3N2N1N3N2N2M2O1OGPACP??UA_Ol>a0VA]Oj>d073N2N1N3N2N2M3000O010000000O0100000TOUAb0j>\\OYAc0h>[OZAe0f>YO\\Ag0n>1000O10O1N2N2M2O000O11O2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N[Xh2"}, "image_id": 31, "id": 464}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 278.0, 85.0, 74.0], "area": 2567, "segmentation": {"size": [512, 512], "counts": "^Yh21m?2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N2N11000O10O10O1000O10O1000O10O1000O10O1000O10O1000O0100000O0100000O0100000O0100000O010000O0100000O01M3N2N1N3N2N2N1N3N2N2M2O2N2M3N1O2M3NjUm3"}, "image_id": 31, "id": 465}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 54.0, 51.0, 35.0], "area": 1224, "segmentation": {"size": [512, 512], "counts": "ZRV14l?6J5[OD\\AKTA6l>?O100000O100000O1000O100000O100000O100000O1001N7J6J00O100000O10O100000000O010000000O11N7J7Ic]P6"}, "image_id": 32, "id": 466}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 136.0, 11.0, 35.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "Uej73i?4M3M4K4M3M4L3L4N201gK"}, "image_id": 32, "id": 467}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 174.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "^eo73_:"}, "image_id": 32, "id": 468}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 319.0, 15.0, 16.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "XZh22m?2N1N3N2N2N2M21N1N3N2N2M2O2NmUP5"}, "image_id": 32, "id": 469}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 323.0, 130.0, 124.0], "area": 7503, "segmentation": {"size": [512, 512], "counts": "nZT22m?2N1O2M3N2\\@E`??N2M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2O0100000TBaNV=_1gBdNX=\\1aBbNI3g=[1^BdNI3g=[1^BmN`=U1^BmNb=S1[BPOe=e11000O1000O1000O100000O10O1N2N2M2ODcBXN[=h1gBXNW=h1jBYNS=g1PCYNnN2N2N1N3N2N2N2M2O2N2N2M3N[cj3"}, "image_id": 32, "id": 470}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 403.0, 14.0, 12.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "i\\f13j?3M4O001O01O010O000O2M2M3N\\cR6"}, "image_id": 32, "id": 471}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 448.0, 66.0, 64.0], "area": 2398, "segmentation": {"size": [512, 512], "counts": "eo[32k?3N2M4M2M4M2M3N3L3N3L3M3N3L3N3L3N2O2O010O01O01O01O001OO1N2M3N2O100OO2N1N3N1N2O3M2O1O2O0O2O0O101N3N2M2O1L3N2N3L3010O01M2N2M4M2M4M2MdQc3"}, "image_id": 32, "id": 472}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 0.0, 8.0, 3.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "P`h31o?001O00001O00NR`S4"}, "image_id": 33, "id": 473}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 21.0, 84.0, 54.0], "area": 2269, "segmentation": {"size": [512, 512], "counts": "Rae31m?3N2M3N1O2\\@F^?`0N101000O0100O1N1N3N2N10100O10O10O10O1000O10O10O1000O10O10O1000O10O1M21O1M2O2N2M2O2O10O1000O01000M3NO10O010O10O10O0100O2O1O2M3N1N3N2M3N1N3N2N1N3N2M2O2M3N2MQ_P3"}, "image_id": 33, "id": 474}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 168.0, 33.0, 32.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "feh42m?3N2M3M2O2M3M3N1N3M10O00010O00010O00010O00102M2N3N2M2N3N1N3M2O2MYjf2"}, "image_id": 33, "id": 475}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 62.0, 82.0, 70.0], "area": 3074, "segmentation": {"size": [512, 512], "counts": "mR`11m?2N3M2N3M2N3L3N3M2N2N3M2N3M2N3O010O010O010O010O010O010O01O01O010O010O010ON3O010O010O0010O010M2M4^AhNX>Z1eAjNZ>^1010O010O010O01O010O010O010O010O01O010O010O0N3M2VObA0a>MbA0`>NbA0`>MdA0_>McA1_>MdA0_>LdA1_>McA1k[W5"}, "image_id": 34, "id": 476}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 81.0, 62.0, 66.0], "area": 2839, "segmentation": {"size": [512, 512], "counts": "a2i1W>01O00010L3N201O01O01O01O00010O0001O01O01O01O01O01O00010O0001O01O01O01O01O01O00010O0001O01O01O01ON3K4M3M3mNWAl0Q?K4M3M4L3L4M3M4LklP7"}, "image_id": 34, "id": 477}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 95.0, 12.0, 10.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "RS^12l?3N1010O010O010O010N1Nol[6"}, "image_id": 34, "id": 478}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 123.0, 81.0, 67.0], "area": 3170, "segmentation": {"size": [512, 512], "counts": "bdU11m?3M2N3M2N3M2N3M2N3M2o@XOi>j0TAYOl>m0010O010O010O010O010OO2M2N3M2O20O010O010O010O010O010O010O010O010O^AeN_>_10O010O01O0N3M2N3M2N30O010O010O010O010O010N1N3M2N3M2N3WO`A@06b>8`A@06c>7`A@06b>8hAF[>7hAFZ>8i0M2N3Mika5"}, "image_id": 34, "id": 479}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 131.0, 78.0, 74.0], "area": 3211, "segmentation": {"size": [512, 512], "counts": "^eT41m?2N3M2N3M2M4M2N3M2N3N1IUOYAl0d>WO\\Ah0b>ZO\\Ai0`>[O]Ag0a>=N3M2O2O010O010O010O010O010O010O010O00010O010O010O010O010O010O010O010O001M2N3M20001O01mNnA7Q>GQB9P>DSBk=_OXBa0g=\\O\\Bd0e=YO]Bf0d=XO_Be0d=XO^Bf0d=XO_Bd0e=YO]Be0a>M2N3M2N3M2N3M2MbZd2"}, "image_id": 34, "id": 480}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 162.0, 21.0, 48.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "R5\\1d>00TO^A:c>BaA>^>_OeAa0[>\\OhAd0Y>XOkAh0f>00010O00010ON2M4L3M3M4L3M3MYZe7"}, "image_id": 34, "id": 481}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 172.0, 95.0, 70.0], "area": 3590, "segmentation": {"size": [512, 512], "counts": "]ff01m?2N3M2N3M2N2N3L3N3M2N3M2N3M2O20O01N1N3N110O010O010O010OiN\\AR1e>kN^AU1f>01O010O010O01O0N3M2N3M2N3O010O010dA_NW>e110O010N110O010O010O01O010O01N1N30O010O010N1N3M2^NfA\\1`>N3M2N3M2N3M2N3M2N30O010O010ON3M2N3M2N3M2N3M2Nkii5"}, "image_id": 34, "id": 482}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 178.0, 81.0, 69.0], "area": 3296, "segmentation": {"size": [512, 512], "counts": "lf`32l?2N3M2N3DGm@;Q?Gl@aNgA`1]>O010O010O01O01O0M4M2N3M2lNVAP1o>M2N3L3N3M2N3M2N3M2MciV3"}, "image_id": 34, "id": 483}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 179.0, 87.0, 69.0], "area": 3160, "segmentation": {"size": [512, 512], "counts": "bfa51m?2N3M2M4M2N3M2M3N3M2N3M2M4M2010O010O0010O010O010O010O01O01O010O010O010\\AkN[>U1cAmN^>S1_APO`>X110O010O010O010O01O010O01O010O010M2N2M4M210O010O010ON3M2N2M4M2N3M201O0010OlNWAQ1m>O0M4M2N3M2N2M4M2N3G\\@2f?L\\@1ZYS1"}, "image_id": 34, "id": 484}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 224.0, 96.0, 71.0], "area": 3731, "segmentation": {"size": [512, 512], "counts": "ng73k?3M2N3M2N3M2N3M2N3M2N3M2M4O010O0O2L310O010O01O010O010iNZAR1f>lN]AT1g>010O010O010O0ZAmN`>S1]APOb>W110O0010ON3M2N3M00111O010O010O0010O010O01O0010O010O010O010O010O010O0M4M2N3L3N3M2M4O0010O01O010O010M2N2M4M2N3L3N3M2E_@4h?NSXX6"}, "image_id": 34, "id": 485}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 235.0, 82.0, 70.0], "area": 3041, "segmentation": {"size": [512, 512], "counts": "^XZ51m?3M2N3M2N3M2M4M2N3M2N3M2N3M2N3M2N30O010O010O010O01O010O010O0N2N30O010O010O010O01bA`N[>c1O0010O010aNgAU1Y>iNiAX1W>eNlAZ1]>10O010O010O01O010O010O010OO1N00O1003M2N3M2N2O2O0O2M2Fn@DU?9m@EU?9;M2N3M2Nng\\1"}, "image_id": 34, "id": 486}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 284.0, 89.0, 66.0], "area": 3402, "segmentation": {"size": [512, 512], "counts": "^9?_?3M2N3M2M3N3M2N3N110O010O010O01OO2M201O010O0010O0hN]AT1g>010O01O01O010N1N3M210O010O010N1N2`AdN\\>`1010O010O010OO2M210O00010O010O010O010O010O01O0N2N3M2N3L3N3M2N3M2N3N1010O00N3M2M4M2N3M2N3M2M`Vc6"}, "image_id": 34, "id": 487}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 286.0, 77.0, 74.0], "area": 2974, "segmentation": {"size": [512, 512], "counts": "XZi22l?3M2N3M2N2M4M2N3M2N3O0HWOZAi0c>YO\\Ah0b>[O[Ah0b>WOaAj0a>QObAn0h>10O010O010O010O0N3M2N3M2N3L3N3M2N3M2N3MfUP4"}, "image_id": 34, "id": 488}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 289.0, 81.0, 69.0], "area": 3126, "segmentation": {"size": [512, 512], "counts": "SZn42k?4M2N3M2N3L3N2j@_Ok>d0RA^Ol>l0M4M2N3M2N3L21M4M200010O010O010O010O00010M2M40O010O010O01O01OcAcNW>c11O010O010aNhAT1X>jNjAW1U>fNoAY1\\>1O010O01O010O010O01O010O01ON00000003M2M4M2N3O010M2N2Hm@AV?HVA;g>GWA;g>HVA;g>GWA;g>a0M2N3M2N3M2N3O010O010O010ON3M2N3M20_F"}, "image_id": 34, "id": 490}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 331.0, 91.0, 80.0], "area": 3932, "segmentation": {"size": [512, 512], "counts": "f[X41m?2N3M2N2M4M2N3M2N3M2N3O010O010O010JTOVAl0h>VOYAi0e>ZOZAg0c>[O^Ad0`>_O_Ab0]>AcA?[>DeAFhA:V>IjA7T>JlA6T>KlA5T>JlA6T>KlA5R>LmA5T>l0O01O010O010O010O010O010O01O010O01O010O010O010O010O010O010O010O0010O010O01QOjA3U>KnA4S>LlA5S>KnA4S>LlA5S>IPB6Q>IoA8P>HQB7o=JPB7P>HQB7o=JPB6Q>IPB6P>KoA3T>JnA4T>JoA3T>@aA5=9T>@aA4?8Y>EiA9Y>EjA8P?N3M2NnSZ2"}, "image_id": 34, "id": 491}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 333.0, 49.0, 70.0], "area": 2194, "segmentation": {"size": [512, 512], "counts": "]:e1[>010O010O010O010O010O010O0O2M2O20O010O010O010O010O010O010lNkA;U>CmA>S>_OPB`0P>^ORBc0n=ZOUBe0k=YOWBh0i=UOZBj0f=TO\\Bj0g=SO\\Bj0f=TO\\Bj0_>M2N3M2N3M2N3M2N3M2N[TW7"}, "image_id": 34, "id": 492}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 368.0, 58.0, 65.0], "area": 2322, "segmentation": {"size": [512, 512], "counts": "e\\S72l?2N3M2N3M2M4M2N3M2N3O0010O010ON3M2N3L3N3M2N2N3N1010O010O01O010O010O010O010O010O01N110O010O010O010O010O010O00010fNcAo0]>oNfAQ1Z2"}, "image_id": 34, "id": 493}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 400.0, 72.0, 65.0], "area": 2867, "segmentation": {"size": [512, 512], "counts": "jmn12l?2N3M2N3M2N3M2N3L3N3M2N3M2N3M2N3M2N3M2N3N110O010O010O010O010O01N1N30O010M2N3M2010O010O010O01O0O20O010N1N3O01ON3M2YOfAF]>7eAG]>7fAG\\>6fAH\\>7fA@F1g>?bA^OJ0f>b0c00OO2M2N3M2N3M2NSSm4"}, "image_id": 34, "id": 494}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 405.0, 80.0, 60.0], "area": 2653, "segmentation": {"size": [512, 512], "counts": "amW42l?3M2N3M2M4M2N3M2N3M2N3L3N3O00010O010O010O010O010O010O00010YAlN`>T1^AnNc>X1O010O010O010O01O010O01O0O2M2N3L310O010O01O0O2M2N3M2N3L3N3M2O20O010O010O01O0N3L3N3M2N3M2N3L3N3M2NhR`2"}, "image_id": 34, "id": 495}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 419.0, 76.0, 73.0], "area": 3050, "segmentation": {"size": [512, 512], "counts": "`^f62l?2N3M2BHQAGQA;m>GQA;m>HPA;m>=M4M2N3M2N3M2M4N1010O0010O010OO2N11O010O010O010O01O010O010O01O010O010O010O010O01O010O010O01O01hNfAi0Y>UOjAj0W>SOkAn0T>POoAo0R>nNPBS1_>O010O0N3M2M3N3M2N3M2N3M2M4M2N3Mda3"}, "image_id": 34, "id": 496}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 432.0, 81.0, 63.0], "area": 2936, "segmentation": {"size": [512, 512], "counts": "dnm21l?4M2N3M2N3M2N3M2M4M2N2N3M2N3M2N3M2010O0N3M2O2O010O0010O0010O010O01ZAlN_>S1_AoNa>Y10O010O010O010O010O00010O01N1N3M2N30O010L3O20N1N3M20N3M2N3L3N3M2N2M4M2N3M2N3L3N3M2N3M2M4M2NTbi3"}, "image_id": 34, "id": 497}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 463.0, 75.0, 49.0], "area": 2412, "segmentation": {"size": [512, 512], "counts": "`_m33k?2N3M2N2N3M2M4M2N3M2N3M2O2O010O0TAoNi>T110O010O010O01O001O001O001O001O00001O00M3N2N2N2N2N2N2N21O001O001O001O00001O0000N10O3M2N3M2N3M2N3L3N3M2N2N3M2N3M2M4MYQm2"}, "image_id": 34, "id": 498}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 474.0, 56.0, 38.0], "area": 1372, "segmentation": {"size": [512, 512], "counts": "m_Z63k?2N2M3N2N2M3O11O00HBm@?P?Dn@>P?:N2M3N2O1001O001O001O001O001O001O001O00001O001O001O001O001O001O001O001O001O3Id@D_?7c@I`?2c@Mh?01O00Q`i0"}, "image_id": 34, "id": 499}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 494.0, 13.0, 18.0], "area": 170, "segmentation": {"size": [512, 512], "counts": "^?b0^?01O001O001O001M2N2N3M2N_Pi7"}, "image_id": 34, "id": 500}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 503.0, 21.0, 9.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "no;2l?2N2N2O11O001O001O001O001O001O001O001O00Q`Y7"}, "image_id": 34, "id": 501}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 0.0, 6.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPW31o?000000001ooe4"}, "image_id": 35, "id": 502}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 0.0, 125.0, 123.0], "area": 6997, "segmentation": {"size": [512, 512], "counts": "V`]51n?3M3N1N3N1LE`@>^?4N1O2N1O00O100O1O100O11O2N1O2N2N1O2N1O2N2N1O2N2N1O2N1O2N2N1O2N1O2N2N1O2N2N1O2N1O2N2N1O2N1O2N2N1O2N2N1O2N1O2ZCZMn;h2oCZMPOXC5e[?5N3M2O1N3M2O1N0000010O0001O02N2N3AkNoAV1o=mNnAU1Q>lNmAW1P>kNnAW1P>lNnAV1P>kNnAW1P>>N3M100O0001O01O002O2M2N2O2M2N3M20010O01O01O0O2M2N2O2M2N0010O000IRC^MnCeAI3d0X>@gAJ3f0V>^OjAI3i0S>[OWBe0i=YOZBf0g=WO[Bj0^>O01O010O010O010O01N1N3M2N3M2M4M2N_L"}, "image_id": 39, "id": 516}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 131.0, 80.0, 69.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "]d_61n?3M2N2O2M2N2N3N1N2N2N3N1N2O2O00010O00001M2N2N2O2M20001ZAhNa>]10O0000010O000010O0001O01O01M2O1N2N2NO1O2O0O1O1O1O2N1O1O1O1O20O1O2N2O0010O01M2O2N1N3N1O2M2JPA[OR?b0QA[OQ?:m@L[?1h@LZ?2h@L[?1;N_[8"}, "image_id": 39, "id": 517}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 149.0, 84.0, 62.0], "area": 2580, "segmentation": {"size": [512, 512], "counts": "\\UT51o?1N2N3M2]@I[?9c@IZ?9d@IZ?a0N1N3M2OO01O00102M2N2O2M2N20N3N1N3M201O01N1N2N3N1N3M2O2M10O2N2O2M2N01O01O00010O0001O01O01O01JYAROg>n0[AQOd>P1501O01O01O01O00010O0001O101N3M2O2M2N3N1N2N3M2O2M2N3N1N2Ngja1"}, "image_id": 39, "id": 518}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 198.0, 94.0, 56.0], "area": 2768, "segmentation": {"size": [512, 512], "counts": "[Wg51o?2M3N2M2O2M3M3F_OQAb0m>APAb0n>_OQAc0l>810O00010O010O010KQOXAo0i>3010O010O00010O010O010O1M4N3N2N20O0O2N2M2O2M3M3N1N2OO010O01O010O01O010O01O010O01O010F\\AWOd>j0^ATOa>l0bAQO_>o0bAPO]>P1;O2M2N2OO010O010O01O01O0101N3M21000M3M2O2M3N2M2O2M3MUii0"}, "image_id": 39, "id": 519}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 276.0, 94.0, 59.0], "area": 3006, "segmentation": {"size": [512, 512], "counts": "bY\\63m?2M3N2M3N3L3N2N2M2IXOTAg0l>\\OQAe0o>5010O010O010O010O0100O0100O0100O3N2M3N2M3N3L3N1N10O010O10O10O102M3N2M3N2M10O010O010O01000OLXAQOi>o0YAoNf>Q150O010O010O010O010O10O10O010O010O010O010O02O2M3N2N3L3N2M3N2M3N2M4M2Mbf4"}, "image_id": 39, "id": 520}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 334.0, 82.0, 58.0], "area": 2752, "segmentation": {"size": [512, 512], "counts": "oZg63l?3N2N3L3N2M4M2M1000O0102M4M2M3J7L3N2N3L3N2NO0100O0100O010O10O10O4M3L3N3L4MO10O010O010KYAPOh>P1310O010O010O10O10O010O010O10O10O010O010O10O11N4M2M3N1N01000O010O010O01000O]E"}, "image_id": 39, "id": 521}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 384.0, 61.0, 62.0], "area": 2387, "segmentation": {"size": [512, 512], "counts": "alQ73:NW?6f@MW?5f@OV?5g@NV?`0N3L4M3M2M4M3M1N100O10O10O1000O011O3O001M3M2M4M2N000O10O1000O10O10N200O0100000O0100ZOTOXBl0h=XOTBg0m=]OnAd0R>@jA`0U>EgA;Y>e01000O10O10001O2M10mC"}, "image_id": 39, "id": 522}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 123.0, 66.0], "area": 4506, "segmentation": {"size": [512, 512], "counts": "a1;c?2O1N2O1N2O2N1N2O1N2O1N2O2UOQOaBQ1[=SOdBn0\\=RO[BNGR1n=QOWB1Io0P>POUB[1k=fNRB\\1n=dNPB_1o=90000000001O0000000000001O00000000001O0000O1K5K5K5L4K5K5K5K5M3001O00001O001O00001O00001O001O00001O001O00001O00001O001O00001O010O01O01O01O010O01O01O010O01O01O010O01O01O010O00010O010O0M3M4M2M4M2M4M2M3M4M2Mc_R6"}, "image_id": 40, "id": 523}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 16.0, 166.0, 138.0], "area": 14053, "segmentation": {"size": [512, 512], "counts": "ZRR51m?3L3N3M2N2M4M2N3L3N3M2M3N3M2M4PBhNT=Z1iBiNU=Z1hBiNU=Y1hBjNU=Y1iBiNU=Z1gBjNV=X1gBkNV=Y1gBjNV=Q201O01O01O010O01O01O010O0010O0010O0010O00010O010O01O01O010OO2L3M3N3L3M4N101O01O010O01O01O010O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O010O00010O0bCeLX00M4M2M4M2M3N3Dn@EU?9m@EU?8o@DU?8;N3L3Nlm:"}, "image_id": 40, "id": 524}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 17.0, 134.0, 139.0], "area": 10986, "segmentation": {"size": [512, 512], "counts": "fSX21l?3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4N110O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O010O01O01L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M]nd3"}, "image_id": 40, "id": 525}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 259.0, 198.0, 153.0], "area": 18003, "segmentation": {"size": [512, 512], "counts": "S8[3e<10O010O00010O010O0010O0010O010O0010O0010O0010O010O00010O010O01O01O010O01O010O01O010O0eCgLPPOaAP1^>oNcAR1\\>lNgAS1d>O0N3M3N1N3N1N3M3N1N3N2M@"}, "image_id": 42, "id": 533}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 9.0, 31.0, 32.0], "area": 515, "segmentation": {"size": [512, 512], "counts": "jPb63l?1N3N2M2O2M3M2O2M3N1N3000O01000O01000M2O2M3N1N3N2M2O2M2O2M3N[_n0"}, "image_id": 42, "id": 534}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 26.0, 104.0, 70.0], "area": 3305, "segmentation": {"size": [512, 512], "counts": "^Qe11m?3L3N2M4M2O2O00010O3NO01O01O01O0N3L3N2M4O0010O00010O01O01O010O00010O010O00010O010O00010O0N3M20010O0010O]Oo@7NBP?5TA=l>_OXA`0T?0010O01N1N2M4L3N3L3N2M4O0eAhNn=Y1oAiNQ>W1lAmNT>R1iAQOW>\\110O0010O00010O010O00010O010O00010O010O0_OgA[OY>b0kA\\OW>a0kA]OW>`0lA]OX>`0kA]OW>`0lA]OW>a0kA]OX>?f0N3L3M3N3Ldnf4"}, "image_id": 42, "id": 535}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 41.0, 39.0, 35.0], "area": 646, "segmentation": {"size": [512, 512], "counts": "gQg52m?1N3N2M2O2M3N2M2O2N20O01000O01000O10O10O10O10O10O10O10O10M3N1N3N2M2O2M3N1O2MV^e1"}, "image_id": 42, "id": 536}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 48.0, 64.0, 45.0], "area": 2157, "segmentation": {"size": [512, 512], "counts": "^Rg66c?7I7I8H7N200000001O0001O00000001O000001O0001O00000000010O000002N001O01O00000000010O00000000010O0000000001O01O000000N2J7H7J6J6I]n8"}, "image_id": 42, "id": 537}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 50.0, 13.0, 15.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "b1;e?1O01O010O01O010OO2M2N2M]^i7"}, "image_id": 42, "id": 538}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 64.0, 7.0, 18.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "P2b0_?M2M4M2N3L3No]l7"}, "image_id": 42, "id": 539}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 79.0, 25.0, 36.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "Tc_41l?3L4M4K4M3M4N100010O00m@[Ok>e0RA_On>i00001O01@QAMo>OVA0j>MYA4g>G]A9c>DaA;R?M3L4MmlS3"}, "image_id": 42, "id": 540}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 81.0, 51.0, 47.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "[Sg13k?3L3N2M4L3N3L3M3N3L30010O010O00010O0010O0010O00010O010O00010O0010O0010O00010O010O00010O0010L3_OQAOR?MQA0R?NQAOQ?NRAOQl_5"}, "image_id": 42, "id": 541}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 82.0, 22.0, 24.0], "area": 360, "segmentation": {"size": [512, 512], "counts": "QcW65h?3L4L5K40000010O0000010O00010O0O1M3L5K4L4M]]]1"}, "image_id": 42, "id": 542}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 87.0, 66.0, 51.0], "area": 2184, "segmentation": {"size": [512, 512], "counts": "fSn45h?3L4M3L5K4M3L4L5M200010O0000010O000010O000010O0000010O000010O000010O0000010O000010O000010O0000010O00010O0000010O0M3L5K4M3L4M4K4L4MmlP2"}, "image_id": 42, "id": 543}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 91.0, 55.0, 44.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "lSW32k?3M4M2M3M4M2M3M4M2M4N100010O00010O010O0N2M4O00010O010O00010OO1M4L3N3L31O01O01O01OO2M2M4L3N2M4L3N2M4L3N3L3M3NP]m3"}, "image_id": 42, "id": 544}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 98.0, 24.0, 22.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "^Sj22l?2N3M2M4M2O2O010O01O01O010O010O010OO2M2N3M2N2Mjli4"}, "image_id": 42, "id": 545}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 115.0, 61.0, 44.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "cc_6[1e>00000001O000000000000000000000000008H00000000000000000000000001O000000000000000000000000000000000000000000000000C=00000Tla0"}, "image_id": 42, "id": 546}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 117.0, 27.0, 26.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "Ude21m?3M2M4M2N3M2N2O2O010O010O010O0010O0010ON3M2M4M2N3M2NUll4"}, "image_id": 42, "id": 547}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 140.0, 107.0, 65.0], "area": 3430, "segmentation": {"size": [512, 512], "counts": "nTj32k?3N2N3M2]@E^?`0N3M210O0i@^OP?i010O010O01O010O01O001M2N3L31O010O010O01O01O010O010O01O010O01O010O010O01O010O01O010O010O01O01O010O010O01O010O01O010O010O01O010O01O010O010ON3L3N2O2O010ON2M4M2N3M2M4M2N3M20010O010O001N1N5J3N3M2N3LiZ`2"}, "image_id": 42, "id": 548}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 141.0, 70.0, 52.0], "area": 2127, "segmentation": {"size": [512, 512], "counts": "]U\\12k?3L4M4K4M3L4M4K4M3O20O0000010O00010O0000010O00010O0000010O00010O0000010O00010O0000010O00010O0000010O00010O0000010XOXA6h>F\\A:d>C_A>a>]OdAb0m>000010O0O1M4K4M3Lej`5"}, "image_id": 42, "id": 549}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 153.0, 37.0, 30.0], "area": 665, "segmentation": {"size": [512, 512], "counts": "ZU`22k?4L3N2M4L3M301O00010O0010O00010O0010O0010O00010O0010O00010ON2M4L3M3N3L3MP[m4"}, "image_id": 42, "id": 550}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 177.0, 70.0, 45.0], "area": 2087, "segmentation": {"size": [512, 512], "counts": "leZ62n?4L4L4K5L4L4L4K5L00O10O1000O10O1000O10O1000O10O1000O10O1000O10O1001N3N00000O0100000O0100000O0100000O0100000O0100000O0100000O4M4L4L4K5L4L4LdYb0"}, "image_id": 42, "id": 551}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 200.0, 47.0, 59.0], "area": 1581, "segmentation": {"size": [512, 512], "counts": "_gQ13k?2XOM`A7]>L`A6]>MaA6\\>M`A6]>MaA5]>N_A6_>K_A7a>e00O0010O0010O010O00010O010O0010O0010O010O00010O010SO_A9a>A_AG3g0_>_OgA2F5b>FlA2E7_>EnA1F;P?BRA>Y?0O01N1N3L3N2M]hV6"}, "image_id": 42, "id": 552}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 207.0, 16.0, 17.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "ifV52k?4M2M4M21O01O010O01O01ON3M2M4L_Ya2"}, "image_id": 42, "id": 553}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 212.0, 63.0, 44.0], "area": 1513, "segmentation": {"size": [512, 512], "counts": "mfg32n?2N3L3N2M4M2M3N3M2M3N3L3N2M4M00O0100O010O0102M3N2N3L2O0O010O0100O0100O0100O010O0102M4M2N2M4MO0100O0100O010O0100O012N2M4M2M3NfhX3"}, "image_id": 42, "id": 554}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 215.0, 16.0, 13.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "kVT64l?4K2O00O0100000O0100000O014L4LQic1"}, "image_id": 42, "id": 555}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 217.0, 30.0, 41.0], "area": 714, "segmentation": {"size": [512, 512], "counts": "jgj41m?2M4L3M3N3L3M4M2M3M4M2M3O20O00010O01O01M2N2M4L3ZOm@=V?_Om@>\\?M4L3N2MnXf2"}, "image_id": 42, "id": 556}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 220.0, 19.0, 28.0], "area": 311, "segmentation": {"size": [512, 512], "counts": "aWV23j?4L3N2M4L3M4L3O1010O0N2M4M2M3M4L3M4MRY`5"}, "image_id": 42, "id": 557}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 237.0, 42.0, 51.0], "area": 1207, "segmentation": {"size": [512, 512], "counts": "bh]22l?3M2M4M2N2N3L3N3M2M4M2N2M4M2N3M2O20O010O0010O0010O010L3N3L3N2M4M2M4L3N3L3N3L3N2M4L[Xm4"}, "image_id": 42, "id": 558}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 238.0, 105.0, 67.0], "area": 3061, "segmentation": {"size": [512, 512], "counts": "kg[61m?3M2M4e@Gk>;SAGk>l0fAVO[>i0bA[O]>f0_A]Ob>P101OfN_AW1d>1O01O010O01O01O0oNZAi0e>TO^Al0c>QO_Ao0j>0O0003NO010O00M4O000010O01O0@k@6T?Go@9R?CRA010O00010O010O00010O010O00010O010O00010O01O01O010O0M3N3L3N3M200010M2N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3NnVZ3"}, "image_id": 42, "id": 562}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 278.0, 22.0, 16.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "mX22k?3N3M210O01O01O01O010O01O01O01O010O01M2N2MVgb7"}, "image_id": 42, "id": 563}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 292.0, 88.0, 168.0], "area": 4719, "segmentation": {"size": [512, 512], "counts": "SmW41e01a>1\\A3`>1]A1a>1\\A2a>2WAFN;4EZ>=bA52AY>]1M3N3L301O00010O01ON3M2M4N100O0N10O0SO[BDc=<`BE_=9dBD^=9eBE^=7eBF^=8eBE^=:bBCa==`B@Q=DmBl02YOAKj<0hC3Li00VO`<2aC1Og0R=ZOlB3Of0Q=[OmB10f0Q=[OgBFN<7f0Q=[OhB74a0P=4mBOQ=b1L2OO2O3M20010O0M3_OhBXNX=g1kBVNU=k1nBRNT=k1oBRNS=m1PCPNog0^BXOCNQ>k0]BSOj=P1a03M2010O0010ON2M4L301O01OO2L3N2M4M2Hb@Kb?1a@Lb?2lV\\2"}, "image_id": 42, "id": 564}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 307.0, 71.0, 132.0], "area": 3557, "segmentation": {"size": [512, 512], "counts": "Pme62k?4M2M4L3N2k@Af>c0VAAi>?TADm>;QAGo>f0O01O01O0O2M2ZOlN_BU1^=nNbBS1Z=QOeBo0Y=SOeBo0X=TOeBP1W=TOfBn0X=TOeBP1Z=POgBm0[=TObBk0a=UO]Bk0f=d03M4M2N21M2M4N100WOPNaCn1_4XAIk>4YAHk>5WAIk>4XAIk>4c0MXf6"}, "image_id": 42, "id": 565}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 313.0, 70.0, 52.0], "area": 2063, "segmentation": {"size": [512, 512], "counts": "hj^21m?3L3N3L3N2N3L3N3L3N3L3N2N30O010O0001VAkNh>W1O010O0010O0010O010O0001M2N3M21O010O01O01O010O01O010O01O01O010O01O010O01O010O01O01N1N3L6K3M2M4M2M4M2N^U^4"}, "image_id": 42, "id": 566}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 332.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "^Zo71m?2dE"}, "image_id": 42, "id": 567}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 333.0, 106.0, 174.0], "area": 7406, "segmentation": {"size": [512, 512], "counts": "PnR51m?3\\OMPA0M5Q?Nn@1M4S?Mn@=n>i2n:lMREU2k:nMUEBAj1Z;dNUE_OEl1V;fNTE\\OHEIV2[;hNUEYONo1m:hNUEWO1Q2g:kNXEPO4U2b:mNZElN4W2a:QOnEP1R:ROlEm0W:SOjEj0Y:VOeEKROGW;?eEIWOHR;b0dEF]OGl:d0fEEAEi:f0fEECCg:g0fEFG_Og:h0cEHH^Og:g0dEIHUOZON^;Q1bEIIUOZOO];P1dEKETO\\OO];P1dEMb;2mCQOJl0Y<3kCSOLk0Y<2gCWOOg0Z<2eCYO1f0Y<2bC\\O2d0]XCEhg0mA\\OR>e0jA^OW>V1O010O00010O010O010O00001L21M3N3M2N2_OeAZO_>c0cA[O_>c0cA[O`>a0dA[O_>c0cA[O`>1VA=^?010OO2M2M3N3M2NiTa3"}, "image_id": 42, "id": 571}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 394.0, 76.0, 46.0], "area": 1481, "segmentation": {"size": [512, 512], "counts": "j\\]12k?4M2N2M4M2N3M20010O0010O010O00010O010O0010O0010O010O00010O010O010O000_Og@\\?10O00010O010OO2L3N2N3O010O00010O010O010O00010O010O00010O010ON3M2M3N3M2M4M2M3NPc\\5"}, "image_id": 42, "id": 572}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 401.0, 66.0, 55.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "i]i21m?2N3L3N3M2N3L3N3M2N2M4M2N3N101O0N3M2N2NO103M201O0010O0010O010O010O0QOZAd0g>YO\\Ag0c>WO_Ai0a>UObAk0i>0010O010O00O2O0010O01O01O010O010O0N3L3N2N3M2M4M2N3M2M3NebU4"}, "image_id": 42, "id": 573}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 403.0, 34.0, 44.0], "area": 1053, "segmentation": {"size": [512, 512], "counts": "V=b0^?0N3M2M3N3L3N3L3N201O010O0010O0010O0001M2M4N110O01O01@RALn>2TAMn>OVANl>0VAMm>1UAMn>OVANl>0gc^7"}, "image_id": 42, "id": 574}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 404.0, 36.0, 76.0], "area": 1916, "segmentation": {"size": [512, 512], "counts": "b^^73j?3M3N3UOFkA=R>FlA=Q>ElA>Q>FlAFkA>Q>FlAFkA>Q>ElA>Q>j0N3L3N30O0010O0010O00010O0010O0010L3N3L3M3N3L3NVC"}, "image_id": 42, "id": 575}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 424.0, 80.0, 58.0], "area": 2452, "segmentation": {"size": [512, 512], "counts": "Rn>2k?3M3M4M2M4L3O1g@\\OV?h00O00010O010O00O2N110O01O01O010O01O01O010O01O01O010O01O01M2N3L3N2M4M2M40O0010O0010O0010O0010O0010O00010O010OkNaAi0_>UOdAj0]>ROfAo0Y>oNiAQ1d>0O01ROSAg0m>VOVAk0P?N1N2M4M2M4M2M3N3L3NiQY6"}, "image_id": 42, "id": 576}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 435.0, 57.0, 77.0], "area": 2268, "segmentation": {"size": [512, 512], "counts": "^_U61m?2M4M2M3k@Ec>>SAC01k>>RAD01k>m000001OUOPOfBP1X=ROhBn0V=UOjBj0S=YOmBg0Q=[OPCd0m<@RC`0l_1kAaNS>^1611N4M2O20OO1M4M2N3O00010O0M4M2M3_Ol@4W?Ik@4X?Il@4a?M^Qb3"}, "image_id": 42, "id": 578}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 473.0, 15.0, 38.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "i>V1k>O0M4M2M4M2N2M4M2N3L3N3M2M3NSQh7"}, "image_id": 42, "id": 579}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 484.0, 29.0, 28.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "i_42k?4M2N2M4M2N3L3N2N3O01O01O01O001O00001O000O2M2M4L3N2M4L3NgP]7"}, "image_id": 42, "id": 580}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 484.0, 21.0, 28.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "eoe73j?3N3L3N2M4M2N3O01O01O010O01O01O010O01O01"}, "image_id": 42, "id": 581}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 489.0, 56.0, 23.0], "area": 856, "segmentation": {"size": [512, 512], "counts": "m_`03j?3M3M3M3M300001O00001O00001O00001O00001O00001O00001O00001O00M3M3M3M3O1001O00001O00001O00001O00001O000N3L3M3M4Lc`c6"}, "image_id": 42, "id": 582}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 493.0, 38.0, 19.0], "area": 532, "segmentation": {"size": [512, 512], "counts": "oom21l?3N2M3M3N2M3N20000001O001O00001O00001O001O00N2N2001O00001O001O000N3M2M4M2M`P_4"}, "image_id": 42, "id": 583}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 494.0, 35.0, 18.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "oog41m?2M3N2N2M3N21O00001ON2M3001O001O00001O001O00001O001O001O00001N1M4M2N[`f2"}, "image_id": 42, "id": 584}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 378.0, 63.0, 68.0], "area": 1732, "segmentation": {"size": [512, 512], "counts": "^]j32m?2N2M3N1O2N2M3CBUA`0i>BUA`0i>BUA?j>CTA?j>=M3N2N2N1O2M2OO3N2N1O2N2M3N2N2N0O10O100000O10O100000O10O100000O0100000O102N2N2N2M2O2N2N2N2M3N1O2N2M3NkSV3"}, "image_id": 44, "id": 585}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 446.0, 55.0, 62.0], "area": 1694, "segmentation": {"size": [512, 512], "counts": "Z__21n?2N1N3N2N2N2M2O2N2N2M3GYOVAh0i>ZOUAh0i>YOVAi0h>9M3N1O2N2M3N2N000O10O1000O10O101O1O2M3N2N2O01000O1N2N1N3FTA]On>a0SA^Oo>`0SA^Oo>`09N3N2N2N2M2O2N2NaQe4"}, "image_id": 44, "id": 586}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 39.0, 20.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "07i?1O001O001O00001O001O001O00001O001O001O001O00001O001O001O0000N2N2N2M3N2N2M3NR`\\7"}, "image_id": 45, "id": 587}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 0.0, 88.0, 58.0], "area": 2920, "segmentation": {"size": [512, 512], "counts": "PQR11l?3N3M2M4M2N2M4M210O01O01O010O01O010ON3BYO_Ai0^>ZOaAg0_>ZOaAf0^>ZObAf0^>ZOcAf0\\>YOeAg0[>WOhAh0X>YOgAh0X>XOgAi0Y>XOcAl0\\>TObAn0^>SO_Ao0a>9O001O001O00001O001O00001O001O00001O001O00001OeNiAm0W>QOkAP1T>mNPBR1R>jNPBV1^>0O010O00O2L3N3L30010O010O00010O010O000N3M2M4M2N2M4M2M4M2MXoa5"}, "image_id": 45, "id": 588}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 0.0, 54.0, 16.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "W``31l?3N3M201O00001O001O00001O001O00001O001O0000N2M3N2M3N2N2O1001O00001O001O001O0000O11O001O00001O001O001ON2N2NR`d3"}, "image_id": 45, "id": 589}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 0.0, 35.0, 9.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "PPP71o?001O00001O001O00001O0000M3001O001O00001O001O001O00001O001O0000N2N2MS`>"}, "image_id": 45, "id": 590}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "PPn71o?001O00"}, "image_id": 45, "id": 591}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 28.0, 98.0, 68.0], "area": 3998, "segmentation": {"size": [512, 512], "counts": "oQ^23k?2M3N3M2M4M2M3N3L3N3L3N2M4M2O2O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O0N2N3L3N3N1010O00010O010O00010O010O00010O010O00010O010O00010O0N3M2M3N3L3N3L3N2M4N110M2N2M4M2M4M2M3N3M2McnP4"}, "image_id": 45, "id": 592}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 39.0, 7.0, 14.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "W1>b?N3N2N2M2O2Nf^l7"}, "image_id": 45, "id": 593}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 56.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "h13n?NXnn7"}, "image_id": 45, "id": 594}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 60.0, 118.0, 68.0], "area": 3657, "segmentation": {"size": [512, 512], "counts": "^b31n?2N2M2O2N2N2N1N3N2N2N2M2O2N2N20O10O1000O10O100000O0100000O0100000O0100000O10O100000O0100000O0100000O0100000O01000000O010N0O0O2O2M4M2O2O01O010O01O01O0O2N10010O0010O00M4N10O2L3N3M21O010O01O01O010O01O01O010O0O1M4M2M4M2M3N3L3N3L3N2M4M2M3NX]Q6"}, "image_id": 45, "id": 595}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 70.0, 86.0, 58.0], "area": 2595, "segmentation": {"size": [512, 512], "counts": "PS`51m?2M4M2M3N3M2M4M2M3N3M201O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O010O000O2L3N3M200010O01O010O01O01O010O01O010O01O010O01PO\\Ab0d>\\O_Ad0a>XObAh0^>VOeAj0Z>TOhAl0g>0O00010O01L3N3L3N2N3L3N3M2M3NklT1"}, "image_id": 45, "id": 596}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 93.0, 93.0, 71.0], "area": 3546, "segmentation": {"size": [512, 512], "counts": "kSQ22k?4L3N2M4L3M4O01O01O0O2M2M3N3L3N3N10010O010O00010O010O0010O0010O0010O0010N1M4O01O010O01O01O010O010O01O01O010O01O01O010O01M2bAcNV>`1gAdNV>d1N2O2O010O0010O0010O0010QOPBLP>2SBMn=OUB2j=LXB4h=I\\B7d=F^B:b=DaB;`=AcB`0\\=^OfBb0[=ZOiBb0Z=\\OhBb0]>L3N2N3L3N3L3Ne[`4"}, "image_id": 45, "id": 597}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 95.0, 20.0, 58.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "o2j1W>M2O2N2M3N1_OaA@a>=bA@a>>aA@a>>`AAa>=bA@a>>aA@a>>aA@`>>c0N2N2M2O2N2Mele7"}, "image_id": 45, "id": 598}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 102.0, 53.0, 86.0], "area": 2757, "segmentation": {"size": [512, 512], "counts": "`eo64h>NWB5f=OPBL_O9]>NRBL^O8]>ORB:l=IPB;Z=\\OfBfNoAT1U>iNoAS1c>J6K200010O01N1N2M4M2M4M2M3N3Lmk5"}, "image_id": 45, "id": 599}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 110.0, 80.0, 70.0], "area": 2474, "segmentation": {"size": [512, 512], "counts": "TTS52l?3L3N2N3L3N3L3N2N3N1_AUOm=l0oAWOR>h0lA[OS>f0iA]OX>b0fAAY>?eAC\\>=`AF`>n0O010O01O01O010O01O010O01O010OnNaAa0b>\\OaAc0_>ZOdAf0\\>XOgAh0Y>UOiAk0f>10O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010ON2N3L3N3M2M4M2M3N3LYkd1"}, "image_id": 45, "id": 600}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 136.0, 59.0, 56.0], "area": 1786, "segmentation": {"size": [512, 512], "counts": "VU31n?2N2N2M3N1O2N2N2M2O2N2l@ZOk>h0SAZOj>i0TAXOk>P1N2N2M100002N2M2O2000O1N2O010O100O1N1O2N2N2M3N1O000O10O2O2N2N2_O_A@b>>`A@c>>_A@c>>_A@c>>_A@c>=`A@b>?_A@c>>a0N2N2M3N1O2N\\[o6"}, "image_id": 45, "id": 601}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 150.0, 65.0, 81.0], "area": 2525, "segmentation": {"size": [512, 512], "counts": "^fW14j?2M4M2M3N3M2M4M2M4N11O01O010O01ORATOh>S1O010O01O01O010O01O01N1N3M2M3N3L3N3L3N3L3N2M3N0O010O0102N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2Dk@JY?3i@KY?2k@JX?4T1M20010O01O01O010O000N3L3N2M40O01O01O01O01O010O01OM4M2M3M4M2M4L3N2M4L3N2M4L3N3L3M3NfZj4"}, "image_id": 45, "id": 604}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 163.0, 91.0, 75.0], "area": 2964, "segmentation": {"size": [512, 512], "counts": "PVe43j?3N3L3N2N3L3N3L3N2M4M2N30O0010O010O0010O0010O0010O0010O00O2M2010O010O0001L301O01O010O010O00010O010O01O01^AVOl=j0RBYOn=g0oA[OQ>e0lA_OT>`0jABV>?fAEY>;eAG\\>n0010O01O01O010O010O00010O01N00O3L3N3L3N2M4M2N3L3N2M4M2N3L3N2M4MQZm1"}, "image_id": 45, "id": 605}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 199.0, 101.0, 74.0], "area": 3468, "segmentation": {"size": [512, 512], "counts": "XgZ61l?3N2M4M2M4M2M301O0010O0010O0010O0010O0O2L3N2n@TOn>P1010O00010O010O0O110O0010O0N3L3N2M4M2M4N10010O010O00010O010O00010O`NeA[1a>0O00010O010O00010^AdN^>`10O00010O010O00010O010O0VOdA0\\>MgA4Y>IjA6V>GmA:S>EmA;S>FmA9S>GmA:S>EnA:R>DPB=P>_OSBa0m=]OVBb0j=\\OXBe0b>O01O01M2N3L3N2M4M2N3LVh2"}, "image_id": 45, "id": 606}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 217.0, 54.0, 46.0], "area": 1427, "segmentation": {"size": [512, 512], "counts": "^Wn13j?3N2M4L3N3L3M3N3O00010O0PAVOj>k0RAYOn>l000010O0010O0010O0010O00010O0010O0010O0010O00010O0010O0010N1M4M2M3M4M2M3N3L3M4MhhV5"}, "image_id": 45, "id": 607}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 222.0, 73.0, 63.0], "area": 2563, "segmentation": {"size": [512, 512], "counts": "SX^44j?2M3^OIYA:c>IZA:d>IYA:d>HYA;d>HZA:d>c0M2010O010O0010O0010O001N1O1010O010O000O2M2N3O00010O010O01O01O010O01O01O010O01SOaA7_>GcA:\\>ChAAiA`0V>]OmAc0T>ZOoAe0Q>YOQBh0d>O01O01O010O01O010O01N1N2M4M2M4M2M3N3MlW]2"}, "image_id": 45, "id": 608}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 262.0, 85.0, 67.0], "area": 3382, "segmentation": {"size": [512, 512], "counts": "[hY62n?3L4M3^@GW?L3N2M4L3N4K5L5J4M2N30O00010O01O01O010O01O0O1M4L3N3L3N2M4Mgf`7"}, "image_id": 45, "id": 610}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 286.0, 106.0, 63.0], "area": 3415, "segmentation": {"size": [512, 512], "counts": "dYj33k?3L3N2M4M2M4M2M3O2O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O010O01O010M2M3N3L3N3M20010O010O00010O010O00010O0O2L3N201O010O00010O010O0001mNfA?Y>_OiAa0X>[OlAe0S>YOoAg0R>UORBj0n=TOTBm0k=QOXBn0]>1O01O010O01O010O01O01O01O0M4M2N2M4M2M4M2M3N3LQf`2"}, "image_id": 45, "id": 611}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 288.0, 29.0, 17.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "VY]51m?2M4N10010O0010O0010O0010O0010O0010O0010N1N3O0010O001F]@3i?MiVT2"}, "image_id": 45, "id": 612}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 308.0, 59.0, 49.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "_Z;2k?4M2M4M2N3L3N201O0010O0010O0010O0010O0010OO2M2N2M4O010O01O010O01N1N2N3L3O20O01O01OM40O010O00010O0O2L3^OZAHh>5[AHi>5YAIi>5ZAGj>5YAIi>5ZAHi>4c0NWVg6"}, "image_id": 45, "id": 613}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 326.0, 59.0, 50.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "VkU11l?3N3L3N3]@E]?`0M3N3L310O010M2M3N3L3010O000N02010OO1M40O010O00010O010O010O00010O010OO1N3M11M4M2M3N3L3N30O01O0M3N3L3Ee@O^?Me@1]?Mf@O]?Omel5"}, "image_id": 45, "id": 614}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 344.0, 87.0, 51.0], "area": 2521, "segmentation": {"size": [512, 512], "counts": "dke32k?3N3M2N3L3N2N3M201O010O01O010O010OM4M2N30O0010O00M4N110O01N1N3M2M3N3N110O010O010O00010O010O010O0010O0010O010O010O0010M2M4M2N3O0010O0M3N3O010O010O01N1N3M2M3N3M2N3L3N3M2N3L3Nidn2"}, "image_id": 45, "id": 615}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 347.0, 16.0, 19.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "X[82l?3L3N3L3N2O2O010O00M4M2M4M2M3NTe_7"}, "image_id": 45, "id": 616}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 357.0, 62.0, 84.0], "area": 2843, "segmentation": {"size": [512, 512], "counts": "W]f63k?2N3L3N3M20001O0N3M2M4\\OWOmAk0Q>XOlAj0Q>YOlAk0Q>XOlAj0R>XOlAk0k=lNRB=0i0l=j0L3N3M2M301O010O010O01O01M2O20@cB[N]=b1fB_NZ=^1iBaNW=]1kBaNX=_1gB_N[=a1fB\\N]=d1bBYNa=g1`BVNb=k1:O001WNlAm00GX>6jAJV>4lAMT>OPB0P>NRB3m=KVB4k=HXB9g=E\\B:e=C]B=c=A`B?`=]OcBb0^=\\OdBb0_>L3N3M2M4M2N[c:"}, "image_id": 45, "id": 617}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 368.0, 96.0, 63.0], "area": 3321, "segmentation": {"size": [512, 512], "counts": "Zl73k?2M4M2M4M2N2M4M2M4M2O110O0010O0010O0010O0010O0010O0010O0010O010O00N3L3N3L30010O010O00010O010O00010O010O0_AeN]>[1aAhN_>\\10jNaAl0_>QOdAn0\\>oNgAQ1Y>mNjAS1a>0010O0010O010N1N2M4M2N3L3N2010O0N3M2N2M4M2M4M2O1010O01M2N2N3YOl@`0T?]OPAc0V?]Og@=`?L3N2M4MfSX6"}, "image_id": 45, "id": 618}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 385.0, 16.0, 39.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "d\\h72l?2M4M2M3h@Ei>>TADj>>TADi>`0SADj>j0010O0010O0010OlC"}, "image_id": 45, "id": 619}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 423.0, 80.0, 67.0], "area": 2542, "segmentation": {"size": [512, 512], "counts": "Wn13k?2M4M2N3L3N3L3N2N3O0010O0010O010O0010O0010O0010O0001M2M4M2N3L3N2M4O0010O00010O010O010O00010O010O00cA`NZ>d10O0WOeAN[>OhA0X>NjA2W>JmA6R>HPB8P>ESBj=@XB`0i=\\O[Bd0d=ZO^Bf0^>10O010O00010O010O001N1N2M4M2M4M2N2M^Qf6"}, "image_id": 45, "id": 620}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 432.0, 8.0, 21.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "Q^l71l?3N2N3L3N3M2M4_B"}, "image_id": 45, "id": 621}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 439.0, 19.0, 19.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "n]`74j?5K3N2N2O1N10001O0000O101O0O2O1N2N3LUR6"}, "image_id": 45, "id": 622}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 490.0, 44.0, 22.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "go;2l?2Y@N]?6`@L^?P1mAPOR>n0PBROP>l0SBQOo=m0SBQOP>l0SBQOo=m0d0M2N3M2N3M2N3M2N3M2N3M2Nk_n2"}, "image_id": 46, "id": 628}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 0.0, 35.0, 28.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "XPn51l?4M2M4O00001a@FV?:g@JX??0001O001O00001O00001O001O00001O001O0000M3N2M3N2M4L3H_@Obo`1"}, "image_id": 46, "id": 629}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 0.0, 5.0, 2.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "P`_61o?001O000PP^1"}, "image_id": 46, "id": 630}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 0.0, 23.0, 22.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "_`R72k?3N2M4L3M4N10001O00001O00001O01N1M3N3L3M3M4LPPb0"}, "image_id": 46, "id": 631}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 0.0, 12.0, 5.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "PPj71o?00001O001O001O00001O00"}, "image_id": 46, "id": 632}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 11.0, 56.0, 78.0], "area": 2349, "segmentation": {"size": [512, 512], "counts": "[RQ51m?2N3L3N2N3M2M4M2N3M2M4M2N2DPOfAS1W>oNfAT1X>oNeAT1X>;M4M2N3L3N2N3M2O20O01O010O01N1N3M2M4M2N2O2O0ClAoNT>n0nAPOT>n0nAPOU>m0nAPOT>m0oAQOT>l0a0N2N3L3N3M2M4M2N3M2M3NZoR2"}, "image_id": 46, "id": 633}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 18.0, 29.0, 34.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "Yal63k?3L3N3L3N2M4M20001N1M4M200010O010O00010O0VORAa0P?\\ORAa0X?N3L3N3L3N2MTod0"}, "image_id": 46, "id": 634}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 25.0, 59.0, 81.0], "area": 2406, "segmentation": {"size": [512, 512], "counts": "kbn53k?3L3M3^OF\\A=Z>EZA28<[>McA6Z>LcA7Z>MbA6\\>i0L30010O0010O0010O0010O0010O00010O0010O0010ON1N0010O00010O01O01O02O2M4L3N2M4L3N3L3N2M4L3N3L3M3N3L3MToS1"}, "image_id": 46, "id": 635}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 27.0, 7.0, 17.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "Yal71m?2M4M2N3L3N1VO"}, "image_id": 46, "id": 636}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 47.0, 16.0, 26.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "PRh73j?3N3L3N3L3M3O2O01O010O01O01O01]N"}, "image_id": 46, "id": 637}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 59.0, 30.0, 58.0], "area": 1011, "segmentation": {"size": [512, 512], "counts": "k1i1W>00010O01N11O01M2N3L3FbAPO`>n0cAoN`>n0:M4M201O01O01O0O2M2M3N3M2M4M2M4Mkm`7"}, "image_id": 46, "id": 638}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 67.0, 49.0, 61.0], "area": 1775, "segmentation": {"size": [512, 512], "counts": "bSk63k?3M2M4M2M4M2B]O\\Af0b>\\O\\Ag0a>\\O\\Af0a>]O\\Ag0a>=M3N3L3N30O00010O010O010O00010O0O2M1N10O03N2M3N3M2M4M2M3N3L3N3M2M4M2M3N3L3Ni]<"}, "image_id": 46, "id": 639}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 74.0, 57.0, 72.0], "area": 2243, "segmentation": {"size": [512, 512], "counts": "Sd=1l?4M2M3M4L3N3M20010O00010O00010N1M4M2M3M4M2M3M4L3N3L3M3N3L3M3M4O01O01O010O0001L3M4M2M3M4L3N2O2O010lNaAe0_>YOcAg0]>VOfAg0^>UOfAh0j>M3Ci@N[?Nh@O[?Ni@OX\\f6"}, "image_id": 46, "id": 640}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 81.0, 22.0, 64.0], "area": 892, "segmentation": {"size": [512, 512], "counts": "VTe72l?3L3N3]OF[A=b>G[AF\\AF[A=b>G[Ab0M4M2N2M4M2M4N110O01O01O0]M"}, "image_id": 46, "id": 641}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 109.0, 61.0, 59.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "eTX12l?3L3N3L3N2M4M2N3L3N2M4M2O20O0010O0010O0010O0010ON2M4M2M4M2M301O010O0010O0010O0010O001L3N2M4M2M4M2N3L3N2M4M2M4M2M3N3L3N3LZ\\i5"}, "image_id": 46, "id": 642}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 123.0, 52.0, 65.0], "area": 1914, "segmentation": {"size": [512, 512], "counts": "`UW21l?3N3L3M4M2M3N3L3M3N3L3M4M2M3N3L3M4M2M3N3O00010O0010O0010L3O02L3N2M4L3N3L3N2M4L3N2M4L3N3L3N2M4L3N2M4L3Nokn4"}, "image_id": 46, "id": 643}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 133.0, 39.0, 42.0], "area": 1014, "segmentation": {"size": [512, 512], "counts": "U4k0V?Mm@[Oo>b0RA@n>>QAEP?7PAMo>1QA1P?KPA9o>EQA=P?@o@c0Q?ZOo@j0Q?2N3O01O010O00010O010O00010O010O00010O0XOPA`0P?]OTAc0l>ZOVAe0S?M4L3N2M4M2M4MW[\\7"}, "image_id": 46, "id": 644}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 148.0, 58.0, 69.0], "area": 1976, "segmentation": {"size": [512, 512], "counts": "Zfm21m?2N3L3N2M4M2N3O000010O010O01L3N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3O20O00010L3N3L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2MT[U4"}, "image_id": 46, "id": 645}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 161.0, 31.0, 30.0], "area": 546, "segmentation": {"size": [512, 512], "counts": "cel02l?3L3N3M2N3M2N3M2O20O00010O010O010O010O010O0O2M2M3N3M2N3M2N3Mgjc6"}, "image_id": 46, "id": 646}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 173.0, 57.0, 65.0], "area": 2088, "segmentation": {"size": [512, 512], "counts": "ofh31l?3N3L3N2N3L3N3M2M3N3L3N3N101O0O1M4M2M4M2N2M4M2N3O010O01O010O01O01O010O0N2N3L3N3L3N3M2M3N3M2M4M2M3N3M2M4M2M4M2N2MYjZ3"}, "image_id": 46, "id": 647}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 179.0, 31.0, 34.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "Yff13k?3L3N2M4M2M4M2M3N30O01O01O010O01O01O010O01OM4M2M4M2N2M4M2M4MVji5"}, "image_id": 46, "id": 648}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 189.0, 61.0, 69.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "^ga43k?2N3M2M4M2N3N11O010O010OYO_OoAb0n=@RB`0l=CTB=i=EWB;f=IYB7e=KXB9e=JXB8f=JXB9g=HVB:k=ERB?m=AQBa0o=@nAc0R>e00010O010O010O0010O0010O010OM4M2N2N3L3N3M2N3L3N3M2N2M4M2N3M2M4M2N3M2M4Mci_2"}, "image_id": 46, "id": 649}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 209.0, 28.0, 27.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "SW`21m?3L3N2N3L3N3M2N2010O01O010O01O010O01O01M2N3L3N2N3L3N3MYiQ5"}, "image_id": 46, "id": 650}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 218.0, 51.0, 64.0], "area": 1794, "segmentation": {"size": [512, 512], "counts": "\\X`51m?3L3N3L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3M2010O00010O010O010M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3MlXf1"}, "image_id": 46, "id": 651}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 229.0, 30.0, 28.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "gWZ31m?3M2N2M4M2N3L3N3O010O01O01O010O010O010O01O000N3M2N3L3N3M2NdhV4"}, "image_id": 46, "id": 652}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 239.0, 62.0, 70.0], "area": 2114, "segmentation": {"size": [512, 512], "counts": "TiW62l?3L3N3L3N3M2M3010O010O0010O001L3N3L3N2N3L3N3L3N2M4M2N3L3N2M4M201O010ON3M2N3L3N2M4M2M4M2N2M4M2M4M2N2M4M2M4M2010O00Fc@2]?Ke@5c?O2M2MPXi0"}, "image_id": 46, "id": 653}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 251.0, 37.0, 32.0], "area": 654, "segmentation": {"size": [512, 512], "counts": "]hV41m?3L3N3M2M3N3L3O2O010O00010O010O00010O010O010O00010O01@g@8X?Fj@:W?Bl@?[?O001N1N3M2M3NdgV3"}, "image_id": 46, "id": 654}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 263.0, 59.0, 65.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "_iR72k?4M2M3N3L3N210O010O00010O0N3M2M3M4M2N30O0010O001N1\\AmNX>V1eAnNX>U1eAmNX>_1N3L3N3O01O010O0O1N3L301O0N2N3L3N3L3N2M4M2M3M4M2M4M2M3N3L3N3LaG"}, "image_id": 46, "id": 655}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 270.0, 29.0, 28.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "QYR51m?2M4M2M3N3L3N3M20010O010O00010O010O00010O01M2M3N3L3M4M2M]W_2"}, "image_id": 46, "id": 656}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 293.0, 29.0, 30.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "iik51m?3L3N3L3N2N3L3N3N110O01O01O010O01O01O010ON3M2M4M2M3N3M2Mffe1"}, "image_id": 46, "id": 657}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 317.0, 33.0, 31.0], "area": 616, "segmentation": {"size": [512, 512], "counts": "aje61l?4M2M3N3L3N3L3N3O01O01O010O01O01O010O01O01O010O01M2N2M4M2N3L3N3Llei0"}, "image_id": 46, "id": 658}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 326.0, 5.0, 12.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "]jm72l?3M2O2M3iE"}, "image_id": 46, "id": 659}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 327.0, 67.0, 50.0], "area": 1742, "segmentation": {"size": [512, 512], "counts": "RkZ52l?3L3N3M2N3L3N2N3M2M013M2010O00010O010O010O00010O010O010O010O00010O010O010O01O01O010O010O010O00010O010O010O010N1N2N3L3N3M2N3L3N2N3L3NTec1"}, "image_id": 46, "id": 660}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 340.0, 29.0, 28.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "Vk`72k?4M2M3N3L3N3L31O010O01O01O010O01O01O010O0O1M4M2N3L3N2M4MUe0"}, "image_id": 46, "id": 661}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 362.0, 61.0, 90.0], "area": 2684, "segmentation": {"size": [512, 512], "counts": "]]\\41m?2M4M2M3f@Em>>PAEl>>QAEm>>PADn>i00O01O010O01ON3M2M4XOhNdBZ1Y=iNdBZ1Z=iNcBZ1Z=hNcB[1Z=iNcBZ1Z=hNdBZ1Y=iNdBZ1Z=h0L310OO2M2M3N3L3N1N013M2M4M2M3N3L3N3L30010O0FaAQOb>l0aAPOb>n0`APOc>l0:N2M4M2M4M2M3N3L3N3M]Te2"}, "image_id": 46, "id": 662}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 458.0, 48.0, 54.0], "area": 1704, "segmentation": {"size": [512, 512], "counts": "[_R41m?2M4h@Jb>8[AKb>8\\AJb>9[AIb>:[AIc>:ZAHd>:YAId>k0N2M3N2O11O00001O001O00O1N2M3001O001O00001O0N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3M^aU3"}, "image_id": 46, "id": 663}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 409.0, 139.0, 103.0], "area": 10610, "segmentation": {"size": [512, 512], "counts": "]>c1^>O00O1M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2O100001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O000000N2M3N2M3N2N21O001O001O00001O001O0O2L3N2M4M2N3L3N3L3N2N3L3N3L3N3M2M3N3@WAEk>9WADm>;TABn>?901M2N3M2M4M2N[Rj5"}, "image_id": 47, "id": 664}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 218.0, 154.0, 98.0], "area": 7017, "segmentation": {"size": [512, 512], "counts": "VXd41m?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3M3O20O01O01O010O01O01O010O01O01O010O00010O010O00010O010O00010O010O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O00010O010O00010O010O00010O010O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O00010O010O00010O010O00010O010O000N3M2M3N3L3N3L3N2M4M2M4M2M3M4M2M4Mbgn0"}, "image_id": 49, "id": 665}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 323.0, 64.0, 53.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "gZP72l?3M2N3M2N3M2N3M2N3M210O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O01O010O010O010O010O010O010O010ORE"}, "image_id": 49, "id": 666}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 408.0, 7.0, 18.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "Uml72l?2M4M2N3M2N3WC"}, "image_id": 49, "id": 667}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 0.0, 28.0, 28.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "]`o12m?2N2N2N2N2N2N2O2M2N2N2N2N1O01O2N2N3M2N2N2O1N2N2N2N2N3Mc_b5"}, "image_id": 50, "id": 668}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 46.0, 27.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "VPb22m?2N2N2N2N2N2O1O1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OQPg4"}, "image_id": 50, "id": 669}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 0.0, 27.0, 12.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "P`Z61o?001O001O1O001O001O001O1O001O001O001O1O00N2N2O1N2N2NRPX1"}, "image_id": 50, "id": 670}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 0.0, 55.0, 43.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "YPm61m?2O2M2N3M201O001O1O001O001g@Bn>>PADP?=m@FR?:m@GS?d0O001O001O1O001QAQOk>S1O1O001O001O001O001O1O00N2N2N3N1N3M2N3M3N1N3M2N3M2O2M2N3M3Mi_7"}, "image_id": 50, "id": 671}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 7.0, 60.0, 61.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "\\a:1n?2M3N2N1N3N2M3N1g@_OR?c0k@@R?h0O2M3N2N1N3N2N2M2O2M3N2N0O011N2O2N2O10O010N2M3N2N1N3N1O1N3N2N1N3N2N2M2O2N2M3N1O2N2M3N2N1N3N2N2M2O2NT_g6"}, "image_id": 50, "id": 672}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 7.0, 53.0, 60.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "baU31n?2M2O2N2M3N1O2M3N2N1N3N2N2M2O2HRO[AP1c>QO[AR1b>7O2N2M3N000O01000O0101O2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3NYoo3"}, "image_id": 50, "id": 673}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 22.0, 12.0, 23.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "f0g0Z?O0O2N2M3N1O2M3N3L3N1ORoi7"}, "image_id": 50, "id": 674}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 42.0, 27.0, 28.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "f15j?2M3N1N3N2N1N3N2N2O01000O01000O01M3N2N1N3N2M2O2N2M3N[^b7"}, "image_id": 50, "id": 675}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 48.0, 27.0, 27.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "ma^21n?2N2N2N2N2N2N2N2N2N2N2N2N20O1N2N2N2N2N2N2N2N2N2N2N2NTnS5"}, "image_id": 50, "id": 676}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 54.0, 51.0, 60.0], "area": 1705, "segmentation": {"size": [512, 512], "counts": "QSV43k?2N3M2O2M2BCWA`0f>CWA?h>BWA`0f>CWA`0f>BXA`0f>?N1N3M2N3M3N1O2O010O010O0O2N20M2N3N1N3M3M2N3N1N3M2N3N1N3M3M2N3N1N3M2N3N2M2N3M2Nn]P3"}, "image_id": 50, "id": 677}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 61.0, 56.0, 56.0], "area": 1633, "segmentation": {"size": [512, 512], "counts": "nbT12m?2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3000O0100N2N1N3N2N2M2O00O011O2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2M2O2Nc]o5"}, "image_id": 50, "id": 678}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 62.0, 30.0, 30.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "`bR51m?2N3M2O2M2N3N2M2N3M210O10O010O10O010O10M3M2N3N1N3M2O2M3M2Ni]^2"}, "image_id": 50, "id": 679}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 69.0, 36.0, 42.0], "area": 783, "segmentation": {"size": [512, 512], "counts": "U2j0W?0O0100000O01000O0100000O01000O0100000O01000O01O1N2M2O2M3N1O2M3N2M2O2NSm]7"}, "image_id": 50, "id": 680}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 69.0, 11.0, 12.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "\\bZ32m?2M2O2M3N10O2N1N3N2Mjm_4"}, "image_id": 50, "id": 681}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 84.0, 63.0, 60.0], "area": 1696, "segmentation": {"size": [512, 512], "counts": "cSb51m?2N3M2O2M2N3M3N1N3M2N3N1N3M3M2O2M2O20O10O10O010O010O10O10O010QOZAb0f>\\O]Ad0c>YO`Af0a>WOaAj0^>UOdAk0h>010O0100O0100O010O0100O0100O01N1O2M3M2N3N1N3M2N3N2M2NX\\^1"}, "image_id": 50, "id": 682}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 87.0, 78.0, 74.0], "area": 2882, "segmentation": {"size": [512, 512], "counts": "VTm11n?2M2O2M2N3N2M2N3N2M2O2M2N3N2M201O0O2M3N1N3M3N1O2O01000O010O10O10O010O10O10O010G`NQB`1m=bNRB^1l=eNTB[1j=fNVBZ1h=iNXBT1h=nNWBR1i=POUBo0k=b0010O3M2O2M3N1N3M2O2M3M2O2M3N1N3M2O2M3N1N3M2O2M3M2O2M3N1N3M2Oilk4"}, "image_id": 50, "id": 683}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 88.0, 37.0, 35.0], "area": 661, "segmentation": {"size": [512, 512], "counts": "WSb32m?2N2N1N3N2N2M2O2N2N2M30O10N2M3O010000000O010000O0VOn@e0X?N1N3N2N2M3N1O2N2M3N1Of\\k3"}, "image_id": 50, "id": 684}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 101.0, 27.0, 27.0], "area": 386, "segmentation": {"size": [512, 512], "counts": "`c?3l?1N3N2N2M3N1N3N2O100O0100000O10O10ON3N2N2M2O2M3N1O2MalR7"}, "image_id": 50, "id": 685}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 115.0, 16.0, 26.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "Yd[42l?2N3GJd@8[?Ic@:Z?Id@8Z?7O0001O3K4N3M2N3M2N\\\\\\3"}, "image_id": 50, "id": 686}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 121.0, 7.0, 7.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "mSc42m?1N3N10O2M2NX\\Y3"}, "image_id": 50, "id": 687}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 129.0, 37.0, 35.0], "area": 669, "segmentation": {"size": [512, 512], "counts": "`d;2m?2M2O2M3N1O2M3N2M2O2N2O1O0100000O1000O0100000O010N2N2N1N3N2M2O2N2M3N1N3N_kQ7"}, "image_id": 50, "id": 688}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 139.0, 58.0, 47.0], "area": 1444, "segmentation": {"size": [512, 512], "counts": "nTW53k?2O2M2N3N2M2N3M2O2M2N3O1O010O010O10O10O010O010O10O10O010O01000O010O010O10OO2N101N1N3N20N1N3N1N3M2O2M3M2N3N1Db@4a?Ib@4g?M2OPkk1"}, "image_id": 50, "id": 689}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 143.0, 27.0, 29.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "PU\\43k?2N3M2N3M2N3M3M2O20O010O010O010O010M2N3M2O2M3M2N3M2NZ[V3"}, "image_id": 50, "id": 690}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 144.0, 62.0, 54.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "aUX33k?3M2O2M2N3M2O2O01000M2O2M2N3M2N3O001O1M2N3YAiNb>[1N3M30O001N1O01N3M2010O010O0O2M2N3M2N3M3N1N3M2N3M2O2M210O10O010O01_Of@:Z?Di@9`?O2M2N3M2Nljh3"}, "image_id": 50, "id": 691}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 148.0, 19.0, 13.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "hdk41n?1N3N101000O010O10M30O10O10O1O0O2M2NWkj2"}, "image_id": 50, "id": 692}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 152.0, 42.0, 47.0], "area": 1065, "segmentation": {"size": [512, 512], "counts": "^Uo01n?2M3N2M2O2N2M2O2M3N1O2M3O1PAUOj>j0UAXOk>h0RA[Om>m00O10M3N1O2M3N2M21N1N3N2M2O2N2M3N1N3N2N1Di@MZ?0i@NY?0i@MZ?0i@NX?1i@NY?0Xk[6"}, "image_id": 50, "id": 693}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 157.0, 39.0, 45.0], "area": 871, "segmentation": {"size": [512, 512], "counts": "nUb21m?2O2M2N3N1N3M3M2O2M2N3N2M2N3M2O2M2N3N2NO2N3M3N1N3M2O2M3M2O2M2N3M2O2M3M2O2M2NkZj4"}, "image_id": 50, "id": 694}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 160.0, 41.0, 74.0], "area": 1669, "segmentation": {"size": [512, 512], "counts": "W6l0S?2M3N1O2M3N2M2O2N2M2O1O0O010JaNlA_1T>510O10O01000O01000O2O2N2M3N1UObA2a>L`A3b>JaA4a>JaA3b>K`A3a>KbA3`>KaA3b>K`A3b>JaA4`>KbA2W?OPZ[7"}, "image_id": 50, "id": 695}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 162.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "R51mjo7"}, "image_id": 50, "id": 696}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 166.0, 30.0, 30.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "hUU22l?2O2M2FJh@9V?Ig@:V?Hi@9X?8O010N2O010O01000O010O01N2N1N3M2O2M3M2O2M2N3N_j[5"}, "image_id": 50, "id": 697}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 170.0, 14.0, 19.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "hUa73k?2N3N1N3M2N0010O3M3N1N3N1Nej7"}, "image_id": 50, "id": 698}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 171.0, 9.0, 11.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "[ed0;e?00000000000002NcjV7"}, "image_id": 50, "id": 699}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 171.0, 13.0, 21.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "lUR61l?4K4M3M4L3O1010L3M3M4K4MfZg1"}, "image_id": 50, "id": 700}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 172.0, 14.0, 14.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "dUk11m?3N2M2N3N110O10O0N3N2M2N3N`jm5"}, "image_id": 50, "id": 701}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 174.0, 15.0, 34.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "Vfh72l?3M2N3N2M2N3M2N3M2O2M2N3M210`J"}, "image_id": 50, "id": 702}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 182.0, 36.0, 36.0], "area": 673, "segmentation": {"size": [512, 512], "counts": "ZVc12l?3M3BJo@7P?Km@8P?Jn@8R?Il@9S?;0O010O2O0O0100O0100O0100O0100O010O001M2O2M3M2O2M2N3M3N1Njij5"}, "image_id": 50, "id": 703}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 183.0, 61.0, 47.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "]fj41n?2M2N3M2N3N1N3M2N3M3N1N3N1010O01000O010O010O010O01000O010O010O010O01000O010O01M2O2M2O2O01N1N3M3M2N3N1N3M2N3N2M2N3M2N3N1NjiV2"}, "image_id": 50, "id": 704}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 186.0, 32.0, 31.0], "area": 588, "segmentation": {"size": [512, 512], "counts": "XVn33k?2N3M5K2N3N1N3O10O010O01N1N30O010O010O01000M2N3M2N3M2O2M2N3M2Noia3"}, "image_id": 50, "id": 705}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 197.0, 49.0, 67.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "jgc02l?2O2M2N3M2N3UOARBa0k=BRB`0l=BRBa0k=BRB`0m=BPBa0m=ARB`0l=CQB`0l=BRB`0l=CQB`0m=AQBb0n=h0100O010O010M2N3M2N3N2M2N3M2N3N1N3M3M2N3N1N3M2N3M2O2M3M2N3M2O2M[ic6"}, "image_id": 50, "id": 706}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 209.0, 6.0, 5.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "cf[23k?200000O2M`Ya5"}, "image_id": 50, "id": 707}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 214.0, 22.0, 26.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "XWY21m?3M3M2O2M2N3M2N3N1N3O1OO2M2[Oh@a0]?M3M2N3N1N3M2NTi[5"}, "image_id": 50, "id": 708}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 217.0, 41.0, 46.0], "area": 1105, "segmentation": {"size": [512, 512], "counts": "dg[72l?2N3M2N3N1N3M2N300O0100O010O010O01N2M2N3M1O002O1N3M3N110O010O010O010O10O10O010O0PI"}, "image_id": 50, "id": 709}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 220.0, 57.0, 54.0], "area": 1541, "segmentation": {"size": [512, 512], "counts": "ig^41m?3M2N3N1N3M2N3M2O2M3M2N3M2N3N1N300O010O010O010O10O10O010OlNZAn0l>100O010O0100O0100O010O010O01N3M2M2N3M2N3M2O2M2N3M2N3NXhd2"}, "image_id": 50, "id": 710}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 223.0, 31.0, 31.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "^WT32l?2X@Ma?6\\@Mb?9N3M2N30O010O01M3M2N30O010O010O0O2M2O2M2N3M3M2N3M2O2M2NjX\\4"}, "image_id": 50, "id": 711}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 225.0, 50.0, 68.0], "area": 1858, "segmentation": {"size": [512, 512], "counts": "ihZ11_?0n@3o>0n@2P?0n@3o>0n@2P?1m@2P?0o@1o>b0N1N3FlNdAV1Z>mNcAV1Z>lNdAW1Y>9N3M2N3N1N3N110O01O0N3M2N3M2O2M2N3M2N3M3M2O2M2N3M2N3M2N3N1N3M2N3M3M2N3N1NaXl5"}, "image_id": 50, "id": 712}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 226.0, 8.0, 7.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "Tgc33k?3O01O010O000OmXX4"}, "image_id": 50, "id": 713}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 232.0, 9.0, 7.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "[gk21n?1N3000O010O1N1Oeho4"}, "image_id": 50, "id": 714}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 241.0, 33.0, 43.0], "area": 609, "segmentation": {"size": [512, 512], "counts": "dXg31m?3M3N1N3M2N3N1N3M2N3N2M2N1O010O00000001O2N3N1N3M2N3M2N3M2O2M3M2N[Xh3"}, "image_id": 50, "id": 715}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 244.0, 47.0, 62.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "UiS21m?3M3N1SOIoA:n=IoA9o=IoA:o=HoA9o=IoA:n=IoA:n=IoA9P>HoA:n=IoA9P>HnA;R>EkA=V>BiA`0V>f00O010O010O1O0O2M2N3M2O2M3M2N3N1N3M2N3N2M2N3M2N3N1N3M3M2O2M2NkgT5"}, "image_id": 50, "id": 716}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 264.0, 61.0, 48.0], "area": 1687, "segmentation": {"size": [512, 512], "counts": "QYn61m?2N3M2O2M2N3M2N3N2M2N3M2O2M201000O010O010O010O01000O010O01M2O2M3M2O20O010O010O010N1N3M2N101N3M2N3N2M2N3M2O2M2N3M2N3N1N3M^W3"}, "image_id": 50, "id": 717}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 272.0, 33.0, 37.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "[Y`41n?2M2N3M3M2O2M2N3M2N3M2O2M3M2N3O00O2M2O2M2N3M2N3M3N1N3M2N3M2N3N1NZWo2"}, "image_id": 50, "id": 718}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 274.0, 47.0, 56.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "kYi22l?2N3M2FHj@;S?Gl@:R?Ik@:R?Hl@;R?:N3M2N3M2O2M2N3M3M2O2O010O0100O010M2N3M2N3N1N3M3M2N3N1N3M2N3M2O2M3M2N3M2O2M2NQW_4"}, "image_id": 50, "id": 719}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 277.0, 10.0, 10.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "kX]42l?2N3M20010O01N1N3MZg]3"}, "image_id": 50, "id": 720}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 288.0, 35.0, 30.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "dii41m?3O1O0O2M2010MILf@2Z?0f@OY?4d@OY?3e@OZ?=O010O010O001N20O010O010O1N1100O01Gh@IW?5k@KV?2m@MS?2n@MT?0o@MS?1Vgd2"}, "image_id": 50, "id": 721}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 293.0, 46.0, 57.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "_j`32l?2N3N1EIk@:R?Ik@9S?Ik@:R?Il@8m>AVAk0g>WOWAl0f>8N3M2O2M2N3O01000O010O010O010ON3M3M2O2M2N3M2N3N1N3M3M2N3N1N3M2N3M2O2M3M\\Vh3"}, "image_id": 50, "id": 722}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 302.0, 62.0, 54.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "UZ^62l?3M2O2M2N3N2M2N6J3O0O2O001N101N2O0O2O0O2O1O010O001M2O2O10O010O010O10O10O010O01000O010O010O1O0N3M2N3N1N3M3N1N3\\Oi@;Y?Di@9Z?Dh@;`?M2N3N2Mjeb0"}, "image_id": 50, "id": 723}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 303.0, 8.0, 8.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "diW41m?2N3N110OO2M2NaVd3"}, "image_id": 50, "id": 724}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 307.0, 17.0, 16.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "kic72l?2N3N2M2O2O010O10O010O0O2M3M2N3MYf3"}, "image_id": 50, "id": 725}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 309.0, 46.0, 61.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "T[W42l?3M2O2SOImA9Q>ImA:P>ImA9Q>ImA:P>InA8Q>ImA:P>ImA:P>HnA:Q>HlA;T>EjADhA?X>d0010O01000O010N1O2M2N3M2N3N2M2N3M2N3N1N3M2N3N2M2N3M2N3N1N3M2NkeQ3"}, "image_id": 50, "id": 726}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 328.0, 21.0, 31.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "kje71m?3M3M2O2M2N3M2N3M2O2N101000O010O010O010OcE"}, "image_id": 50, "id": 727}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 332.0, 31.0, 29.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "lZS62l?2O2M3M2N3M2N3N1N30O010O0100O0100O010O010O0O2M2N3M2O2M3M2N3MZU]1"}, "image_id": 50, "id": 728}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 351.0, 15.0, 15.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "Vkh62l?3M2N3N110O010O10O010M2N3M2Nndo0"}, "image_id": 50, "id": 729}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 351.0, 15.0, 13.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "Ukd71m?3M2N3O010O010O10O10OO2N1N3Mmd3"}, "image_id": 50, "id": 730}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 353.0, 15.0, 15.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "YkV71m?3N1N3M3N1010O010O10ON3N1N3Mlda0"}, "image_id": 50, "id": 731}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 365.0, 55.0, 67.0], "area": 1979, "segmentation": {"size": [512, 512], "counts": "o\\k52d?Od@2Z?1c@2[?Oc@3[?0c@2Z?VOYAi0e>YO[Ah0b>[O]Ae0a>]O^Ae0`>>N3O10O010O10O1N1O2M2N2N012M2N3N1010_OUBiNn=U1SBiNo=U1TBiNn=T1TBjNn=T1UBiNn=U1SBjNn=T1a0M3N1N3M2N3N2M2N3N1N3M2N3N2M2NXTY1"}, "image_id": 50, "id": 732}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 368.0, 31.0, 60.0], "area": 1096, "segmentation": {"size": [512, 512], "counts": "h;`1^>3M3M2N3O0010M2N3M2N3M3N1N3M2N3M2N3N1N3M2N3M2N3M3N1N3M2N3M2NUT`7"}, "image_id": 50, "id": 733}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 380.0, 28.0, 34.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "f\\d62m?1N3M2N3M2N3M2N3N1N3M3M2N2N03M2O2M2N3M2N3M2N3M2O2M2N3Mocm0"}, "image_id": 50, "id": 734}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 386.0, 12.0, 25.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "b\\j73l?1N3M2N3M3N1N3M2N3O010lC"}, "image_id": 50, "id": 735}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 394.0, 54.0, 56.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "`]j62l?2N3N2M2N3M2O2M2O20ON3M3N1N3M2N3O010O10O01O0N3M2O2M2N3M3N0O0101N3M2N3N2M2N3M2N3N1N3M2N3N2M2N3M2N3N1N3M3M2O[c:"}, "image_id": 50, "id": 736}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 397.0, 56.0, 54.0], "area": 1664, "segmentation": {"size": [512, 512], "counts": "^m`02l?3M2N3M2O2M2N30O01N2M2O2M2N3M201O010SAPOh>P1VAROk>R10O01N1N3M100O2N3M2N3O00100OO2M3M2N3N1N3M2N3M2O2M3M2N3O0010OO2N1N3Ec@O_?Od@N_?O`Sc6"}, "image_id": 50, "id": 737}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 417.0, 52.0, 58.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "YnY11m?3N1N5K2N3N1N3M2N3N2M2O20O01O0N3M3N1N3M2N3N1N3M2N3N2M11N2N3N2M2N3M2O2M2N3M3M2O2M2N3M2O2M3M2N3N1N3M2N3NcRl5"}, "image_id": 50, "id": 738}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 434.0, 29.0, 31.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "Wn41m?2O2M3M2N3M2O2M2N3M2O2M30O010O010N1N3M3N1N3M2N3M2O2M2N3MWb\\7"}, "image_id": 50, "id": 739}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 442.0, 59.0, 60.0], "area": 1839, "segmentation": {"size": [512, 512], "counts": "Sol12m?1N3M2N3M2N3M2N3M2O20O01O0N30O010OO2M3M2N3M2N3M2N3M2N3N1N3M201O010OO2M2N3M2N3M2N3M2N011N3M2N3M2N3M2N3M2N3M2N3M2N3N1NnaU5"}, "image_id": 50, "id": 740}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 449.0, 42.0, 47.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "m^[72l?3M2N3N2M2N3M2O2M2N30O0100O010O010O01000O010O010O01M1O02N2O2M3M2N3M2O2M2N3O10O010OmA"}, "image_id": 50, "id": 741}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 463.0, 46.0, 49.0], "area": 1270, "segmentation": {"size": [512, 512], "counts": "koi22l?3M2O2M2N3M2N2N2O1N2N2IYOSAi0k>7N2O1N2N2N2N2N2N200001O001O0O2M2N3M2N3N2M2N3M2N3M2O2M2N3M2N3M2N3NVQ_4"}, "image_id": 50, "id": 742}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 466.0, 31.0, 45.0], "area": 749, "segmentation": {"size": [512, 512], "counts": "bof01m?2N3N1N3M3M2N3N1N3M2N3N2M2N3M2O2M2N300O0100XO[A3e>L]A1f>L]A1e>M]A2e>K^A2d>M]A1f>L]A1f>L\\A3e>K^A2koi6"}, "image_id": 50, "id": 743}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 485.0, 46.0, 27.0], "area": 765, "segmentation": {"size": [512, 512], "counts": "noa32l?2N2O1N2N2N2N2O1O11O1O001O00O1O1N2N2N2O1N2N200001O1O001O001O1O001O001O001O1O0N3M2N3N2M2N3N_Pg3"}, "image_id": 50, "id": 744}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 488.0, 15.0, 14.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "`o>1m?2N3M2N3O010O010O010O01M2M3Nf`Y7"}, "image_id": 50, "id": 745}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 499.0, 25.0, 13.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "n_`12l?2O1N2N2N2N200001O1O001O001O001O001O001O1O001O0OUPS6"}, "image_id": 50, "id": 746}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 504.0, 18.0, 8.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "n_]22l?2O1N2O11O001O001O1O001O001O001O00Q`Y5"}, "image_id": 50, "id": 747}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 507.0, 8.0, 5.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "n_12l?2O1001O001O0NU`j7"}, "image_id": 50, "id": 748}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o?1P`o7"}, "image_id": 50, "id": 749}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 2.0, 2.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "02n?OQPo7"}, "image_id": 51, "id": 750}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 0.0, 66.0, 42.0], "area": 1377, "segmentation": {"size": [512, 512], "counts": "PPV31o?2N1O1O2N1O1O2N1O1O2N1O00O100O1O1O100O1001O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O2N1O1OO100O1O102M2N3M2O1N3M2N2O2M2N2N3N1N2N3M2O2MYoh3"}, "image_id": 51, "id": 751}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 0.0, 63.0, 57.0], "area": 1856, "segmentation": {"size": [512, 512], "counts": "V`m41o?1N3M2j@K_>6_AL_>7^AL_>6`AK^>8_AJ1Io=?nAK0HQ>>nAK0IP>>mAL0HR>=lAM0GT>=jAN1DV>^1100O1O100L4O100O1O1O100O1O1O100I\\AQOe>o0]AnNd>Q16O1O100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O101N3M2N3N1N2N3NfoR2"}, "image_id": 51, "id": 752}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 32.0, 15.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "VP[61j?5O101O0000001O0000001O0000002N0000001O000000001O0000001OL4L4KUPU1"}, "image_id": 51, "id": 753}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 0.0, 16.0, 27.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "YPh7a0V?901O00000000000000000000000000"}, "image_id": 51, "id": 754}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 14.0, 9.0, 14.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "^`Q7>b?000000000000000boi0"}, "image_id": 51, "id": 755}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 16.0, 25.0, 17.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "oPW72_??00000000000000000000001O0000000000000000000000__<"}, "image_id": 51, "id": 756}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 20.0, 19.0, 12.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "m`g63m?000K50000L4000000000000000000000000\\on0"}, "image_id": 51, "id": 757}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 29.0, 21.0, 22.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "TQZ62m?2O2M2N2O2M2N2N3N1N21M2N2O2M2N3M2O1N3M2Oe^[1"}, "image_id": 51, "id": 758}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 35.0, 25.0, 19.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "XaV7=^?50000001O00000000000000000001O00000000000000000ln<"}, "image_id": 51, "id": 759}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 38.0, 49.0, 51.0], "area": 1375, "segmentation": {"size": [512, 512], "counts": "nQo32m?2O1HM`@5_?L_@7^?7N2N3M2O1N1O0001O01O3M2O1N2N3M2O1N3M2N2O20M20010N1N1O01OK^AmNc>S1^AkNb>U140001XOXA7k>FWA9j>FXA7j>GXA7k>FXA7j>HWA7j>GXA7Z?N3M2O1Nl]X3"}, "image_id": 51, "id": 760}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 49.0, 15.0, 14.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "hQd51o?2M2N2N2N2O0O000002O2M2N2N2NZ^T2"}, "image_id": 51, "id": 761}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 49.0, 15.0, 32.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "iah7g0Q?8000000000001O0000000000000_N"}, "image_id": 51, "id": 762}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 61.0, 161.0, 158.0], "area": 13091, "segmentation": {"size": [512, 512], "counts": "Q37i?2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N3GbNkA`1S>bNlA_1R>dNkA_1S>8N2N3M2O1N3M2N2O2M2N2N3N1N2N3M2O2M2VCQMa0YANi>0YANj>OYANi>0YAOh>0YANj>OYANi>0YANj>OYAOh>0X\\_5"}, "image_id": 51, "id": 763}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 69.0, 30.0, 27.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "ebf51n?3M2O1N3M2N2O2M1O1O01O0001O01O01O0001O01O03M2N2O2M2N2N3N1Na]j1"}, "image_id": 51, "id": 764}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 72.0, 18.0, 21.0], "area": 195, "segmentation": {"size": [512, 512], "counts": "]bo12m?3M2O2M2N2O2M2O20O001M2N3M2O1N3M2O[]g5"}, "image_id": 51, "id": 765}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 74.0, 30.0, 29.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "fRi31n?2O1N3M2N2O2M2N2N3N1N3M2N10O0000102M2N2N3N1N2N3M2O1N3M2N2OVmg3"}, "image_id": 51, "id": 766}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 77.0, 61.0, 64.0], "area": 1848, "segmentation": {"size": [512, 512], "counts": "[S]41n?2N3M2EJj@7T?Kj@8S?Jk@8S?Kj@7U?;N00010O0000010O002N3N1N2N3M2O2M2N2N3N11O01O01O01O010O00010ON3N1N2N3M2O1N3M2O1N3M2N2O2M2N3M2O1N3M2N2O2M2NW\\d2"}, "image_id": 51, "id": 767}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 83.0, 14.0, 15.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "fbV51o?1N3M2O2M20001O00N3N1N2N3MT]b2"}, "image_id": 51, "id": 768}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 97.0, 28.0, 34.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "WSW71o?3L3N3M3L3N3M3L3N3L4MO10O10O10O0101O3L4M2N3L4M3M2M4M3LVl:"}, "image_id": 51, "id": 769}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 103.0, 26.0, 25.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "bSj53m?1N2N3N1N2N3M2O1N2N01O01O01O00011N2N3M2O1N3M2N2O1N_lh1"}, "image_id": 51, "id": 770}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 119.0, 37.0, 70.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "g3j1W>10O010O00010O00001M2O1N3M2N3N1N2N3M2O1N3M2N2O2M2N2O2M2N3M2O1N3M2N2O2M2NiZ]7"}, "image_id": 51, "id": 771}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 120.0, 11.0, 21.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "Pdj71n?2O2M2N2N2N2N3N1N2N2WL"}, "image_id": 51, "id": 772}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 130.0, 53.0, 49.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "kTX51n?3N1N3M2N2O2M2N3N1N3M2O1N3M2N3N1N1O10O0000010O00010O00010O0000010O00010O2N2O2M2N3M2O1N3M2O2M2N2N3N1N3M2OX[m1"}, "image_id": 51, "id": 773}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 132.0, 15.0, 13.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "[dU61o?2M2N3M100O00000010O1O2N3N1Nikb1"}, "image_id": 51, "id": 774}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 143.0, 13.0, 11.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "ddZ41o?1N2N3M2N10O010O2N2N2N3N\\k^3"}, "image_id": 51, "id": 775}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 153.0, 25.0, 24.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "UUX62m?2O1N3M2N2O2M2N2N01O01O0001O01O01O3M2O1N3M2O2M2NnZ[1"}, "image_id": 51, "id": 776}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 176.0, 4.0, 9.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "cUn72m?2N3M2_J"}, "image_id": 51, "id": 777}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 182.0, 50.0, 57.0], "area": 1453, "segmentation": {"size": [512, 512], "counts": "off52m?2N3M2O1YOHcA;Z>GdA;[>GbA;\\>GcA;Z>HcA:\\>GbA<[>FcA:]>IaA7_>J_A6a>d000010O00010O000010O0001O01O01O102M2N3M2O1N3M2O1N3M2N3N1N2N3M2O2M2N2O2M`Y`1"}, "image_id": 51, "id": 778}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 189.0, 22.0, 20.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "Xfl62n?2M2N2N3N1N1O1O0001O01O0001O011N2N2N2O2M2NmYh0"}, "image_id": 51, "id": 779}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 214.0, 55.0, 55.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "fWY62n?2M2N2O2M2HFg@=V?Fg@k0VASOm>j06N3N1N2N3N1N2N3M2O2M2N2NcXk0"}, "image_id": 51, "id": 780}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 219.0, 19.0, 19.0], "area": 197, "segmentation": {"size": [512, 512], "counts": "RgY73l?2O1N3M2N3N1N2N10O002O2M2N2O2M2N3M2Ojh<"}, "image_id": 51, "id": 781}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 236.0, 19.0, 36.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "igf73l?2O1N3M2N2O2M2N3M2O1N3M2O1N3M2N01O00eH"}, "image_id": 51, "id": 782}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 238.0, 18.0, 21.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "lWX11n?2N2GMb@6[?Mc@4\\?Mb@5\\?8O0010O2N3M2O1N3M2N2O2MXh^6"}, "image_id": 51, "id": 783}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 242.0, 24.0, 24.0], "area": 311, "segmentation": {"size": [512, 512], "counts": "jWX73m?1N2N3M2O1N3M2N2O2M2N10O2O1N3M2N2O2M2N2N3N1N2NQh;"}, "image_id": 51, "id": 784}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 264.0, 35.0, 33.0], "area": 587, "segmentation": {"size": [512, 512], "counts": "hXn51n?3N1N2N2N3N1N2N3M2O1N3M2N01O0001O01O0001O01O1O2O1N3M2N2N3N1N2N3M2O1N[W`1"}, "image_id": 51, "id": 785}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 269.0, 48.0, 44.0], "area": 1110, "segmentation": {"size": [512, 512], "counts": "Rid61n?2O1N3M2O2M2N2N3N1N2N3N1N3M2N2O2M1O00012M1O01O0001O01O01O01O1O3N1N2L5M2O2M2N2N3N1N2N3N1N3M2N2OmVc0"}, "image_id": 51, "id": 786}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 280.0, 242.0, 232.0], "area": 24351, "segmentation": {"size": [512, 512], "counts": "Xk?1o?1N2N3M2O2M2N2O2M2N2N3N1N2N3M2O0O1O0001O01O0001O01O01OISO\\Am0d>UOZAl0e>71O01O0001O01O0001O01O01O0001O011N2N2N3N1N3M2N2O2M2N2M4M2O1N3M2O1N3M2N3N1N2N3N1N2N3M2O1bDPMi9S3TFoLj9S3TFoLk9R3TFPMi9S3TFoLj9S3TFoLk9S3RFPMk9R3TFoLj9S3TFoLj9T3SFoLk9R3TFoLj9S3TFPMi9S3TFoLk9R3VFlLi9V3WFhLi9[3TFeLl9]3RFcLo9_3nEbLQ:`3nE_LR:c3lE]LU:e3hE\\LW:f3hEYLX:i3fEWLZ:l3cETL^:m3`ETL_:n3`EQL`:h411ON2O2M2N2N3N1N3M2OSEUKO0`:i4aEYKM0c:d4`EfK_:Z4`EgK`:Y4^EiKc:k4O00010O00N3N1N3M2N2O2M2N2NbEWKi9i4TFZKk9f4SF\\Km9d4QF^KP:b4nE`KQ:`4mEbKS:^4kEdKV:\\4gEgKX:Y4gEhKY:Y4dEiK]:V4aEmK^:k41O000O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O00001O1O2N1O1O2N1O1O2VLkCe3Z2QBLQ>2RBLo=2SBLP>1RBMP>2RBLo=2SBLP>1RBMP>1QBNR>0mA1T>MmAM_OLf>5mAM_OLg>4jA1\\>NcA2_>LbA3U?0010O1O3NhPg3"}, "image_id": 51, "id": 787}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 291.0, 23.0, 23.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "\\iW62m?3N1N2N3M2O1N3M2N2O0O000101N3M2N2O2M2N2N3N1Nbf\\1"}, "image_id": 51, "id": 788}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 306.0, 60.0, 66.0], "area": 2070, "segmentation": {"size": [512, 512], "counts": "TZR71n?2N2O2M2N2N3N1N2N3M2O1N3N11O01N0O1O2N2O2M2N2N3N1N3M2N2O2M2N2N30O01O01O01O01N1O1N5K3N1N2N3M2O1N3M1O10O000011N2N3M2O1N3M`E"}, "image_id": 51, "id": 789}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 312.0, 4.0, 10.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "kYn73l?2N3M2WF"}, "image_id": 51, "id": 790}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 0.0, 27.0, 15.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "PPi01o?1O1O1O1O2N1O1O1O1O1O1O1O1OO1O1O100O1O1O1O1O1O1O2N2No_i6"}, "image_id": 52, "id": 791}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 3.0, 54.0, 56.0], "area": 1569, "segmentation": {"size": [512, 512], "counts": "c0WO`Ak0^>WO_Al0^>WO`Ak0^>VOaAk0^>=M3N1N3N2N0O12M2O2N2M2O1N101N2O000O010O2O2N1N3N2M2O2N2M3N1N3N2M2O2N2M3N101M3N1N3N^\\S6"}, "image_id": 52, "id": 796}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 111.0, 33.0, 33.0], "area": 600, "segmentation": {"size": [512, 512], "counts": "mS<2m?3M2O1N2N2N2N2N2N2N3M2N2N2O1N2N11N2O1N2N2N2N3M2N2N20000O1N2N2N3IX@1hkS7"}, "image_id": 52, "id": 797}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 114.0, 29.0, 30.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "ocR22m?3M2N2N2N2N2O1N3M2N2N2N2N2N11N2N2N2N2N2N3N1N2N2N2N2N2N2NPl^5"}, "image_id": 52, "id": 798}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 126.0, 4.0, 8.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "n38i?N2N2NPlm7"}, "image_id": 52, "id": 799}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 131.0, 29.0, 29.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "`T11o?1N3M2N2N2N2N2N2O1N3M2N2N2N1O02N2O2M2N2N2N2N2N2N2O2M2N2N_[`7"}, "image_id": 52, "id": 800}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 157.0, 52.0, 54.0], "area": 1481, "segmentation": {"size": [512, 512], "counts": "nea11n?3M2N2O1N2N2N2GBm@`0Q?Bm@`0Q?Bm@`0Q?:M2N2O1N2N2N2N2N1O01O0001O0000000001O003M2O0O001O00001O3ROTAc0n>[OTAc0n>[OTAc0n>[OTAd0U?N2N2N3M2N2N2N2N2O\\Zd5"}, "image_id": 52, "id": 801}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 158.0, 30.0, 30.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "]ee21n?2N2N2N2N2N2N2N2N2N3M2N2O1N000001O2N2N2N2N2N2O2M2N2N2N2N2NfZk4"}, "image_id": 52, "id": 802}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 168.0, 42.0, 59.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "[5R1m>2N2N2000010O000000000000010O000000000000O1N2N1O00000000002O1N2N2N2N3Ch@MZ?1h@MZ?2g@L[?2g@L[?2g@MZ?1N2N2N2N2N2N2N20001O000000000N2N2N2N2N1O000001O000000000001O00000000000002N2N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2NSiS5"}, "image_id": 52, "id": 804}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 204.0, 34.0, 33.0], "area": 584, "segmentation": {"size": [512, 512], "counts": "mfP31n?2N2N2N2N3M2O1N2N2N2N2N2N2N2N00010O0001O2N2N2N2N2N2N2O1N3M2N2N2N2NWY^4"}, "image_id": 52, "id": 805}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 222.0, 17.0, 18.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "UWg31n?3M2N2N2N2N2N201ON2N2N2O1N2N2N2NjXP4"}, "image_id": 52, "id": 806}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 234.0, 59.0, 65.0], "area": 1755, "segmentation": {"size": [512, 512], "counts": "gX_21n?2N2N3M2O1N2N2N2N2N3M2N2O1N2N2N2N2N1_OnNSBR1m=POQBP1o=ROPBm0P>UOnAl0Q>VOmAj0T>WOjAi0V>YOhAg0X>`000000001O01O002N2O10000N2N3M2IZAROg>l0[AROg>l0[AROg>l08O1N3M2N2N2N2N2N2O2M2N2N2N2NkWc4"}, "image_id": 52, "id": 807}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 236.0, 10.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "_g02m?3M2N200OO2N2O1N2N`Xj7"}, "image_id": 52, "id": 808}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 237.0, 31.0, 31.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "kWh31n?3M2N2N2N2O2M2N2N2N2N3M2N2N1O01O1O2O1N3M2N2N2N2N2N2N2N2N3M2OTXh3"}, "image_id": 52, "id": 809}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 265.0, 4.0, 10.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "_Xn72l?2N3M3fG"}, "image_id": 52, "id": 810}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 272.0, 37.0, 36.0], "area": 725, "segmentation": {"size": [512, 512], "counts": "SiU41n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2OO0000001O2N1O1O002N2N2O1N2N2N2N2N2H\\@1f?M\\@1f?N[@0kVX3"}, "image_id": 52, "id": 811}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 278.0, 12.0, 15.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "i8>RB@m=`0UB^Ok=b0WB\\Oi=d0YBZOg=f0[BXOe=h0]BVOd=i0^BUOb=k0`BSO`=m0bBQO^=o0dBoN^=o0dBoN^=o0k0O1N2N2N2N2N2N2N3M2N2N2N2N2N2N2OheW7"}, "image_id": 52, "id": 815}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 312.0, 38.0, 41.0], "area": 782, "segmentation": {"size": [512, 512], "counts": "Ujk4120i?5N2N2N2N2N3M2N2O1N2N2N2N2N20001O01O000000000O1N2N2N2N3M2N2O1N2N2N2N2N2N2N`Ua2"}, "image_id": 52, "id": 816}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 314.0, 67.0, 58.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "nja31n?2N2N3M2N2N2N2N2N2N2N2N2N2N2CTOcAn0[>TOdAm0Z>UOdAm0Z>UOdAm0Z>UOdAm0Z>>N1O1O01O00001O2N2N2N2N2N2N2N2N2N2N2N2N2N000000000001O0000000001O00002N2N2N2N2N3M2N2N2N2N2N2Nje\\3"}, "image_id": 52, "id": 817}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 340.0, 12.0, 12.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "kj]51n?2N2N2N2N001O02N2N3M2NXU\\2"}, "image_id": 52, "id": 818}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 346.0, 30.0, 29.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "\\kU71m?2N3M2O2M2N3M2N3N1N3O10O010O010O010O010O1M2N3M2N3M2O2M2N3MnT;"}, "image_id": 52, "id": 819}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 347.0, 9.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "oja53l?2N2N2N10O3M2N2NReY2"}, "image_id": 52, "id": 820}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 350.0, 54.0, 62.0], "area": 1578, "segmentation": {"size": [512, 512], "counts": "XlS42m?2N2N2O1N2N2N2N2N2N2N3M2N2N2]OTOPBm0n=UOPBm0n=VOoAl0P>UOnAl0Q>VOmAj0S>XOkAi0T>YOjAg0V>[OhAe0X>a0000000001O2N3M2N2O1N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N3M2N2NWTQ3"}, "image_id": 52, "id": 821}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 354.0, 99.0, 61.0], "area": 2248, "segmentation": {"size": [512, 512], "counts": "f\\21n?4M3BJk@9R?Jk@6U?Li@4W?Ng@2Y?:0000000002N01O001O00000000000010O00Kg@DY?=h@AX??40002N3M2N2000O1N2N2N2N2N3M2N2N2O100001O00000000000001M2N1O0000000000010O00000000000000000000000000010O00000001O2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N_T\\6"}, "image_id": 52, "id": 822}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 360.0, 26.0, 26.0], "area": 352, "segmentation": {"size": [512, 512], "counts": "f[V52m?3M2N2N2N2N2N2N2N2N1O00000001O2N2N2N2N2N2O1N3M2N2N^d\\2"}, "image_id": 52, "id": 823}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 383.0, 26.0, 27.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "^lh52m?1O2M3N2N1O2M3N2N2M2O2N2O10N2N1N3N2N2N2M2O2N2M3N1OiSj1"}, "image_id": 52, "id": 824}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 385.0, 22.0, 35.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "f\\e72l?2N3M3N1N3M2N3N1N3M3M2O2O0010O0100O0100O01jC"}, "image_id": 52, "id": 825}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 389.0, 38.0, 29.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "\\\\i61m?2N3M2O2O1O010\\@H^?=10O010O10O010O010O10O010O010O010O010O010O010O01N1N3M2O2M2N]cc0"}, "image_id": 52, "id": 826}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 390.0, 61.0, 49.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "o\\U12n?2M3N2M3N2M3N2M3N2M3N2M010O010O010O010O0010O010O010O010O010O010O010O010O010O010O010O010O010O010O011N3N2M3N2M3N2M3N2M3N2MVSl5"}, "image_id": 52, "id": 827}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 390.0, 51.0, 61.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "amf41n?2N2M3N2N1AFWAEXA=f>EWA>f>EXA=b>_OXA64J[A8c>I[A:c>c0M3N2N1O2M3N2N2N10O2N2N2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2N2N1N3N2N2N2M2O2N2NTc_2"}, "image_id": 52, "id": 828}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 419.0, 50.0, 57.0], "area": 1452, "segmentation": {"size": [512, 512], "counts": "T^W71m?2N3M3M2O2M2N3M2N3N1N3M3M2N3N1N3M2N3O0010O0100O0100O0QO]Ab0c>\\O_Ac0a>[ObAe0^>XOdAh0\\>VOgAj0Y>SOiAn0e>O010O010O10O010O10N1N3N1N3M3M2N3NoA"}, "image_id": 52, "id": 829}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 421.0, 55.0, 57.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "S^]56i?8I7I7I00O0100000000O010000000N2O0O2O1N2O1N2O0O2O1N2O1N2O0O2O0O0102M3N2M2O2M3N1O2M3N1N3N2M3N1N3N2M2O2M3N1N3N]Rg1"}, "image_id": 52, "id": 830}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 422.0, 33.0, 32.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "k]`01n?2N2N2N2N2N2N2N2O2M00000000001O00000001O0000002N2N2O1N2N2N2N3M2NaRo6"}, "image_id": 52, "id": 831}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 426.0, 71.0, 68.0], "area": 2082, "segmentation": {"size": [512, 512], "counts": "g^d11n?2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M2N2N2O1N2N00000000001O000001O00000000000001O0001O0000000000000001O01O2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1NQRX5"}, "image_id": 52, "id": 832}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 431.0, 35.0, 32.0], "area": 612, "segmentation": {"size": [512, 512], "counts": "Q^Y61m?2J0Z@3c?7N1010ON3M2N3M2010O10O10O010O010O010O010O10OO2M2N3M2N3M2O2M2N3MVRU1"}, "image_id": 52, "id": 833}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 439.0, 47.0, 39.0], "area": 859, "segmentation": {"size": [512, 512], "counts": "W^f02m?2N2N2O1N2N2N2N3M2N2N2N2N2N2N2O1N2N2O2OO2M2N2N2N2N2N2N2N2N2N2N3N1N000000000000111N1N2N2N2O1NiQb6"}, "image_id": 52, "id": 834}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 450.0, 37.0, 38.0], "area": 698, "segmentation": {"size": [512, 512], "counts": "_n31n?2O1N2N3M2N2N2N2N2N2N2N2N2N2PAUOi>l0UAVOk>P10O1N2O10000N2N3M2O1N2N2N2N2N2N2N2N2N2N2NYaY7"}, "image_id": 52, "id": 835}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 459.0, 29.0, 27.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "k^U11o?1N2N3M2N2N2N2O1N2N0000001O01O000000010O3M2N2N2N2N2O2M2N\\Q\\6"}, "image_id": 52, "id": 836}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 463.0, 50.0, 49.0], "area": 1560, "segmentation": {"size": [512, 512], "counts": "W_W63k?3M2j@Ib>9\\AHb>:\\AIa>:\\AIa>9]AIb>9\\AHb>;[AGc>;[AGc>l0O1N2N2O11O001O1O001O001O001O1O001O001O001O1O001O0N3M2N3N2M2N3M2O2M2N3M3N1N3M2No`o0"}, "image_id": 52, "id": 837}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 463.0, 29.0, 31.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "Q_T73k?2N3M3M2N3M2N3M2N3000O010O010O01000O0N3N1N3M3N1N3M2N3N1NXQ="}, "image_id": 52, "id": 838}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 479.0, 49.0, 33.0], "area": 929, "segmentation": {"size": [512, 512], "counts": "ooe21n?1O1O1O1O1O1O1O1O1EGn@:Q?Hm@9R?Il@8S?Jk@7T?Kj@6U?;O1O1O1O1O1O1O11O1O1O2NO1O1O11O1O2N1O1O1O1O1O1O1O1N2N2N2N2N2N2N2N2N\\`a4"}, "image_id": 52, "id": 839}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 498.0, 32.0, 14.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "n_P72m?1N2N2N2N2O1N2001O001O1O001O001O001O001O1O001O001O001O001O1O00Q`?"}, "image_id": 52, "id": 840}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 14.0, 34.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "0R1n>N2N2N2N2N2N2N5K2N3M2N3M2Nmoh7"}, "image_id": 53, "id": 841}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 0.0, 31.0, 24.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "]P\\12l?2N3M3N1N3M2N3O001O001O1O001O001O0000N2O1N2N2N2N2N2O1N2N2N2NR`T6"}, "image_id": 53, "id": 842}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 0.0, 32.0, 24.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "cPX22i?OY@3e?OZ@3d?6M2N3M2OO00011001O1O001O001O001O00O1O1N2N2N2N2O1N2N3M2O2MPPX5"}, "image_id": 53, "id": 843}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 21.0, 9.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "P`l21o?001O001O001O001O001O001O1O001OO1N2N2O1NRPi4"}, "image_id": 53, "id": 844}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 0.0, 43.0, 28.0], "area": 691, "segmentation": {"size": [512, 512], "counts": "WP_31m?3M2O2M201O1O001O001O1O001O0e@CS?>j@DV?8M2O2M2N3M3Ndo]4"}, "image_id": 53, "id": 847}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 29.0, 116.0, 69.0], "area": 3577, "segmentation": {"size": [512, 512], "counts": "aa]53k?2O2M2N3N2M2N3M2O2M3M2O2O010O010O10O010OSATOg>l0WAUOi>R10O0100VAnNe>Q1ZAPOf>V10O01000O010O01000O01O0O2M3M2OAdAZO\\>f0fAYOY>h0iAUOZ>h0hAVO[>g0hAWOY>h0iAUOZ>h0`0N3000O0i@ZOU?i0O01000O010O01000O0O2O010O1PAUOj>j0UAWOl>o0O010O10O10O010O10O10O0N3N20O010O01000O010O0100O0100O010O01000O0O2N1N3M3M2O2M2N3N2M2N3N1N3M3N1Nm]h0"}, "image_id": 53, "id": 848}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 33.0, 21.0, 21.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "^Qb21m?2O2M2N3M3M2O2N110O010O010ON3M3M2O2M2N3Mj^S5"}, "image_id": 53, "id": 849}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 37.0, 27.0, 47.0], "area": 637, "segmentation": {"size": [512, 512], "counts": "lQR43k?2N3N2M2N3M2N3N1N3M2N3M2XAUOZ>m0cAUO^>k0^AXOb>T10O0[O^ALc>2_ANa>OaA1_>MdA2]>KeA2^>LeA1^>McA0`>NcANa>OaANb>0\\_`3"}, "image_id": 53, "id": 850}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 38.0, 30.0, 29.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "hai11l?3N3M2N3M2N3M2N3N110O010O010O010O01O010O010M2N3M2N3M2N3M2Nb^g5"}, "image_id": 53, "id": 851}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 49.0, 17.0, 15.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "iaj22l?3L3N2O2O010O01O01O010O01O0N2M4M\\nl4"}, "image_id": 53, "id": 852}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 58.0, 26.0, 23.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "VR[71n?1N3M3M2N3M2O20O010O10O010O10O010O010N1N3M3M2N3N1Nnm7"}, "image_id": 53, "id": 853}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 63.0, 36.0, 31.0], "area": 671, "segmentation": {"size": [512, 512], "counts": "dbk41m?3N1N3M3M2O2M2N3M2O2M3M210O010O01O001O1O001O001O001M3N1010OJe@G\\?6f@H\\?78M3M2Nf]b2"}, "image_id": 53, "id": 854}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 75.0, 54.0, 43.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "RSS24j?2N3L3N2N3L3N3L3N2N3O010O0010O0010O0010O01RAROi>m0UAUOk>Q10O000N3M2010O01O01O010O0O2L3N2N30O0N3L3N3M2M3N3M2M4M2N3J]@Jf?44M`mQ5"}, "image_id": 53, "id": 855}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 84.0, 70.0, 50.0], "area": 1279, "segmentation": {"size": [512, 512], "counts": "kbX61m?3M2O2M2]@G_?>00O0100OO2O1O010O01000O010O01N20O010O1b@AZ?>e@D[?a01N10O010O1N1N3N110O10O10O010O10O010O10O10O010O10O10O010O10O010O10O1N1O2M2N3N2M2N3N1N3M^\\d0"}, "image_id": 53, "id": 856}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 86.0, 33.0, 46.0], "area": 849, "segmentation": {"size": [512, 512], "counts": "^c_73k?2O2N2O011N010O1O0N3M2O2M2N3M2N3M2O2M3M2N3O0010O010O01000O010O01TM"}, "image_id": 53, "id": 857}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 87.0, 17.0, 17.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "Pch13k?2N3M2N3N10100O010O01O0N3M2N3M2NVmn5"}, "image_id": 53, "id": 858}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 102.0, 23.0, 27.0], "area": 527, "segmentation": {"size": [512, 512], "counts": "jc[55`?;G9000001O000000000001O01O0000000000N2H801LklX2"}, "image_id": 53, "id": 859}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 104.0, 55.0, 41.0], "area": 1534, "segmentation": {"size": [512, 512], "counts": "VTY42i?6BIl@=00O100010O000000010O000000010O000000010O000000010O000000010O0000L4K6L30000000010O00000001M2J6K5K5K5K6Jj\\k2"}, "image_id": 53, "id": 860}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 109.0, 46.0, 33.0], "area": 987, "segmentation": {"size": [512, 512], "counts": "STZ34f?7J5K5K5O101O0001O000001O01O000001O0001O0001O0001O0001O00[On@KjA3X>KkA2W>LkA2W>LkA2W>LkA2W>LkA2W>LkA2W>LkA2W>LkA2W>LkA2W>LkA2W>LkA2S?Nnhb0"}, "image_id": 53, "id": 865}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 312.0, 121.0, 57.0], "area": 3196, "segmentation": {"size": [512, 512], "counts": "bji01k?4L4M3L5K4L4M310O0000010O00010O0000010O0000010O0000010O000010O000010O0000010O0000010O000010O000010O0000010O0000010O0000010O00010O0000010O0000010O0000010O000010O000010O0000010O0000010O000010O000010O0000010O0000010O0000010O000010N1L4L5L3L4L4L^eY5"}, "image_id": 53, "id": 866}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 380.0, 215.0, 132.0], "area": 9298, "segmentation": {"size": [512, 512], "counts": "V_?1k?4M4K4L4M3L50O0001O01O0001O01O01O0001O01O01O01O0001O01O00010O0001O01O00010O0000010O000010O000010O0000010O000010O000010O000010O0000010O0001O01O00010O0001O01O03M0001O0000001O00001O0000001O0000001O00001O0000001O0000001O0000001O00001O0000001O0000001O00001O0000001O0000001O0000L4L4M3L4L4L4M3L4L4M3L4L4L4M3L4L4M3L4L4L4M3L4L4L4M3L4L4M3L4L4L4M31O0000001O0000001O0000001O000M4K4M3L4L5L3L4L4L5L3L4L5L3L4L4L5L3L4L4L5L3L4L4M4K4L4L5L3L4L4M4KkSU4"}, "image_id": 53, "id": 867}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 442.0, 91.0, 52.0], "area": 2932, "segmentation": {"size": [512, 512], "counts": "f^T12k?3L5K4L4L4M4K4N201O01O0001O01O0001O01O0001O01O00010O0001O01O0001O01O0001O01O0001O01O00010O00M4L31O0001O01O00010O0000010O0000010O0000010O0001O01O00010O0000010O0000010M2L4L4L5K4Cd@2`?Jd@3f?LdQ^5"}, "image_id": 53, "id": 868}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 453.0, 34.0, 44.0], "area": 954, "segmentation": {"size": [512, 512], "counts": "U?1k?4M4K4L4L4M4K4L4N3O00000XAmNd>W100010O0000010O0000010N1L4M3L5K4L4M4K4L4Lia^7"}, "image_id": 53, "id": 869}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 478.0, 43.0, 33.0], "area": 673, "segmentation": {"size": [512, 512], "counts": "__Z41o?4L2M010000O0100O002O2M2N3N1N2O2M2N2OO00010O00010O00010O010O00102M2N3N1N2N3N1N3M2O2MdPP3"}, "image_id": 53, "id": 870}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 483.0, 36.0, 29.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "c_R51m?3N1O2M3N2N1O2M3N2N2N1N3O100000O1O001O1O1O001O1O1O1O001N2N2N2N1N3N2N2N\\`[2"}, "image_id": 53, "id": 871}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 16.0, 10.0, 39.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "`0W1i>L4L4L5K4L4M3L5K4Lboj7"}, "image_id": 56, "id": 872}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 74.0, 36.0, 73.0], "area": 2420, "segmentation": {"size": [512, 512], "counts": "oS[1c0h>e0ZOf0F:000000000000000001O0000000000000000000001O00000000000000000H8ZOf0[OZnR6"}, "image_id": 56, "id": 873}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 204.0, 67.0, 37.0], "area": 2289, "segmentation": {"size": [512, 512], "counts": "lfe0b0n>`001O000000000000000000000000000000000001O000000000000000000000001O00000000000000000000000000000000000001O000000000000000000000001OBPjX6"}, "image_id": 56, "id": 874}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 17.0, 16.0, 64.0], "area": 546, "segmentation": {"size": [512, 512], "counts": "YRh74i?4K4L4L5L3L4L4L5L3L4L4M4K4L4L4_O"}, "image_id": 57, "id": 875}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 100.0, 103.0, 119.0], "area": 7225, "segmentation": {"size": [512, 512], "counts": "Xf\\62k?4K4L4M3L5L3L4M4K4L4M3L5L3L4L5L3L4M3L5L3L4L5L3L4M3L5K4M301O0001O01O01O01O0001O01O01O01O0001O01O01O0001O01O01O01O0001O01M2M3L5L3L4M3L5L3L4M4K4M3L4M4K4M3L4M4K4L4M4N10000010O0001O01O00010O0001O01O00010O0001OWL"}, "image_id": 57, "id": 876}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 268.0, 155.0, 163.0], "area": 16213, "segmentation": {"size": [512, 512], "counts": "a\\l32k?3L5L3L4L5L3L4L4M4K4L4M3L5K4M3L5K4M3L4L5nNaM^Db2_;aM]Dc2_;aM]Dc2_;bM]Db2_;aM]Dc2_;aM^Db2^;bM^Db2_;R1O000010O000010O0000010M2M310O0000010O0001O01O00010O0000010O0001O01O00010O0000010O0001O01O00010O0000010O0001O01O0001O01O01O01O0001O01O0001O01O01O01O00M4L3M3L4M4L3L4M4K4M3M3L5L3M3L4M4L3L4M4K4M3O1010O00010OSOWBCi=:ZBFf=8]BGd=8\\BIc=7]BIc=8]BGc=9]BGd=ZOUBi07Nc=VOYBl04Nh=3XBLh=4XBMh=2XBKk=6TBFP>:QBBR>>nA^OW>a0iA[O[>f0eAVO^>j0=L3L4L4M4K4L4McVf1"}, "image_id": 58, "id": 877}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 74.0, 88.0, 88.0], "area": 3951, "segmentation": {"size": [512, 512], "counts": "hcW11n?2M3N1O2N2N2M3N2N1O2N2M3N2N2N1N3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1010000O1000O100000O1000N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N_\\\\5"}, "image_id": 60, "id": 878}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 129.0, 80.0, 79.0], "area": 3219, "segmentation": {"size": [512, 512], "counts": "]UZ21n?2N2N2N2N2N1O2M3N2N2N2N2N2N1N3N2N2N2N2N2N2M3N1O2N2N2N2N2N2M3N1O2N2N2N2N2M3N2N1000O2N2N2N2N2N2M3N2N1O2N2N2N2M3N2N2N1O2N2N2N2M3N2N2N1O2N2N2M3N2N2N2N2N1O2M3N2N2Nnj]4"}, "image_id": 60, "id": 879}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 170.0, 82.0, 82.0], "area": 3416, "segmentation": {"size": [512, 512], "counts": "hfP31n?2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2O10000O10O1N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2N2M2O2N2N2M3N2N1O2N2M3N2NdYf3"}, "image_id": 60, "id": 880}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 271.0, 6.0, 12.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "_82M3N1O2N2M3N2N2N1O2N20000OO2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N1N3N2N2N2M3N1O2N2N2M3N1O2N2M3N2N2N1O2M3N2N2NUfU7"}, "image_id": 60, "id": 882}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 322.0, 79.0, 80.0], "area": 3235, "segmentation": {"size": [512, 512], "counts": "a[;1n?2N2N2M2O2N2N2M3N1O2N2M3N1O2M3N2N2N1N3N2N2N2M2O2N2M3N1O2N2M3N2N1O2M3N2N2N1O200O10O1N2N2N1N3N2N2N2M2O2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2M3N1O2N2M3N1O2N2M3N2N1OPU]6"}, "image_id": 60, "id": 883}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 134.0, 69.0], "area": 3877, "segmentation": {"size": [512, 512], "counts": "`Pl52l?2N3L3N3M2M4M201O00001O001O001O001O00001O001OM3N2N2N2M3N2N2N2001O001O00001O001QACY>=eAE[>9aAI_>8]AKc>5[ANd>2ZA0f>1WA2h>c001O001O00001O001O00gN]AV1f>010O01O010O010O01O010O01O010O010O01O010O01O010O001M2N2M4M2N3M2M4M210OO2L3N2O2O010O0\\AYOP>g0mA[OS>e0jA^OW>b0fAAY>?eAC\\>:_AIa>l00O010O0010O010O0010O010gN^AR1b>kNaAU1f>0O01N1M4M2N2N3L3N3Ie@E]?88M2N3Mnn0"}, "image_id": 61, "id": 884}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 0.0, 20.0, 10.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "QPf71n?10001O001O001O00001O001O00001O001O001O"}, "image_id": 61, "id": 885}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 222.0, 279.0, 290.0], "area": 20759, "segmentation": {"size": [512, 512], "counts": "^^]13m?9G9G:F9F;F000000000000000O10000000000kIdNfM\\1Z2nN\\MR1d2VOTMj0l2VOTMj0l2VOSMk0m2UOSMk0m2UOSMk0l2VO[Mc0e2]O^M`0b2@^M`0b2@^M`0b2@^M`0b2_O_Ma0a2_O_Ma0a2_O_Ma0a2_O_Ma0a2_O_Ma0`2@`M`0`2@_Ma0a2_O_Ma0a2_OnL\\OWKU1k7^OoLFmJl0T8^OoLS1Q3mNoLS1Q3mNoLS1Q3mNoLS1P3nNPMR1P3nNPMR1P3nNPMR1P3nNoLS1Q3mNoLS1Q3lNPMT1P3lNPMT1P3lNPMT1P3lNPMT1o2mNQMS1o2mNQMS1o2mNQMS1o2mNQMS1o2mNPMT1P3kNQMU1o2kNQMU1o2kNQMU1o2kNQMU1o2kNQMU1n2lNRMT1n2lNRMT1n2lNRMT1n2lNQMU1o2jNRMV1n2jNRMV1n2jNRMV1n2jNRMV1n2jNRMV1m2kNSMU1m2kNSMU1m2kNSMU1m2kNSMU1m2kNRMV1n2iNSMW1m2iNSMW1m2iNSMW1m2iNSMW1m2iNSMW1l2jNTMV1l2jNTMV1l2jNTMV1l2jNSMW1m2hNTMX1l2hNTMX1l2hNTMX1l2hNTMX1l2hNTMX1k2iNUMW1k2iNUMW1k2iNUMW1k2iNUMW1k2iNTMX1l2gNUMY1k2gNUMY1k2hNTMX1l2QOkLo0U3[OaLe0^3EYL;g3OoK1Q48fKHZ4b0[K_Oe4j0RKVOn4T1hJlNX5]1_JcNR5SNiHd3k1YN\\5_2[JaMe5h2RJXMn5R3hInLX6UObHl2m0oMa6UOlHl29oMk6TOVIm2EPNT7TOaIQ5_6oJjIh4V6XKSJ_4m5aK]JU4c5kKfJl3Z5TLPKb3P5^LYKY3g4gLcKo2]4QMlKf2T4YMWL]2b3jMgLm1Y3SNQMc1o2]NZMZ1f2fNdMP1\\2POmMg0S2YOSNa0m1_OSNa0m1_OSNa0l1_OUNa0k1_OTNb0l1^OTNb0l1^OTNb0l1^OTNb0l1^OTNb0l1^OTNb0l1^OTNb0Q2YOoMg0Q2YOoMg0Q2XOoMi0Q2WOoMi0Q2WOoMi0Q2WOoMi0Q2WOoMi0Q2WOoMi0Q2WOoMi0Q2WOoMi0Q2WOoMi0P2WOQNi0o1WOPNj0P2VOPNj0P2VOPNj0P2VOPNj0P2VOTNf0l1ZO]N=c1CgN3Y1MPOJo07[O_Oe0`0EWO;i0NnN2R1^700000000000O01000000000000000O10O1000000000000000O01000000000000000O10O1000000000000000O01000000000000000O10O1000000000000000OBdAYO]>g0>0000000000O10O1000000000000000O01000000000000000O10O10000000000000O10O1000000000000000O2O9G:F^XW2"}, "image_id": 61, "id": 886}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 293.0, 43.0, 72.0], "area": 2195, "segmentation": {"size": [512, 512], "counts": "YYa04l?8H8G:QAYOS>o0eAQO[>Z105K9G6J000O01000000000000000O010000000000000O0107I3MOaNmAS1S>mNUBk0k=UO^Bb0b=^OfB:Z=EPC2P=NXCIi<7`1000008HZVi6"}, "image_id": 61, "id": 887}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 345.0, 3.0, 14.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "i:>b?04LSUn7"}, "image_id": 61, "id": 888}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 369.0, 16.0, 34.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "b;Q1o>00000O1000000000000000O10;E8jAHV>8jAHV>a0aA_O_>Q10O12N9G2M1000000000000000O10O10000002N9G7IO10O10002N9G:F9GlQY7"}, "image_id": 61, "id": 890}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 0.0, 60.0, 19.0], "area": 586, "segmentation": {"size": [512, 512], "counts": "P`11o?00001O00001O00001O001O00001O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00O1M3M3N2M3M3MS`P7"}, "image_id": 62, "id": 891}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 0.0, 115.0, 79.0], "area": 5983, "segmentation": {"size": [512, 512], "counts": "WQ`12l?2M4L3M3N3L3M3M3M4M2M3M4L3N2O2O00001O00001O00001O00001O00001O00001O00001O00001O00001O00000lA]Nk=d1QB_No=j1O00001O00001O00001O001O00001O00001O00001O001O00001O00001O00001O00001O001O00001OO1M3M3N2M3M3M3N2M3M3M3M3N2M3M3M3N2M3M3M3M3N2M3M3M3N2M3MS`f4"}, "image_id": 62, "id": 892}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 0.0, 4.0, 4.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "PPn71o?1O1O1O"}, "image_id": 62, "id": 893}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 30.0, 45.0, 49.0], "area": 1741, "segmentation": {"size": [512, 512], "counts": "kac4c0j>c0F:00000000001O0001O000000000000000004L00001O0C=O1000K5000000000000001O0000000000000F:]Ocoe2"}, "image_id": 62, "id": 894}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 40.0, 54.0, 57.0], "area": 2635, "segmentation": {"size": [512, 512], "counts": "bab3?W?:000001dAXO^=h0nALR>Q1001O000000000001O0000000000000000000002N000000000001O0001O00000000000000000000000000000I7B>B>BR_b3"}, "image_id": 62, "id": 895}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 59.0, 24.0, 22.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "XRf03j?3M3M4M2N2010O00010O00010O00010O01O01L3M3M4L3MRnm6"}, "image_id": 62, "id": 896}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 82.0, 18.0, 12.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "mRY31d?;000000001O0000000000000000000000]m]4"}, "image_id": 62, "id": 897}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 84.0, 70.0, 115.0], "area": 3404, "segmentation": {"size": [512, 512], "counts": "UTf41m?2N3i@M]>5_AIA4n>6^AO_>3^A0`>3]A0`>2^A0Z>_OgAc0L1[>^OfAZ1X>9M2NO10120O01M2N3M2N3M2N3N1N3M2O20O010ZOaBiN_=U1dBkN\\=R1fBnNZ=P1iBPOW=n0jBROV=l0mBTO4^O\\<[1bCWO1_O]g09000000O1000O10000000O10O1002NO1000O10000000O1000O10005K7I7I6JclT1"}, "image_id": 62, "id": 899}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 94.0, 46.0, 35.0], "area": 892, "segmentation": {"size": [512, 512], "counts": "aSS21l?3N2M4M2M4M2M3O2O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010ON2M4M2M4M2M3NflU5"}, "image_id": 62, "id": 900}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 111.0, 43.0, 54.0], "area": 1321, "segmentation": {"size": [512, 512], "counts": "gTT73k?2M4M2M4M2M4M2M3N3L3N3L3N3L3N2M4O0010O0010O0010O010M2M3N3L3N3L3N2M4M2M4M2M4M2M3N3L3NY\\6"}, "image_id": 62, "id": 901}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 114.0, 21.0, 32.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "[d^33g?7J5K5K5L50O00001O01O01O00010L3L5J5L4L5J]lV4"}, "image_id": 62, "id": 902}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 117.0, 39.0, 45.0], "area": 1131, "segmentation": {"size": [512, 512], "counts": "eck3?a?00:F000000=C00000000000000000078A00000000000000000000001O0J600000000000000\\O_l`3"}, "image_id": 62, "id": 903}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 130.0, 23.0, 26.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "Sdk51o?7I8H7I2N000000O0100002N00000Fn@FQ?;:00000001O7Hfkh1"}, "image_id": 62, "id": 904}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 153.0, 8.0, 21.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "ZUl71m?3L3N3L3N2M4M2WK"}, "image_id": 62, "id": 905}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 163.0, 36.0, 40.0], "area": 660, "segmentation": {"size": [512, 512], "counts": "TVc64j?2N2M4FEk@=R?Gk@50O01O01O01O01O1O100O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O2N3N1N3M2O1N3M2O2Mdoe6"}, "image_id": 64, "id": 914}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 0.0, 82.0, 56.0], "area": 2444, "segmentation": {"size": [512, 512], "counts": "SQZ31n?2O1M4M2O2M2N3\\OBbA?\\>CbA`0\\>BaA`0]>BbA`0[>CdA=Z>EdA=[>DcA?Z>DdA<[>d01O01O00010N11O1O100O1O100O1O100O1O100O1O1O100O1O100O1O100O1O100O1O100O1O1O100O1O100O1O100O1O100O1O100O1O1O100O1O100O2N2O2M2N3N1N2Nho\\3"}, "image_id": 64, "id": 915}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 0.0, 57.0, 49.0], "area": 1450, "segmentation": {"size": [512, 512], "counts": "PPd51o?2N1O1O2N1O000`@IW?6j@LT?3l@OS?1l@1S?Nm@5Q?JPA7o>HQA:n>GPAFPA;o>Fo@P?:O2N1O1O2N1O1O2N1O2N1O1O00O1O3N1N3N11O010ON2O2M2N3M2O1N3M2N2O2M2N2O2M2N3M2O1NP__1"}, "image_id": 64, "id": 916}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 0.0, 30.0, 23.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "PPc61o?1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2NO100O1O1O100O1Cc@6^?Ie@4\\?Kf@4Z?Lg@2Z?Mi@0X?O]Pn0"}, "image_id": 64, "id": 917}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 0.0, 13.0, 39.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "Y`i73k?3HKa@8^?Ia@9]?6O1k@@h>b0UA@j>a0TAAk>`0SACk>?SABl>k0N1O1O"}, "image_id": 64, "id": 918}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 6.0, 112.0, 106.0], "area": 4795, "segmentation": {"size": [512, 512], "counts": "iQS41o?1N3M2O1[@I^?:_@I^?>N2O2M2N3M21O01O010O000N3O00010O0N3M2O1N3M2N3FcNmA^1Q>eNlA]1S>dNkA_1R>9N2N3N0O000010O0002OiNUB;h=E[B:c=F_B;^=FcB8^=GeB6[=JgB5X=LiB2X=MkB0U=0mBOR=1QCLo<5RCIo<6SCHm<8VCFi<:YCDg<=[C@fROmAl0V>QOmAl0U>SOlAl0U>ROmAl0V>QOmAl0U>SOlAl0f>M2N2N3N1N3M2O1N3M2N2O2M2NomT2"}, "image_id": 64, "id": 919}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 16.0, 39.0, 44.0], "area": 834, "segmentation": {"size": [512, 512], "counts": "kPd22n?2M2N3N1N2N3N1N3M2O1N3M2O2M2N3N1N210O010O01O0O2M2N3N1N3M2O1N3M2O2M2N3N1N3M2Oa^h4"}, "image_id": 64, "id": 920}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 21.0, 61.0, 73.0], "area": 2169, "segmentation": {"size": [512, 512], "counts": "_aQ72m?2N2N2N2N2N2N2IBi@`0U?Bi@`0U?7N2N2N2N2N3M2N2N2N2N2N2000000000000000001O000000N2N2N2N2fN[AV1i>N2N2O100001O00000000000000000000000000O1N20QN"}, "image_id": 64, "id": 921}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 26.0, 40.0, 44.0], "area": 885, "segmentation": {"size": [512, 512], "counts": "bae12m?2N2O2M2N2O2M2N2N3N1N2EZOYAi0e>YOYAh0e>ZOYAh0e>ZOYAf0h>9O0000010O01O3N1N3M2O1N3M2N3N1N3M2O1N3M2O2M2N3M^^f5"}, "image_id": 64, "id": 922}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 30.0, 20.0, 17.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "VQZ22n?1N3M2O2M2N100O00010O000100O2N3N1N3M2Okn[5"}, "image_id": 64, "id": 923}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 42.0, 66.0, 88.0], "area": 2813, "segmentation": {"size": [512, 512], "counts": "dQa62m?2O2M2e@Ji>8UAJi>9TAI1L^><_AK0K_><`AJ0L]>=`AJ0K6In=d0jAJ0K6Io=_1oAcNn=_1PBcNn=j1M2O1O2O00010O010O00010O00010O010OO1N3M2O1N3M2O2M2N2N3N1N2N3M2O2M21O01M2N3N0O001O2O1N3M2O1N3M2N3N1N2N3M2O1Nhl="}, "image_id": 64, "id": 924}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 54.0, 50.0, 54.0], "area": 1422, "segmentation": {"size": [512, 512], "counts": "Wbe02m?2N2N3^@IW?8h@IV?9h@IV?:g@IV?a000O3M2N2O2M2N2O2M2N2N2OO01O1O201O01O000N3M2N2O2M2N2N3N1N3M2N2O2M2N2O2M2N2N3N1N2N3M2OV]a6"}, "image_id": 64, "id": 925}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 63.0, 27.0, 27.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "Yb]13m?1N3M2O2M2N3N1N2N3N1N1O010O000101N3M2O2M2N3N1N3M2O1NdmT6"}, "image_id": 64, "id": 926}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 72.0, 73.0, 72.0], "area": 2271, "segmentation": {"size": [512, 512], "counts": "ebX31n?2N3N1N2N3M2O1N3M2N2O2M2N2N10O01O2O110O00010O0010O00010ROQAh0o>WORAi0T?0O0N2N3N1N2LnNWAU1f>4N3N1N2N3M2O1N3M21O01OO0O100O3M2N2O2M2eN`AS1b>kN`AS1h>01N1N3TOQAb0Q?\\OQAc0P?[ORAc0X?M2O1N3M2N2O2M2N2NRlb3"}, "image_id": 64, "id": 927}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 85.0, 64.0, 76.0], "area": 2408, "segmentation": {"size": [512, 512], "counts": "Uck51o?1N3M2N3N1N2N3M2O1N3M2O1N3VAUOZ>m0eAUOX>n0eATONMT>P1lAVOMLU>P1lAVONLS>Q1mA\\OR>Z1N3M2N3N100010O0N2O2M2N3O00010O0O1O2M2N3N1N210N1N2N3N1N3M2O1JUATOn>i0TAUOn>j05N3M2O1N3M2N3N1N2N3M2O1Nf[T1"}, "image_id": 64, "id": 928}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 105.0, 76.0, 77.0], "area": 2362, "segmentation": {"size": [512, 512], "counts": "QUb23l?2N2O2M2N3M2O1N3M2N3N1N2N3N0O000010O0001O01O0001O01O01O01O00[OROYBo0f=SOXBm0h=VOVBi0j=YOnAJLn0V>YOlAKLl0X>\\OjAi0V>YOhAg0Y>`0O000010O000010O0002O2M2N2N3N1N3M10O0001O3N1N3M2O2M2N2O2M2N3M2O1N3M2O2M2N2NhkW4"}, "image_id": 64, "id": 929}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 111.0, 64.0, 79.0], "area": 2416, "segmentation": {"size": [512, 512], "counts": "jSW51n?2d@Ng>4WAOg>3WANg>4WAOf>3XAOf>4WANh>3WAOf>3XAOf>4WAOg>f0]AiN7Of=Z1RBiN6Nf=\\1QBhN7Nf=d1WB_Ng=b1WB`Ng=b1XB`Nf=n100010O010M2N3N1PNTBi1n=VNTBg1T>O1N3M2O2M21O0OO00102M2N2N3N1N3M2O2N11O000N000101N3M2N3N1N3M2O1N3M2O2M2N2N3Nfjh1"}, "image_id": 64, "id": 930}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 118.0, 67.0, 47.0], "area": 1648, "segmentation": {"size": [512, 512], "counts": "RdW11n?2N3N1N2N3N1N2N3N1N3M2N2O2M2N2O2N10010O00010O00010M10O0000010O0000010O0000010O00003N1N2N3N1N2N3M01O01O0001O01O00011N2N2N3N1N2N3M2O2Mlkf5"}, "image_id": 64, "id": 931}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 142.0, 61.0, 67.0], "area": 2174, "segmentation": {"size": [512, 512], "counts": "XU`42n?1[@NY?5e@LY?6e@Mk>L]A9EM8JY>2hA9FM7JY>1hAd0M]OY>2gAd0M]OY>1iAc0M]OY>2gAn0W>`0M2O2M2N100O000011N2N101N3M2N3N1N3M2O1N3M2O2M2N3M21O010O01O010O010N0O1O010O3M2O2M2N3N1N2N3M2O2M2N3N1NoYa2"}, "image_id": 64, "id": 932}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 167.0, 51.0, 42.0], "area": 1085, "segmentation": {"size": [512, 512], "counts": "gem12n?2M2N3N1N3M2N2O2M2N3N101O0O2M2N2O1N00010O0000010O00010O0001O2O1N3M2O2M2N3N1N1O1O01O000102M2N2N3N1N3MYjX5"}, "image_id": 64, "id": 933}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 193.0, 12.0, 24.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "YVj71o?1N2N3M2O1N3M2N2O2M2N3mI"}, "image_id": 64, "id": 934}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 200.0, 28.0, 28.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "cfa12n?1N3M2N2O2M2N2O2M2N2N3NO01O0001O2O2M2N2O2M2N2N3N1N2N3MZYP6"}, "image_id": 64, "id": 935}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 205.0, 47.0, 68.0], "area": 1529, "segmentation": {"size": [512, 512], "counts": "QXh21g>0_B2_=0`B1^=2_B1_=0_B2_=0`B2]=1`B1_=0_B3^=O`B3^=0`B1^=1`B2^=O`B3^=0`BO`=3^BMc=4[BMd=6YBJg=8XBGh=mN^AR1d>kN_AR1i>O2O010O00010O01]H"}, "image_id": 64, "id": 937}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 218.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "kVo71n?2UI"}, "image_id": 64, "id": 938}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 231.0, 80.0, 72.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "ShY61n?2N3N1N2N3N1N2N3M2O2M2N000KZORAf0n>[OPAh0m>7N2N3N1N2N3N1N20N2O1N00010O000002N3M2N2O2M2O2O01O01O01O01O01O01ON3M2O2^NdA[1^>dNcAZ1d>M2O1N3N100010O0010O0O1O2M2N3N1N2N3M2O1N3M2N2O2M2N2N3NmV>"}, "image_id": 64, "id": 939}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 266.0, 28.0, 28.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "eXZ22m?3N1N3M2N3N1N3M2O2M2N100O000010O2N3N1N3M2N3N1N2N3N1N3MWgW5"}, "image_id": 64, "id": 940}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 270.0, 77.0, 82.0], "area": 2938, "segmentation": {"size": [512, 512], "counts": "[ii51n?3M2N2O1N3M2N2O2M2N2N3M2O1JUOTAn0i>6N2N3N1N2N3M2OO0001O102M2N2N3N1N2N3O00N3M21O01O01O01O0O1O110O0YNkAb1U>\\NmAe1W>01O000O1N3N1N2N3M2O1N3M2N2N3N1N2N3M2O1N3M2N2O2M2N2N2O2M2Neeo0"}, "image_id": 64, "id": 941}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 286.0, 28.0, 27.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "YYV51n?2O2M2N3N1N3M2O2M2N3N0O1O010O000101N2N3N1N3M2O2M2N3N1Nef[2"}, "image_id": 64, "id": 942}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 292.0, 62.0, 62.0], "area": 1810, "segmentation": {"size": [512, 512], "counts": "ZZ_41n?3M2N2O2M2N2O2M2N2N3N1N3M2O1N3M2N2O0O1O01O01O000FlNjAS1V>oNhAR1W>QOfAo0[>ROdAm0\\>UObAk0^>N\\Ai0b>YO\\Ai0b>ZO\\Ah0b>YO\\Ai0b>nN`AR1_>70010O00010O0001O01O00010O00010O000010O00010O00010O0001O01O000M`AhN`>W140010O0001O01O00010O0001O01O01O01O01O01O102M2N3N1N3N101M2N3N0O00100O3M2O2M2N3M2M4N1NkdQ2"}, "image_id": 64, "id": 947}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 337.0, 53.0, 59.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "d[a33l?2O1N3_OITA9j>IUA9i>HUA:i>ITA:i>HUA:j>HTA9j>a0M2O2M2N2N3NO01O01O0001O01O01O01O00010O002O2M2N3N1N2POXAe0k>XOWAf0k>YOVAf0T?M1O01O01O2N3N1N3M2O2M2N2O2M`Td3"}, "image_id": 64, "id": 948}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 338.0, 31.0, 30.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "nZW41o?2M2N2O2M2N3M2O2M2N2O2M2N010O00000101N2N2O2M2N3N1N3M2N2O2M2NoTY3"}, "image_id": 64, "id": 949}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 351.0, 27.0, 26.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "Zkn51n?2O1N2N3M2N2O1N3M2N2N2O1N000101N2N2N3M2N2O1N3M2N2N2Oddc1"}, "image_id": 64, "id": 950}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 357.0, 53.0, 47.0], "area": 1257, "segmentation": {"size": [512, 512], "counts": "l[i61n?2N2O2M2N3N1N2N3M2O1N3M2O2M2N2N3N1N1O01O2O1N001O01O01O01O0001O01O00010O0002N2O2M2N2N3N1N2N3N1N3M2N2O2M2NXT<"}, "image_id": 64, "id": 951}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 369.0, 31.0, 30.0], "area": 493, "segmentation": {"size": [512, 512], "counts": "kkT61o?2M2N3N1N3M2O2M2N3N1O2N1N2N10O00011N2N2O2M2N3N1N3M2O2M2N3N1Nnc[1"}, "image_id": 64, "id": 952}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 391.0, 56.0, 48.0], "area": 1344, "segmentation": {"size": [512, 512], "counts": "T]Q11o?2M2N2O2M2KFa@<]?3010O000101N2N3M2O1N00012M2N3M2O1N3M010O0000010O00010O00000101N2N3M2O2M2N2O2M2N3M2O1N3M2O2M2N2N3NPcR6"}, "image_id": 64, "id": 953}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 398.0, 49.0, 80.0], "area": 2016, "segmentation": {"size": [512, 512], "counts": "VmW72m?2O2M2N3N1N2N3N1N3M2O2M2N3N1N3M2ImN^AV1_>lN`AU1^>9N1N2N3M2O2M2O10N3N0O00010O01O30O001cNkAm0T>ROnAm0S>POoAP1Q>nNQBS1o=jNTBU1l=jNUBW1k=fNXBY1X>0010O010O\\B"}, "image_id": 64, "id": 954}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 408.0, 102.0, 104.0], "area": 5369, "segmentation": {"size": [512, 512], "counts": "l=7i?1N3M2O1N3M2O2M2N2O2M2N2O2M2N3M2O1N3M2O1N3M2O1N3M2O2M2N2O2M2N2O2M2N3N1N2N3M2O1N3M2O2M2N2O1N1O100O1O100O1O1002N1O2N1O2SNQCe0P=ZORCd0oWOXAj0g>800O1O1O100O1O100O1O11O1O2N1N3M0010O0002O2M2VOQA`0R?]OPAa0R?^OPA`0Q?^OQA`0Y?N3M2O2M2N2O2MaP7"}, "image_id": 64, "id": 959}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 472.0, 51.0, 40.0], "area": 1052, "segmentation": {"size": [512, 512], "counts": "bod13l?2O1N3M2N3BDTA=j>ETA=j>FSA=k>DTAFSA;l>=O100O1O10@VAJi>7XAHg>8[AGd>9_AEa>2ZAJ63a>2[AH56b>O[AI39c>LeA5]>HcA9^>FbA:_>DaA=P?1O100O1O100O1O1O100O1O1O100O11O2N1Jf@D\\?9f@F[?9g@E[?88N3N1N]`a5"}, "image_id": 64, "id": 960}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 473.0, 22.0, 20.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "Sof51o?2M2N2O2M2N3N0O1O01O00010O00012M2N2O2M2N3MoPn1"}, "image_id": 64, "id": 961}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 496.0, 27.0, 16.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "oo^51k?0W@1h?4O100O1O100O1O100O1O100O1O1O11O2N1O2N1O1O2N1O2N1OR`S2"}, "image_id": 64, "id": 962}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 506.0, 13.0, 6.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "o_d61o?0O1O1O1O100O1001O1O1O1ORPU1"}, "image_id": 64, "id": 963}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 139.0, 63.0, 46.0], "area": 1560, "segmentation": {"size": [512, 512], "counts": "jdP72l?6J2_@HW?H`A:^>I_A:_>G`A;]>H`A:_>H`A9]>IdA7Y>LfA4Y>NgA2V>0kAOT>3kANR>4oALP>5oAKR>5nAHT>8mAEV>;iADY>=bAC`>=]ACe>j03HXAUOj>i0WAUOk>i0XAUOj>i080O010O10O10O1N1O2M2N3N2O010ON3000O0100N1N3M2O2M3Ai@2X?Lj@2Y?Li@1Z?Li@2X?Mi@1bhX3"}, "image_id": 66, "id": 965}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 213.0, 42.0, 55.0], "area": 1429, "segmentation": {"size": [512, 512], "counts": "dWk53\\?a00000B>000000000001O000VA]OW>W1O0000000000aNmAQ1a>000000000000000001O0O1^Ob0000000000000000000_O]i_1"}, "image_id": 66, "id": 966}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 271.0, 53.0, 72.0], "area": 2995, "segmentation": {"size": [512, 512], "counts": "UZR5`0g>i0000000000000E;K60O00000000000000000000000000000000000000B>000000000000000000000000000000c0^OO0000000b?IWPo7"}, "image_id": 69, "id": 976}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 0.0, 89.0, 48.0], "area": 3053, "segmentation": {"size": [512, 512], "counts": "oP45e?6I7I8N11O00000001O01O0000000001O01OJ6M3000001O000001O0001O00000001O0001O0J42N200001O0000000000001O4L001O0000000000001O0000000000001O00000000001O01O0I7J6I7K501O0001O000000L5I6IT`_6"}, "image_id": 69, "id": 977}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 0.0, 5.0, 1.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "PPP31o?0000000P`m4"}, "image_id": 69, "id": 978}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 0.0, 11.0, 2.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "P`k31o?0000000000000000001oon3"}, "image_id": 69, "id": 979}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 0.0, 58.0, 14.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "PP^46j?0000001O00000000000000000000000000000000001O0000000000000000000000005K0000000000000000001O0000000000000000000000HXPe2"}, "image_id": 69, "id": 980}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 0.0, 76.0, 30.0], "area": 1985, "segmentation": {"size": [512, 512], "counts": "P`h64l?00000i@N`>2WA7i>In@`0R?70001O000000000000001O00000000000000001O0000000000L4000000000000001O00000000000000001O00000000000000001O000000000000001O000000000000O1G9H8GY`1"}, "image_id": 69, "id": 981}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 9.0, 88.0, 52.0], "area": 3507, "segmentation": {"size": [512, 512], "counts": "XQe26`?:K501O8H001J5E;0000001O00000001O0000000001O000N20000002N0004L10O0000000000000000N3N1000000001O00000001O0000000001O2N000001O00J7N100000000001O00000001O0000000001oNgA9Y>FhA1b>N_AGl>8>0000Igon3"}, "image_id": 69, "id": 982}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 11.0, 88.0, 58.0], "area": 3223, "segmentation": {"size": [512, 512], "counts": "jao34a?;G9000001K4E;E;O100000000000001O01O00000000000J60004eNaAm0i>0000001O00000001O000000000001O00000001O000000000001O00000001O05K00001O000000000001O00000001O000000000001O00000001OE;E;E\\_d2"}, "image_id": 69, "id": 983}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 18.0, 1.0, 12.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "b`o70000000000000000000iN"}, "image_id": 70, "id": 988}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 113.0, 54.0, 44.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "ocj33m?3M3L4M3M3L4M3M3L4M00000O01000O01000O01000O01000O01000O01000O01000O01000O01000O01000O03N3M3L4M3M3L4M3M3L4Mb[Z3"}, "image_id": 70, "id": 989}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 129.0, 68.0, 70.0], "area": 1921, "segmentation": {"size": [512, 512], "counts": "hUa61n?2N2N3FIg@8W?Jg@8NFV?3j@9NFV?g0N2N2N01O01O000000010O001O00010O0JSOZAm0f>UOXAk0i>5EmNjAT1U>nNiAR1W>QOfAo0Z>SOdAm0]>TObAk0^>;010O3M2N2N2N2O0O0MVAQOj>P120001O000001O01O00002O2M2N2N2N3M2O1N2N2N3M2O1N2N2N^k<"}, "image_id": 70, "id": 990}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 147.0, 30.0, 31.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "jTZ44l?3M2M4M3M3L4M3MO010O10O10O10O10O01000O01002M4M2N3L4M3L4M2NhjV3"}, "image_id": 70, "id": 991}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 84.0, 49.0, 75.0], "area": 2464, "segmentation": {"size": [512, 512], "counts": "g2X2h=O10000000O10000;00000000O100000000000O1000000000000000>B=C00O18G[\\W7"}, "image_id": 72, "id": 992}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 131.0, 109.0, 91.0], "area": 6419, "segmentation": {"size": [512, 512], "counts": "hTZ44l?8H7I7H8I5K00O10L4000000000O0107I8H7I7I5K0O10000000O1000M30000000O10O10000L4M\\OiBeNW=Z1jBeNW=[1iBeNW=[1hBfNX=o0iBdNO=X=o0SCQOlK^ANb>2fAFZ>9f01000000001O7H][o1"}, "image_id": 72, "id": 993}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 132.0, 52.0, 48.0], "area": 1158, "segmentation": {"size": [512, 512], "counts": "[eo23k?2M4M2M4M2M3N3L3N3L3N2M4M2M4M2N02N3L3N3L3N2M4M2M4M2O110O0010O0010O0010O0010O0010O0010O001O0N2M4M2M4M2Ma[V4"}, "image_id": 72, "id": 994}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 140.0, 40.0, 75.0], "area": 1922, "segmentation": {"size": [512, 512], "counts": "\\U\\72k?3N2M4L3N2M4L3bAZO`=h0^B[OY=OQBi0b0[O\\=l0aBXO^=i0_BYOa=g0\\B\\Od=d0YB@e=b0WBAi=?UBCk=Z1O0O2O00001O02N3N1N3M2UNnAe1V>0010O00010O0010ON2M4L3BaAYO`9"}, "image_id": 72, "id": 995}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 163.0, 54.0, 58.0], "area": 2147, "segmentation": {"size": [512, 512], "counts": "^ej02n?5K0GJh@6X?902M=D;E;E4L0000000000000O100000O7J0000000000O10000000]OhA]OY>c0SBQOm=o0b000000L40O10000000O100000000006I3N009G?AmYZ6"}, "image_id": 72, "id": 996}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 213.0, 28.0, 27.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "Wgm61m?3L3N3M2N2M4M2N3O01O01O010O01O010O01O01O0N3M2M3N3L3N3LVYd0"}, "image_id": 72, "id": 997}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 226.0, 61.0, 52.0], "area": 1980, "segmentation": {"size": [512, 512], "counts": "dgS14l?5K5K5J6K4L2N4K1000O10O100000O10O1000O10O10JWAUOi>k0OSO[Al0f>XOUAi0k>60000O010000000O4M00O1000O1000O10O100000O10O100000O10O[O[A1e>O`AL`>3fAG[>9jABW>=k0K4L5J`hm5"}, "image_id": 72, "id": 998}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 263.0, 30.0, 54.0], "area": 1127, "segmentation": {"size": [512, 512], "counts": "WYa74j?2BMl@7Q?Kl@8Q?Km@7Q?>L3M4M2O110O001ZAkN`>[1O01O01O010O00010O01O01O01iN^Ao0a>PO`AP1c>mN]AS1c>nN]AR1Q6"}, "image_id": 72, "id": 999}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 264.0, 37.0, 75.0], "area": 1858, "segmentation": {"size": [512, 512], "counts": "_XQ39h?:^Oa0F;E8H000O100000O1000000:F9JM3M0O100002N0O10O101O:F:F;E:F1O00000O5J?aAIW>P100O1000O10000000000000O1000O100000000000O10000207G:F8H00O100000O100000000000O100000O100000000000O1000O100000000000[O[OnAf0R>DdA<\\>d0O0100000000000000K50O100004K4M00000000O10000000O10000000O100005K000O10O10000004L:F:FSVc2"}, "image_id": 72, "id": 1001}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 290.0, 53.0, 51.0], "area": 1805, "segmentation": {"size": [512, 512], "counts": "XYQ2>b?0000000k@Ia>7_A1Y>OgA1Y>OgA1U>o0000000O10000000O100000000000000000O1000000000004L0?A>B000O01000000000000000000000000006JlUT5"}, "image_id": 72, "id": 1002}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 293.0, 50.0, 52.0], "area": 1760, "segmentation": {"size": [512, 512], "counts": "WZV1?a?000POGYB9g==cAC]>P10000000000O10000000000000O100000000000O1000004L0P?Em@>o>Eo@=f>^O_An0^>UO^An0_>UO_Am0a>:O0010O0010N1N30O02N010O01O01O010O01O01O010OWOeAM\\>MiA4V>LkA3V>KkA5U>InA7Q>GQB9P>CSB>l=@WB?j=[O[Be0e=YO^Bf0^>M3N3L3N3M2M4MSck0"}, "image_id": 72, "id": 1006}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 434.0, 50.0, 63.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "l^V63j?3N3L3M3FDn@?o>Cn@`0o>;L3N2M4O0010OO1M4M2M40O0011N1O010O01O010O00010O010O0WOfALZ>1iA0W>MkA3U>JoA6Q>FRB:n=DUB;k=BXB?h=^OZBb0f=[O^Be0^>O2L3N2M4M2M3N3LWaP1"}, "image_id": 72, "id": 1007}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 450.0, 57.0, 62.0], "area": 2304, "segmentation": {"size": [512, 512], "counts": "aoS71m?2N3L3TOHnA;o=HmA;P>HnA:P>HnA;o=HmA;S>EcAG2g0Z>HcA;^>c0010O01M2N3N11O010O01O01O010O010N1M3N3L3N3M2M3N3L3N3M21cAWOb=h0[B[Oe=f0XB\\Oh=d0VB^Ok=b0QBAo=?oACR>:hAHX>8eAK\\>l0001O00001O001O001O"}, "image_id": 72, "id": 1008}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 498.0, 51.0, 14.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "n_P62k?3N2N2N2M300001O001O001O001O00001O001O001O001O00N2N2N2O11O001O001O00001O001O001O001O00001O001O001O0OTPV1"}, "image_id": 72, "id": 1009}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 0.0, 88.0, 62.0], "area": 2944, "segmentation": {"size": [512, 512], "counts": "SPR62m?2O2M2N2O2N1O2N1O2N1O2N1O2N1O2N1O2N1O2N1O2N1XAmNa>U1\\AmNc>Z1N1O2N1O2N1O2N1O2N1O2N10N2N3N1N3M2O2M2N3N0O1O01O01O01O011N201O010O01O0N3M2O2M2N3N1N3M20N010O00010O00010O01O3N1N3M2O2M2L_@Fd?74N3N1Nlna0"}, "image_id": 73, "id": 1010}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 0.0, 51.0, 34.0], "area": 1090, "segmentation": {"size": [512, 512], "counts": "P`V71o?1O2N1O2N1O1O2]@F\\?;b@H\\??O1O2N1O2N1O2N1O2N1O2NO100O1O100O1O1O100O1O100O1O100O1O100O1O1O1O1O1002N1O00O1O100O1"}, "image_id": 73, "id": 1011}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 8.0, 66.0, 88.0], "area": 3259, "segmentation": {"size": [512, 512], "counts": "oP\\51o?2N3L4M3M3L4M3L10ObAZOd=h0YB\\Oc=h0ZB[Oc=g0[B[Oc=h0YB\\OY=DaBT13[OY=DaBT13[OX=DbBU12YO[=T1bBkN^=l1000O01000O0100O0100O01000O0100O03N2N3L4M2N1N10O010002M4M2N3HjA_N]>8aAi0f>TO]Ah0g>UO\\Ah0n>00O010O01001N4M3M2M4M3M3L3Njmb1"}, "image_id": 73, "id": 1012}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 16.0, 68.0, 79.0], "area": 2854, "segmentation": {"size": [512, 512], "counts": "k`m23m?1N3M2O2M2N3N1N3M2O2M2N3fATO\\=m0bBVO[=m0cBTO[=n0cBUO[=m0bBUO\\=m0cBUO\\=l0aBVO_=j0_BYOa=g0]BZOc=f0[B]Oe=c0XB_Og=b0XB@f=b0WB@g=^1N10O00010O002O2M2N3N1N3M2O2M210O010O010O0O2N1N3N1N3M2O2M2N3N1N3TOQAb0Q?\\ORAb0P?[ORAc0X?M2O2M2N3N1N3M2Oa]P4"}, "image_id": 73, "id": 1013}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 48.0, 81.0, 72.0], "area": 2723, "segmentation": {"size": [512, 512], "counts": "jQP21n?3M2O2M2N3N2M2N3N1l@^Og>e0VA]Oh>e0WA]Og>e0VA]Oh>P1M2O2O1O0100M2O2M2N1000O3M2O20O010O010O010N1O2M2N3N0O00010O0002O1N3M2O2M2N3N1N3M2O20O0O2N1N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M3M2O2M2N3Ni\\g4"}, "image_id": 73, "id": 1014}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 80.0, 83.0, 87.0], "area": 3424, "segmentation": {"size": [512, 512], "counts": "WSS11o?2M3N2M3M3N2M3N1N3F\\OTAf0i>]OTAe0k>7010O0100O3N2M3NO010O0100O3N2M3M3RB[N^=f1_B\\Nb=d1\\B^Nd=a1ZBbNc=a1[BaNc=a1ZBbNc=P2N2M3N2M2O0O0100O3N2M3N2M2O2M3N2TNTB_1o=_NRB`1P>]NSB`1P>]NSBa1X>M3N2M3M3N2M2O2L4M2O0O010O0010O010O0100O3N2M2O2M3N2M3N2M3Nc[c5"}, "image_id": 73, "id": 1015}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 139.0, 92.0, 82.0], "area": 2920, "segmentation": {"size": [512, 512], "counts": "bei13m?2M4M3L3N3M2M2OO10O10Ol@\\Om>e0SA]Ok>b0UABh>>XAEd>>ZADd>>YAFd>n0L3N1O03L4M2N3L4M2M10000O01000O010O01000O010JPA\\OP?d0SAXOm>i05O010O2O1O0O10O10O10O010O1nNZOjBe0V=_OgBa0Y=AdB?]=D`B<_=H^B8b=J[B6f=MWB3h=0UB1k=2RBMn=7oAIQ>9lAGU>R1O10O10O10O010O3N2N3L4M2N3L4M2M4M2N3L4M2N3L4M2M4M3MVZh4"}, "image_id": 73, "id": 1016}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 156.0, 78.0, 63.0], "area": 2978, "segmentation": {"size": [512, 512], "counts": "TeU646M_?6]@N_?=M4L3M3L4M4LO010001N5L3M3L4M4L3L22L5L3M0O01000O01000O0100000O01000O01000O01000KcAfN]>[14O10O10O10O10O10O1000O10O10O10O11N4M3M4K4M3M3M3L4M31OO2L3M3MO010000O4MhYc0"}, "image_id": 73, "id": 1017}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 168.0, 40.0, 77.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "X5Q2Q>0O0100O010O010OOO00010O01O01O01O01O01OXOSB^On=b0TB[Om=d0VBZOk=d0WBZOl=d0VBYOl=e0VBZOl=c0WBZOl=d0UBZOm=d0VBZOl=c0VB[Ol=d0j0M2N3N1N3M2O2M3N1Nji[7"}, "image_id": 73, "id": 1018}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 184.0, 73.0, 61.0], "area": 2587, "segmentation": {"size": [512, 512], "counts": "ofh34l?4L3M4K4M4L0O1000O10O1000O0100D[O]Ad0c>AYA?g>=000O01000O0100FPOdAn0KoNa>6aAk0`>;0O1000O01000OM_AiNa>W14O10O1000O01000O10O4M3M2M10O1000O10O10O10O1000O10O10000O4M4PO]A;g>A\\AA]A:h>A\\AR15000O01000O01000O0100000O01000O01000O0100000O01000O0102N4K4M3M4L3L4M4L3L[X\\4"}, "image_id": 73, "id": 1020}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 229.0, 73.0, 52.0], "area": 2671, "segmentation": {"size": [512, 512], "counts": "ZW^66j?7I5K00O10O10000h@Dl>o0I7H5L0000O1000O10000000O010000000000O01000001O001O1N2O001O1O001O1N2O4L000000000O0100000000000O0100000000000O010000003M7I7H8I7IbW="}, "image_id": 73, "id": 1021}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 238.0, 72.0, 74.0], "area": 3271, "segmentation": {"size": [512, 512], "counts": "h8=c?3L5L3M3M4K3N000O010000O01000O010000O0[OSOYBm0g=VOVBj0j=YOSBf0n=[OPBf0o=[OQBe0o=[OQBe0o=ZOQBf0o=[OQBe0o=]OoAc0Q>@kA`0V>e0K\\NmAe1S>31000O10O10O10O1000O01000O101O3L10O1000O01000O11N4M4L3L3NO1002M5L3M3YOWA0n>LUA1n>KVA2n>JVA1n>LUA1`?Mgfk6"}, "image_id": 73, "id": 1022}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 249.0, 73.0, 65.0], "area": 3254, "segmentation": {"size": [512, 512], "counts": "lhQ42n?7I6J6I7J3MO10EVO`Aj0`>[OZAf0f>:O010000000O010000000O010000000O010000000O010DjNnAV1Q>POjAP1V>=0O1000O1000O1000O1000O100000O6K0000000O010000000O01000001O3L10001UOfAN_>EcAG3?_>EnA5X>FmA5T?Kbfi2"}, "image_id": 73, "id": 1023}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 275.0, 93.0, 60.0], "area": 3471, "segmentation": {"size": [512, 512], "counts": "WY^25k?6I6K6J00O1000O100000O10O1000003EXOXAh0h>8O100000O12M6K6J5K000O01000000000O0100000005J6K0000000O01000000000L8L00O1000O100000O1000O10DTOdAl0\\><000O0100000000O010000000O0100000000O015K5K6J5K5J7J5K5KSVS4"}, "image_id": 73, "id": 1024}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 280.0, 70.0, 61.0], "area": 2607, "segmentation": {"size": [512, 512], "counts": "jYe61o?5EO]@7]?;K002N2M10O10000000O10O100000O4M5K0000000O01000000000O010^OQOSBo0l=XOnAh0R>^OhAb0X>c00O1000O100000O1000O1000LiA_NW>a14000O1000O100000O11O6I7J6J5K00000O5L6J6J6Jje7"}, "image_id": 73, "id": 1025}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 311.0, 10.0, 8.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "iY^41n?6K00O10000000O12NWf\\3"}, "image_id": 73, "id": 1026}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 315.0, 83.0, 62.0], "area": 2870, "segmentation": {"size": [512, 512], "counts": "jZ62m?5L4L5J5L5K4L3L1IoN_AQ1`>UO[Ak0e>80O10O1000O10O1000O1000O1N2O0100000O010GeNmA[1S>jNhAV1W>:0O100000O0100000O0100000O10O10003M4K6K4L4POSAe0X?K3N02N3L0100000O010000000O0100000O0100000O0104L4L4L5J5LdT`6"}, "image_id": 73, "id": 1027}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 326.0, 79.0, 59.0], "area": 3267, "segmentation": {"size": [512, 512], "counts": "Q[U43m?;E;D5L0000000000000000O10O102N2NO10000000000000O1KPOZAP1f>5O010000000000000000000O10O10F:00000000000000000O0101O1J`NjA`1V>5000000000O1000000000O1000000000O1000007UOcAEd>4gAAd>LRA2cdc2"}, "image_id": 73, "id": 1028}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 340.0, 8.0, 38.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "d:V1o>K5K5K5J6K5K5KYdk7"}, "image_id": 73, "id": 1029}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 344.0, 87.0, 51.0], "area": 3137, "segmentation": {"size": [512, 512], "counts": "lZd28h?=C1O000000O100000g@Fk>;UA2^>NbA2^>NbA1_>OaA1_>OaA1_>h00000N2000000000000O010000000000000000M303M0000O10000000O1000000000007I3MO10000000O1000000000000000O10000000L402N2N00000000000000000O100000;E=CRTP4"}, "image_id": 73, "id": 1030}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 384.0, 72.0, 47.0], "area": 2524, "segmentation": {"size": [512, 512], "counts": "l\\^>b0000000000000000000000000000000000000000000000000000000000]Oc00000000000000000000Pd_6"}, "image_id": 73, "id": 1031}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 399.0, 77.0, 57.0], "area": 3036, "segmentation": {"size": [512, 512], "counts": "[]X4g0Y?00000000000000000000000000000E;0000000000000000000000000000000000000000000000E;0006J0000000000000000000000000J6000000000000000000000000000]Oc0000000000`0@QSa2"}, "image_id": 73, "id": 1032}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 400.0, 87.0, 53.0], "area": 3147, "segmentation": {"size": [512, 512], "counts": "]]g2h0X?000000000000000000000000000000000SOm00000000000000000000000000L4000000000000002N0=C0000000000000000000000004L00000000000000000000000000000000000000007I0000000000000000000000hRm3"}, "image_id": 73, "id": 1033}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 424.0, 71.0, 57.0], "area": 3072, "segmentation": {"size": [512, 512], "counts": "jml66c?7o@JQ>=gAKY>n0000000000010O0000000000000010O0000000000000010O0000000000000I80O00000000000000010O0000003M000001O01O00000000000nNiA;W>]OQBd0e>000000000001O01O000jA"}, "image_id": 73, "id": 1034}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 433.0, 12.0, 31.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "a=n0R?0010O000000000I7I8H7Jcbi7"}, "image_id": 73, "id": 1035}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 442.0, 80.0, 63.0], "area": 2995, "segmentation": {"size": [512, 512], "counts": "R_:7a?8H8K5000001O01O00000000010O00M3001O0001O0000000N2M301O00000001O0001N1I7I700000001O000001O000001O0001O00hA^NR>h10000000010O0000QOPBNP>KWB5i=JXB6h=JYB5h=JXB7g=F\\B:d=_OcBa0]=XOjBh0W>0001O01O000000O1J7H7IVa]6"}, "image_id": 73, "id": 1036}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 468.0, 89.0, 44.0], "area": 2841, "segmentation": {"size": [512, 512], "counts": "foe28^?:K501O0000000001O000001O000000000001O00000000N2G9G4410001O00000000000001O0001O0000000005K1O0001O000000000001O000001O0000000001O00O1L5O0001O00000000000000001O0000000XO^A2b>EgA;o>0000L5Fh`m3"}, "image_id": 73, "id": 1037}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 471.0, 8.0, 41.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "g>Y1g>01O0I7I7I7I7I_ak7"}, "image_id": 73, "id": 1038}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 476.0, 78.0, 36.0], "area": 2394, "segmentation": {"size": [512, 512], "counts": "coV40001O000000000000000000000M3000000001O00000000000000000000L4M300001O00000000000000000000000000000000001O00000000000000000000000000000000001O000E;0000N2^OcQb2"}, "image_id": 73, "id": 1039}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 488.0, 75.0, 24.0], "area": 997, "segmentation": {"size": [512, 512], "counts": "g_i69^?9J60000000000001O00000000000000001O000000000000001O00000000000000001Aj@2b?000001O000000000000001O00000000000000001O000000000000001O00000000000000001O00SP1"}, "image_id": 73, "id": 1040}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 507.0, 28.0, 5.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "kog05l?O0000000000001O0000000000001O00000000001O000000000000QPj6"}, "image_id": 73, "id": 1041}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 0.0, 28.0, 12.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "P`P11o?001O1O001O001O001O1O001O001O001O1O001O00O1O1N2N2N2O1NR`a6"}, "image_id": 74, "id": 1042}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 0.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "P`P21o?001O001O00OQPl5"}, "image_id": 74, "id": 1043}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 0.0, 42.0, 34.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "aPS31m?2N3M2N3M2O2M3M2N3O001O0100O010O010O0100O010O010O0100O010O010O0O2N1N3M2N3M2N3M3N1NboW4"}, "image_id": 74, "id": 1044}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 0.0, 50.0, 42.0], "area": 1180, "segmentation": {"size": [512, 512], "counts": "e`S42l?2N3M2N3M2N3M2N3M2N3N101O1O001O001O001O001O001O001O00QOWAh0h>VOZAj0f>TO]Al0k>010O010O010O010OO2N1N3M2N3M3M2N3M2N3M2N__S3"}, "image_id": 74, "id": 1045}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 0.0, 57.0, 35.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "\\Pl62l?2N3M3N1N3M2O2O001O1O001O001O001O1O001O001O1O001O0100O0100O010O010N1N3N2N1000N2N2O1O1001O001OCPAJP?5RAIo>5SAIo>5TAIm>5VAHl>7UAGm>7`0M2N3No_7"}, "image_id": 74, "id": 1046}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 13.0, 65.0, 57.0], "area": 2111, "segmentation": {"size": [512, 512], "counts": "Va52m?1N3M2N3M2N3N1N3M3M2N3M2O2M201O010O010O10O10O010O010O0O2M2O2O10O010O010O10O010O010O10ON3M2N3O0010YObAJ^>5dAK\\>4dAL\\>5dAK\\>4eALZ>5eAK\\>4eALZ>2hANY>DcA079Y>DcA06:Y>DcA169Y>DcA06:V?M2N3M3M\\ni6"}, "image_id": 74, "id": 1047}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 27.0, 59.0, 49.0], "area": 1577, "segmentation": {"size": [512, 512], "counts": "ca^61m?2N3N1N3M2N3M2N3N2M2PAYOe>j0XAXOf>j0YAXONLc>o0^AYOb>f0_AZO`>g0^AZOc>R1O010O010ON3N1N3M3M2010O010O010O0100O010O0100O010O0100O010O0O2N1N3M2N3M2N3N2M2N3M2N3N]nc0"}, "image_id": 74, "id": 1048}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 36.0, 57.0, 41.0], "area": 1442, "segmentation": {"size": [512, 512], "counts": "mac32j?4M3L5L3L4M3L5O00010O0000010O00010O0000010O00010O0000010O00010O000010O000010O00010O0000010O00010O00M3L5L3L4M4L3Lbn_3"}, "image_id": 74, "id": 1049}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 38.0, 41.0, 34.0], "area": 785, "segmentation": {"size": [512, 512], "counts": "oac12l?3M2IKa@8\\?Jc@7[?9N1N3M2N3M3N110O01O0O2M3M2N3N1O20O0100O0100O010O010O0100M2N3N1N3M2N3M3NZng5"}, "image_id": 74, "id": 1050}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 40.0, 37.0, 32.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "iac21m?2N3M3N1N3M2N3M2N3O010O010O010O10O010O010O010O10O010O01O0N3M2N3M2N3M2O2M\\ni4"}, "image_id": 74, "id": 1051}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 67.0, 60.0, 47.0], "area": 1768, "segmentation": {"size": [512, 512], "counts": "Y2g0W?201O01O01O010O00010O0010O00O2L3M4O000010O01M21O01O010O0001kNWAR1k>010O00010O010O00001L310O00010ON3M2M4M2N2M4M2N3L3N3M2N2M4MamQ7"}, "image_id": 74, "id": 1052}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 71.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "YRo71m?2iM"}, "image_id": 74, "id": 1053}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 77.0, 61.0, 49.0], "area": 1466, "segmentation": {"size": [512, 512], "counts": "TSW31n?1N3M2N3M2N3M2O2M3M2N3M2O2O0100O010O010O0100O010O0100O010O010O0100O010O0100O010O010O0100O010O01O0N3M3M2N3M2O2M2N3M2N3M2Nl\\j3"}, "image_id": 74, "id": 1054}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 78.0, 28.0, 32.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "XSb71m?2N3M2O2M3M2N3M2O2M21M2N30K^Oj@b0U?_Oi@d0T?501N1N02N2N3M3N1N3M2N3N_M"}, "image_id": 74, "id": 1055}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 87.0, 36.0, 27.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "RcZ22l?3M2N3M2N3N1010O010O010O010O010O010O010O010O010O010O010O001M2N3M2N3M2Nm\\S5"}, "image_id": 74, "id": 1056}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 89.0, 34.0, 33.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "[cX12l?2O2M3M2N3N1N3M2N3M210O10O10O010O010O10O010O10OO2M2N3N1N3M2N3M3N1Nl\\V6"}, "image_id": 74, "id": 1057}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 107.0, 3.0, 7.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "_cn72l?3M2eL"}, "image_id": 74, "id": 1058}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 115.0, 30.0, 59.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "d3[1d>1010O10O10O010O010O01^AdN_>_1O10O10O010OUObA4_>JcA6\\>HfA8[>EhA;W>CoA7T>FnA8T>FoA7T>GmA8U>EnA8o>M2N3MUk`7"}, "image_id": 74, "id": 1059}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 122.0, 50.0, 46.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "ldn21m?3L3M4M2M3N3L3M3N3L3N3L3M301O0010O000M4N110O01O01O010O00010QOVAf0j>WOYAj0f>TO\\Al0m>O010O01O01O01O01M2M4M2M3M4M2M3N3Ld[X4"}, "image_id": 74, "id": 1060}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 123.0, 59.0, 43.0], "area": 1555, "segmentation": {"size": [512, 512], "counts": "dTd51l?3M3M4L3M3M4L3M3O2O00010O00010O00010TATOb>l0[AWOe>i0XAZOi>o0O00010O00O1M4L30001O01O01O01O01O01O01O01O01O01O01O01O01O01ON3L3M3M4L3L4M4Lh[^1"}, "image_id": 74, "id": 1061}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 135.0, 23.0, 23.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "dd]12l?3M3M2O2M2N3M210O010O010O10O0O2M2N3M3M2N3N1NckV6"}, "image_id": 74, "id": 1062}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 137.0, 41.0, 36.0], "area": 757, "segmentation": {"size": [512, 512], "counts": "kdj02l?3M2N3N1N3M3N1N3M2N3O01000O010O010O10O10OO2M2O2M3O00f@AT??j@CU?d010O10N2M2O2M2N3M3N1N3M2OWk`6"}, "image_id": 74, "id": 1063}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 139.0, 37.0, 29.0], "area": 563, "segmentation": {"size": [512, 512], "counts": "kdf62m?2M2N3N2M2N3N1N3M3N110O010O01000O010O01O1N1N3M2O2M2N3000O010M2001M3M2O2MZkf0"}, "image_id": 74, "id": 1064}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 153.0, 11.0, 10.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "oT`21l?4L31O01O01O01O01O0LX[Z5"}, "image_id": 74, "id": 1065}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 159.0, 109.0, 164.0], "area": 5223, "segmentation": {"size": [512, 512], "counts": "[ig03m?2M3N3L3N2M3N2N2M4M1N10O010O01ZOTOXBk0i=WOTBj0k=YOSBf0n=\\OoAe0P>^OnAa0S>AkA?T>EhA;Y>GeA9Z>g00O010O010O010O4MUOdNTCY1mU=EiB;W=GfBBG8d=8cB@K6a==bB\\O04_=c0]BZO61\\=h0\\BVOl0bARO^>n0dAoN\\>Q1:0iNnN_CR1a7QBFR>7QBGQ>7QBFR>7RBFP>8RBFP>7SBFQ>7QBGQ>6RBGQ>7QBGQ>6o0N2M4Mlia5"}, "image_id": 74, "id": 1066}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 162.0, 31.0, 30.0], "area": 508, "segmentation": {"size": [512, 512], "counts": "deZ71m?2N3M2N3M2N3N1N3M2O20O0100O010O0100O010O010M2N3M2N3N1N3M2N3Mej5"}, "image_id": 74, "id": 1067}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 165.0, 73.0, 44.0], "area": 1666, "segmentation": {"size": [512, 512], "counts": "ieX52m?1N3M3N1N3M2O2M3M2O2M2N3O010O10O010O10O10O010O10O10O010O10O10O010O10O10ON3N1N1O010O0002O1N3M2O2M3M2O20O0100O010O01000O010O01000O0N3M2O2M3M2O2M2N[jb1"}, "image_id": 74, "id": 1068}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 172.0, 25.0, 21.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "geV32k?4M2N2M4N1010O00010O010O01O01O010O010O0N2M4M2M4M^j\\4"}, "image_id": 74, "id": 1069}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 181.0, 29.0, 29.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "Wf81m?2N3N2M2N3M2N3M2N3N101O010O010O10O10ON3N1N3M2N3M2N3M2O2MTjX7"}, "image_id": 74, "id": 1070}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 181.0, 15.0, 18.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "me^22k?3M3N30O00\\@H`?<10OG_@3a?Jb@7d?O010O0M4MRjY5"}, "image_id": 74, "id": 1071}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 192.0, 55.0, 43.0], "area": 1330, "segmentation": {"size": [512, 512], "counts": "dfa22n?3M2M3N2M3N3L3N2M3N1O0O010O10O010OJXOUAi0k>YOSAf0m>70O010O01000O010O010O011N3N2N0O010O010O10O010O010002M4M2M3N2M3N3L3N2N[ib4"}, "image_id": 74, "id": 1072}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 199.0, 45.0, 61.0], "area": 1622, "segmentation": {"size": [512, 512], "counts": "jgY71m?2M3M4ZOH`A:^>I^A;^>H_A;^>I_A9_>I^A;^>H_A;^>e0M4M2O2O01O01O01O01O010O01O01O01N1M3N3L3M4M2M3M4L3N2M4L3N2M4L3N3L3M3N`I"}, "image_id": 74, "id": 1073}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 200.0, 7.0, 11.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "X6:f?010OO2M2N3NfYl7"}, "image_id": 74, "id": 1074}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 206.0, 37.0, 34.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "PWQ61m?2N3N1N3M3M2N3N1N3M2010O0100O010O0100O010O0100O010O0100O0N3M2N5K2O2M2N3MTY\\1"}, "image_id": 74, "id": 1075}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 212.0, 6.0, 12.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "d6;f?00N1N3N1N[il7"}, "image_id": 74, "id": 1076}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 221.0, 50.0, 42.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "eWl4?R??G9000000000000010O000000000000000000000000000010OkN^Al0k>0000000001O0000000000000001O000000000001O0O1@ViZ2"}, "image_id": 74, "id": 1077}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 225.0, 41.0, 34.0], "area": 666, "segmentation": {"size": [512, 512], "counts": "`Wj51m?3M2O2M2N3M2N3M2010O10O10O010O010O010O01000O010O010O010O010O10O10M2N3M2N3M2O2M2N`Xa1"}, "image_id": 74, "id": 1078}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 233.0, 34.0, 37.0], "area": 627, "segmentation": {"size": [512, 512], "counts": "SX81m?3N1N3M3M2O2M2N3N2M2O20O10O10OO2N1N2N010O0002O2M3M2O2Hf@F\\?8g@E\\?88N1N3M2ObhV7"}, "image_id": 74, "id": 1079}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 251.0, 8.0, 17.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "k7a0_?N3M2N3M2O2M2NThk7"}, "image_id": 74, "id": 1080}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 262.0, 64.0, 48.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "WYX21o?2M4M2M3N2DDo@?o>Co@>o>En@d0000000@`0000009G0000000000009G0000000000000000G907I000005K00004L000S1mN0000000000000000000000000000000000]V:"}, "image_id": 74, "id": 1082}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 270.0, 22.0, 21.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "ohe53`?=L40000001O0000000001O000000000001O000N2CmWo1"}, "image_id": 74, "id": 1083}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 277.0, 49.0, 52.0], "area": 1546, "segmentation": {"size": [512, 512], "counts": "^Yk44l?3M3L4M2N3L4B]OYAf0d>]OXAg0e>:O0100O01000O01MjN\\AV1d>201000O01000O010O10O10O01000O01000O4M3M3L3N3M3L4M3L3N3M3L4M3MVV\\2"}, "image_id": 74, "id": 1084}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 290.0, 8.0, 39.0], "area": 312, "segmentation": {"size": [512, 512], "counts": "RYl7W1i>0000000000000nF"}, "image_id": 74, "id": 1085}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 296.0, 15.0, 14.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "[io53m?3M3L3N000O01000O0101O3L4M2N_fh1"}, "image_id": 74, "id": 1086}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 297.0, 31.0, 27.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "aYl13l?4M2M4M3L4M2M100O0100O0100O010O0101N3N2N2M4M2M3N2M12N000O10OYVd5"}, "image_id": 74, "id": 1087}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 301.0, 30.0, 27.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "eiV62m?4M3M2M4M3M3L1000O010O01000O01000O01000O010O3N3M2M4M3M3L3NQVZ1"}, "image_id": 74, "id": 1088}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 303.0, 13.0, 13.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "bYm23m?2N2M4M0O100O0101N3N3L3NYVl4"}, "image_id": 74, "id": 1089}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 305.0, 10.0, 10.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "dYg12m?3N2N1N100O03N2M3NYfS6"}, "image_id": 74, "id": 1090}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 317.0, 122.0, 71.0], "area": 3275, "segmentation": {"size": [512, 512], "counts": "Z[_12n?3L3N3L3N3M2M4M2M4M1OO0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O102N3L100O10^O^OeAc0[>_OcA`0]>D_A=a>E]A7L_Oh>=YA4i>b00O103L3N2N0O10O010O10O10O10O010O12M10O10O010O10O10O010O10O10O010O10O10O010O10O10O010O10O10O010O10O10O010O12M4M2M4M2N3L3N3L3N3MZec4"}, "image_id": 74, "id": 1091}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 323.0, 30.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "bj^61n?1O2N2N2N2M3N2N2N2N2N1O2N2N2O10N2N2N2N2N1O2N2N2N2M3N2N2N2NaUR1"}, "image_id": 74, "id": 1092}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 325.0, 15.0, 15.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "YZo11n?4M2M4M2NO010O01000O103L3N3McUi5"}, "image_id": 74, "id": 1093}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 327.0, 51.0, 63.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "f[V53l?2M2N3M2N3M2N3N1N3M2N3M2N3M3M2O2M2N3M2N3M2N3N1N3M10O3M2N3M2O2M3M2N3M2N3M2O2M2N3M2N3M2N3M3N1N3M2N3M2N^UP2"}, "image_id": 74, "id": 1094}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 343.0, 30.0, 37.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "d[k62k?3M3M4L3M3M4L3M3N3L301O01O01O01O01O01O0M3M4M2M3M4L3M3M4L3MVee0"}, "image_id": 74, "id": 1095}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 344.0, 64.0, 111.0], "area": 2775, "segmentation": {"size": [512, 512], "counts": "j<]1a>10O10O1@iNXBV1i=`00O0100O0102N1N3N2M3N1O2M2O0OWOfNoBZ1Q=gNmBZ1R=iNlBV1U=lNhBU1X=lNgBS1Y=POeBP1[=RObBo0]=TOaBi0b=XO\\Bi0d=YOZBg0h=YOUBg0n=XOQBh0Q>b01N3N2M2O2N2O010M3N2M2O2M3N1O2M3N2M2O2N2M2O2M3N1N3N2N2M2O2M3Nado6"}, "image_id": 74, "id": 1096}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 353.0, 19.0, 33.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "Sk\\72n?>B>A2O0000000000000000O100000O1=C>BTd9"}, "image_id": 74, "id": 1097}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 373.0, 37.0, 68.0], "area": 1722, "segmentation": {"size": [512, 512], "counts": "]mn54j?2M3N3L3N2gNDkB`0T=HbB:]=0WB3i=5mANR>P101O00001O0O101O00001O001O0O10N2M3N2M3M3M3N4K;E;E;F:E\\c^1"}, "image_id": 74, "id": 1098}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 394.0, 114.0, 74.0], "area": 3335, "segmentation": {"size": [512, 512], "counts": "a]P23m?2M4M2M3N2M3N2M3N3L3N2M3N2N2M3NO010O010O010O102N3L3N2M3NO0100O010O010OMVOQAk0o>20100O010O0100O010O010O010O010O0102M3NO010O10O02O1N10O10O10O01JQA[On>f0UAVOl>i0510O010O010O01EWO_Ai0a>YO\\Ag0d>\\OZAd0f>^OXAa0h><0O010O10O010O010O010002M4M2M3NO010O010O0100O3N2M3N2M3N2N3L3N2MnbV4"}, "image_id": 74, "id": 1099}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 401.0, 43.0, 59.0], "area": 1704, "segmentation": {"size": [512, 512], "counts": "j]f63U?0]A3`>0\\A4a>0[A4IEa>;cA9Z>JcA9\\>g0O00010O00010O00010O0001OO2L3M301O01O01O01O01OM4L3M3M4L3oNYAe0k>XOXAe0k>XOYAd0S?M3M4L3L4M3MTSd0"}, "image_id": 74, "id": 1100}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 401.0, 25.0, 57.0], "area": 1100, "segmentation": {"size": [512, 512], "counts": "Pnc72l?3L3N2jN0YB4f=6mAMS>P101N10001O00001O00001O00001O0O101O00O`C"}, "image_id": 74, "id": 1101}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 445.0, 41.0, 67.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "V^Y11n?3N2h@M_>5_AM^>7_AK_>7^AM^>6`AL^>6_AM^>6`AL^>6_AM^>m0N2M3N3L3N2M3N0O100002N2N2N2aNmAm0U>QOmAl0V>QOmAm0U>QOmAl0V>QOmAm0U>QOmAl0f>N2M4M2M3N2M3N2N2M3N3LZPR6"}, "image_id": 74, "id": 1102}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 451.0, 25.0, 21.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "\\nj22m?3N2M3N2M3N0O010O010O010O010O010O0100O3N2M3N2M4Mbah4"}, "image_id": 74, "id": 1103}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 455.0, 118.0, 55.0], "area": 3036, "segmentation": {"size": [512, 512], "counts": "^_`22n?5K5K3L10000000O010000000O010000IDj@d09000O10O100000O10O100000O1000O1000O1000K5000O10O5L000004L4K100000O10O100000O10O100CZO`Af0`>_O[A`0f>=00O1000O1000O100000O10O100000O3N2N0000O0101O5K5K5J7J5K5Kf`d3"}, "image_id": 74, "id": 1104}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 488.0, 61.0, 24.0], "area": 1060, "segmentation": {"size": [512, 512], "counts": "a??a?0000000000000O100002N00000000O10000000000000000000000000000000000O1I7000000000000000000000000O1007I0000000000O10000003M_PQ7"}, "image_id": 74, "id": 1105}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 503.0, 13.0, 9.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "i_f37i?0O100000000000000O1008HQPS4"}, "image_id": 74, "id": 1106}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 509.0, 8.0, 3.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "o_P21o?0O100O100002NQ`k5"}, "image_id": 74, "id": 1107}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 0.0, 9.0, 5.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "Q`71n?101O001O1O00O1NRPd7"}, "image_id": 75, "id": 1108}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 0.0, 20.0, 8.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "PP=1o?1O001O001O001O1O001O001O0000N2N2N2O10PPY7"}, "image_id": 75, "id": 1109}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 0.0, 57.0, 24.0], "area": 722, "segmentation": {"size": [512, 512], "counts": "P`]11o?001O001O1O001O001O001O1O001O001O001O001O1O001O001O001O1O001O001O001O1O001O001O001O0000O1N2N2N2O1N2N2N2N2O1N2N2NRPf5"}, "image_id": 75, "id": 1110}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 0.0, 25.0, 27.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "dPk54f?6K5J6M4O00000000001O01O00000001O01O00000L4K5J7IS`h1"}, "image_id": 75, "id": 1111}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 0.0, 5.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPT71o?001O00OQ`i0"}, "image_id": 75, "id": 1112}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 18.0, 87.0, 48.0], "area": 3176, "segmentation": {"size": [512, 512], "counts": "d`d6`0^?202N1O2N10O0O100000UA^OX>b0ZALf>g00lNZAn0l>0000001O00000000000000000001O0001O00000000000000000000000K50M3N200000000001O000001O00000000000000000001O0001OUAQOe>U100M3O10001O00000001O00000000000000\\O"}, "image_id": 75, "id": 1113}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 20.0, 60.0, 39.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "mPh44l?6J5K6J5J5L0000000O0100000000O0100000000O0100000000O01000000000O010000000O01000000000O010000000O01000000001N7J5K6J6J5K_nY2"}, "image_id": 75, "id": 1114}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 21.0, 64.0, 48.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "\\Qm01n?1N3M2N3M2O2M3M2N3M2O2M2N300O010O0100O010O010M201ON3M2O2O10O010O10O010O10O010O010O10O010O10O010O010M3N1N3M2N3M2O2M2N3M3M2O2M2NhnR6"}, "image_id": 75, "id": 1115}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 23.0, 15.0, 18.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "SQ11m?3M3N1N3M2N3N1100M2N3M2N3M2NW_g7"}, "image_id": 75, "id": 1116}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 36.0, 35.0, 35.0], "area": 709, "segmentation": {"size": [512, 512], "counts": "hQ73l?2M2N3M2O2M3M2O2M2N3M3N1010O10O001M3O010ON3M2O2M2N3N2M2N3M2O2M3M2O2Me^W7"}, "image_id": 75, "id": 1117}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 63.0, 46.0, 55.0], "area": 1450, "segmentation": {"size": [512, 512], "counts": "oRb02m?2M2N3M2N3N1N3M2N3N2M2N3M2N3N1N3M2N300O0100O010O010O0100O0100O010O0100O010O01oN\\Ab0g>[O\\A6MMi>K\\A6MNi>J\\A5NNh>K\\A5NNU?On@NT?1dmf6"}, "image_id": 75, "id": 1118}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 66.0, 9.0, 18.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "R2?a?01G`@1_?Mc@3^?Kd@5c?010OO2Md]k7"}, "image_id": 75, "id": 1119}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 72.0, 21.0, 24.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "hbk66c?7H8O1000000000001O01O00000000000001OM3GPni0"}, "image_id": 75, "id": 1120}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 74.0, 70.0, 55.0], "area": 2174, "segmentation": {"size": [512, 512], "counts": "XcQ41m?3M2N3M2N2N3M2N3M2N3M2N3L3N3N110O010O010O010O010O010O010O010O010O01O010O010O01O010N1N3L3N3N110O0010O0N3M2N3M2N3M2N3M2N3M2N3L3N3M2N3M2N3M2NY]k2"}, "image_id": 75, "id": 1121}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 75.0, 12.0, 21.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "nbW71b?=J601O00000000000000Hm]b0"}, "image_id": 75, "id": 1122}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 83.0, 33.0, 37.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "`S^54X?0SA5l>Mm@9T?9N2K510O0M3001O00010O1O000001O0001O000001O0001O000K5J6J6K6I]]Q2"}, "image_id": 75, "id": 1123}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 84.0, 25.0, 31.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "i2b0\\?3N1N3O10O010O10O10O010O10O01M2N3N1N3M3N1N3M2N3NQ]c7"}, "image_id": 75, "id": 1124}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 85.0, 9.0, 9.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "ib92l?2N3O010O0100N1NZma7"}, "image_id": 75, "id": 1125}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 94.0, 49.0, 53.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "WdU62j?4L4M310O000GEm@;o>IQA9i>LWA4d>0\\A0_>5aAK_>5^ANb>h0010O0000010O000001L3L41O01O000001O01O0001O01O0001L3BcAUOa>0\\A`07\\Oa>0\\Ac0V?010O00L4K6K4LilQ1"}, "image_id": 75, "id": 1126}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 97.0, 27.0, 63.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "edb77b?7I7I7I7I8H7I7M3000000010O0000000000010O000000000001lL"}, "image_id": 75, "id": 1127}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 114.0, 52.0, 58.0], "area": 1390, "segmentation": {"size": [512, 512], "counts": "bT62l?3M2N3M2O2M2N3M2N3M2N3M3M2O2M2N3M210O010O010O010O01000O010O01\\O]AJb>4`ALa>1bAO]>0dA0]>MfA3Y>KiA5X>HkA8T>FnA:S>CPB=o=ATB>g>100O010OO2M2N3M2N3M2NTko6"}, "image_id": 75, "id": 1128}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 163.0, 34.0, 52.0], "area": 1125, "segmentation": {"size": [512, 512], "counts": "a5l0R?2N3N1N3M2N3M3N110O0100O010O0100O0100N1O2M2N3AVADl>;UADm>9VADl>:VADm>9VADm>:TAEm>9?M2N3M2O`j^7"}, "image_id": 75, "id": 1129}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 187.0, 69.0, 49.0], "area": 1895, "segmentation": {"size": [512, 512], "counts": "cfW31m?3N2M2N3M2O2M2N3M3N1N3M2N3N1010O0100O010O010O010O10O10O010OQOSAk0Q?100O0100OO2N1O2N1O2N2N1O00000000O10001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2Ncie3"}, "image_id": 75, "id": 1130}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 239.0, 64.0, 34.0], "area": 1674, "segmentation": {"size": [512, 512], "counts": "Rh_58_?9H8N200000001O0001O0000000001O0001O000000000001O0001O0000000001O0001O000000000001O01O000000000001O0001O0000000001O0001O0J6H8GcX`1"}, "image_id": 75, "id": 1131}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 241.0, 34.0, 30.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "PhX43l?2M2N3N1N3M2N3N2M210O010O10O10O010ON30O0100O0100O01]Oe@?_?N1N3M2N3N2M2NPXV3"}, "image_id": 75, "id": 1132}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 243.0, 66.0, 47.0], "area": 1863, "segmentation": {"size": [512, 512], "counts": "Vhg23k?2M4L3N2M4L3M301O0010O00010O00010O010O00010O00010ON2M4N101O01O01O01O01O01O01RAQOj>R101OQOWAg0j>UOYAk0n>1O0N2M4L3O2O00010O00010O01O01O01ZO[ANf>O]ANf>0]AMf>O]ANf>O]ANg>N]ANf>0jXW4"}, "image_id": 75, "id": 1133}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 245.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "e71Zho7"}, "image_id": 75, "id": 1134}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 251.0, 23.0, 25.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "\\h11n?2M2N3M2O2M3M2O2M2N3O0010N2M2N3N1N3M3N1N3M2NQhb7"}, "image_id": 75, "id": 1135}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 252.0, 16.0, 22.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "[Xh42m?1N3N2M2N3N2O0N3N0O10O2O2Eb@0`?Nc@O`?Oa@0bWP3"}, "image_id": 75, "id": 1136}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 256.0, 32.0, 31.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "aXo33k?2N3M2O2M2N3M2N3M30O010O10O010O010O10O010O0O2M3M2N3M2O2M2N3M2Nfg`3"}, "image_id": 75, "id": 1137}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 277.0, 31.0, 21.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "QYi55k?0000001O00010K4G900000001O000001O0000000001O000001O00000H8GbWg1"}, "image_id": 75, "id": 1138}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 288.0, 61.0, 48.0], "area": 1612, "segmentation": {"size": [512, 512], "counts": "ii[21m?2N3M2N3N1N3M2N3M2N3M2N3M210O010O10O010O010O010O010O01N1N3O0010O010kNWAQ1m>0O0O2M210O010O0N3OO2M2N3M2N3M2N3M2N3M2N3M2N3M2N3N1Ncfe4"}, "image_id": 75, "id": 1139}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 290.0, 79.0, 71.0], "area": 2585, "segmentation": {"size": [512, 512], "counts": "^Yo43h?5K5N200001O03M1O000001O00000001O01O000000]ABd=>RBBB3\\>;PBDB3_>8lA3S>NjA4V>LgA8X>HfA:Z>FdA<\\>f0O0O001O000010O2OO00001O0010O0001O001O010O^NmAW1^>0010O010O0010O01PO[Ad0d>YO`Af0`>XObAh0_>TOdAm0h>O010O01O01O0O2L3N2M4M2M4M2N3L3NcUi1"}, "image_id": 75, "id": 1140}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 291.0, 5.0, 16.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "`im72k?3L5L3M3mF"}, "image_id": 75, "id": 1141}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 294.0, 37.0, 39.0], "area": 716, "segmentation": {"size": [512, 512], "counts": "fY^32l?3M2N3b@HQ?9m@JP?9m@IQ?:m@HQ?d0N3M3OROTAh0l>WOUAj0k>SOXAl0n>1000O010O001M3M2O2M2N3O1O010O01000N1O2M2N3N2M2NWVo3"}, "image_id": 75, "id": 1142}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 315.0, 16.0, 22.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "XZh78^?:M3001O0000000001O00000001O00SF"}, "image_id": 75, "id": 1143}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 316.0, 17.0, 17.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "Wj]41m?2N3M2N3N1N300O010O01O1M2N3M2N3NoeY3"}, "image_id": 75, "id": 1144}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 317.0, 20.0, 19.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "Xjg42l?2M4M2M3O2O0010O0010O0010O010M2M3N3L3NPVn2"}, "image_id": 75, "id": 1145}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 322.0, 8.0, 8.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "VZU41m?2N3O10O010M2Nnef3"}, "image_id": 75, "id": 1146}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 330.0, 45.0, 36.0], "area": 896, "segmentation": {"size": [512, 512], "counts": "lZ`42l?2N3M2N3L3N2N3M2O2O010O01Ok@[Oo>l0O01O010L3N30O0010O0010O0012M010O00010O010O0010ON2N3L3N3L3N2MYUi2"}, "image_id": 75, "id": 1147}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 333.0, 17.0, 19.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "iZX41m?3N1N3N2M2O2M3N110OO2M3M2O2M3N1N`U_3"}, "image_id": 75, "id": 1148}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 334.0, 113.0, 80.0], "area": 3269, "segmentation": {"size": [512, 512], "counts": "Vkl11m?2O2M3M2N3M2O2M2N3M2O2M3M2O2O0100O010O0100O010O0100O0100O010O010O0100O0100O010O0UAPOg>P1VASOj>Q10100O100O10O010O1M2ROUAe0n>XOTAg0m>WOVAh0Q?2O0O010O0100O0100O010O0N3N1N30O0100O0100O010O0100O010O0100O010O0100O010O0100O010O0100O01O1M2O2M2N3M2N3N1N3M3M2O2M2NncZ4"}, "image_id": 75, "id": 1149}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 352.0, 22.0, 32.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "j[e71j?6K4L4K5L4L5O01O0001O01O0001O0001O0001O01OlD"}, "image_id": 75, "id": 1150}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 367.0, 21.0, 16.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "d[Z42l?3M2O2O010O010O010O010O010O010O010OO2M2N[T[3"}, "image_id": 75, "id": 1151}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 367.0, 60.0, 44.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "VlR53j?3M4M2M4L3M3M4L3010O00010O010O00010O010N1N2M4O010O01O010O0O1O2O010O01O01O010O01O01O0m@XOo>m0O01O01O010O01N2M3L3N3L3N2M4M2M4MPTo1"}, "image_id": 75, "id": 1152}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 375.0, 28.0, 27.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "XlS41m?3M2O2M2N3M2N3M2N3O0010O010O010O010O0O2M3M2N3M2N3M2N3MRT^3"}, "image_id": 75, "id": 1153}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 383.0, 66.0, 46.0], "area": 1882, "segmentation": {"size": [512, 512], "counts": "f\\^13k?2O2M3M2O2M2N3M3N1N3M3N1N3N110O10O0100O0100O010O001O0010O01O1O0014K0000000001O0000000M300010O00000000001O01O00000001O0001O0I7I7I7Hjc`5"}, "image_id": 75, "id": 1154}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 396.0, 10.0, 12.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "d\\P42l?3M2N3M2001M2N3M2Occj3"}, "image_id": 75, "id": 1155}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 402.0, 10.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "f\\i33l?1N3N1010O010M3M2N]cQ4"}, "image_id": 75, "id": 1156}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 404.0, 34.0, 38.0], "area": 813, "segmentation": {"size": [512, 512], "counts": "\\]_71l?3N3L3N2M4L3N3L3N2O20O010O0010O0010O0010O010O01O01O010O01O010O010NRC"}, "image_id": 75, "id": 1157}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 414.0, 79.0, 49.0], "area": 1964, "segmentation": {"size": [512, 512], "counts": "Zm_43k?2N3M2N3M2N3b@_O[?d0O010O010O010O010O010N1N3O010O010O010O010O010O01ON3M2N30O010O010O010O010O01O010O0O2M2O20O010O010nNWAk0i>SOYAn0m>O010O010O010O010O001M2N3M2N3M2N3M2N3M2N3M2N\\bX2"}, "image_id": 75, "id": 1158}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 418.0, 34.0, 31.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "cmg31m?3M2N3M3M2N3M2N3N1010O010O010O010O010O010O010O010M2N3M2N3M2N3M2N3MdRg3"}, "image_id": 75, "id": 1159}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 422.0, 56.0, 64.0], "area": 1687, "segmentation": {"size": [512, 512], "counts": "gnW22l?3FLe@5Y?Nd@5Y?Me@5Z?:B]O[Af0b>]O[Ae0c>]O\\Ae0b>]O[Ae0c>>M3N1N3M2O2M3M2O20O0100OJoA]NR>a1oA_NR>e0nAM3\\OP>e0oAM\\>1fAM[>1hAOX>NjA3V>KlA4T>JnA7R>FQB9o=FSB:m=CUB>k=_OXB`0h=_OYBb0c>O10O10O010O10O10O010O1O0O2M2N3N2M2N3N1NZQl4"}, "image_id": 75, "id": 1160}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 429.0, 57.0, 53.0], "area": 1613, "segmentation": {"size": [512, 512], "counts": "VnS13k?3M2N3M2O2M2N3M2N3M2N3N1N3N2O010O010O010O0100O010O0100O0N3N110O010O010O010O010O10HYAUOe>k0]AVOa>i0bAVO_>g0dAYO[>e0gA[OZ>b0iA^OV>`0lA^OW>`0kA^OW>?f0N3M2N3M2N3N1NWbo5"}, "image_id": 75, "id": 1161}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 433.0, 9.0, 8.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "c]S33k?201O010O010O01M^Rh4"}, "image_id": 75, "id": 1162}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 436.0, 27.0, 25.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "Rnf21m?2O2M2N3N1N3M2O2N2O010O010O010O01000OO2M2O2M2N3M2O2MUbk4"}, "image_id": 75, "id": 1163}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 442.0, 18.0, 16.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "Rn[32l?2N3M2N3O010O010O010O0010O0N3M2N3MRR[4"}, "image_id": 75, "id": 1164}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 448.0, 51.0, 45.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "gnU74j?2M3M4M2M4L3N2M4M21O010O01O01O01O01O01o@UOl>o000010O00010O010O0001L3N2O20O01O01O0M4M2M3M4M2M3M4M2M4L3N2M4Lja0"}, "image_id": 75, "id": 1165}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 453.0, 61.0, 42.0], "area": 1701, "segmentation": {"size": [512, 512], "counts": "k^Y42l?3M2N3N2M3M2O2M3M2O2M3M2O2O1O001O1O1O001O1O001O010O01O1O001O001O0VOUA?l>^OVAb0k>\\OVAd0j>ZOXAf0Q?0000000000000000001O000000000000000001O00000000ZQh2"}, "image_id": 75, "id": 1166}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 468.0, 33.0, 27.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "o^\\32l?3M2N3M2N3N102OO10O010O10ON3M210O010O010O010O010ON3M3N1N3M2N3M2NUQS4"}, "image_id": 75, "id": 1167}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 479.0, 15.0, 13.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "V_m31m?2O2M2N30O010O010O010N1N3M2NnPk3"}, "image_id": 75, "id": 1168}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 481.0, 53.0, 31.0], "area": 1059, "segmentation": {"size": [512, 512], "counts": "f_i02l?3N1N3M2N3N2M2N3N1O200O0100O001O001O001O1O0000N2N2N2O1N2N2O1001O1O001O001O001O1O001O0O2N3L2N3M2O2M2N3M3NbP\\6"}, "image_id": 75, "id": 1169}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 488.0, 18.0, 20.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "f_S32l?2O2M3O001N2M2O0O010O02O2M2O2M3M2Of`c4"}, "image_id": 75, "id": 1170}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 492.0, 27.0, 20.0], "area": 320, "segmentation": {"size": [512, 512], "counts": "goi12l?3M2O2M2N3M2010O0100O010O01O001O1O001O001O0N3M2N3M2O[`h5"}, "image_id": 75, "id": 1171}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 500.0, 20.0, 12.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "noY42l?2N2N2N2N21O001O001O001O001O001O1O0O2MVP\\3"}, "image_id": 75, "id": 1172}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 504.0, 17.0, 8.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "ooo61n?1N2N2N21O1O001O001O001O1O001O00Q`g0"}, "image_id": 75, "id": 1173}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 505.0, 15.0, 7.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "oo^71m?2N2O1O11O001O1O001O001O1O00Q`9"}, "image_id": 75, "id": 1174}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_X31P`g4"}, "image_id": 75, "id": 1175}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 289.0, 224.0], "area": 42181, "segmentation": {"size": [512, 512], "counts": "\\d`02l?3L3N3M2M4M2M3N3M2M4M2M3N3L3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2M4M2N3L3N2M4O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O00M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2MSPo2"}, "image_id": 76, "id": 1176}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 178.0, 151.0, 212.0], "area": 17718, "segmentation": {"size": [512, 512], "counts": "miV42l?2M4M2N2M4M2M4RB[O]`0YADf>=WAEj>:SAIm>7QALn>c0N3L3N2M4M2M4M2M3O20O01O01O010O0N2N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2MWlc4"}, "image_id": 77, "id": 1182}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 119.0, 20.0, 21.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "PdQ23j?6K4L4N0O101N10001O000001O0O1O101N3N3KT\\d5"}, "image_id": 77, "id": 1183}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 178.0, 34.0, 27.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "oe\\13j?3N3L3N3L31O010O01O010O01O010O01O010O01O010O01O010O01O001M2M3N3M2MUZR6"}, "image_id": 77, "id": 1184}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 198.0, 32.0, 28.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "ff[13k?2M4M2N3L3N2N30O010O0010O0010O010O00010O010O0M3N3O001N1M4M2N2McYT6"}, "image_id": 77, "id": 1185}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 206.0, 90.0, 72.0], "area": 2844, "segmentation": {"size": [512, 512], "counts": "`gT21l?4M2M4M2M3N3L3N3L3N2M4M2M4N10010O010O0001M2N3L3N2M4M2010O00010h@YOU?i010O0010O0010O0010O0010O00O2O0010O00010O0^A[Oi=f0TB\\Om=c0PBAo=?oACR>=jAFV>:hAIW>8eAK[>5cANZ>m000010O010O00010O010O00010O0N3M2M3N3L3N3L3JRAXOP?f07L3N3L3N2M4M2MkX^4"}, "image_id": 77, "id": 1186}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 241.0, 30.0, 30.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "TXe12l?3M2M3N3L3N3M2M40O0010O0010O0010O010O0010O0M4M2M3N3M2M4M2MYhk5"}, "image_id": 77, "id": 1187}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 245.0, 52.0, 91.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "Ui_32Q1Oi=3TB0i=4TBOi=3TB0i=3QBF_O:]>3PB4m=OQB3m=0PB3e=XOXBh003e=XOYBg004d=Y1M3N3L3N3L3N2O2O01N1N2M4M2M4O000O2M2M4M2M3N1N102HoA^NU>_17M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3NTXf3"}, "image_id": 77, "id": 1188}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 247.0, 44.0, 61.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "Zij01m?2M4M2N2M4^OD\\A>b>D[A`0a>D\\A>b>D[A?c>C[A`0a>a0N3L3N3L30010O010O00010O01O0N2M4M2M4M2M3N4L3L3N3L3N2M4M2M4M2N2M4MnW_6"}, "image_id": 77, "id": 1189}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 270.0, 52.0, 94.0], "area": 2721, "segmentation": {"size": [512, 512], "counts": "QjU43k?2f@MJ0]>6gALJ0]>7eAMJ0^>5fAMJ0]>7eA:X>HfA:W>i0N3L3N3L3N2M4M2M4M2M3N3L3N3MO3N3L3N2M4M2M4M2M3N3L3JoA[NT>b16M2O1N2O2M3N1N3N2M3N2M4M2M4M2M4M4K5L\\WP3"}, "image_id": 77, "id": 1190}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 272.0, 25.0, 21.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "lXl21l?4M2N2M4M201O010O00010O010O0010O0010OO2M2M3N3L3N[Wg4"}, "image_id": 77, "id": 1191}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 281.0, 51.0, 59.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "VZ]11m?3L3N3L3N2M40O01O01O01N1@_O_Ad0]>_O`Ad0^>_O_Ac0_>_O^Ae0^>`0N3N10010O010O010O00010N1M4M2000O2L3N2N30O01O01OM4M2M4lN[Ai0g>TO\\A=LKT?1PALR?2PALS?1o@LT?1?NjVi5"}, "image_id": 77, "id": 1192}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 308.0, 52.0, 53.0], "area": 1498, "segmentation": {"size": [512, 512], "counts": "jjU22l?2N3L3N2N3L3010O010O00010O010OO1N3L3N3M2M4M2N2M4M2N3L3O2O00010O01O0N3O01O010O01@cAZO^>d0dAZO_>b0eAZO^>d0dAZO^>d0a0L3N3M2M4M2N2MVVP5"}, "image_id": 77, "id": 1193}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 309.0, 32.0, 54.0], "area": 779, "segmentation": {"size": [512, 512], "counts": "P[d42l?3M2M3N3M2M4M2N3L3N2N3L3N3M201O01BYAAf>>\\ACa>=aAC]>=mA@T>`0oA]OQ>d0PBZOR>g0nAUOU>c0]ACT?:o@DR?9QADR?:6VAMg>6VALg>7VAMg>6VALh>6UAMh>h0M2M4N101O01O010O01O01O010O01O0N3L22M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M\\eW4"}, "image_id": 77, "id": 1195}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 353.0, 52.0, 74.0], "area": 2160, "segmentation": {"size": [512, 512], "counts": "Z\\_51m?3L3N3AHRA:l>HQAGQA;l>HRA:l>?M2M4jAiN_=Y1^BjN_=Z1^BiN`=X1^BjNc=U1ZBnNf=S1WBPOh=P1VBROk=b1O010O01O01O010O010O0001O0N3L3N3M2M2O3GfAhN\\>U1:M2N2M4M2M4M2N2M4M2N3L3N3L3Nbdf1"}, "image_id": 77, "id": 1196}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 360.0, 50.0, 50.0], "area": 931, "segmentation": {"size": [512, 512], "counts": "a[l61n?1N3N2N2M3N1O20000O0100000g@Cm>>QADo>;o@GR?9l@IT?7j@KV?`0O10O1000O10O100000O10O1000O10O100000O10N2N2M2O2N2M3N2N1O2M3N2Nhc:"}, "image_id": 77, "id": 1197}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 362.0, 42.0, 58.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "jkg31m01P>2mA2P>1mA1P>2mA2P>1mA1P>2mA1Q>1mA2Q>0kA3U>MiA6W>JeA9[>f010O0010O0010O00010O010O00010OO2L3N2M4M2M3N3L3N3L3M3N3L3N3L3N2M\\Tc3"}, "image_id": 77, "id": 1198}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 384.0, 38.0, 51.0], "area": 1011, "segmentation": {"size": [512, 512], "counts": "Y]o52k?3N3M2M4M2M3N3M2M4M2M3N3M2M4M2M4M20010OO2L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3Njc]1"}, "image_id": 77, "id": 1199}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 390.0, 48.0, 61.0], "area": 1601, "segmentation": {"size": [512, 512], "counts": "dm[41m?3M2BLo@6o>Ko@8n>KPA6n>Mo@6g>C[A8M7e>D[Ak0d>VO[Al0b>;N1N3M2O2M3N110O10O01000O0O2M3M2O2M3N1N3N2M2N3N1N3N2M2O2M3M2O2M3N1N3N1N3M3NWSl2"}, "image_id": 77, "id": 1200}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 416.0, 51.0, 70.0], "area": 1940, "segmentation": {"size": [512, 512], "counts": "^m<4j?4K6K5J5L5K4O2M2O2M0010O01aAmNP>R1nASOo=m0nAYOP>g0mA]OQ>Y1010O0010O0010O0010O00O2L3N3L3N3L3N2M4M2M4L3O1XOm@c0X?1O01O0M4M2N2M4M2M4M[bi6"}, "image_id": 77, "id": 1201}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 425.0, 55.0, 66.0], "area": 2093, "segmentation": {"size": [512, 512], "counts": "Q^j62k?4M2N3L3N2M4i@]Om>e0QA]Om>l0M4N110O0010O00eAnNi=R1SBQOm=o0QBTOo=l0nAVOR>j0kAYOU>h0hA[OX>V1010O00010O010O01M2N2N3L3010O0M4M2N2M4M2HdAiN`>S1cAkN_>S18N3L3N3UOm@e0X?M4M2M4M2N2M4M]R:"}, "image_id": 77, "id": 1202}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 430.0, 49.0, 76.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "inP51l?3N3L3M3N3L3XA@P>a0lABU>=iAEW>;fAIY>8dAJ]>5`AO_>2]A1d>e0N3L3M3N3L3N3L3M3N3L3M3ON4M2M3N3L3M4IlA^NV>`16M4L3N2M4M2M4L3N2M4L3N2M4M2M4L3N[bV2"}, "image_id": 77, "id": 1203}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 431.0, 30.0, 61.0], "area": 1062, "segmentation": {"size": [512, 512], "counts": "_=b1^>0010OfAaNS>_1kAdNU>c100012M2O1N3L3K5M4M2N2VOSA>o>]OTAb0U?10O0010O010O00M4M5K4K6KTa`7"}, "image_id": 77, "id": 1204}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 455.0, 52.0, 57.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "Qoh52k?4M2M4l@E_>=_AE^>>_AF^>=^AF_>=_AF^>=^AF_>o0N3N1010O0010O0010O0010O0010O0010O0M3N2001O00001O0M4M2M3N3L3N3L3M3N3L3N3L3N2M4M2M4L3NaQ]1"}, "image_id": 77, "id": 1205}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 460.0, 49.0, 52.0], "area": 1493, "segmentation": {"size": [512, 512], "counts": "moR13k?2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2N21O001O00001O001O00N2NO103M2M4M2M3N3M2M4M2M4M2N3L3N3L3N2N_aT6"}, "image_id": 77, "id": 1206}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 462.0, 27.0, 28.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "R_P21m?2M3N3M2N3M2M4M2N3O010O01O01O010O01M2N3M2N3M2M4M2N2N]Qb5"}, "image_id": 77, "id": 1207}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 492.0, 38.0, 20.0], "area": 483, "segmentation": {"size": [512, 512], "counts": "hoc62k?3N3L3N2a@DX??e@C[?a01O00001O001O00001O001O00001O001O00001O001O00001O001O00001N1M4MYPi0"}, "image_id": 77, "id": 1208}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 496.0, 38.0, 16.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "noQ22l?2N2N2O1N2N2N2O1001O001O001O001O001O001O001O001O001O001O001O1O001O001O001OQP[5"}, "image_id": 77, "id": 1209}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 498.0, 21.0, 14.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "no_72l?2M3N2M3N21O00001O00001O001O00001O0O2L3NZ`5"}, "image_id": 77, "id": 1210}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 501.0, 28.0, 11.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "no92k?3N2M3O100001O001O00001O001O00001O00001O001O00001O0000SPX7"}, "image_id": 77, "id": 1211}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 509.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "m?3P`o7"}, "image_id": 77, "id": 1212}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 30.0, 73.0, 57.0], "area": 2444, "segmentation": {"size": [512, 512], "counts": "jam43k?2CLl@7Q?Lk@7S?Kk@8Q?=N210O0010O010O001VOn@e0Q?YORAf0T?10O010O0001M2N3M2M4M2N2M4O010O010O01O01O010O010O01OM4M201O010O01O01O010O010O01O0fNaAT1^>iNfAV1b>1O01O010O0M4^OXAHj>6XAHj>5ZAGj>6XAHj>5YAHk>5a010L3N`nm1"}, "image_id": 78, "id": 1213}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 62.0, 27.0, 24.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "RRV46h?3N1O2O1O1O001O1000O10OB`@=b?O0100000O01000O1N1O2M3N2N1Ne]\\3"}, "image_id": 78, "id": 1214}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 84.0, 88.0, 78.0], "area": 2679, "segmentation": {"size": [512, 512], "counts": "PTl21m?2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4O010O0010O001ZOfAEY>9jAFV>8lAIT>3PBLP>2RBNo=NTB3k=KXB4i=HZB9e=E^B:b=CaB=`=@bBa0_>O010O00010O010O00f@_OU?b0h@@Y?c0010O01O010N1N2M4M2N3L3N2M4O010O01O01O010O01O01O010O01N1M4M2M3N3L3N3L3N2M4M2N3L3N2MWlg3"}, "image_id": 78, "id": 1215}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 374.0, 19.0, 22.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "V\\a72l?2N2N3M2N3L3N3N110O0001M2N3M2M4M2N3MWT5"}, "image_id": 78, "id": 1216}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 414.0, 44.0, 70.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "a]Z72l?2M3N3M2M4M2M4bA\\O\\=f0aB]O_=c0_B@a=`0[BCe==YBFg=:UBIk=7SBKm=5QBNo=2mA1S>OkA4U>k000010O0POiA8W>DmA;S>CoA=Q>@SB`0m=]OUBc0k=[OXBe0h=WO[Bi0e=UO]Bk0]>0O010O0010O0010O0010O0010O001jA"}, "image_id": 78, "id": 1217}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 502.0, 26.0, 10.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "oo_71m?2N2M3N21O001O001O00001O001O001O00001O001O001O0000QP3"}, "image_id": 78, "id": 1218}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 504.0, 20.0, 8.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "m_R73k?2N2O11O001O00001O001O001O00001O001O00Q`c0"}, "image_id": 78, "id": 1219}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 182.0, 14.0, 12.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "le`02k?3M4O00010O01O01O01O010M2MYZX7"}, "image_id": 79, "id": 1220}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 300.0, 256.0, 212.0], "area": 29753, "segmentation": {"size": [512, 512], "counts": "n_^12l?2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M300001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001OO1N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3N2001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O0iLYF5g9I[F7f9E^F:b9D`F^9_OeFa0\\9bNUFTOa0Z2Z9`NXFROb0]2V9^N[FSOa0_2U9[N]FSOa0a2R9]N\\FoNe0d2P9\\N[FnNg0f2n8]N[FiNk0i2j8^N[FgNm0k2i8^NZFdNP1m2f8_NeGa1\\8^NeGa1[8`NdG`1]8]NeGc1[8[NhGd1X8YNkGg1V8VNmGi1S8UNoGk1R8WMnEf0T2R2n7VMQFe0S2U2Q8hMRHX2n7fMUHY2l7dMVH\\2j7aMZH^2g7_M[Ha2e7\\M^Hd2b7ZMaHe2]:01O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O0M4M2N2M4M2M4M2N2M4M2N3L3N3L3N2N3L3N3M2M4M2M3N3M2MPba2"}, "image_id": 79, "id": 1221}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 69.0, 44.0], "area": 1600, "segmentation": {"size": [512, 512], "counts": "0i0W?1O1O1O1O001O1O1O1O1O001O1O1O1O1O01000O1000O100O1M2O2N2N2N2M2O2N2N2N2M3N1O2N2N2M100001N3N1O2N2N2M3N2N1O2N2M1001O2N2M3N2N2N1N3N2N2N2N2M2Of_m6"}, "image_id": 81, "id": 1222}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 0.0, 50.0, 38.0], "area": 977, "segmentation": {"size": [512, 512], "counts": "fPQ21n?1O2N2M3N2N1O2M3N2N2N1N3N2N2N2M2O2N2O1OO1O1O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1NRPV5"}, "image_id": 81, "id": 1223}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 13.0, 61.0, 67.0], "area": 2139, "segmentation": {"size": [512, 512], "counts": "Qbo01m?3[ONYA4e>NYA3f>OXA3Y>IeA6O4Z>HdA614Y>HdA614Y>GeA703Y>IeA603Y>R1N2N2N2M2O2N200O01N2M3N2N2N1N3N2N2N2M2O2N2N0O10O100000O0100000O101O2N2M3N2N1O2N2M3N2N2N1N3N2N2N2M2O2NVoQ6"}, "image_id": 81, "id": 1224}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 75.0, 63.0, 79.0], "area": 3499, "segmentation": {"size": [512, 512], "counts": "fc]6Q1o>O10000000000000000000000000000000000000POYOgBg0Y=B^Bb0^=IWB7i=P10000F:00000000000000000000000000000000000O100000O1000000000000n0ROf0ZO0000000000Qlb0"}, "image_id": 81, "id": 1225}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 164.0, 28.0, 29.0], "area": 799, "segmentation": {"size": [512, 512], "counts": "TeY7`0`?=C000000000000000000000000000000000000000000000000000lZ8"}, "image_id": 81, "id": 1226}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 385.0, 19.0, 21.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "Wlf71m?3N2N1O2N20000^@G\\?9b@I^?=0N2N2N11000N2N2N2NdC"}, "image_id": 81, "id": 1227}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 469.0, 67.0, 43.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "[_]41m?3N2N2N1O2M3N2N2N2M2O2N2N2N2n@SOm>R1N2O010000000O01O1OO1O1O1N2O1O1O1001O1O1O1O1O001O1O1M3N101000000O1O001O1O00N2OO3N2N2N2N1N3N2L`@Eb?94N1N3N2NePa2"}, "image_id": 81, "id": 1228}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 508.0, 7.0, 4.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "oo[41m?2O11O1O1O00Q``3"}, "image_id": 81, "id": 1229}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_T41P`k3"}, "image_id": 81, "id": 1230}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 0.0, 68.0, 49.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "d`k32m?2N2M3N2N1O2M3N2N2N1O2M3N2N2N2N1O2O10000O10O100000O1N1N3N2N2N2O1O0000O1O1O1O1N3N2N2N2N101000000N2N1N3N2O10N2O1O01N2N2N2N2N1He@I^?5d@I^?58N2N1N^_R3"}, "image_id": 82, "id": 1231}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 0.0, 17.0, 9.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "P`h41o?1O1O1O1O001O1O1O1OO1O1O1O1N2O1OQPo2"}, "image_id": 82, "id": 1232}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 0.0, 5.0, 3.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "PPU51o?1O1OO1OQ`h2"}, "image_id": 82, "id": 1233}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 34.0, 68.0, 57.0], "area": 2062, "segmentation": {"size": [512, 512], "counts": "jaZ31n?2N2N2M2O2N2N2N2M3N2N1O2N2N2M3N2SAPOi>T1000000O010N2N2N2N2M3N1O2N2N2000O1N2N2M2O2N2N2N2M3N2N1O2N2000000000O10OO1OO2O2N2N1O2N2M3N2Ic@H_?6b@I_?67N2N2MV^c3"}, "image_id": 82, "id": 1234}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 75.0, 66.0, 57.0], "area": 2035, "segmentation": {"size": [512, 512], "counts": "Sck22m?2M2O2N2N2N2N2M3N1O2N2l@ZOk>h0SAYOl>i0RAYOk>P1N2N2N2N2N1N3O100O100000O1000O10N2N2N2M3N2N1O2N2N2O1000M3N2N2N2N11N2N2M3N2N2N1O2N2M3N2N2N2N1O2M3N2N2N2Nj\\S4"}, "image_id": 82, "id": 1235}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 78.0, 64.0, 73.0], "area": 2327, "segmentation": {"size": [512, 512], "counts": "kS^52m?2M2UOKjA7T>KjA7S>LkA6S>LjA7T>KjA6U>KjA7S>LkA6S>LjA7T>KjA7T>KjA6T>LkA6S>LjA7T>l0O01N2N2M2O200O1000O13M0O1000O1000O10000000O01000000000O0100O1N2N2N1N3N2N2GSA\\Oo>b0SA[OP?b0RA]Oo>b09N2N2M3N2N2N1N3NYla1"}, "image_id": 82, "id": 1236}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 116.0, 65.0, 55.0], "area": 1834, "segmentation": {"size": [512, 512], "counts": "\\dW22m?2M2O2N2N2N2N2M3N1O2N2N2M3N2N2N1O2N2M3N200000O1000O10000000O1000O1000O1N2N2N2N0O0101O2N1O10O2N2M3N2N2N2N1O2M3N2N2N2N2N1N3N2N2N2Nckg4"}, "image_id": 82, "id": 1237}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 138.0, 77.0, 57.0], "area": 2534, "segmentation": {"size": [512, 512], "counts": "Qej41n?2N2N2M2O2N2N2M3N2N1O2N2M3N2N1SAROg>P1WAROf>V1N2N110O100000O10O10000000O10O100000O00O00O10O1000O10O1002N2O0100000O1000O100000O10O100000N2N2ROVAc0k>\\OWAb0k>\\OWAa0l>]OUAb0m>\\OUAb0V?N2N2M2O2N2N2M3Nbjn1"}, "image_id": 82, "id": 1238}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 159.0, 67.0, 53.0], "area": 1968, "segmentation": {"size": [512, 512], "counts": "feg12l?3N1O2N2N2N2M3N2N1O2N2N2M3N2N2N1O2O1O100000O100000N2N2N2N1N3N2N2N2OO2N2M3N2N2N1O2N2M3N2000O1000O1M3N2N1O02N2000O1N1N3N2N2Hd@I^?5d@I^?58M2O2N[jV5"}, "image_id": 82, "id": 1239}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 170.0, 70.0, 68.0], "area": 2182, "segmentation": {"size": [512, 512], "counts": "ifl51m?2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N0000O2O2N2N1O200000000O]OVOPBk0n=WORBi0l=YOTBg0j=ZOWBf0f=]OWBf0g=\\OWBe0h=]OVBe0h=]OVBe0h=]OVBe0g=]OWBf0i=ZOUBg0l=YORBi0n=c02N1O2M3N2N2N1O2M3N2F_AROc>l0_AROc>l0_AROb>m0:M3N2N2N2N1N3N2N2N2N2M2O2N2NnYP1"}, "image_id": 82, "id": 1240}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 199.0, 69.0, 54.0], "area": 2007, "segmentation": {"size": [512, 512], "counts": "nVV12m?2N2N2N2N2M2O2N2N2N2N2M3N1O2N3M2M3O1000O100000O1O1N1N3N2N2N2N2N20M2O2N2N2N2N2M3N1O2N2N200000O10O100M3N2N10O2N2O00N3N2N2N2N2N1Kb@Fa?85N2M3N2NQYg5"}, "image_id": 82, "id": 1241}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 211.0, 70.0, 67.0], "area": 2185, "segmentation": {"size": [512, 512], "counts": "TXf62m?2N2N2N2M3N1O2N2N2N2M3N1O2N2N2M3N1O00O10O11O2N2M3N2N20O1@VOiAj0U>XOkAg0S>\\OmAd0Q>^OoAb0o=_ORBa0l=ASB`0k=BSB`0j=CTB>k=DRB?l=CRB?l=BSB`0k=BSB`0k=BSB`0m=i00O2N2M3N2N2N1O2M3DcARO_>l0cARO_>k0dASO^>k0cATO^>k0dASO^>k0=N2N2M3N2N1O2N2M3N2N2N1O2Mhh6"}, "image_id": 82, "id": 1242}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 222.0, 71.0, 78.0], "area": 2872, "segmentation": {"size": [512, 512], "counts": "Pi]32m?1O2M3N2N2N1N3N2N2[O]OjAe0h=\\OPB15e0i=]OPBO6f0h=]OPBO6f0h=\\OPB16d0i=]OoA15c0k=LSB3n=NQB2o=0oA0Q>o0O011O2N2M30O010000000M2O00O1000O10O1000O3N1O2N2M3N2N1O2M3N2N2O001mN[Aj0e>TO\\Am0d>QO^An0c>PO_An0k>N2N1N3N2N2N2M2O2N2N2M3N1O2N2MSh^3"}, "image_id": 82, "id": 1243}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 238.0, 69.0, 56.0], "area": 2063, "segmentation": {"size": [512, 512], "counts": "VXe01m?3N1O2N2M3N2N2N1O2M3N2N2N2N1N3SAQOg>P1WAQOi>T1000000000O01N2N2N2N2M2O2N2N2N20O0O2N2M3N2N2N1O2M3N2N2N2N101000000000O01NO1001O2N2M3N2N1O2N2Ja@Gb?75N2N1O2MlWX6"}, "image_id": 82, "id": 1244}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 269.0, 71.0, 75.0], "area": 2609, "segmentation": {"size": [512, 512], "counts": "UjX41n?2M2O2N2FIh@9V?Ih@9V?Hh@;m>BWAg0h>[OVAg0h>[OVAg0h>:N2M2JfNeA\\1X>gNfA[1X>8N2N2M2O2N2N2N1N02O2N2N1O0O10O1000O102N1O2N2M3N2N2N1N3N2N2N2N1N3O1PORAm0Q?000000O1N1O2N2M3N2N1O2N2M3N2N2N10100O1N2Ncfc2"}, "image_id": 82, "id": 1245}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 285.0, 26.0, 41.0], "area": 567, "segmentation": {"size": [512, 512], "counts": "kYc72l?2O2N2N2N2N2M3N1O2N2N0O10000000002M2O2N2N2N2N2M2O2RG"}, "image_id": 82, "id": 1246}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 298.0, 75.0, 72.0], "area": 2743, "segmentation": {"size": [512, 512], "counts": "iYb22m?2N1O2YAL^=7`BJ_=8_BJ_=8_BJ_=8^BK`=6hABb0:c=<[BFc=<[BEd=<[BFc=[OTAf0j>]OSAf0k>9N2GjNeAW1Y>lNdAW1Z>kNdAW1Z>9N2M101O1N3N1OO2O2N101O1N2N2N1N2O002M3N2N1O2M3N2N2M21000000O001O1000N2N2N1N3N2N2N2M3N1O2M3N2N2N2M2O^em1"}, "image_id": 82, "id": 1248}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 338.0, 88.0, 67.0], "area": 3125, "segmentation": {"size": [512, 512], "counts": "UlT31n?2N2N1N3N2EFm@Q1aAnN^>S1dAjN]>V16O101O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2NPU_3"}, "image_id": 82, "id": 1249}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 354.0, 74.0, 76.0], "area": 2705, "segmentation": {"size": [512, 512], "counts": "c\\g51n?2N2M2O2N2N2N2N2M2O2N2N20000N2M2O2[OTOSBn0k=TOSBn0k=TOSBn0k=TOSBn0k=SOSBP1j=SOTBn0k=TOSBn0k=TOSBn0k=e0N2N2M210O10N2N2N2N2M2O2N2N2N2N2M3N1O2N2N2M3N2N1O1O00O0102N2O10O01N2N2N2N2M3N1O2N2N2M3N2N1O2N2NRdS1"}, "image_id": 82, "id": 1250}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 373.0, 69.0, 61.0], "area": 2199, "segmentation": {"size": [512, 512], "counts": "j\\W41n?1O2N2N2M3N2N2HBk@?T?Cj@?S?Dk@>S?9N2N2N2N1N1000O11010O10O100000000O0O2N2N2N2N2M3O1O010N1O0000O1000O1002N2M3N2N1O2N2N2IXAROk>k07N2N1O2N2N2M3N2N2N1O2N2M3N2NhSf2"}, "image_id": 82, "id": 1251}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 387.0, 75.0, 73.0], "area": 2766, "segmentation": {"size": [512, 512], "counts": "_]\\12l?3Y@L_?6_@L_?6_@L_?WOkAl0S>UOlAm0R>UOlAm0R>UOlAm0Q>VOmAk0R>WOkAl0S>b0N2N2M3N1O2N11N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M010000000O010000000O0100001N3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N`S^5"}, "image_id": 82, "id": 1252}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 400.0, 66.0, 90.0], "area": 2742, "segmentation": {"size": [512, 512], "counts": "jmm41n?2N2M3IIb@8\\?Jc@8[?Jc@8[?8N2N1N3N2N2N2N0O101O2iAmN^=U1_BmN_=U1`BmN^=U1`BmN^=U1`BmN^=U1`BmN^=U1_BnN^=U1`BlNa=S1^BoN`=S1^BoN`=j1M3N2N1O2N2O1000RNiBQ1X=lNjBR1X=mNjBQ1X=mNjBQ1X=mNjBQ1X=mNjBQ1X=lNkBQ1X=mNiBR1X=mNjBQ1X=mNjBQ1X=mNjBP1Y=mNiBR1Y=lNgBm0[OVOf>k0\\ASOd>m0]AROc>m09O2N2N2N2M2O2N2N2M3N1O2N2N2M3NTRQ2"}, "image_id": 82, "id": 1253}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 411.0, 82.0, 63.0], "area": 2620, "segmentation": {"size": [512, 512], "counts": "d][62m?2N2N2M2O2N2N2N2M3N1O2O1O100O0O2M3N2N2N2N1N3N2N2N2N20O01000000000O01000000000O010000000N2N1O1O0O1000O11O2M2O2N2N2N0O10O1000O1000O1101O1N2N1O2M3N2N2N2M3N2N1O2N2M3Ndb;"}, "image_id": 82, "id": 1254}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 429.0, 66.0, 83.0], "area": 2766, "segmentation": {"size": [512, 512], "counts": "]_T22m?1O2M3N2N2N2M2O2N2N2N2ROZOWBODi0T>ZOVBODi0T>ZOUB0Eh0S>ZOWBP1g=ROWBP1g=ROWBP1g=ROVBP1h=SOVBo0h=SOVBo0h=f0N1O1N2O00000O10O102N1O2N2M3N2N1O2N2M3N2N000O2O2N2N1O2M3N2N2lNYAl0h>SOZAj0i>TOYAj0P?N1O2N2M3N2N2N1N3N2N2N2Neaj4"}, "image_id": 82, "id": 1255}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 441.0, 90.0, 71.0], "area": 3749, "segmentation": {"size": [512, 512], "counts": "Uo82m?2N1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3eA_NT>b1kA_NT>g1O1O1N2O1O1O1O1N2O1O1O1001O001O001O001O001O001O001O001O000O2N1O2M2O2N1O2N1N3N1O2N1N3N1O2N1O2M2O1O2N1O2M2O2N2N1O2M3N2N2N1N3N2N2N\\QZ6"}, "image_id": 82, "id": 1256}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 454.0, 34.0, 58.0], "area": 1251, "segmentation": {"size": [512, 512], "counts": "S__71n?2N2N1N3b@HQ?:m@HQ?:l@IR?9l@IR?9l@HS?d0N2M3N2N2N2N2N1O2M3N2N2N1O1O1N2O02N2N2M3N2N2N1O2NdA"}, "image_id": 82, "id": 1257}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 466.0, 59.0, 46.0], "area": 1642, "segmentation": {"size": [512, 512], "counts": "bon52m?1W@Ob?3\\@Oa?:N2N1O2M3N2N1O1N2O1O1O1O1N20000N2O1O1O1O1N2O1O1001O001O1O1O1O0O000O010000001N3N2N2N2M2O2N2N2M3N1O2N2M3N2N1O2N2MRaS1"}, "image_id": 82, "id": 1258}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 486.0, 38.0, 26.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "ooo21i?0[@1d?0[@1d?1Z@0e?6O1N2O1O1O1O1O1N2O1O1O1O1O1O11O001O1O1O1O1O1O001O1Ec@1^?Le@3\\?Kf@4d?O1O001O1O1OQP]4"}, "image_id": 82, "id": 1259}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 498.0, 27.0, 14.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "no]12m?1O1O1N2O1O1O1N2O1O11O1O001O1O1O1O1O001O1O1O1O001O1OQ`T6"}, "image_id": 82, "id": 1260}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 509.0, 4.0, 3.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "noU72m?11O1OQPh0"}, "image_id": 82, "id": 1261}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 509.0, 5.0, 3.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "n_\\72m?1001O1OQPa0"}, "image_id": 82, "id": 1262}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 0.0, 33.0, 18.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "RPc01n?2N101O1O1O1O1O1O1O1O001O1O1O1O1O1O00O1O1O1O1N2O1O1O1N2O1O1O1O1NR`l6"}, "image_id": 83, "id": 1263}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 0.0, 77.0, 53.0], "area": 2502, "segmentation": {"size": [512, 512], "counts": "Qao02l?3N1O2N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1N3N1O000O10O1000001N2O1O1001O1O001O1O1O1O1O1O1O1O1O001O1O1O1OO1O1N2O1O1O1N3N1O2N2N2M3N2N1O2M3N2N2N1O2M3N2N2N2M2O2N2N`oi5"}, "image_id": 83, "id": 1264}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 0.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "PP_21o?000P`_5"}, "image_id": 83, "id": 1265}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 0.0, 61.0, 43.0], "area": 1523, "segmentation": {"size": [512, 512], "counts": "aPh22m?1N3N2N2N2N2N2M3N1O2N2N2N2N2N2O1O001O1O1O1O1O1O001O1O1O1OO1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O11O1OO1N2O1O1O1N2O2N2N1O2M3N2N2Nk_Y4"}, "image_id": 83, "id": 1266}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 0.0, 37.0, 28.0], "area": 587, "segmentation": {"size": [512, 512], "counts": "]`l51n?2N1O2M3N2N2N1O2M3N2N2O001O1O1O1O1O00O1N2O1O1O1N2O1O1O1N2O1O1K_@Jb?46O1O1O1NRPa1"}, "image_id": 83, "id": 1267}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 0.0, 59.0, 67.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "dQh61n?2N2N2XOKbA7\\>KaA7]>KbA7\\>KYAJ2=c>KYAI3>b>1\\A1b>1[A2`>j0N1O2N2N2M3N1O01O1OO10O100000001O1GiAhNX>W1jAgNW>X1jAfNX>Y1jAeNW>Z19M3N2N1O000O101O1O1O1N2O1O1O2N2M3N2N2N1O2N2M3N2N2N1Og_:"}, "image_id": 83, "id": 1268}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 0.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "P`i71o?1OOQP5"}, "image_id": 83, "id": 1269}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 8.0, 59.0, 80.0], "area": 2539, "segmentation": {"size": [512, 512], "counts": "Ubd31g>1eAO;3m=1fAN;3m=1eAO<2m=0fA0;2m=0fA0;1n=1eAO;3m=>QBDm=>QBDm=>QBDm=>QBDm=>PBEm==RBDm=Y1N2N2N2M3N11000000N2N1O2M3N2N2N2N1N3N2N2N2N2M2fNaAR1a>lNaAR1a>lN`AS1f>00O102N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2Ngn]3"}, "image_id": 83, "id": 1270}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 27.0, 76.0, 69.0], "area": 2657, "segmentation": {"size": [512, 512], "counts": "Wbh12m?2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O0O100000000O1UAoNh>U1O2N2M3O100000O01000000N2M3N1O2N2N2M100000O10O100000O2O2N2N2N2JcAeN`>X15O2N1O0O2O2N2N2N2M2O2N2N2N2]Oe@=a?N2N2N2N1N3Na^Q5"}, "image_id": 83, "id": 1271}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 31.0, 36.0, 80.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "PS^72l?2O2N1YOKbA7\\>KaA8]>JVAK6JVAL50[A1c>2[A0c>2ZA1d>g0N2M3N2N3M2N1N100000O010000000O0100000JmA_NS>b1oA\\NQ>d15000O0SO"}, "image_id": 83, "id": 1272}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 49.0, 69.0, 81.0], "area": 2837, "segmentation": {"size": [512, 512], "counts": "ac\\41n?1O2AMm@6Q?Lm@6h>HWA4O6h>HWA4O5i>IUA504i>HVA6O4h>h0N2FhNiAY1V>iNhAY1V>iNhAY1U>;N2N1O2N2M3N2N1O01N3N2N1O0O1000000O10O1000O101O2N2N2M3N2N2N1N3N2N2N2M3lNWAP1m>00O1000O01M3N2N2N2M2O2N2N2M3N2N2M2O2Nam`2"}, "image_id": 83, "id": 1273}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 73.0, 81.0, 63.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "aSa22m?1O2N2N2N2N2M2O2N2N2N2N1O2GXOWAj0g>WOWAl0g>VOWAl0g>600O1000O1000001N3OWAnNe>R1YAPOf>V10O1000000000000O010000O1N2N2M2O1O0000O10O100000O10O100001O2M3N2N2N1O2N2M3N2N2N1O2M3N2N2N2N2N1N3N2N2N2N2NT]V4"}, "image_id": 83, "id": 1274}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 93.0, 68.0, 81.0], "area": 2863, "segmentation": {"size": [512, 512], "counts": "ldW52`?Ol@3R?Nl@5Q?Nm@4i>IUA504i>IUA504h>JVA4O4j>g0N2EjNiAX1U>iNjAY1T>iNiAZ1U>hNiAY1U>;N2N2N2M3N2N1O0002N2M3N1O1OO10O1000O1000O10002N2N1N3N2N2N2N1N3N2N2N2lNXAm0i>ROYAm0h>QOYAo0m>000000N1O2N2N2M3N2N2N1O2M3N2N2N2M2OU\\f1"}, "image_id": 83, "id": 1275}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 99.0, 28.0, 39.0], "area": 593, "segmentation": {"size": [512, 512], "counts": "S3S1m>O2M3N2N2O1000O0100000000000O0100N2ZOh@a0]?N2N2N2M2O2N2N2N2NTla7"}, "image_id": 83, "id": 1276}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 129.0, 77.0, 60.0], "area": 2206, "segmentation": {"size": [512, 512], "counts": "Se^32m?2N2N2N1N3N2N2N1N3N2N2N1O2M3N1O000O101O2N2N1O2000000O10O10000000O010000000O10O1N2M010000000O0100000O010000000O0100000O2O2N2N2N1N3N2N2N2M2O2N2N2N2M3N1O2NckZ3"}, "image_id": 83, "id": 1277}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 144.0, 69.0, 64.0], "area": 2334, "segmentation": {"size": [512, 512], "counts": "PfP62m?2]ONUA4i>NUA4i>NUA4i>NUA3j>NUA4i>NTA5i>NUA4i>NUA4i>c0N2N1O2M3N2N2N1O0000O0102N1O2N2O00O2N2N2N2M3N2N1O2M3N2N2N2N0O0100000O100000O01000001O2O010O1N1N3N2N1N3N2N2N2M3N2N2M3NTkl0"}, "image_id": 83, "id": 1278}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 148.0, 6.0, 11.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "d4;e?O2N2M3N2NYkl7"}, "image_id": 83, "id": 1279}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 155.0, 56.0, 58.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "Xeh02m?2N2N2N2N2M3j@Bg>?WADg>>WACh>?VACh>?VACh>?UADi>j001000000N1N3O100000O10O1000O100N2O010O10000000O010000000O01N2N2M3N1ZOQA:Q?DQA:Q?CQAT1[AnNd>X100000O100000O1000O10000000O1000OO000000000O01000000O10O100000O10002N2N2M3N1O2N2N2M3N2N2Ie@F]?8d@G]?87N2M3N2NXjb2"}, "image_id": 83, "id": 1281}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 174.0, 72.0, 75.0], "area": 2569, "segmentation": {"size": [512, 512], "counts": "Sgd61n?2N1O2M3N2N2N2N1HAm@b0Q?@m@b0P?An@a0P?9N1O2N2M3N2N2N1O2M3N2N2N2N1N3N00000O10O100000O10O1000001O2M2O2N2N2N2M3N1O2N2N2mNWAk0l>SOUAm0k>ROWAn0n>000000N1O2M3N2N2N2M2O2N2N2N2M3N1O2NeY7"}, "image_id": 83, "id": 1282}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 190.0, 58.0, 57.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "lf81n?2N2ALo@6o>Ko@8n>KPA6o>Lo@6o>Lo@6o>Lo@6o>?N2N2M2O2N2N20000000N1O2N200O100000O1N2N20O1000O10000000N2N2N1O2mNUAn0P?M3N20O10N2N2N2N2N2N1N3N2N2N2N2NPYj6"}, "image_id": 83, "id": 1283}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 203.0, 76.0, 67.0], "area": 2423, "segmentation": {"size": [512, 512], "counts": "ago41n?2N1O2N2N1N3N2N1O2N2M3N2N2N2N2N2M3N2N1O000002000O10000000O1000O100N2N2M3N1O2N2N2N0O010000000O0100000000O3N2N2IaAjN`>T1cAjN_>T18N2N2N2N1N3N2N2N2N2M2O2N2N2N2N2M3N1OSYj1"}, "image_id": 83, "id": 1284}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 213.0, 65.0, 69.0], "area": 2258, "segmentation": {"size": [512, 512], "counts": "egU12m?2N2N2N2N2N2N2M2O2N2N2N2N2N2N2TAQOe>P1YAROg>m0XAUOh>Q11000O1N1O2N1OO1002M3N2N1O2N2N2N20O100000O0O2N2N1OSOPBKP>5RBIm=8UBEl=;UBDk=;XBCh==ZBAg=>[B@g=>[B@g=>ZBAh=<[BAh==ZBAg=>[B@g=>[B@g=>ZBAh=<[BBg=P1WAROi>R110N2N2N2N1N3N2N2N2M2O2N2N2M3N1O2N1O02N1O2N2M3N2N1O2N2N2N2N1N3N2NeH"}, "image_id": 83, "id": 1286}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 227.0, 46.0, 56.0], "area": 1399, "segmentation": {"size": [512, 512], "counts": "S7X1i>000O1N1O2O10O1N2M3N2N2N1ROSAh0S?10O1000o@VOk>j0SAWOn>i0PAYOP?l0O1N2N2N20O100000O01N2N2N2N2M2O2N2N2N2N2Ic@G`?75O2N2N2NngX7"}, "image_id": 83, "id": 1287}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 245.0, 74.0, 71.0], "area": 2521, "segmentation": {"size": [512, 512], "counts": "Sij52m?2N2N1N3N2N2N2N2M2O2N2N2N2N0O101O2N2N1N3N2N2N2N2N1N3N2N2000000O010N1O000O1000O100000O1000O100IjAeNV>[1lAcNV>[1lAcNU>\\19O1O1N2N2N2N1N3N2N2N2N2N1N3N2N2N2N2M3N1O2N2N2NgWP1"}, "image_id": 83, "id": 1288}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 254.0, 62.0, 62.0], "area": 1865, "segmentation": {"size": [512, 512], "counts": "lXf01n?1O2N2N2M3N2N1O2N2M3N2N1O2N2M1000001O2M3N2N1O2N2M300O10O10000000O10O100gN]AT1c>iN`AW1e>000O01000N2N2O0100N2M2O2N2N2M3N2Cm@JT?5n@IT?5n@IT?4n@JU?4m@JU?4=N1NVgZ6"}, "image_id": 83, "id": 1289}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 276.0, 55.0, 62.0], "area": 1938, "segmentation": {"size": [512, 512], "counts": "kYa63l?2N2N2N1N3N2N2N2N2N1N3N2N2N2N1N3N2N2N00O10O101O2N1O2M2O00001O2N2M3O1000O10000000O10M3N2N2N2N1SO_A:c>D_A9d>E^AOKNi>1]A0LMi>1]A0KNj>O^A0KOi>O^A0KNi>1^AOhec0"}, "image_id": 83, "id": 1290}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 286.0, 15.0, 28.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "n8j0W?0000N2N1O2M3N2N2N2N2N1N3N2NgVh7"}, "image_id": 83, "id": 1291}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 302.0, 61.0, 62.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "Vj21n?2N2N2N1N3N2N2N2N2N2M2O2N2N2N200000O10O1M3N2N2N1O2N2M3N2N2000ObA`N]>b1000O10O1000O1000000O010NO12N2M3N2]OZAIg>6ZAIh>5ZAIh>5ZAHi>6YAHi>5ZAIh>5ZAIh>5ZAHh>7b0N2M3Nken6"}, "image_id": 83, "id": 1292}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 309.0, 79.0, 61.0], "area": 2303, "segmentation": {"size": [512, 512], "counts": "bjg21m?3N2M2N3N2N2N2N1O2N20000O10O100000O1000O10O1M3N2N1O2M3N2N2N2M2O2N2N2M3N2N1O2M1002M3N2N2N1O2N0O101O2N2N1O2M3NO1000000O010002N2N1O2M3N2N1O2N2M3N2N1O2N2N2M3N1OoeP4"}, "image_id": 83, "id": 1293}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 343.0, 40.0, 53.0], "area": 1205, "segmentation": {"size": [512, 512], "counts": "l:Z1d>3N2N1O20000000O001N2N2M3N2N2N2M2O2N2N2M300O1000O1000O100O1N2M3N2N1O2M3N2N2N2M\\d[7"}, "image_id": 83, "id": 1294}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 347.0, 79.0, 62.0], "area": 2318, "segmentation": {"size": [512, 512], "counts": "ik^31n?1O2M3N2N2N2N1O2M3N2O1O10O10000000O0100000N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N1O01N3N2N2N1O2M10001O2N2M3N1O00000O10O100000O3N2N1O2N2M3N2N2N2N1N3N2N2N2N2M2O2NhdY3"}, "image_id": 83, "id": 1295}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 363.0, 74.0, 71.0], "area": 2433, "segmentation": {"size": [512, 512], "counts": "olR21n?2N2M2O2N2M3N1O2H@m@b0Q?@l@c0Q?7O2N2M3FnNcAS1\\>oNbAS1\\>oNaAT1\\>:N1O2M3N2N1N3N11N1O2M3N2N1O2N2M3N1O2N2N2N1N3N2N000000O01000O10000O0100000O12N1N3N2N2N1O2M3N2N1O2N2N2M2O2N2N2N1OYTh4"}, "image_id": 83, "id": 1296}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 371.0, 62.0, 70.0], "area": 2175, "segmentation": {"size": [512, 512], "counts": "X]^41n?2N2N1ELh@7V?Kh@7V?Kh@7m>DWA8J6m>DWAg0h>[OVAg0h>[OVAg0g>;N2N2N1N3N2N2N2N2M2O00O10O100000000O11N3N2N1O2N2N2N2N2M2O2QOYAb0i>[OYAd0i>ZOXAe0j>YOVAg0k>XOTAj0Q?100O10O1N2M3N2N1O2N2M3N2N1O2N2M3Nacb2"}, "image_id": 83, "id": 1297}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 405.0, 7.0, 14.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "e<>c?N2M3N2N1O2NWSl7"}, "image_id": 83, "id": 1298}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 408.0, 67.0, 66.0], "area": 2212, "segmentation": {"size": [512, 512], "counts": "jmh21n?2M2O2N2N2N2M3N1O2N2N2N2n@XOh>k0VAVOi>k0VAWOh>k0VAWOh>R1N2N2M2O2N2N2N2N2M3N10100000O0N3N2N2N2N2N1N10000000OIaAoN`>Q1bAmN^>S1dAkN\\>U17O101O2N2N2N2M2O2N2N2N2N2M3N1O2N2N2M3N2N1OjbU4"}, "image_id": 83, "id": 1299}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 411.0, 60.0, 71.0], "area": 2234, "segmentation": {"size": [512, 512], "counts": "_^T52m?1O2N2N2M3N2_OD[A>c>D[A=d>EYA>e>DYA>d>D[A>Z>ZOhA:L>Z>ZOhA:L>Z>YOiAW1U>lNiAV1U>UOVAi0R?N1O2N2M3N2N2N1N3N2N2N2N1N_bm1"}, "image_id": 83, "id": 1300}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 444.0, 69.0, 67.0], "area": 2359, "segmentation": {"size": [512, 512], "counts": "X_]32m?2M3N1O2N2M3N2N1N3N2N2N2M2O2N2N2M2O2N000O10000O3O01000O1O1N1O2M3N2N2N1N3N2N2NO01001O1N3N2N2N2M2O2N2FaAoNb>o0`AoNa>P1aAnNa>P19M3N2N1N3N2N2N2M2O2N2N2M3N1O2NaQ`3"}, "image_id": 83, "id": 1301}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 467.0, 71.0, 45.0], "area": 1842, "segmentation": {"size": [512, 512], "counts": "ooo51n?1O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2D_OXAb0g>@WAa0g>BWA?h>BWA?g>>O1O1O1O1O1O1O1001O1O1O1O0000O11O1O1O1O1O001O1O1O1O1O00O1O1O0002M30000N1N3N2N2N2N2N1N3N2N2N2N2M3N1Of`l0"}, "image_id": 83, "id": 1302}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 478.0, 61.0, 34.0], "area": 1197, "segmentation": {"size": [512, 512], "counts": "n_d02m?1ENg@3X?Nf@4Y?Ne@3Y?0e@1Z?1d@0[?;O1N2O1O1O1O11O1O001O1O1O1O001O1O1O1O001O1O00O1N2O1O1O1N2O1O1O1N2O1O1O1N2O11M3N1O2N2N2N2M2O2N2N2N2M2O2N2N2NfP]6"}, "image_id": 83, "id": 1303}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 482.0, 59.0, 30.0], "area": 938, "segmentation": {"size": [512, 512], "counts": "ooY41n?1O1O1O11O001OO1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1N2O1O1O100001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1N2M2O2N2N2N2NZ`h2"}, "image_id": 83, "id": 1304}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 498.0, 28.0, 14.0], "area": 231, "segmentation": {"size": [512, 512], "counts": "ooV71n?1N2O1O1O1N2O1O1O1O1O1001O001O1O001O001O1O1O1O1O1O1O1NSP;"}, "image_id": 83, "id": 1305}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 499.0, 21.0, 13.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "o_b11n?1N2O1O1O1O1N2O1O1O11O1O1O1O001O1O1M3N1OVPS6"}, "image_id": 83, "id": 1306}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 0.0, 73.0, 29.0], "area": 1199, "segmentation": {"size": [512, 512], "counts": "S`<1m?3N101O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00O1M3N2M3N2M3N2M3N2M3NRP_6"}, "image_id": 84, "id": 1307}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 0.0, 108.0, 41.0], "area": 1963, "segmentation": {"size": [512, 512], "counts": "P`l31o?00001O001O001O00001O001O001O001OO1N21O001O00001O001O001O00001O001O001O001O00001O001O001O00001O001O001O00001O003M1O00001O001O001O00001O001O010O010O00010O010O010O01O01O010O010O010L31O01O010O01O0N3M2M4M2N2N3L3N3M2M4Md_]2"}, "image_id": 84, "id": 1308}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 0.0, 55.0, 44.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "a`T74j?2N3L3N2N3L3N3N1010O0010O010O0N2O2O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001OnN\\Ah0d>VO^Aj0c>SO`Am0i>010O001C"}, "image_id": 84, "id": 1309}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 5.0, 5.0, 18.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "5b0_?M2M3J]@Mf?O]@O^om7"}, "image_id": 84, "id": 1310}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 13.0, 21.0, 24.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "nP]12l?2N3L3N3M2N3L3O110O010O0001M2M4M2N3M2M4M__X6"}, "image_id": 84, "id": 1311}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 18.0, 17.0, 15.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "i`k63k?3L3N3O00010O010O00010O01O0N2M4L\\ok0"}, "image_id": 84, "id": 1312}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 23.0, 47.0, 44.0], "area": 1228, "segmentation": {"size": [512, 512], "counts": "aQY32l?3L3N3L3N2M4M2N3L3N3M21O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O0CQAGR?6PAGS?6QAGQ?7QAFS?73k?2M4M2M4M2M4M2M3N3PAUOh>l0VAVOk>P1O00010O010O00010O010O010O00010O010O00010O010O00010O010O00N3L3N3L3Aj@2X?Kk@2Y?Kj@2c?M]ni6"}, "image_id": 84, "id": 1314}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 29.0, 98.0, 61.0], "area": 2717, "segmentation": {"size": [512, 512], "counts": "bQZ61m?3L3N3M2M3N3O010O010O2OO00010O010OM4M2N3L3N210O010O0010O0010O010O0010O010O0010O010O0010O0010O010O0010O010O0010O010O0010O010O0010O010O0010O0010O010O0010O010O0010O010O0010O001L3N3M2N2M4M2N3M2M4M2NTn4"}, "image_id": 84, "id": 1315}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 30.0, 49.0, 84.0], "area": 1870, "segmentation": {"size": [512, 512], "counts": "n0h01C67l=c0QB_OP>4mAC0=R>U11ROnALR>MmA[O4i0n=LWB3j=JXB7g=F\\B:e=C^B0010O0010O0010O0010O0010O010O0010O0010O0010O0001L3[OQA7R?FPA8R?FPA0L0X?LPA0K2elW7"}, "image_id": 84, "id": 1316}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 30.0, 35.0, 33.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "bQV43j?3N3M2M3N3M2M4M2O20O0010O0010O010O0010O0010O010O001N3K3N2N3L3N3M2M3Nh^X3"}, "image_id": 84, "id": 1317}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 56.0, 30.0, 31.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "laQ62l?2N3d@Lj>4TANl>3PA0Q?Om@4R?Ll@6U?;h@ZOT?i01O000N30O010O01O01O010O01O0O2L3N2N3L3N3M2MP^_1"}, "image_id": 84, "id": 1318}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 63.0, 21.0, 25.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "bRS11l?3N3M2M4M2M3N3M2010O0010O01L3N2M4M2M4M2Nn]b6"}, "image_id": 84, "id": 1319}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 67.0, 66.0, 50.0], "area": 1773, "segmentation": {"size": [512, 512], "counts": "jRm22l?2N3L3N3M2M3O2O010O010O00010O010O0M4M2N3M2M310O0010O010O0010O0010O010O0010O010O0010O0010O010O0010O0010O010O0010O010O0N3[OSA3o>KSA3o>KTA1P?KSA3o>KSA3_?MmlQ4"}, "image_id": 84, "id": 1320}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 78.0, 41.0, 34.0], "area": 826, "segmentation": {"size": [512, 512], "counts": "nRn33k?2M4L3N3L3N2O2O0010O0013L00010O00010O010O00010O010O00010O00010O01O0M3M4M2M4M2M3MY]]3"}, "image_id": 84, "id": 1321}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 89.0, 68.0, 44.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "VSY61m?2N3M2N3L3O2O0d@AX??e@DZ?a01O010O010OM4M20010O0010O010O010O00010O010O010O00m@XOo>m0O00010O010OO2M2O110O0010M2O20O01O01O010O010O010M2N2M4M2N3L3N3M2M3Ngld0"}, "image_id": 84, "id": 1322}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 101.0, 35.0, 32.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "fcf53k?3M2N3M2M4M2N3M21O010O010O010O01O010O01O010O010O010O0N2N3M2N3M2M4M2Nalg1"}, "image_id": 84, "id": 1323}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 102.0, 24.0, 25.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "dc^24j?2N3M2M3N3M2O20O010O0010O0010O010N1N3L3N2N3L3Ne\\U5"}, "image_id": 84, "id": 1324}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 105.0, 41.0, 46.0], "area": 1179, "segmentation": {"size": [512, 512], "counts": "[db41a?0j@2T?0i@4S?0j@2T?0j@3R??N3L3N2M4O010O01O01O010O01O010O01O01O010O01O01L3N3L3N2M4M2M40O0010Cc@2`?Kd@2f?MVlh2"}, "image_id": 84, "id": 1325}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 111.0, 43.0, 50.0], "area": 1305, "segmentation": {"size": [512, 512], "counts": "WTh24j?2N3M2M4M2N3O01O010OO2M2m@\\Oh>g0UA[Oi>o0M4M2O20O010O010O00010O010O010O0010O00O2M2M4M2N3^OQAOQ?OQANS?OPANR?0PANR?OQAOWkb4"}, "image_id": 84, "id": 1326}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 133.0, 61.0, 47.0], "area": 1760, "segmentation": {"size": [512, 512], "counts": "oTS62l?2N3M2M4M2N2M4M2N3M2M4N11O01O010O010O01O010O01O010O01O010O01N1M4M2N201O010O010O00010O010O01M2M3N3M2M4M2N3M2M4M2N2M4M2N3Mb[n0"}, "image_id": 84, "id": 1327}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 142.0, 33.0, 33.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "SeR52l?3L3N3M2N3M2M4M2N3N11O01O010O010O010O010O01O0N2N3M2N3L3N3M2N3M2NZk\\2"}, "image_id": 84, "id": 1328}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 152.0, 82.0, 52.0], "area": 2771, "segmentation": {"size": [512, 512], "counts": "Se^32k01S>2iA1T>2jA1S>2jA0T>2]AF6;^>4`AO_>2^A0b>0[A4e>c0010O0010O0010O01O0N201O010OO2M2M4M2N2M4M2N3M2O20O0010ON3L3N3L3N210O010O0010O0010O010O0O2O0010O00010O010O010O00010O010O01O010O01N1N3M2M3N3M2@j@3Z?Ji@3Y?Ki@3c?MbZX3"}, "image_id": 84, "id": 1329}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 161.0, 67.0, 51.0], "area": 1790, "segmentation": {"size": [512, 512], "counts": "oe[22l?3L3N2M4M2N3N1010O0010O010O00010L3N3M2M3N3L3N3M201O01O010O01O01O010O01O01N1N3O00010O0010O01YOWA5i>H[A8d>F^A:c>B`A?_>_OdA`0]>]OeAc0m>0O0010O0010O01O0O1M4M2N3L3N2MPjb4"}, "image_id": 84, "id": 1330}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 177.0, 53.0, 54.0], "area": 1692, "segmentation": {"size": [512, 512], "counts": "efg51m?2ZON[A6b>L\\A6a>N\\A4b>N\\A5a>N[A5c>M[A6c>K[A7f>b0O01O010O01O010N100010O010O010O00010O010O010O010O00010O010O010O0lN\\Ak0e>SO]Ak0n>L3N2N3M2M4M2N3L3N3Mgi]1"}, "image_id": 84, "id": 1331}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 195.0, 9.0, 34.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "S6Q1P?O01ON3L3Am@OV?Mn@0T?Nn@OV?Nl@0ohk7"}, "image_id": 84, "id": 1332}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 197.0, 39.0, 42.0], "area": 965, "segmentation": {"size": [512, 512], "counts": "RWg43k?2N3L3N3M2M3N3L3N3M2M3N3N1010O010O00010O010O010O000N3L3N3M2M3N3L3N3M2M4M2N2MdYe2"}, "image_id": 84, "id": 1333}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 203.0, 59.0, 58.0], "area": 1723, "segmentation": {"size": [512, 512], "counts": "_WQ21l?3N2N3L3N3L3N2N3L3N3L3N3M2M3N3O010O00010O010O0010O0010O010O00010O010O01UO]A8b>F`A:`>DcA;^>AeA`0Z>^OiAa0X>[OkAf0g>010O01O010O010O1O010L3N2M4M2N3L3NaXQ5"}, "image_id": 84, "id": 1334}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 220.0, 36.0, 31.0], "area": 660, "segmentation": {"size": [512, 512], "counts": "\\Wn24j?2M4M2N3L3N2N30O01O01O010O01O01O010O01O01O010O01O010O01O0N3L3N2M4M2N3Lkh_4"}, "image_id": 84, "id": 1335}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 222.0, 15.0, 34.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "igh71m?2N2M4M2N3L3N3M2M3N3L3O20O01PI"}, "image_id": 84, "id": 1336}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 226.0, 56.0, 56.0], "area": 1599, "segmentation": {"size": [512, 512], "counts": "ThW52k?4M2N2M4M2N3M2M4M2N3M2M3N3M2N3O010O0010O0010O010O0010nN]Ae0c>YO`Ag0`>UOcAk0]>SOfAl0g>1O010O010O010O01O01O010O010OO2M2M4M2N2N3L3N3M2N3LSXl1"}, "image_id": 84, "id": 1337}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 246.0, 49.0, 50.0], "area": 1352, "segmentation": {"size": [512, 512], "counts": "fhb12l?2M4M2M4M2N2M4M2N3L3N2M4M2O2O010O00010O010O010O00010O010O010O00SOZA`0g>\\O]Ad0b>ZO`Af0a>VObAk0j>O0010OO2M2N2M4M2N3L3N3M2Mbgd5"}, "image_id": 84, "id": 1338}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 256.0, 29.0, 29.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "dhc41l?3N3M2N2M4M2N3M2N3O010O00010O010O010O01N1N2M4M2N3M2M4M2Njgm2"}, "image_id": 84, "id": 1339}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 265.0, 34.0, 46.0], "area": 1021, "segmentation": {"size": [512, 512], "counts": "WY_71m?3M2M4M2N3L3N2N3L3N3M2M4M200010O01O010O01O010O01O010O01O010O01O010^G"}, "image_id": 84, "id": 1340}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 274.0, 50.0, 43.0], "area": 1224, "segmentation": {"size": [512, 512], "counts": "XiX22l?2N3M2N3IFc@=[?6M2O2O0O2O0l@VOQ?m001O0O20O010N1O20O010O010O0010O0010O010O010O010O01O01O010O010O[OQA8Q?EQA9Q?DSA9[?M4M2N2NeVn4"}, "image_id": 84, "id": 1341}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 285.0, 54.0, 54.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "PZR51m?2M3N3M2M4M2N3L3N2N3L3N3M2M4M200010O010O01O01O010O010oN\\Ad0d>ZO_Af0`>XObAh0_>TOeAk0h>10O01O010O010O01O01O010O01L3N3M2M3N3M2M4M2N3LYfR2"}, "image_id": 84, "id": 1342}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 286.0, 25.0, 19.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "UiV32k?4M2N3O0010O010O010O010O00010O010O010O010M2N3M2Nkf\\4"}, "image_id": 84, "id": 1343}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 312.0, 30.0, 25.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "Tj^42l?3L3N2N3M2O20O010O00010O010O010O00010O010O010O010M2N2M4M2NPVR3"}, "image_id": 84, "id": 1344}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 315.0, 48.0, 51.0], "area": 1286, "segmentation": {"size": [512, 512], "counts": "Q[Y11l?3N3L3N2N3L310O010O00010O001L3N2N3L3N3M2M4M2M3N3M2010O010O001O0M3N3L3N3M2M4M2N2M4M2M4M2N2M4M2Nnen5"}, "image_id": 84, "id": 1345}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 316.0, 47.0, 53.0], "area": 1405, "segmentation": {"size": [512, 512], "counts": "\\jX72m?2O2M2N2N2O2M2N2N3N1N2N3N1N2N2N3N1N2N2N2O0O1010O2O0O1O2O0O100O2O0O1O101N010O0010O010O0010O00_E"}, "image_id": 84, "id": 1346}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 334.0, 19.0, 24.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "oZn13j?3N3L3N2M4M2N30O00010O0N3L3N2M4M2M4M_Uh5"}, "image_id": 84, "id": 1347}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 341.0, 90.0, 50.0], "area": 2255, "segmentation": {"size": [512, 512], "counts": "_kT21n?2M3N2M2O2M3N2M2O2M3N2O0010000O01M3N20O10O10O1000O10O100O0N3O1O1O00000000000L40003M0O100000O100000000000O100002N1J50000000N2O1O0O010O10O10O010O11N3N1O2O100O0O2M3N1O2M3N2M2O2N2M2OQU^4"}, "image_id": 84, "id": 1348}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 350.0, 66.0, 45.0], "area": 1597, "segmentation": {"size": [512, 512], "counts": "b[n42l?3L3N3M2N3L3N2N3M2010O010O010O0010O0010O010O010O0010O0010O010O010O0010O0010O010O010O00N3M2N3M2N30O010O0O1N3L3N3M2N3L3N3M2N2M4M2N3MgdP2"}, "image_id": 84, "id": 1349}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 372.0, 50.0, 52.0], "area": 1508, "segmentation": {"size": [512, 512], "counts": "ilR12l?2M4M2M3N3L3010O00O2L3N3L3N2M4M2M4M2N201O010O01O01O010O01O01O010O01N1N2M4M2M4M2M3N3L3N3L3N2M4M2M4MRTT6"}, "image_id": 84, "id": 1350}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 383.0, 48.0, 60.0], "area": 1573, "segmentation": {"size": [512, 512], "counts": "^mT72l?3M2M3N3M2N3L3N3M2M3N3M2N3L3N3M2M4M2N2M4N110O010O01O000M4M2N3L3N3M2M3N3M2N3L3N3M2M3N3M2M4M2N3MgS3"}, "image_id": 84, "id": 1351}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 393.0, 14.0, 13.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "_l]33j?3N3N10010O01O01O010ON3L3NeS[4"}, "image_id": 84, "id": 1352}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 395.0, 104.0, 66.0], "area": 3229, "segmentation": {"size": [512, 512], "counts": "TmX43k?2M3N3L3N3L3N3M2M3N3N1010O0010O010O00010O010O0010O0010O010O00010O010O0010O0010O0010O0010O010O0010O00N3M2M4M2O1010O010O00010O010O010O00010O010O0QO^A`0b>]OaAd0_>YOcAg0]>VOgAj0Y>SOiAm0f>0O00010O010O01O01O010O010O00010O01N1M3N3M2M4M2M4M2N2M`RS2"}, "image_id": 84, "id": 1353}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 402.0, 44.0, 56.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "m]^21l?3N3ZOJ]A9a>I]A9`>K]A8`>J]A9`>J^A9_>J]A9`>J^A9_>e01O01O010O01O010O01O010O01O01O010O01O010O0M3N3M2M4M2M3Eo@ET?8n@FT?7PAET?8;N2M4MRck4"}, "image_id": 84, "id": 1354}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 407.0, 39.0, 32.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "Ymd12l?2M3N3L3M4M2M3010O00010O010O00010O010O00010O01O01O010O00010O010M2M3N3L3N2M4LPcg5"}, "image_id": 84, "id": 1355}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 410.0, 24.0, 20.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "V]c32l?2M3N3L3N210O0010O00010O010O00010O001O0M3M4M2MRcP4"}, "image_id": 84, "id": 1356}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 424.0, 49.0, 52.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "imS34j?2M3N3L3O2O0RAA\\>>aAF^>;_AGa>9\\AJe>6XAMg>3VA0k>c0O00010O0O2L3N2M4N1010O00010O010O1O10O0001N1M4M2M3N3L3M4M2M3N3L3N3L3N2M4M^bS4"}, "image_id": 84, "id": 1357}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 450.0, 84.0, 50.0], "area": 2804, "segmentation": {"size": [512, 512], "counts": "Xom03d?9I7000001O0M3H80000000010O00000000000000010O000000000000010O00000000000H8J7O000000000001O0001O000000000001O01O000000000001O0001O00TO`A8`>_OiAa0W>WOQBi0c>01O0001O000000000001O000K5G9H]Qh5"}, "image_id": 84, "id": 1358}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 453.0, 28.0, 20.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "]^[24i?3N3L30001O010O01O01O01O01O010O01O01O010O01O0O1M4M2M3NeaV5"}, "image_id": 84, "id": 1359}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 460.0, 54.0, 47.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "jn\\43=NP?5l@NQ?5m@NP?5l@NQ?5m@0n>b0O101O010O000N3O0010N1O110O0010O0010O0010O00010O010O00010O010O00010O010O00N3M2M4M2M3N3L3N3L3N2M4MVQh2"}, "image_id": 84, "id": 1360}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 462.0, 69.0, 50.0], "area": 2122, "segmentation": {"size": [512, 512], "counts": "[_m62k?4M2M3N3L3N3L3N2M4M2M4N101O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010ON2N2N2M3001O001M2M3N3M2M4M2M3N3M2M4M2M4M2N2MY1"}, "image_id": 84, "id": 1361}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 464.0, 24.0, 28.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "Q_k34j?2GKd@9X?Ke@7Y?9O20O010O01O01O01O0010O0010O01M2N2Ec@0a?Ma@1a?Lc@0iPi3"}, "image_id": 84, "id": 1362}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 477.0, 22.0, 25.0], "area": 335, "segmentation": {"size": [512, 512], "counts": "__12l?2M4L3N2M4M2M4O01O01O010O01O0N2M4M2M3N3L3NPac7"}, "image_id": 84, "id": 1363}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 481.0, 35.0, 31.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "n_P32l?2M3N2M3N2M3N2M3N2N2M3N2001O00001O001O00001O001O00001M2M4M2M3N3L3N3LiP^4"}, "image_id": 84, "id": 1364}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 499.0, 36.0, 13.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "n_a02k?3M3N2O1001O001O00001O000000M3N2001O00001O001O00001O001O00001O00001M2MY`l6"}, "image_id": 84, "id": 1365}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 5.0, 14.0, 41.0], "area": 302, "segmentation": {"size": [512, 512], "counts": "5Y1h>M2M3M4M2M4M2M3M4M2M3N3L3Mioh7"}, "image_id": 86, "id": 1366}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 16.0, 75.0, 79.0], "area": 3041, "segmentation": {"size": [512, 512], "counts": "db13j?3N2M4M2M4F_OQAc0l>@QAc0m>_OQAd0k>:N3L3N2N3O0010O0010O0010O001M2N2M4M2M4O01O000M4M2M4M2M3N3L3N2M4M2M4M2M3N3OO1M4M20010O0N3M2M3N3L3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3N3L3NWoh6"}, "image_id": 86, "id": 1367}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 34.0, 71.0, 98.0], "area": 3567, "segmentation": {"size": [512, 512], "counts": "gbP11l?3N3L3N3M2M3N3L3N3O0`AWOi=j0TBXOm=g0PB\\OP>d0nA_OR>a0kAAU>?hAEX>;eAG[>9bAK]>l01O010O01O01O01cA`NY>c101XO]NRCb1kh101O01O010O010O010O01QBWNe=>YBd02POf=LVB:4h0OVOj=OZBj0JYOl=J]Bm0E[On=F`B]1Q>0010O010O0010O0010O01M2M4M2N2M2OO10001N4M2N2N3L3KaAgNb>U16N3M2M3N3M2M4M2N3L3N2N3M2MT]l4"}, "image_id": 86, "id": 1370}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 111.0, 94.0, 76.0], "area": 3845, "segmentation": {"size": [512, 512], "counts": "ldP43k?2M4M2N3L3N3L3N2O2O0QAVOi>j0SAZOl>m01O010O01O01O010O01L3N3N10010O010O00010O01M2N3O0000O2M2N3L3N2M4M2M2O2N2M4M2N3O01O010O01O010OmNWBOj=NYB1g=M[B4e=I]B7c=I^B7a=J^B6c=I^B6b=J^B7b=I^B6b=J]B8b=IZB:g=EVB?i=j03M2M4M2M3N3M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4MQ\\`2"}, "image_id": 86, "id": 1371}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 147.0, 73.0, 86.0], "area": 3024, "segmentation": {"size": [512, 512], "counts": "^e\\52l?2N3N1a@JR?9l@IQ?9m@JP?9m@IQ?e0N1N3RAPOi>U10O010O10O010O1jAkN`=T1^BnNb=S1[BPOe=o0YBSOg=n0VBUOj=j0UBXOj=i0SBYOn=f0PB]Oo=d0nA^OS>X1O010O10O10O010O010O1NO0010O0000001O000000KQB\\No=c1TBZNl=f17O2N3M2N3N1N3M2N3M3M2N3M2N3N1N3M2N3M2N3M2N3N1NTk^1"}, "image_id": 86, "id": 1372}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 166.0, 72.0, 93.0], "area": 3422, "segmentation": {"size": [512, 512], "counts": "kf[61l?4M2M3N3L3N3L3N2M4O010O0^AVOn=j0oAZOQ>f0lA\\OT>d0iA_OW>b0fAAZ>>cAE]>;aAH_>l000010O0010O0010OZOcNiB]1T=fNmBZ1P=hNoBY1n3M2DkNjAX1T>kNiAX1T>jNiAY1T>M2N3N1N3N2M2O2M2O2M3N1N3N1Hh@F[?7g@H[?68N3M2O[Yl6"}, "image_id": 86, "id": 1374}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 188.0, 18.0, 33.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "]Vg71l?4L3N2M4M2M3O2O010h@^OP?b0n@@R?h00O010O01O010oI"}, "image_id": 86, "id": 1375}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 213.0, 28.0, 32.0], "area": 546, "segmentation": {"size": [512, 512], "counts": "[WZ73j?3N2M4L3N3L3N3L3O1010O01O010O01O010N1N2N3L3N3M2M3N3M2MWi7"}, "image_id": 86, "id": 1376}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 231.0, 82.0, 57.0], "area": 2498, "segmentation": {"size": [512, 512], "counts": "nWQ31n?1N3M2N3N2M2N3N1N3M2N3N2M2O20O01000O010O010O01000O010O01RAROi>R110O10O0O2O010O10N1N3M3M2O2M2N3O01000O010O010O0100O0O2M2N3M3N1N3M2O2M2O20O1L30O1O2M3M2N3N1N3M2O2M3M2N3N1NYhe3"}, "image_id": 86, "id": 1377}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 266.0, 70.0, 70.0], "area": 2652, "segmentation": {"size": [512, 512], "counts": "RYj53j?3N2M4M2M4M2M3N3L3O20OQASOk>R10O00010O010O00010O0010bAQOi=o0UBTOj=m0RBVOo=i0oAYOQ>g0lA]OS>d0iA_OX>U1O01O01O010O01O01O01O010O01O01O010ON3L3M1O0012M3N2EjAiNZ>S1iAkNY>R1jAkNZ>R1;M3N3L3N3L3N2M4M2M3M4M2MbgR1"}, "image_id": 86, "id": 1378}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 288.0, 94.0, 65.0], "area": 3013, "segmentation": {"size": [512, 512], "counts": "`ib22m?2M3M2O2M2O2M3N1N3M201O10O10O10O10O010O10O10N1N3M3O0100O01000O010O01000O01000O010O10O10O01O0N3N2M2N3N1N3_AcN\\>a1O20O01000O010O10O010O10O10O01000O0100O010O1N1N3M2N3_OYAFi>8XAFj>9XADk>;VACl>=SAAo>?RA^OQ?b0n@]OT?`07N3M2O2M3M2O2MWVn3"}, "image_id": 86, "id": 1379}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 299.0, 4.0, 22.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "[9f0[?Ai@0Z?Nh@0[?Mh@0mUn7"}, "image_id": 86, "id": 1380}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 331.0, 92.0, 69.0], "area": 2635, "segmentation": {"size": [512, 512], "counts": "ojV23k?3N1N3N2M2O2M3M2O2M3N1N3N20O10O10O01000O01000O01000O010000O010O10O10O10O10ON3N2O010O10O10O0100QAUOh>j0VAXOj>i0TAYOl>n00O010O10O10O10O10O01000ON3N2M2O2M2O1N12M2O2M3N1N3N1N3M3O010O10OUOn@h0V?N2M2O2Ic@G`?66N3N1N3N`T[4"}, "image_id": 86, "id": 1381}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 335.0, 70.0, 62.0], "area": 2617, "segmentation": {"size": [512, 512], "counts": "SkZ51m?3L3N3L3N3L3N3L301l@WO=2o=k0oAWOQ>i0lA[OT>e0iA]OW>c0fAAZ>?cAC]>P1010O010O00010O010O00010O010O00010N1O2O00010O010O00010O010O00010O010ON1O0O0100O3N2M4M2M4M2M3N3L3N3L3N2M4M2M4MXUb1"}, "image_id": 86, "id": 1382}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 342.0, 59.0, 43.0], "area": 1533, "segmentation": {"size": [512, 512], "counts": "]km61l?3N2N3L3N3L3N3M20010O010O0010O0010O010N1M3N3M2O20O010O00010O010O010O00010O010O01M2N2O2N110OO2L3N2M4M2N3L3N3M2M3N3M2JYe4"}, "image_id": 86, "id": 1383}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 370.0, 34.0, 29.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "Q\\Z63k?3M2M3N3M2M4N110O00010O010O01O01O010O010O01O01O010O01N1M4M2N2M4M2NUdT1"}, "image_id": 86, "id": 1384}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 386.0, 99.0, 57.0], "area": 2867, "segmentation": {"size": [512, 512], "counts": "c\\g11n?2M3N1N3N1N3N2M2N3N2M210O10O10O010000O0100N1N3N2M210O01000O01000O01000O01000O0100O01000O01000O0100N1N3N1N3N2M2O2M3N1O2O10M3N1N3M3M2O2M3M20100O01000O0O2N2O001000O01000O01000OVOn@f0S?XOo@f0V?N3N2M2O2M3N1N3N2M2OoRg4"}, "image_id": 86, "id": 1385}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 394.0, 74.0, 60.0], "area": 2609, "segmentation": {"size": [512, 512], "counts": "h\\k43j?4M2`@JS?:j@HT?:i@IT?d0N1010O0010O00010]AUOo=k0nAXOR>i0kAYOV>f0gA^OX>b0eAA\\>?aAD^>o00010O01O01O010O00010O010O00010O01L3N2M40O00010O010O00010O01O01O010O00010O001N100010M2M3N3L3N3L3M3N3L3N3L3M3NQco1"}, "image_id": 86, "id": 1386}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 394.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Zlo72d3"}, "image_id": 86, "id": 1387}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 402.0, 13.0, 13.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "jln51m?3M2M3O20O01O01O010M2N3M\\cj1"}, "image_id": 86, "id": 1388}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 407.0, 6.0, 12.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "n\\m71m?3N2N2M2O2XC"}, "image_id": 86, "id": 1389}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 414.0, 54.0, 54.0], "area": 1488, "segmentation": {"size": [512, 512], "counts": "P^\\62k?4M2M3N3L3M4M2M3M4M2M3M4M201O01O010O01O01O010O01OSO[A?f>]O]Ac0c>[O_Af0a>VOcAi0]>UOeAl0h>O00010O010O00010O010O00010O010L3N2M4M2M4L3N2M4MWbh0"}, "image_id": 86, "id": 1390}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 434.0, 85.0, 76.0], "area": 3050, "segmentation": {"size": [512, 512], "counts": "Y^[11m?3N2M2O2M3M2O2M3N1N3N2M2N3N2N110O10O01000O01000O01000O0100O01000O01000O010N2M2O2000O01000O01[AhNa>[110M2N3N2M2O2M3N101000O010O10O10O10OTOlAMT>0oAOQ>0PB1P>LSB4m=JUB5k=IWB8i=FYB:g=C[B=e=A^B?b=_O`B?b=^O`Ba0a=^OaB?b=^OaB`0`=_OaB?b>N2J`@Hb?66M2O2MiPZ5"}, "image_id": 86, "id": 1391}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 446.0, 25.0, 26.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "_^m52l?3L3M3N3L3M4N110O01O01O010O01O01O01M2N3M2M4M2M3NmQf1"}, "image_id": 86, "id": 1392}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 454.0, 86.0, 58.0], "area": 2844, "segmentation": {"size": [512, 512], "counts": "k^\\42k?3N2M4L3N2M4L3N3O01O01k@WOR?k000010O0010O0010O`AUOk=k0SBXOm=h0oA[OQ>e0mA]OS>c0jAAV>?fADZ>o01O010O00001O001O00001O001O00001O001O0N2M4M2O1010O010O0010O0010O0010OSAPOj>S11OO1N2M3N2N2M13M2N2M4M2M4M2M3N3M2M4M2M3N3M2M4M\\aX2"}, "image_id": 86, "id": 1393}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 483.0, 81.0, 29.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "i_j03l?2M2O2M3N1N3N1N2N2O1N2O11O1O001O1O001O1O001O001O1O0000N2O1N2O1N2001O001O1O00O1N2O1N2O1O11O001O1O001O1O001O1O001O1O001O001OO1O1001O1O001O1O001O1O001O1O0N3M3N1N3NZPm5"}, "image_id": 86, "id": 1394}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 489.0, 59.0, 23.0], "area": 947, "segmentation": {"size": [512, 512], "counts": "o_R61m?2M3N2M3N2M3N2M3N2001O00001O001O00001O001O00001O001O00001O001O00001O001O000000N2N2M3N2O100001O001O00001O0N3M2M3N3L3NcPP1"}, "image_id": 86, "id": 1395}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 508.0, 12.0, 4.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "n_d52l?200001O001O00001O0000Q`U2"}, "image_id": 86, "id": 1396}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 19.0, 16.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "0;e?1O00001O001O00001O001O00M3N2M3N2M3NR`f7"}, "image_id": 87, "id": 1397}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 53.0, 21.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "P``01o?001O00001O001O00001O001O00001O001O001O00001O3M001O001O00001O001O00001O001O00001O001O0000O1M3N2N2M3N2M4MQPe6"}, "image_id": 87, "id": 1398}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 0.0, 52.0, 52.0], "area": 1795, "segmentation": {"size": [512, 512], "counts": "VQa11l?3M4^@IP?0n@:OIP?0o@9NKP?g0M3M4M21O01N01M3M4N100001M2M3M4L30001O001O00001O00001O00010O0010fNdAn0\\>oNgAQ1Y>mNjAS1a>01O000M4M2GQA^OR??:M2M3M4L3N`od5"}, "image_id": 87, "id": 1399}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 16.0, 45.0, 51.0], "area": 1373, "segmentation": {"size": [512, 512], "counts": "fag23k?2ZON[A5c>NZA5c>MZA6c>NZA5c>MZA6c>M[A6b>e00010O0010O0010O0010O0010O0010O010L3N2N3L3N3L3O2O01O01O001M2M3N3M2M4M2M3NToa4"}, "image_id": 87, "id": 1400}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 17.0, 67.0, 49.0], "area": 1758, "segmentation": {"size": [512, 512], "counts": "]a52k?4M2M4M2M3N3L3N3L3N2M4O010O00010O0010O0010O0010O00010O010O00102M3N0O00010O010O00010O010O000O2M2010O0010O0010O0010O0O2M2M3N3L3N3L3N2Mgnh6"}, "image_id": 87, "id": 1401}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 38.0, 22.0, 37.0], "area": 537, "segmentation": {"size": [512, 512], "counts": "QRe71m?3L3N3L3N2M4M2M4M2M3N3O0010O0010O0010O0010eN"}, "image_id": 87, "id": 1402}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 45.0, 52.0, 54.0], "area": 1671, "segmentation": {"size": [512, 512], "counts": "fRW31l?3M4M2M3N3L3N3L3M3N3L301O01O01O01O01O010O01O01L3M4M2M3N3L301O00N3L3O110O010O00010^OaAA_>=cAB_>:dAC_>;dAB^>;eAB_>;cAB`>;d0M4M2M4M]nn3"}, "image_id": 87, "id": 1403}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 47.0, 30.0, 35.0], "area": 617, "segmentation": {"size": [512, 512], "counts": "ZbQ11l?4M2M3N3L3N3M2M4M2M3N3O0010O00010O001M2M3N3M2M4M2M3N3L3N3M\\^_6"}, "image_id": 87, "id": 1404}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 53.0, 25.0, 46.0], "area": 696, "segmentation": {"size": [512, 512], "counts": "n1R1l>3L3N3M20010ON3M2M3N3L3N3L3O1010ON3M2M3N3L3N3L3NU^c7"}, "image_id": 87, "id": 1405}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 63.0, 45.0, 55.0], "area": 1534, "segmentation": {"size": [512, 512], "counts": "\\SS44j?2M3GIg@:U?Ii@9U?;M2M4M2M3N3M2M4M2N3L3N12O0O2O0O2M2N3M2M4O01O010O010O01O010O010O0[OWA4h>J[A3h>JZA3i>KYA3j>JYA3i>JZA4i>IZA4Z?MU]V3"}, "image_id": 87, "id": 1406}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 66.0, 38.0, 55.0], "area": 1271, "segmentation": {"size": [512, 512], "counts": "]S_23e?N`@4^?O_@3_?9M2M4D^OVAd0h>^OVAe0g>;N3L3N2N3M2010O010O0010O010OO1N3M2M4M2N3L3N2N3M2M4M2Df@0]?Me@0^?Ne@O]?Oomm4"}, "image_id": 87, "id": 1407}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 76.0, 58.0, 79.0], "area": 2538, "segmentation": {"size": [512, 512], "counts": "_T11l?3M3N3L3M4M2M3N3L3M4M2M3M4M2M3M4M2M4M2M3M4M2M3N3O001L3O110O0001M2N3O01OO2M2M3M4L3XNmA`1[>M3N3L3M3M4L3N2M4L3M4L3M3N3L3M3MXmQ7"}, "image_id": 87, "id": 1408}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 78.0, 27.0, 27.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "obm02l?2N3L3N3M2N3M2N210O0010O010O010O010O0N3M2M3N3M2N3M2M]md6"}, "image_id": 87, "id": 1409}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 88.0, 44.0, 56.0], "area": 1374, "segmentation": {"size": [512, 512], "counts": "jco42k?3N2M4M2M4M2M3N3M2M4M2M3N3]AkNW>W1gAkNY>V1cAmN]>\\10O010O01O01O010O01O01O0bNcAZ1]>cNeA[1b>M2M6K2N3L3N2M4M2M4M2M3N3L3Nh\\Z2"}, "image_id": 87, "id": 1410}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 90.0, 33.0, 19.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "obR33j?3O101O010O010O01O01O0O2M2O110O0010O0010O0010O010O0010O00M4M2M4MPm\\4"}, "image_id": 87, "id": 1411}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 91.0, 29.0, 41.0], "area": 855, "segmentation": {"size": [512, 512], "counts": "\\ca71l?4K4M3N2j@Eg>9QALn>4o@OR??01O01O00M4K400010O000010O000010O00002OO0PM"}, "image_id": 87, "id": 1412}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 107.0, 58.0, 65.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "`Ti02m?2N2N2_@IV?9h@IV?9h@IV?9h@IV?b0N2N2N2N2N2M3N2N2LgN^A[1_>5N1O000000000000000000000000000001O2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N2NokY6"}, "image_id": 87, "id": 1413}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 115.0, 53.0, 36.0], "area": 1208, "segmentation": {"size": [512, 512], "counts": "Vdk14j?2M3N3L3M4M2M3O2O010O00010O00010O0N30O0010O00010O0010O0010O00010O01NO111O01O010O01O01O0N2M4L3N3L3N2M4L3NSlY5"}, "image_id": 87, "id": 1414}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 119.0, 60.0, 62.0], "area": 1923, "segmentation": {"size": [512, 512], "counts": "oTg21m?2N3L3N2N3L3N3M2N3O01O010O01O010O01O010O01L3N2N3L3N3M2M4M2N2M4N1010O010O00010O010O010O0TOiAOW>OkA2U>KnA4R>IQB8o=ETB:l=DVB=j=_OYBa0g=]O\\Bb0d=\\O^Bb0e=ZO_Bb0a>N3M2M3N3M2MTkZ4"}, "image_id": 87, "id": 1415}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 119.0, 28.0, 29.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "ZT]42l?2M4M2M4M2N3L3N210O0010O010O0010O0010O0N3L3N2N3L3N3L3NSlT3"}, "image_id": 87, "id": 1416}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 143.0, 49.0, 36.0], "area": 1339, "segmentation": {"size": [512, 512], "counts": "UeW74h?4K6K4L4K501O00000010O00000010O0000010O2N0000010O00N201O01O0001O01O0N2M3010O00000010O0000N2L5K4Hh@Cj:"}, "image_id": 87, "id": 1417}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 146.0, 27.0, 22.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "lde42l?2N4L2N3M2010O01O010O010O01O010O010O010O0N2N3M2N3M2NWkl2"}, "image_id": 87, "id": 1418}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 157.0, 103.0, 64.0], "area": 3286, "segmentation": {"size": [512, 512], "counts": "Zfl01n?2N2N2N2N2N2N2N2M3N2N2N1O2N2N2N2N2N0000000000001N3N2N2N1O2N1O02N2M3N2N2N2N1O2N2N2N2N2N2N2N1O01O2N2N000O10O10000001O2N2N2N2N2N2N2N2N2N2M3N2O010000000000000000000000000O010000N2N2N2N2N2N2JUATOm>j0UATOm>j06^Ol@4W?Jk@4W?Jk@4W?Jk@4W?Jk@4b?N2Nji_5"}, "image_id": 87, "id": 1419}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 172.0, 48.0, 49.0], "area": 1386, "segmentation": {"size": [512, 512], "counts": "^V_42k?4M2M3N3M2010O00M4M2M4M2M3N3L3N3L300010O0010O0010O0010O0010O00010O0N3M2M3N3L3N3L3M3N3L3N3L3N2M\\jh2"}, "image_id": 87, "id": 1420}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 187.0, 61.0, 53.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "gfQ71l?4M2N3L3N3M2N2M4M2N3L3N3O00010O010O0010O010O0010O0010O010O0010O010O0010O010O0010O0010O010O0010O010O0010O0010O01N1N2N3N10aI"}, "image_id": 87, "id": 1421}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 190.0, 27.0, 26.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "^V^32l?3L3M4M2M3N3N1010O00010O010O00010O01O01O0M4M2M3N3L3NmYT4"}, "image_id": 87, "id": 1422}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 217.0, 72.0, 52.0], "area": 1853, "segmentation": {"size": [512, 512], "counts": "_Wb33k?2N2N3L3N3O001O01O010O010O01O01O010O010O01O010O01O010OM4M2N3L3N2N3M2M4N101O01O010O010O01O01O010O010O01O02N10O010O00oNZAi0f>SO]Am0l>O01O010O01ZOPA:P?CTA:o>CSA;o>CTA9[?N3M2MVhY3"}, "image_id": 87, "id": 1423}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 233.0, 66.0, 66.0], "area": 2365, "segmentation": {"size": [512, 512], "counts": "hXn03k?2M4L3N2M4L3N3L3M3N3M20010O0010O00010O0010O0010O00010O0010O001L3M3M4M2M3M4M2M4L3O101O01O0O1M4M2M4L3N2M4L3N2M4L3N3L3M3N3L3M3N3L3M4M`hP6"}, "image_id": 87, "id": 1424}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 234.0, 30.0, 29.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "mgo21l?3N2N3L3N3L3N2N30O01O01O010O010O00010O010O01OM4M2M4M2M3N3M_Xa4"}, "image_id": 87, "id": 1425}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 245.0, 66.0, 49.0], "area": 1963, "segmentation": {"size": [512, 512], "counts": "aXo61l?3N3L3M3N3L3M3N3L301O01O01O01O010O01O01O01O01O010O01O01O01O01OO2M2M4L3N2O2O01O01O010O00010O01O01O010O00010OM4M2M3M4M2M3M4M2M4L3N2MTH"}, "image_id": 87, "id": 1426}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 275.0, 57.0, 44.0], "area": 1414, "segmentation": {"size": [512, 512], "counts": "\\Ya31l?3M4M2M3O2O01O010O01O0N2N3L3M3N3L3010O00010O00010O0010O0010O00010O0010O0010O00010O0010O0010O00010\\OQA5o>HUA7l>FVA:j>CYA;i>B[A:W?M4L3NbVb3"}, "image_id": 87, "id": 1427}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 286.0, 63.0, 55.0], "area": 1870, "segmentation": {"size": [512, 512], "counts": "mYU21m?2FOc@4Y?Oe@3Y?Od@5Y?:M3N3N1010O010O00010O010O010OO1N3L3N3M2O2O010O01O010O01O010O01O010O01O010O01O0XO\\A5d>G`A8`>FbA;]>CfA<[>AgA`0X>]OlAb0U>[OmAf0g>O00010O001L3N3M2M3N3M2MQVk4"}, "image_id": 87, "id": 1428}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 298.0, 84.0, 88.0], "area": 3356, "segmentation": {"size": [512, 512], "counts": "_:Q1o>17H000I70O2O000000O1N2NjNCnB`=9iB[OE>a=8gB\\OG=c=6dB@Fe=BYB?h=m0O010O01N110_OYBgNg=Q1WBgN4MN6h=T1WBgN:6_=P1iBmNY=Q1iBlN[=R1j0TOVAb0k>\\OWAd0h>ZOZAe0h>XO[Ae0Q?N3M3N1N3M2N3M2O2Mbee6"}, "image_id": 87, "id": 1429}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 323.0, 50.0, 38.0], "area": 1023, "segmentation": {"size": [512, 512], "counts": "gZX31l?3N3L3N3N100010O01M2M4M2M310O010O00010O010O0010O0010O010O00010O010O0010O0010O010OO1N3L3N3L3N2N3L3N`en3"}, "image_id": 87, "id": 1430}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 329.0, 63.0, 63.0], "area": 2421, "segmentation": {"size": [512, 512], "counts": "gkg52k?3N3M2N3L3N3M2M3O20O010O01M2M3N3M2M4M2N3L3N2N1OO2101N101O0O101O001O010O00010O010O010O00010O010O01N1N2@fAWO^>e0eAYO]>e0fAXO]>e0eAXO^>e0fAXO]>e0`0N2M4Hb@J`?39M2N]eX1"}, "image_id": 87, "id": 1431}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 331.0, 69.0, 95.0], "area": 3358, "segmentation": {"size": [512, 512], "counts": "jla12k?3N3L3N2N3L3N3L3N2M4M2N3L3N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2O1010O0M3N3L3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N3L3N2M4M2N3L3N2M4M2NYe[5"}, "image_id": 87, "id": 1432}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 347.0, 23.0, 17.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "Q[e24j?2M3O20O01O01O010O01O01O010O010O00010O0N3L3NoTo4"}, "image_id": 87, "id": 1433}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 350.0, 21.0, 25.0], "area": 321, "segmentation": {"size": [512, 512], "counts": "akl32k?3N3L3M4M2M3M4O01O01O01O000N3L3M4M2M3M4Modh3"}, "image_id": 87, "id": 1434}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 359.0, 58.0, 64.0], "area": 2172, "segmentation": {"size": [512, 512], "counts": "]\\h63j?3N3L3N3L3e@_OU?c0i@_OW?f0O0O2M2M3N3L2O0O3M3N3M200010O01O01O010O01O0M301eA^NW>e100010O010O00010O01O01O0WOkAHU>[OkAe03OW>NlA2T>KoA6Q>GRB8n=EUBM2M4M2M3M4M^c:"}, "image_id": 87, "id": 1435}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 365.0, 85.0, 60.0], "area": 2708, "segmentation": {"size": [512, 512], "counts": "Y\\d23k?2N3L3N3M2M3N3M2M4M2N3L31O010O010O01N1N201O0010O010O0010O0010O010kNYAo0g>nN\\AR1i>10O010O010O00010O010O010O00010O010O010O00010ON3M2O2O01O01O010O010O01O0N2N3M2M4M2N3L3N2N3L3N3M2M4M2NlSQ4"}, "image_id": 87, "id": 1436}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 373.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "e;1Zdo7"}, "image_id": 87, "id": 1437}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 384.0, 40.0, 53.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "aO010O010OJoN^AR1b>PO\\Ao0d>70O010O010O010O010O010O010O010O3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2MQc[7"}, "image_id": 87, "id": 1438}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 396.0, 26.0, 26.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "nlX21l?3N3L3N3L3N2N3N110O01O01O010O01O01O01M2M4M2M3N3L3N_SZ5"}, "image_id": 87, "id": 1439}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 405.0, 63.0, 46.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "`m`51l?3M4L3N2M4N11O01L3N3L3M3010O00010O00010O00010O010O00010O00M4M2M310O0010O00010O00010O0010OO101O0ROWAe0i>XOZAi0o>O000[Oo@^OTAb0V?0O01M2M3N3L3M3Mab_1"}, "image_id": 87, "id": 1440}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 412.0, 8.0, 22.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "^]l71l?4M2M4M2M3N3L3TC"}, "image_id": 87, "id": 1441}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 415.0, 28.0, 29.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "Xmd02m?3N2M3N2M3N1N3N2M3N1N010O0010O010O102M3M3N2M2O2M3N2M3N`Rm6"}, "image_id": 87, "id": 1442}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 425.0, 75.0, 55.0], "area": 2200, "segmentation": {"size": [512, 512], "counts": "Un\\22k?3N3L3N2M4M2M4M2M3N3L301O01O010O01O01O010O01O01O010O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010L3N2M4M2M4L3N2M4M2M4MPb]4"}, "image_id": 87, "id": 1443}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 430.0, 61.0, 47.0], "area": 1648, "segmentation": {"size": [512, 512], "counts": "Yn\\63j?3N2M4M2M4M2M3N3N101O01O010O01O01O010O01O01O010O01O01O010O01O000M4M2M4M2O1O2O001N100O20O0N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3N]bd0"}, "image_id": 87, "id": 1444}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 436.0, 43.0, 44.0], "area": 1008, "segmentation": {"size": [512, 512], "counts": "X^R11m?3L3N2M4M2O2O00010O010O00010O010N1i@Aj>b0TA@j>c0RAAk>k0M4O000010O010O00010O010L3N2QOUAf0o>WOSAg0U?L3N3M2M3N3L3NQRX6"}, "image_id": 87, "id": 1445}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 436.0, 31.0, 36.0], "area": 676, "segmentation": {"size": [512, 512], "counts": "^nZ72k?4M2M4L3N2M4L3N3L3N2010O0010O0010O00010O0N3L3M3N3L3M3N3L3N3LWb5"}, "image_id": 87, "id": 1446}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 454.0, 38.0, 45.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "`>o0Q?01O0000000000010O000000000000L5H7O11O00000001O0001O0000000001O0001J5F:E;FQb\\7"}, "image_id": 87, "id": 1447}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 458.0, 56.0, 44.0], "area": 1481, "segmentation": {"size": [512, 512], "counts": "R_T51m?2N3M2N3M2N3M2N3M2N3M2N3O0010O010O010O010O010O010O01N1N3M2O20O010O010O010O010O0O2M2N3M1O1O3M2N3M2N3M2N3M2N3M2N^ao1"}, "image_id": 87, "id": 1448}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 478.0, 63.0, 34.0], "area": 1201, "segmentation": {"size": [512, 512], "counts": "joR22l?2M4M2M4M2M3N3L3N3L3N2O100001O001O00001O001O00001O001O00001O001O001O00001O001O00001Bj@0V?Nl@2U?Jn@6R?HQA7\\?01O00001O001O00001O001O00001O001OQ`m4"}, "image_id": 87, "id": 1449}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 481.0, 4.0, 10.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "Y_n71l?3N3L3o@"}, "image_id": 87, "id": 1450}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 489.0, 42.0, 23.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "n_e02k?3M3N2M3M3M3N2N2001O00001O001O001O00001O001O001O00001O00O1N2N21O00001O001M2M3N3L3Mc`e6"}, "image_id": 87, "id": 1451}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 494.0, 38.0, 18.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "ooY71m?2M3M3N2M3N2N200001O001O00001O00001O001O00001O00001O001O00001O001O00001M2MZP3"}, "image_id": 87, "id": 1452}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 496.0, 38.0, 16.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "o_i41m?2N2M3N2M3N2O100001O001O00001O001O001O00001O001O001O00001O001O00001O001O0NV`c2"}, "image_id": 87, "id": 1453}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 503.0, 23.0, 9.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "noW62k?3N2N2001O001O001O001O00001O001O001O00001O00Q`\\1"}, "image_id": 87, "id": 1454}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 504.0, 13.0, 8.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "j?6i?100O1000000001O001O1O3MRPi7"}, "image_id": 87, "id": 1455}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 504.0, 22.0, 8.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "noX12k?3M300001O00001O001O001O00001O001O00001O00QP\\6"}, "image_id": 87, "id": 1456}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 506.0, 14.0, 6.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "moe13k?2O11O00001O001O00001O001OQPS6"}, "image_id": 87, "id": 1457}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 256.0, 45.0, 62.0], "area": 1719, "segmentation": {"size": [512, 512], "counts": "cXQ63j?4d@Kh>8UAKg>9VAKf>9VAJi>7TALm>d0_AoNQ>Q1lAROT>o0iASOX>l0eAXOZ>h0cA[O^>S101L3M3N30O00010O01O01O01OO2M2M2O1N3N3L3M3N3L3N2M4M2M3M4M2M4M2M3N3LkWX1"}, "image_id": 88, "id": 1458}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 320.0, 27.0, 28.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "cjm52k?3N3L3M3M4L3N30O00010O0010O0010O0010OO1M4M2M4M2M3N3Lked1"}, "image_id": 88, "id": 1459}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 16.0, 71.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "0W2i=M3M3M3N2M3lNlA9W>ElA7W>FlA7W>FlA7W>FmA7U>FnA7U>FnA7U>GnA5P?M3MXog7"}, "image_id": 89, "id": 1460}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 0.0, 146.0, 137.0], "area": 9734, "segmentation": {"size": [512, 512], "counts": "bbV23k?3L3N3M2M4M2M3N3M2M4M2M3@lNRBW1j=mNSBU1k=mNRBW1k=lNRBV1k=mNRBW1k=`0M3N3M2M4M2M3N3L3N3M2O2O00001O001O00001O001O001O00001O001O00001O001O00001O0010O0oM^Ce0blNcAQ1g>M4M2N2M4M2M4M2N2M4M2M4MX]`3"}, "image_id": 89, "id": 1461}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 0.0, 13.0, 5.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "PPV41o?001O001O001O00001O00N2NR`c3"}, "image_id": 89, "id": 1462}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 160.0, 28.0, 84.0], "area": 1263, "segmentation": {"size": [512, 512], "counts": "WWb72l?2QONmA6o=MoA5o=NmA6S>IjA:V>FhAEdA?\\>@bAb0^>_O^Ae0b>ZO[Ai0e>901O0N2M4M2M4L3N2M4M2M4M2M3M4M2M4oJ"}, "image_id": 89, "id": 1463}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 203.0, 137.0, 181.0], "area": 12062, "segmentation": {"size": [512, 512], "counts": "T[i02k?3N3M2M4M2N2M4M2M4M2N3L3N2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2N2mN\\MgDh2U;[MiDg2U;\\MgDh2U;[MiDg2U;\\MhDg2X;XMeDk2[;VMbDl2^;TM_DP3a;oL]DS3c;nLZDU3f;jLWDY3i;`00O00010O010O0001O0N3L3N3L3N2N3L3N3M2O2N100OO2N2M2O2M2O2N2M2O2M2O2N2M2O2N1N3N2M2O2N1N3N2N1N3N1N3N2N1N3N1O2M3N1N3N1O2M3N1O2M2O2M3N1O2M21000N1O3L3N3M2M3N3L3N3M2M4M2N3L3N3L3N2N3L3N3L3N3M2M4M2N\\YR5"}, "image_id": 89, "id": 1464}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 305.0, 101.0, 113.0], "area": 6691, "segmentation": {"size": [512, 512], "counts": "V\\l22l?2M3N3L3N3M2M4M2M3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N3L3N2N3L3N3L3N2M4M2N3L31O01O010O010O00010O010O0N2O2O0010O0010O0010O010O0010O0010N1M4M2M3N3M2M4M2M3N3L3N3M2M4M2M3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N3L3N2N3L3N3LkUa3"}, "image_id": 89, "id": 1465}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 359.0, 153.0, 126.0], "area": 10833, "segmentation": {"size": [512, 512], "counts": "fmc53k?2M4M2M3N3L3N3L3N2M4L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3010O0010O0010O0010O00hMnBc1R=YNRCf1ne0YA[Og>c0\\A\\Od>b0^A^Ob>a0_A_Oa>?bA@_>>bAB^>;gAEY>:hAFX>8kAGV>6lAJT>5nAJR>4PBLP>3QBMP>0SBOo>0QPg6"}, "image_id": 89, "id": 1469}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 477.0, 41.0, 22.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "T_h51l?3N2N30O01O01O01O010O01O01O01O010O01O01O010O01O01O010O01O01O01O010O01O01O01O0N3LgPc1"}, "image_id": 89, "id": 1470}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 223.0, 169.0], "area": 23646, "segmentation": {"size": [512, 512], "counts": "4S4m;00000000000000c0]O0000000000000000000000000000000000000000000000000000000000UOk00000000000000000000000?]O400000000000000000000000000000000000000000000000000000000000k0UO00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000WOeEmK[:c0[H^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0bG^O^8b0`300000000000000000000000000SOm000000000000000000000000000000000000000000000000000000000000000000000000000000000000P``4"}, "image_id": 90, "id": 1471}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 0.0, 81.0, 207.0], "area": 14718, "segmentation": {"size": [512, 512], "counts": "jdg6W1i>000VKMgI3Y6P1dH^On6f1dGdN\\8S2mFmMS9S2mFmMS9S2mFmMS9S2mFmMS9S2mFmMS9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9R2nFnMR9`4000000000000000000000000000000000000000000000SOm0000000000000000000000000000000000000000000000000000000000000000000000000"}, "image_id": 90, "id": 1472}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 191.0, 75.0, 66.0], "area": 2715, "segmentation": {"size": [512, 512], "counts": "hV[31k?5L3M3M4L3L4M4M20010dAVO`=j0]BYOd=f0YB^Of=b0VBBj=?RBDo=;nAIQ>7lALT>P11O01O01O01O01O00010O000010O00010O0001O01O01O01O01M2L4M4K4M3L5L3N201O01O01O00010O000010O00010O0001O01OM4L3M3M4L3M3M^Y_3"}, "image_id": 90, "id": 1473}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 203.0, 95.0, 309.0], "area": 29192, "segmentation": {"size": [512, 512], "counts": "\\6d9\\6000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000l3TLiU`6"}, "image_id": 90, "id": 1474}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 294.0, 59.0, 140.0], "area": 4411, "segmentation": {"size": [512, 512], "counts": "gZc49f?1O100O1O1Oc2]M100O1O1O1O1O1O1O1N2O1O1O1000000000000O1000000000000000000000000O1SOm00fLRFb0o9]OZ301O001O001O001O001O001O001O001OaV_2"}, "image_id": 90, "id": 1475}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 222.0, 54.0], "area": 11430, "segmentation": {"size": [512, 512], "counts": "0f1Z>000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000o0QO0000000000000000000000000000000000Qo`4"}, "image_id": 91, "id": 1476}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 0.0, 153.0, 34.0], "area": 2812, "segmentation": {"size": [512, 512], "counts": "PPd4d0\\?00000000000000000000000000O100000000000000000000000000000000000000000000000000000000?A000000000000000000000000000000000000000000000000000000000000000000@aA_O_>a0`0000000000000000000000000000000000000000000000000000000000000000000000000000000000000_Oa00000000000000000000000000000000000000000000000000000000000000P`o0"}, "image_id": 91, "id": 1477}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 43.0, 225.0, 209.0], "area": 35034, "segmentation": {"size": [512, 512], "counts": "kdW3Q3o<000000000000000000000000000000000@`000000000000000000000000000000000000000000000000000000000`M`2000000000000000000000000000000000000A?00000000000000000000000000000000000000000000000000000000000000000000>B000000000000000000000000000000000000000000000000000000000000000000000000A?0000000000000000000000000000000000000000000000000000000000000oLWGoMe9Q2[FoMe9Q2[FoMU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NU:a1kE_NY:0TG0l80TG0l80TG0l80TG0l80TG0l80TG0l80TG0l80TG0l80TG0l80b30000000000000000000000000000000000000000000QnW1"}, "image_id": 91, "id": 1478}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 366.0, 220.0, 146.0], "area": 30405, "segmentation": {"size": [512, 512], "counts": "_;a4_;00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000Y2gMk0UO0000000000000000000000000000^aa4"}, "image_id": 91, "id": 1479}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 380.0, 109.0, 132.0], "area": 12324, "segmentation": {"size": [512, 512], "counts": "g]X6h0X?0000000000000000000000000WOV2cN4L000000000oNQ1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000Q1oN00000000000000SS1"}, "image_id": 91, "id": 1480}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 177.0, 128.0], "area": 20607, "segmentation": {"size": [512, 512], "counts": "0Y3g<000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000hCoLa;h300000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000[OWDSMi;m2e00000000000000000000000000000000000O10000000000000^2bMb]W5"}, "image_id": 92, "id": 1481}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 0.0, 182.0, 85.0], "area": 9353, "segmentation": {"size": [512, 512], "counts": "g`P51n?3N2M3N2M2O2M3M3N2M3N1N3N2^OROmAQ1Q>POnAR1P>POmAS1Q>oNmAR1R>POkAS1S>oNjAT1T>>N2N1O2N2N2N2N2N1O2N2N2N2N2N00O100O100O100O100O1O100O100O100O100O1O100O100O100O100O1O100O100O100O100O1O100O100O100O100O1O100O100O100O100O1O100O100O100O1O100O100O100O100O1O100O100O100O100O1O100O100O100O100O11O1O2N2N2N1O2N2N2N2N1O2N2N1OO100O1O100O100O100O100O1O100O100O100O100O1O100eN`AU1a>kNaAS1_>lNaAT1`>kNaAU1_>kN`AU1e>10O3M3N2M2O2M3N2M3N1N3M3N2M3N1N3NQ_4"}, "image_id": 92, "id": 1482}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "P`o71o?"}, "image_id": 92, "id": 1483}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 95.0, 86.0, 97.0], "area": 4375, "segmentation": {"size": [512, 512], "counts": "nSg42m?3N2M3N2M2O2M3M3N2M3N2M2O2M3N2M3M3N2M2O2M3N2M3N2M2N3N2M3N2M3N2M2O2M3N1N0010O0101N2O0O010O00010O^OkBZNV=f1lBXNS=h1PCmMM1T=R2PClMN0Q=T2c00O0010O1O3N2M3N1N3N2M3M3N2M3N2M3N2M3N2M2N3N2M3N2M3N2M3M3N2M3N2M3N[km1"}, "image_id": 92, "id": 1484}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 102.0, 189.0, 204.0], "area": 19678, "segmentation": {"size": [512, 512], "counts": "oeR22n?2M2O2M3M3N2M3N1N3N2M3M3N1N3N2M3M3N1N3N2M3N2M3M2O2M3N2M3M2O2M3N2M3N1N3M3N2M3N2M2N3N2M3N2M2O2M3M3J]LkCe3S<6M2N3N2M3N2M2O2M3M3N2M1J\\KPEe4n:^KoDc4P;701N100O2O0O1O2O0O010O001_OlJWFR5j9PKUFo4j9SKVFk4k9WKTFg4l9\\KRFd4n9^KQF`4o9cKPF\\4P:eKPFY4P:jKnEV4Q:mKmER4T:PLiEQ4V:l01O010O010O01O010O010O01O010O010O01O010O01O011N3lLeE?]:^OeE`0^:^OdE`0^:]OeE`0]:_OeE?]:^OfE?]:_OeE?]:^OeE`0^:]OeE`0]:_OeE?]:^OcEb0`:\\O`Ed0b:YO^Eg0e:WO[Ei0g:TOYEl0i:SOVEn0l:oNUEP1n:mNRES1Q;kNoDU1S;hNmDX1U;gNkDY1W;dNiD\\1Z;bNfD^1\\;_NdDa1_;]N`Dd1b;YN_Df1c;YN\\Dg1g;VNZDj1h;SNXDm1k;QNUDo1m;nMSDR2o;mMQDS2QS>DkA=T>FjA:V>GhA:W>HgA9X>JeA7Z>K]AE3`0`>1^A0a>2]AOb>h0O100O1O1O100O1O1O100O10000O1O100OKjAaNV>_16O2O1N3M2N2O2M0000010O0001O01O0001O01O0001O01O00102M2_ORAMP?1RAMQ?1PANQ?0RAMP?1RAMQ?0QANQ?1QAMP?1RR?"}, "image_id": 92, "id": 1491}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "o_i71n?1001OQ`4"}, "image_id": 92, "id": 1492}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 8.0, 14.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "0>b?O1N2N2N2N2N2NRPl7"}, "image_id": 93, "id": 1493}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 62.0, 93.0], "area": 3415, "segmentation": {"size": [512, 512], "counts": "Z26f>LZB6d=LZB7c=LZB6d=LYB8d=KYB7e=KYB8c=LZB6d=LZB7c=LZB6d=LZB7c=LZB6d=LYB8d=JZB8d=T1L3O2O01OO2M2N3L301O001O001OO1N2N2N2N2N2N2N2M3N2N2N2N2N2N3M2eNmAg0V>VOmAf0V>XOlAf0W>WOlAf0V>XOlAi0T>TOoAk0Q>SOQBk0d>M2N3M2N2N3M2N3L3N3M2NWoP7"}, "image_id": 93, "id": 1494}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 0.0, 33.0, 13.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "PPY41o?001O00001O001O001O001O001O001O001O001O00001O001O001OO1N2N2N2N2NR`V3"}, "image_id": 93, "id": 1495}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 0.0, 77.0, 36.0], "area": 1582, "segmentation": {"size": [512, 512], "counts": "T`X63k?2N3O001O00001O001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O001O001O00001O001O001O001O001O001O00010O010O01L3N2N3M2N3M2M4M2N3M2N3Mjo`0"}, "image_id": 93, "id": 1496}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 16.0, 63.0, 102.0], "area": 3400, "segmentation": {"size": [512, 512], "counts": "XSk02l?2N3EKg@7V?Lh@6b>GlAe0Q>^OlAd0R>^OlAe0Q>]OmAe0Q>^OlAe0Q>]OmAe0P>_OlAe0Q>f0@VNhBm1T=VNjBl1T=WNiBl1T=VNjBl1T=WNiBl1T=a0N3L3N210O010O010O01M2N3M2N3M2N2N3M2M4M2N3M2N3VNTB\\1n=bNTB\\1o=aNSB\\1P>bNSB[1Z>N3M2lNYAm0n>N2010O01M2N3M2N3L3N3M2N3M2N3M_^U6"}, "image_id": 93, "id": 1497}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 17.0, 75.0, 70.0], "area": 2654, "segmentation": {"size": [512, 512], "counts": "dam53k?2N3M2O2M2N3IAi@b0T?6N3M2N3N110N1N3M2N3N1N30O010O010O010O010O0100O010O010O010O010O010O010O010O010O010O01cNbAW1]>gNeAY1b>0O10O0nN[Aj0e>SO]Am0c>RO_An0a>oNaAQ1h>0O010ON3M2N3M2N3M3M2N3M2N3M2N3MVnl0"}, "image_id": 93, "id": 1498}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 85.0, 71.0, 78.0], "area": 2849, "segmentation": {"size": [512, 512], "counts": "Vd^21m?2N3M2N3M2M4M2N3M001O2N3M2M4M2aAnNn=U1oAmNo=U1oAnNn=U1oAmNo=U1nAoNo=b1N3M2M4O010O010O01O010O010O010O010O0N3M2M4M2N001O3M2N3M2M4M2N3M3M2N3M2N3M2M4M2N3M201O010OO01N3M2N3M2Ojl]4"}, "image_id": 93, "id": 1499}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 116.0, 33.0, 91.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "d3k2U=O1N3M2N3M2N3L3N3M2N3M2FQBaNR>\\1QB`NR>^1PB`NR>^1:M2M4M2N3O010O0N3M2N2M4M2N3Ee@M]?1e@L_?19NP\\_7"}, "image_id": 93, "id": 1500}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 134.0, 94.0, 150.0], "area": 5755, "segmentation": {"size": [512, 512], "counts": "og6172\\?0b@2MM[?4d@6Z?:M2N3M2M3N3M2N3M2N3M2M40O01O0N3M2N3M2N3FQN_BQ2_=RN]BQ2a=9L3N3M2N3M2N3MO10O1001O3M3MO0K[MSCe2l<^MRCb2n<7000000000O0100000000000O2O3M2N00O1002M3N3M2N3M2N3M2N3L3N3M2N3M2N3M2M4M2N3M2N3M2N3L3N3M2N2N3M2N3M2M4M2N][Z6"}, "image_id": 93, "id": 1501}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 136.0, 63.0, 81.0], "area": 3001, "segmentation": {"size": [512, 512], "counts": "aUf41i01Y>1eA1Y>2dA1Y>1eA1Y>2dA1Y>1eA1Y>2dA1X>2eA1Y>2dA1Y>1eA1Y>l0N3M2N3L3N3M2N3M2N3N1010O010O010O001N1N3M2M4M2N3N11O01O0N2N000O4M2N3M2N3M2N2M4M2N3M2N3M2M4M2N3M2N3M2N3L3N\\[Z2"}, "image_id": 93, "id": 1502}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 157.0, 74.0, 98.0], "area": 3588, "segmentation": {"size": [512, 512], "counts": "_g[51l?3N3M2N3M2N3M2M4M2N3O010O0N2N3M2M4M2N3XOeNgB]1V=fNhB]1U=fNhB\\1V=fNhB]1U=fNhB\\1V=fNhB]1U=fNhB\\1V=fNhB\\1V=i0M2N3M2N3O0001M2M4M2N3M2M4M2N3M2M4M2N3M2M4M2N30O01O010O010\\NfA`1^>N1]OcAA`>=aAAa>?`A^Oc>b0\\A[Og>e0YAYOj>f0WAWOk>g08N3M2N3L3N3M2N3L_Z_1"}, "image_id": 93, "id": 1503}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 187.0, 68.0, 78.0], "area": 2919, "segmentation": {"size": [512, 512], "counts": "cgZ62l?6J5K2O2O001O0N3YO\\OnAf0P>]OmAf0P>\\OnAf0P>]OmAf0o=]OnAf0P>]OmAf0P>\\OnAf0P>f0N3M2M4M2O2O010O010O010O010O01M2N2M4M2N3O0010ON3M2N3M2N0000003L3N3M2N3M2N2N3N1010O01N1N3M2N3M2N3M2M4M2NiYc0"}, "image_id": 93, "id": 1504}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 237.0, 49.0, 52.0], "area": 1412, "segmentation": {"size": [512, 512], "counts": "`hR42l?3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3N110O010O010O010O0010O010N1N3M2N3M2N3M2N3M2N3M2N3M2N2N3M2N3M2NWhT3"}, "image_id": 93, "id": 1505}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 298.0, 47.0, 118.0], "area": 3276, "segmentation": {"size": [512, 512], "counts": "d997a2\\N2M4M2M4M2M3N3L3DTABo>:TACo>;TABn>;>M2M4M2MPVX7"}, "image_id": 93, "id": 1506}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 329.0, 135.0, 100.0], "area": 6267, "segmentation": {"size": [512, 512], "counts": "iki03k?2XON`A5]>MaA5]>N`A4]>OWAF7>_>2^A0`>2^A1_>2^A0`>j0M2N3M2O20O001O01O01O010O010O010O010O010O010O010OiA\\NR>j10O010O010M2WNlAd1Y>01O010O010O010O010O010O010O010O010O0010gA]NS>c1jA`NW>d1010O01O010O01O0N3O0010O010O010O010O010O010O010O01O010O010O01O010O010O010O010O010O010ON2N002N2M4M2N3M2M4O010O010O0N3O010O010N1N2N3L3N3M2N3L3N3M2NPdR5"}, "image_id": 93, "id": 1507}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 393.0, 85.0, 66.0], "area": 2802, "segmentation": {"size": [512, 512], "counts": "X]82l?3M2N3M2N3M2N3L3N3M2N3M2N3M2N201O010O010O010O010O010O0010O010O0010O010O03NO010O010O010O010O010O00010O010O010O010O010O010O010O0O2O01O010O01ON3L3N3M2N3M2N3M2N3L3N3M2N3M2NiR]6"}, "image_id": 93, "id": 1508}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 431.0, 85.0, 66.0], "area": 2778, "segmentation": {"size": [512, 512], "counts": "^n`12l?3M2N3M2M4M2N3M2N3M2N3L3N2N3N1010O010O010O010O010O0010O010O010O010O010OiNYAU1i>10O010O0010O010O0010O010O010O010O010O0010O010O001N1010O01O010O010OM4M2N3M2N2N3M2M4M2N3M2N2N3M2MeaT5"}, "image_id": 93, "id": 1509}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 460.0, 136.0, 52.0], "area": 3573, "segmentation": {"size": [512, 512], "counts": "m_]42l?2N3M2N2M3N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2O11O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O001O0000Q`^1"}, "image_id": 93, "id": 1510}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 461.0, 62.0, 51.0], "area": 1982, "segmentation": {"size": [512, 512], "counts": "]>^1c>0ON3M201O010O010O0010O01iNXAU1i>010O010O0WAkNh>V100001O001O001O001O001O001O001O001O001OO1O1001O001O001O001O001O001M2N2N3M2M4M2N3M2N3Mh`P7"}, "image_id": 93, "id": 1511}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 470.0, 20.0, 40.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "i_P33j?3L4L5K4L4M4K4L4L4O10L4L5K4M3L4L5K4M3L\\ae4"}, "image_id": 93, "id": 1512}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 490.0, 14.0, 22.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "ioZ34h?4L4M4K6N0001N100O1L5K4M3LhP^4"}, "image_id": 93, "id": 1513}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 495.0, 40.0, 17.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "o_o01m?2N2N2M3N2N2N2O100001O001O001O001O001O001O001O001O00001O003M001O001O001O001O00Q`\\6"}, "image_id": 93, "id": 1514}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 0.0, 34.0, 12.0], "area": 228, "segmentation": {"size": [512, 512], "counts": "Q`83l?101O001O00N2N21O001O001O001O001O001O001O001O00001O001O00O1N2N2M3NR`V7"}, "image_id": 94, "id": 1515}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 0.0, 85.0, 48.0], "area": 2502, "segmentation": {"size": [512, 512], "counts": "^`k02l?3L3N3M2N3M2O2O001O00001O001O001O001O001l@XOn>h0PAZOP?k001O001ON2N21O001O001O001O001O001O001WAmNc>S1[APOd>V101O000000N2N201O010O010O010O010O010O010ON3O010O01O010O0O2M2N3L3N3M2N2N3M2N3M2M4M2N3Mcoi5"}, "image_id": 94, "id": 1516}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 0.0, 264.0, 213.0], "area": 30103, "segmentation": {"size": [512, 512], "counts": "lc[32l?3M2N3M2N3L3N2N3M2N3M2N3M2N3O010O0010O010O010O010O^OkNYBV1d=mN\\BR1a=QO]BR1`=QO]BQ1a=QO^BQ1_=ROaBm0]=UOcBl0Z=WOfBh0X=ZOhBf0V=]OjBc0S=_OmBa0Q=BoB>n000002N000010O2O1001O`AhNW>X1gAkNX>V1fAkNZ>U1dAmN\\>\\1000dNcAT1^>iNdAW1\\>gNfAZ1`>00N2N01O01O0000001O1O01O003N10000bAfNV>Y1hAiNX>W1fAkNZ>U1dAmN\\>]1O00cNdAU1\\>iNfAW1[>fNgAZ1`>00O2M000001O000001O1O000003O00000aAgNV>Y1iAhNW>X1gAjNY>V1eAmN[>\\1000cNcAV1]>hNeAX1[>fNgAZ1Y>eNiAZ1_>00N00000000001O10O0000011O10000aAgNV>Z1hAhNW>X1gAjNZ>U1dAmN\\>\\1000cNdAU1\\>jNeAV1[>hNgAY1X>eNkAZ1^>O0O1O00000000010O001O00001O3M2O1N2N2N2N2N3^Og@8[?Fg@8[?Gf@8[?Fh@7b?N2N2NTmQ6"}, "image_id": 94, "id": 1519}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 32.0, 15.0, 22.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "`aS33k?3L3N3L3M3N3N01N3L3N3L3N2M4Mond4"}, "image_id": 94, "id": 1520}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 225.0, 24.0, 33.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "VWX32m?3N2M2O2M3N2M3M300O100000O100N1O2M3N2M3N1N3N2MUh[4"}, "image_id": 94, "id": 1521}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 253.0, 25.0, 34.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "QhW32n?2M3N2M3N2M2O2M3N2O1000O10O1000M3N2M2N3N2M3N1N3NYg[4"}, "image_id": 94, "id": 1522}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 260.0, 15.0, 16.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "]he51m?3M2N3M2O20O010O01000N3M2M2NggR2"}, "image_id": 94, "id": 1523}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 332.0, 57.0, 66.0], "area": 1753, "segmentation": {"size": [512, 512], "counts": "dZU11o?2M4M2N3L3N3L3N3L3N3M2M4M2M3N3M2M4M2M4M2N3O0010M2N3L3N3L3N2M4M2N3L3N3L10000O010O01000O010O01003L3N3L3N3M2M3N3L3NPTn5"}, "image_id": 94, "id": 1524}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 349.0, 50.0, 66.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "d[g02m?3N2N3L3N3L3N2M4@YOaAi0]>ZO`Ah0^>ZO_Aj0]>ZO`Ah0^>`0L3N2N3L2O0O010O01000O0101N4O010N1M3N3L3N3L3N2N3L3N3L3N2N3L3N2M4M2M4M2N2M\\c_6"}, "image_id": 94, "id": 1525}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 359.0, 42.0, 72.0], "area": 1686, "segmentation": {"size": [512, 512], "counts": "Y\\92n?3L3TOKiA7U>KhA9T>JjA8T>KhA8V>JhA9T>JjA8T>KhA9T>JjA8T>l0M4M2N2M4MO010O01002M4M2M4M2N2M4M2M3N3L3N2EVA\\Om>`0VA]Om>a0UA]On>?2M01000O010O01000O0100O4M2N3L3N2M4AXA@j>>YA^Ok>?WA_Ok>>?M2M4M2N2MlRb7"}, "image_id": 94, "id": 1527}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 451.0, 82.0, 61.0], "area": 3245, "segmentation": {"size": [512, 512], "counts": "Q_c12m?3N3L3N3M2M4M2M3N3M2M4M2M10000O100O10000O100O10000O100O10000O100O10000O100O1000@aA^O_>c0cA[O]>d0gAXOY>j0hATOX>o0hAnNX>T184M2M4M2N1N100O10000O100O10000O100O10000O100O100OXOnADR>TBQOK9P>g0]BWOe=g0^BVOe=3jA7d0Dd=2kA8d0Cd=2kA7S?FPA8_?M2M3Nn`S5"}, "image_id": 94, "id": 1528}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 488.0, 39.0, 24.0], "area": 619, "segmentation": {"size": [512, 512], "counts": "col02n?2N2M4M2M3N000O100O100O10000O100O10000O100O10000O100O1001O2N3M2N3M2N3M2N3M2NQ`_6"}, "image_id": 94, "id": 1529}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 505.0, 17.0, 7.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "oo`01o?0O100O100O100O10000O100O1003M2NR`V7"}, "image_id": 94, "id": 1530}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 0.0, 18.0, 7.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "P`n61o?001O001O00001O001O001O001O00O1N2MS`h0"}, "image_id": 95, "id": 1531}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 18.0, 19.0, 43.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "Raf72m?2N3M2N2N2d@DQ?>m@DR?=m@DQ?>m@DQ?g0N3M2N2N2N2O0O00001^O"}, "image_id": 95, "id": 1532}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 381.0, 22.0, 60.0], "area": 667, "segmentation": {"size": [512, 512], "counts": "T\\e72m?3N3L3N3M2M4M2M4M2M3N3M2M4M2M4N1010O0001N1fC"}, "image_id": 95, "id": 1533}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 0.0, 19.0, 6.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "PPX11o?0000001O00001O00001O00001O00001O00MS`^6"}, "image_id": 96, "id": 1534}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 0.0, 50.0, 56.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "YQW72j>0VB3g=OWB3g=0VB3h=NUB5k=LRB7m=IQB9o=HnA;Q>EmA=S>DjA?U>AiAa0W>@fAc0Y>]OdAf0\\>`0O001O00001O001O001O001O001OO1N2N2N2N2N2N2M3N2N2N2N2N2N2M3N2N2N2N2N2N2001O"}, "image_id": 96, "id": 1535}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 2.0, 74.0, 87.0], "area": 3576, "segmentation": {"size": [512, 512], "counts": "URk23j?3N2M4M2M4L3N210M2M3M4M2M4M2M3M4M2bAbNT>OlAf1R>5M4L3N3L3N2M4L310O00010O01O01O010O01M2M3M2_O_BcNd=\\1\\BaNg=`1YB]Ni=c1WBZNm=f1600010O01O01O010O01O0O1M4L3N2M4M2M4L3N2M4M2M4M2M3M4M2M3N3L`oo3"}, "image_id": 96, "id": 1536}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 4.0, 73.0, 82.0], "area": 3213, "segmentation": {"size": [512, 512], "counts": "eQ61m?2M4M2N3L3N3M2M3N3N1010O01O01O010O010O01O01O01CUOdAj0Y>YOgAh0V>[OjAd0T>^OjAd0S>@jAc0S>_OjAd0T>_OiAd0S>f0N3N11OUBRNd=o1XBUNg=R210O01O010O01O010O01O010O01O010O01O010O01OlMYBo1l=010O00M4oNPBNR>OQBOR>NQBBJ0X>;QBBJ1W>;QBBKOW>=Q1L3N3L3NY_e6"}, "image_id": 96, "id": 1537}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 53.0, 73.0, 80.0], "area": 3285, "segmentation": {"size": [512, 512], "counts": "bc_52l?3L3N3M2N2N3M2N3M2N30O01M2M4M2N3M2N3HeNgA]1W>eNgA]1W>8M2N3M0O103M2N2N3O0010O010O010O010O010O010O010O0010O01M2N2N3M2UNPBd1X>M2M4M2N3M2N2N3M2N3L3N3M2N3M2N3M2N3M2M4Mdm[1"}, "image_id": 96, "id": 1538}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 76.0, 2.0, 4.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "^Ro71m?3cM"}, "image_id": 96, "id": 1539}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 82.0, 85.0, 138.0], "area": 4075, "segmentation": {"size": [512, 512], "counts": "`f\\61m?2N3M2GJf@9W?Ig@9W?:L3N3M2N3M2N3M201O001M2N3M2N3M2F[NWBh1f=[NWBg1g=[NWBg1g=:N00000O100002N3M1O00000O1JWBWNi=i160HVN\\Bj1d=XNZBh1f=8000000000000O21010NO0000O1000O2O2N3M2N3M2FkAgNX>V1jAhNX>U1lAhNW>U1;N3N110O0N2M4M2N3M2N3M2N3M2N3M2N3MTm8"}, "image_id": 96, "id": 1540}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 93.0, 63.0, 95.0], "area": 3659, "segmentation": {"size": [512, 512], "counts": "Y4X1h>1O01O0001O01O01O01O01O0L4M4L3M0O103M3oAVNm=n100010O00010O00M3M4L3L4M4L3M3L5N11O01O0001O01OO2L3XOiBdNZ=Y1jBbN[=Y1iBdNZ=Y1iBdNZ=]1fB_N^=`1bB\\Nb=d1=O1M4K4M3M4L5J4M4Ak@NX?Ol@LX?1V]P7"}, "image_id": 96, "id": 1541}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 113.0, 67.0, 131.0], "area": 4291, "segmentation": {"size": [512, 512], "counts": "Qgn62l?3M2N3M2M4M2N3M2N3M21O010O01O001M2N3L3N3M2N3M2M3N2NO4O01M2N3L3N3M2N2N3L3JiMaBZ2[=iMcBY2[=8M2N3M2N3M210O0N2N1O002N3L3N2N0000FbCSM_eNgA]1V>9M2N2N001N3N2N3M210O010O010O010O010O01O010O01O010OO2M2N3L3UNPBd1X>M2N3M2M4M2N3M2N3M2M4M2N2N3M2N3L3N3M2N3MW[n1"}, "image_id": 96, "id": 1543}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 141.0, 79.0, 83.0], "area": 3398, "segmentation": {"size": [512, 512], "counts": "Yf[31m?2M4M2M3N3L3N3L3M310O00010O010O00010O010O00010OM4N11N1M4M2M3N3L3N3L3N3L3M3N3L3N3L3N2M4M210O00010O010O0010O0010O010O00010O01mM[Bj1d=TN^Bl1b=QNbBo1g=01jNQB8P>FRB8P>ESB8Q>ERB8P>FRB8Q>DSB8P>FRB8Q>DSB8P>FRB8P>FRB8m>L3N3LRj\\3"}, "image_id": 96, "id": 1544}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 210.0, 77.0, 86.0], "area": 3538, "segmentation": {"size": [512, 512], "counts": "\\h[41m?3M2N2N3L3N3M2N3N1010O01M2N3M2N3M2N2CmNjAW1S>kNkAW1S>lNjAW1R>=N3M2N3M2ZBmM^=U2_BmMa=Z20O01O010O010O01O010O01O010O010OO2M2N3M2N3M2M3N3M2N3M2N2N00O13L3N3M2N3M2N3M2N3L3N3M2N3M2N3M2N2N3LQi]2"}, "image_id": 96, "id": 1545}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 245.0, 65.0, 85.0], "area": 2712, "segmentation": {"size": [512, 512], "counts": "jib51m?3L3N3M2N3M2N3M2N3N110M2N2N3M2N3M2N3M2M4M2G`NPBc1m=_NQBc1m=:M2N3M2M4M2N3M2O11L3N3M2N3M2N3M2N3M2N3L3N3M2N3M2N2N3M2N3M2M4M2N3M2N3M2N3M2N3M2M4Mmg\\1"}, "image_id": 96, "id": 1546}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 271.0, 83.0, 112.0], "area": 4657, "segmentation": {"size": [512, 512], "counts": "]kW63k?2N3M2N3FEk@=S?Fi@=T?:M2N3M2N3M2M4M2N3M2N3MO04M2N3M2N3M2M3O2O010O010M2N3M2N3M2G[MUCh2h<[MTCh2j<8N3M1O0O0102N2N3M2N3L3N2N3M2N3M2M4M2N3M2N3M2M4M2N2N3M2N3L3N3M2N3M2N3L3N3M2N2N3M2M4M2N3M2NRg>"}, "image_id": 96, "id": 1547}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 288.0, 71.0, 80.0], "area": 3150, "segmentation": {"size": [512, 512], "counts": "lji33k?2N2M4M2N3M2N3L3N3O0010ON2N3M2N3M2GjNeAZ1X>hNfAZ1X>9M2N3L2O002N3M2M4O010O010O010O01O010O01O010O010O010OO2M2N3M2M3WNoAa1Y>N3M2N2M4M2N3M2N3L3N3M2N3M2M3N3M2N3M2M[fR3"}, "image_id": 96, "id": 1548}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 310.0, 3.0, 6.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "jin71m?3M2ZF"}, "image_id": 96, "id": 1549}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 326.0, 35.0, 88.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "_l^72l?3M2N3L3N3M2N3M2N3L3N2N3M2N3L3N3M2N3M2M4M2N02010O0M3N3GSN[Bo1c=SN[BP2b=8N2N3L3N3iE"}, "image_id": 96, "id": 1550}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 327.0, 64.0, 84.0], "area": 2625, "segmentation": {"size": [512, 512], "counts": "[lo42l?2N3M2N2M4M2N3M2N3N101O0N3L3N2N3M2N3L3N3M2N3H]NoAe1o=8M2N2N3L3N3M2N3M11N3M2N3L3N2N3M2N3L3N3M2N3M2M4M2N2N3M2M4M2N3M2N3L3N3M2N2N3L3N3M]UP2"}, "image_id": 96, "id": 1551}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 378.0, 75.0, 86.0], "area": 3440, "segmentation": {"size": [512, 512], "counts": "bmc53j?3N3M2N3M2M4M201O001O0M3N3M2N3M2N3EjNgAY1W>jNfAY1W>9M4M2N3M0[BQN[=Q2cBQN[=Q2cBRN]=n1_BUNa=V2O010O010O01O010O010O010O01O01M2N3M2N3L3N3M2N3M4L2M4M00O1003L3N2N3M2N3M2M4M2N3M2N3M2M4M2N2N3MhcV1"}, "image_id": 96, "id": 1552}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 433.0, 65.0, 79.0], "area": 2801, "segmentation": {"size": [512, 512], "counts": "eoe64j?2N2N3L3N3M2N3M2M4N1010OM3N3M2N3L3N2N2HbNlA`1R>bNlA`1R>8M3N2N2N2N2M3N2N2N2001O001O0O2M2N3M2M40O0010ERBbNn=\\1UBbNm=[1UBcNm=[1VBbNm=Z1>N3M2N3L3N2N3M2N3L3N3M2N3M2M4M2NRb9"}, "image_id": 96, "id": 1553}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 457.0, 24.0, 50.0], "area": 677, "segmentation": {"size": [512, 512], "counts": "Z_d71l?3N3M2N3M2N3M2N3M2N3M2N2N3M2M4N110O010[AiN_>W1_AlNa>Y1010OcA"}, "image_id": 96, "id": 1554}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 73.0, 39.0], "area": 1575, "segmentation": {"size": [512, 512], "counts": "03m?1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O001O1O2N1O1O2N1O1O2N1O1OO1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O2O1N3M2Nl_k6"}, "image_id": 97, "id": 1555}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 125.0, 362.0, 262.0], "area": 36111, "segmentation": {"size": [512, 512], "counts": "ij]12m?3N1N2N3M2O1N3M2O1N3M2N2O2M2N2N3N1N2N2O2M2N2N3N1N2N3M2O1N3M2O0O001O01O01O0001O01O01O0001O01O0001O01O00010O0000010O0000010O00010O0000010O000010O0000010O0000010O0001O01O000XMQNaGn1_8TN_Gm1`8UN`Gi1`8YNaGd1`8]N`Ga1`8aNaG]1^8eNbGY1^8iNbGU1^8nNbGP1^8QObGm0^8UOcGh0]8ZOcGd0]8^OcGa0]8@dG=\\8EdG9l7mNiEl0[25l7ROfEk0_21j7VOeEk0a2Mk7ZObEj0d2Ij7_O`Ej0f2Ej7C^Ek0g2Aj7G\\Ej0k2\\Oj7U1VHiNj7Y1VHeNj7]1THdNk7^1SHbNn7_1QH`No7b1oG^NQ8d1mG]NS8d1kG\\NU8f1jGYNV8j1gGWNX8k1fGWNY8j1eGXNY8j1fGXNW8j1gGXNY8h1eG[N[8d1dG]N\\8c1bG`N]8`1aGbN]8`1aGbNY8lNmEd2i1bNX8lNmEc2i1gNT8`1iGjNm7X1QHSOd7o0[HZO\\7g0bHAV7a0hH@W7b0hH^OX7c0fH^OY7d0eH]OZ7e0dH\\O[7f0dHZO\\7g0bHZO]7h0aHYO_7i0_HVOd7i0ZHXOf7i0XHXOg7j0WHWOh7k0WHUOi7j0WHWOh7i0XHXOg7h0YHYOf7g0[HYOe7f0[HZOe7f0[H[OY7RMUHc3c0[OX7TMSH`3L_Lc0n2^7TMQH_3NaLa0m2a7TMmG^31cL?l2e7RMjG^32gL=i2h7d0kGeL;h2k7b0iGhL:g2o7`0eGkL:e2S8P1lGPOV8P1gGQOZ8n0eGSO\\8k0eGUO]8i0bGXO_8f0aG[Oa8b0_G_Ob8?_GAb8=^GCe8WO\\9WOXF`1=YO\\9VOYF_1;[O^9SOYFa19\\O_9QO[Fa15_Oa9oN[Fa14@c9lN\\Fb11Ae9kN\\Fc1OBg9hN\\Fe1MCo:;QEEP;9QEGQ;7oDIR;6nDJS;5lDLV;2jDNW;1iDNZ;0fD0[;OeD1];McD3^;KcD5^;JbD6`;H`D8a;G^D:c;E]D;c;E]D:d;F\\D:d;F\\D:d;F\\D:d;E]D;c;E]D;c;E]D;c;E\\D`;B`D>`;B`D>`;AaD?_;AaD?_;AaD?_;A`D?k;WOUDi0T=000000O1000O1000000000000000O1000O10000000000000O1000O1000000000005K;E:F_hQ1"}, "image_id": 97, "id": 1557}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 304.0, 113.0, 208.0], "area": 12026, "segmentation": {"size": [512, 512], "counts": "P:\\1\\2Nd81ZG1f8OXG4g8MVG5j8KTG8l8HRG9n8GPG;n8GPGO1N3M2N2N2O2M2N2N2N2O2M2N2N2N3N1N2N2N3M2N2O1N3MbSW6"}, "image_id": 97, "id": 1558}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 395.0, 367.0, 117.0], "area": 33702, "segmentation": {"size": [512, 512], "counts": "o]h12m?;F:F9G:F:F9G5K00000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000]OQNUCo1k00000000000000000000000000:F0000000000000000000000c0]O00000000000000000000000000000000000000000000000000000000000000000000000000000000000\\gY6"}, "image_id": 98, "id": 1561}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 473.0, 65.0, 39.0], "area": 1593, "segmentation": {"size": [512, 512], "counts": "f_h3:f?00000000000000000F:0000000000O100O10000O10000O100O10000O10000O100O10000O10000O100O10000O10000O100O10000O10000O100O10000O[Of0000O1WQW3"}, "image_id": 98, "id": 1562}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 499.0, 41.0, 13.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "c_^5=c?0000000001O00000000001O00000000001O00000000001O0000000000001O00000000001O000000WPm1"}, "image_id": 98, "id": 1563}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 275.0, 181.0], "area": 47179, "segmentation": {"size": [512, 512], "counts": "0b5^:000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000J60000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009G00000000000000000000000000000000000000000000F:000000000000000000000000000000UOk0000000000000000000000000000000000000000000000000000000000P`f3"}, "image_id": 99, "id": 1564}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 306.0, 204.0, 206.0], "area": 26860, "segmentation": {"size": [512, 512], "counts": "_n]1a1_>000000000000000000000000000000000000000000000000000000000000000000000000000000000000000kMU20000000000000000000000000000XMYMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7YMWHg2i7h200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000cJlGn2T8RMlG:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:h:FXE:W=00000000000000000000000000000000000000000000000oS\\3"}, "image_id": 99, "id": 1565}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 336.0, 20.0, 15.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "`Za0?a?0000000000000000000000000000000000000`eT7"}, "image_id": 99, "id": 1566}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 417.0, 14.0, 43.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "gmb1e0[?000000000ZOf00000000000000[OdSV6"}, "image_id": 99, "id": 1567}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 106.0, 27.0, 25.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "jS41m?2M4M2M3M4M2O2O01O010O01O01O01O01O010O01OM4M2M4L3N2Mb\\^7"}, "image_id": 100, "id": 1568}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 152.0, 85.0, 123.0], "area": 7638, "segmentation": {"size": [512, 512], "counts": "o4e2[=010N1M3N3QCWMfN3L3M4M2M3N3LfYe6"}, "image_id": 100, "id": 1569}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 355.0, 40.0, 68.0], "area": 1717, "segmentation": {"size": [512, 512], "counts": "Y;m1Q>3MO103L3N3O00010O010O00010O0010O010O00O2L3N3M2M3N3M2M4M2N2M4M2M4M2N2M4M2N3L3N`d[7"}, "image_id": 100, "id": 1570}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 0.0, 55.0, 34.0], "area": 1205, "segmentation": {"size": [512, 512], "counts": "aP61m?2M3N3L3M4M2O1010O01O01O010O01O01O01O0N2M4O001O00001O001O00001O001O00001O00001O001O00M3N2M3M3N2M3N2M3N2M3M3NR`n6"}, "image_id": 101, "id": 1571}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 33.0, 26.0], "area": 585, "segmentation": {"size": [512, 512], "counts": "``l22k?3N2M4M2M4M2O101O001O00001O001O00001O001O0000O1M3N2M3N2M3N2N2M3NRPc4"}, "image_id": 101, "id": 1572}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 0.0, 6.0, 3.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "P`d71o?001O00001o_8"}, "image_id": 101, "id": 1573}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 4.0, 104.0, 55.0], "area": 3093, "segmentation": {"size": [512, 512], "counts": "m`\\32l?2M4M2N3L3N2M4M2M4M2O1010O010O01O01O010O010O00010O010O0001L3010O01O01O010OO2M2N210O010O00010OM4M2M301O010O00010O010O01N11O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O010O01O01M2M4M2M3N3L3N3M2M3N3LT_o2"}, "image_id": 101, "id": 1574}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 12.0, 75.0, 61.0], "area": 2106, "segmentation": {"size": [512, 512], "counts": "aPX13k?3L30001O0`@IX?7e@LZ?=11N0010O010O00010N1010O0010Oe@@V?e010O000\\AYOP>g0mA[OS>e0jA^OW>b0fAAY>?dAD\\>9^AJb>j010O0010O0010O00010O010O00010OO2L3N2M4L3N3N01M4M2M3N3L3N02M3N3L3N2M4M2M4M2M3N3L3N3L3N]_b5"}, "image_id": 101, "id": 1575}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 13.0, 63.0, 55.0], "area": 1944, "segmentation": {"size": [512, 512], "counts": "[Q^61m?2M4M2M4M2N2M4M2N3L3O2O00010O010O010O01O01O010O01O010OO2M2M4M2N2N30O01O010O01O010O01O010O01O01O010O\\O_AIb>4aAK_>3cAN]>NfA2Z>LiA4V>ImA7T>FoA:P>DRB;P>ATBM3N3M2M_^b0"}, "image_id": 101, "id": 1576}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 16.0, 33.0, 32.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "Ua_72k?3M4M2M4L3N2M4M20010O010O00010O00010O010O00010O00N3L3N3L3M3N3L3MZO"}, "image_id": 101, "id": 1577}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 33.0, 72.0, 68.0], "area": 2360, "segmentation": {"size": [512, 512], "counts": "[RW21l?3N2M4M2M4M2M3N3L310O01O010O01O010O01O01O010O01O01O0N3M2M3N3L3N3L3N2M4M2N3O0010O00010O010O00010ROlANT>0nA1R>KRB4n=JTB6m=GUB:j=CZB10O010O0N2M4M2N3L3N2M4M2Mfmd4"}, "image_id": 101, "id": 1578}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 41.0, 46.0, 49.0], "area": 1419, "segmentation": {"size": [512, 512], "counts": "Z1Q1o>10O00N3O010O01O01O010O01O01O010M2N2010O01O01O010O01OTOZA>g>^O]Aa0c>]O_Ad0a>XOcAg0]>WOeAj0h>01O010WOl@d0X?1O01O0N3M2M3N3L3N3LlmX7"}, "image_id": 101, "id": 1579}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 52.0, 30.0, 28.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "URl02l?3L3N2M4M2M4M200010O010O00010O010O00010O010M2N2M4M2M4M2M3NVnd6"}, "image_id": 101, "id": 1580}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 59.0, 51.0, 68.0], "area": 2022, "segmentation": {"size": [512, 512], "counts": "^S_32l?3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2N2M4M2O20O00010O010O010O00010O010M2N2N3L3N3L3^ObA@a>=aAAa>=bA@a>=aA@b>=c0M2M3N3Mk]g3"}, "image_id": 101, "id": 1581}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 78.0, 67.0, 43.0], "area": 1824, "segmentation": {"size": [512, 512], "counts": "TSZ63k?2M4M2N3M2N2N3M2M4M20010O010O0010O0010O010O00010O010O0010OO101O001N10001O0O2O001O0O101O0O10N2M3N1O2M3O10O1000O01000O10004L3L5L4L4L3Lm\\d0"}, "image_id": 101, "id": 1582}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 79.0, 37.0, 31.0], "area": 675, "segmentation": {"size": [512, 512], "counts": "RSZ41l?3M4M2M4M2M3N3N110O00010O010O00010O010O00010O010O00010O01O0N2M4M2M4L3N2MY]S3"}, "image_id": 101, "id": 1583}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 80.0, 27.0, 35.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "Scb71m?2N2M4M2M4M2N3L30010O0010O0010O010O0j@ZOR?k00O0010O0010O01XM"}, "image_id": 101, "id": 1584}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 97.0, 33.0, 42.0], "area": 1028, "segmentation": {"size": [512, 512], "counts": "Q3S1m>001O01O01O01O0001O01O01O01O0001O01O01O01O0001OO2L3L4M4K4L4M3L5Li\\_7"}, "image_id": 101, "id": 1585}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 105.0, 23.0, 20.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "cSa13k?2M4M2M310O0010O010O0010O0010O0010O00M4M2N3Lb\\S6"}, "image_id": 101, "id": 1586}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 107.0, 51.0, 39.0], "area": 1044, "segmentation": {"size": [512, 512], "counts": "mSj03j?3N2N3M2M4M2N3M2010O00010O010O010O0010O0010O010O010O00010O010O010O0010O0010O010O0010OM3N3M2M4M2N3L3NU\\\\6"}, "image_id": 101, "id": 1587}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 123.0, 94.0, 74.0], "area": 2736, "segmentation": {"size": [512, 512], "counts": "[d_54j?2M4M2N2M4M210O0UA]OY>d0cA_O^>`0`AC_>=^AFc>:ZAJd>6ZALg>f001O010O01O01O010O010N1N2M4M2N30O0010O0010O010O00010O010O0010O[OeAE[>8hAIX>4kAKU>2nAOR>NQB1o=MSB3m=JVB7j=FYB9g=D\\B=d=@_B?a=_OaBa0_>1N110O0010O0010O0010O0010O010O0010O0010O0010O0010O010O0010O001M2N2M4M2N3L3N^ZQ1"}, "image_id": 101, "id": 1588}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 143.0, 35.0, 31.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "mTV21m?2M3N3M2M4M21Od@BV??g@CZ?b0O01O01O010O01O01O010O010O00010O010OO2L3N2M4M2N3L3NW[X5"}, "image_id": 101, "id": 1589}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 147.0, 3.0, 10.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "c4:f?N3K`[n7"}, "image_id": 101, "id": 1590}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 154.0, 13.0, 15.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "UUj11l?4L3M3N3O000010O0M3M4L3MW[o5"}, "image_id": 101, "id": 1591}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 159.0, 25.0, 20.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "Xe?3k?2M4M2N3O00010O0010O0010O0010O010O0010O00N3L3N3MkjS7"}, "image_id": 101, "id": 1592}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 185.0, 71.0, 45.0], "area": 1748, "segmentation": {"size": [512, 512], "counts": "aVa21m?3L3N3M2M4M2M3N3M2M40O01O01O010O01O010O000N3M210OM4M20001O010O010O01O01O012M01O010O01O01O010O010O01O01O010O010O00010O010O00O2L3N3M2M3N3L3N3McY[4"}, "image_id": 101, "id": 1593}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 195.0, 29.0, 23.0], "area": 371, "segmentation": {"size": [512, 512], "counts": "^Vk12l?3L3N3L3O1010O01O01O010O01O01O010O01O01O010O01OM4M2M4M2MgYf5"}, "image_id": 101, "id": 1594}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 197.0, 69.0, 51.0], "area": 1846, "segmentation": {"size": [512, 512], "counts": "Pg[52k?3N2M4M2N3L3N2M4M2N3L31O010O01O01O010O01O010O01O01O010O01O010O01O01O010TOTAb0l>\\OVAe0S?O0010O0010O01N1M3N3M2M4N100010O010O0O2L3N2M4M2M4M2M3N3M2M4M2M\\ia1"}, "image_id": 101, "id": 1595}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 203.0, 27.0, 36.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "gfc64j?2M3N3f@Ek>=SAEj>?SADl>9o@IQ?d00O010O010O00010O010O00010@QAMo>1TANm>OUANn>OVANl>0VAMn>0oin0"}, "image_id": 101, "id": 1596}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 205.0, 54.0, 39.0], "area": 1081, "segmentation": {"size": [512, 512], "counts": "Pg:3k?2N2M4M2N3L3N3M2O110O0010O010O010O00010O010O010O00010O010O010O000O2O010O0010O01[Oi@`0[?010O01O01O01O0N3L3N3M2M3NmXj6"}, "image_id": 101, "id": 1597}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 208.0, 8.0, 27.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "`6k0U?01O0N3Ai@1Y?Mi@0Z?Mj@0Y?Mi@1fXl7"}, "image_id": 101, "id": 1598}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 217.0, 14.0, 19.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "XWf31l?3N3M2M3N3L301OM4M2M3N3M2MXiR4"}, "image_id": 101, "id": 1599}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 225.0, 75.0, 55.0], "area": 2246, "segmentation": {"size": [512, 512], "counts": "lgS24j?2M4M2M3N3L3N3L3N2M4M2010O00010O010O00010O010O0010O0010O010O00010OTAnNj>T11O01O010OO2O000010O010O00010O010O00010O010O00010O010O0001N1N3L3N3L3N2M4M2M4M2M3NXhf4"}, "image_id": 101, "id": 1600}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 236.0, 31.0, 37.0], "area": 691, "segmentation": {"size": [512, 512], "counts": "WhQ12k?4M2M4M2M3N3L3N3L3N2N3O010O00010O010O00O2L3N3L3N2M4M2M4M2M3N_h^6"}, "image_id": 101, "id": 1601}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 247.0, 71.0, 56.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "gXP53k?2M4M2M4M2N3L3N2M4M2N3O00010O010O00010O010O00010O010O0010O00010O010O0001N1O2M2O1N3M2O2M2O2M200010O10OO2M3M2M4M2M4M3L3N3L3N3M3M2N3N1N3N2M2O2MmWl1"}, "image_id": 101, "id": 1602}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 256.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "P83m?NRhn7"}, "image_id": 101, "id": 1603}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 257.0, 26.0, 26.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "aX23k?2M4M2M4M2M3O20O01O01O010O01O01O010O000M4M2M4M2M3Njg`7"}, "image_id": 101, "id": 1604}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 274.0, 13.0, 17.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "nXn52l?2M4L3M4N11O01ON3L3M4L3N]Wk1"}, "image_id": 101, "id": 1605}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 283.0, 13.0, 12.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "RYm22k?4L3O101O01O01O01O0N3M2MUWl4"}, "image_id": 101, "id": 1606}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 288.0, 17.0, 13.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "UYT32l?3L300010O010O010O0010O0010M2N3MlVc4"}, "image_id": 101, "id": 1607}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 289.0, 15.0, 13.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "ZYR21m?2M3N3M200010O010O00001L3N2MoVf5"}, "image_id": 101, "id": 1608}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 289.0, 60.0, 88.0], "area": 2719, "segmentation": {"size": [512, 512], "counts": "][[21m?2M3N3L3N3M2M3N3L3N3L3N2N3L3]OkNYBX1d=kNXBX1e=kNYBX1d=jNYBY1d=kNYBW1e=d0L3N3M2M3O2O010O01O0N2N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N3L3N2M4M2M4M2N2M4Mcff4"}, "image_id": 101, "id": 1609}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 291.0, 34.0, 30.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "eiP61m?2M4M2N3L3N2M4N1010O0010O010O0010O0010O0010O0010O010ON2N3L3N3M2M3NeV^1"}, "image_id": 101, "id": 1610}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 294.0, 51.0, 56.0], "area": 1680, "segmentation": {"size": [512, 512], "counts": "`jX11m?3L3M3M4M2M301O0O2L3M3M4M2M3M4L3N2M4O0010O00010O00010O0010O0010O00010M2N2M4L3M4M2M3M4L3N2M4L3M3N3L3Mbfm5"}, "image_id": 101, "id": 1611}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 298.0, 55.0, 56.0], "area": 1959, "segmentation": {"size": [512, 512], "counts": "mYP73m?3M3L5L3M3J6M3L5L3M3M2OO0O010000O0KiNcAW1]>lN`AT1_>610000O01000O010000O01000O01000O010000O013M3L10004L3L4M3M4K4M3M3L4M4L_U4"}, "image_id": 101, "id": 1612}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 303.0, 67.0, 49.0], "area": 1913, "segmentation": {"size": [512, 512], "counts": "^Zd42k?3N2M4M2M4M2N2O2O010O00010O010O010O00010O0O2M2M3N3M2M4M2M301O0010O010O0010O0010O001L3N2M4N1010O00010O010O01O01ON3L3N3M2M3N3L3N3L3N3MTVZ2"}, "image_id": 101, "id": 1613}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 316.0, 39.0, 30.0], "area": 626, "segmentation": {"size": [512, 512], "counts": "`Zo53k?2M3M4M2M4M2M3N3O010O00010O00010O0010O000O2M2M4M2M3N3L30010O00010O010O000M4MkU]1"}, "image_id": 101, "id": 1614}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 343.0, 29.0, 31.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "][b61m?2N3L3N3M2M3N3M2M4N110O01O010O01O010O01N1N3M2M3N3M2M4M2MTUo0"}, "image_id": 101, "id": 1615}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 350.0, 16.0, 15.0], "area": 150, "segmentation": {"size": [512, 512], "counts": "W[c52l?2M4M2N210O0010O0010O01O0N2M4ModT2"}, "image_id": 101, "id": 1616}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 351.0, 73.0, 46.0], "area": 1992, "segmentation": {"size": [512, 512], "counts": "d[S42k?3N3L3M4M2M3N3L310O01O01O010O01O01O010O01O01O010O01OO2M2M3N30O01O01O01O010O01O01O010O01O01O010O01O01O010O0N2M4M2010M2M3N3L3N3L3N2M4M2M4M2M3N3L3NkTh2"}, "image_id": 101, "id": 1617}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 366.0, 26.0, 39.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "]lR52k?3N3M2M4M2N3M2M3N3M2N3L3N3NN4M2N3M2M3N3M2N3L3N3M3M^T`2"}, "image_id": 101, "id": 1618}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 372.0, 34.0, 53.0], "area": 834, "segmentation": {"size": [512, 512], "counts": "j\\Z6173[?0b@2[?1b@3[?:M3N3L3N3L3N1N0100O04MO011O3L3N2M4M2O20O0010N1N3M2M4M2M3N3L3NWdT1"}, "image_id": 101, "id": 1619}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 373.0, 35.0, 31.0], "area": 625, "segmentation": {"size": [512, 512], "counts": "Ylj31l?3N3L3N3L3N2M4M210O00010O0100O0010O010O00010O010O0001N1N3L3N2M4M2M4MQdc3"}, "image_id": 101, "id": 1620}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 391.0, 43.0, 61.0], "area": 1569, "segmentation": {"size": [512, 512], "counts": "h]W42l?3ZONYA5e>MYA6c>NYA5e>MYA5`>B[A<26_>2^A0`>j0L3N2N3O010O0010O0010O010O00010OM4M2N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N3L_SS3"}, "image_id": 101, "id": 1621}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 401.0, 20.0, 28.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "S]f72k?3N3L3N3L3N3M2O1010O010O00010O010O0001ZC"}, "image_id": 101, "id": 1622}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 406.0, 46.0, 91.0], "area": 2270, "segmentation": {"size": [512, 512], "counts": "jQ110O010O0010O0010O010O010O010O01M2N3M2M4M2N2N3M2N3M2N3M2M`bn1"}, "image_id": 101, "id": 1624}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 426.0, 47.0, 69.0], "area": 1860, "segmentation": {"size": [512, 512], "counts": "m]Z62l?2WANb=5[BMc=5ZBOb=5[BMc=5[BNb=5[BMc=5ZBOb=4\\BNc=4ZBOe=1YB1h=OTB4l=LRB7n=IoA9Q>l0O010O010O01O010O01O010ON3M2N3M2]NfA]1a>M2N3M2N3L3N2N3M2M4M2N3M2M4M2N2NURn0"}, "image_id": 101, "id": 1625}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 440.0, 47.0, 68.0], "area": 1623, "segmentation": {"size": [512, 512], "counts": "RoS12l?3`@MP?5n@Mo>5o@Mo>6n@MP?4o@Nn>5o@Mi>D[AP1b>SO\\An0c>SO[AP1b>9M2O2M2N3M3N0O1O01O01O02N3N2M2N3N1N3M3M2O2M2N3N1N3M3N1N3M2O2M3M2O2M2N3M3NjaT6"}, "image_id": 101, "id": 1626}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 462.0, 40.0, 50.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "k_[72e?9G90001O000001O00H8G9G9N3O000001O00000001O00000001O000000000000001O0L4H8G9G9Hga0"}, "image_id": 101, "id": 1627}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 463.0, 28.0, 38.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "P_>2:OW?3g@OV?5f@OMMU?6l@4R??L3N3M2O11L30010O010O001N1M4M2N2N3L3N3M2N3L3N2N[aS7"}, "image_id": 101, "id": 1628}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 490.0, 43.0, 22.0], "area": 578, "segmentation": {"size": [512, 512], "counts": "n_R22l?2N2M30000001O001O00M3N2M3N2N2M3001O001O00001O001O00001O001O001O00001O001O000N3L3N3L^PX5"}, "image_id": 101, "id": 1629}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 499.0, 17.0, 13.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "oom21l?3N2N2M3N2001O001O00001O001N1M4MZ`i4"}, "image_id": 101, "id": 1630}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 510.0, 4.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "oo:1n?11O00QPc7"}, "image_id": 101, "id": 1631}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 0.0, 45.0, 20.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "SPT53j?30001O00001O00001O00001O001O0_@HZ?8d@J\\?=01O00001O0000M3N21O00001O00001O00001O0000O1M3M3M3M3N2MS`U2"}, "image_id": 103, "id": 1632}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 50.0, 6.0, 9.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "jQm71m?2O1N2O1N2^N"}, "image_id": 103, "id": 1633}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 58.0, 234.0, 179.0], "area": 15939, "segmentation": {"size": [512, 512], "counts": "acm01m?3N2M2O2M3N1N3N1N3N2M2N3N2M2O2M3N1N3N1N3N2M2N3N2M2O2M3N1N3N1N3M3N1N3N2N11000O0100O0100O01000O01000O0100O0100O01000O01000O01000O010O01000O01000O010O10O10O01000O01000O010O10O10O01000O010O10O10O10O10O01000O010O10O10O10O10O01000O010O10O10O10O10O010O10O10O10O10O10O10O010O10O10O10O10O10O10O010O10O10O10O10O10O010O10O10O10O10O10O10O01000O010O10O10O01000O01000O010O10O10O01000O01000O010O10O10O001N2M2O2M3N1N3N11000O01000O010O10O10O0O2M3M2O2M3N1N3N1N3M3UO[A8f>F\\A8g>F[A8g>E\\A8f>G[A8g>E\\A8g>E[A9X?N2M2O2MjY]3"}, "image_id": 103, "id": 1634}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 128.0, 49.0, 37.0], "area": 887, "segmentation": {"size": [512, 512], "counts": "[dS42l?2O2M3M2O2M3O010O10O10O10O10N2M2O2M3O01000O01000O01000O01000O01000O01000O01N2N1N3N2M2O2M3N1N3N2M^kS3"}, "image_id": 103, "id": 1635}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 225.0, 84.0, 141.0], "area": 7068, "segmentation": {"size": [512, 512], "counts": "dXf61l?4M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2N3L3N2N30O010OaCVNh:j1VEXNj:i1RE[Nn:d1PE^NP;c1mD`NS;_1jDdNV;]1gDeNY;[1dDiN\\;V1bDlN^;U1^DoNb;P1\\DROd;o0XDTOh;l0VDWOj;h0SD[Om;f0PD]OPCTA`0j>BTA`0j>=M2N3O0M4M20001O001O001O001O000011N0M4L3N2M4L3N3L3N2M4L3Bg@3[?Kg@2f?Lc_n4"}, "image_id": 104, "id": 1638}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 0.0, 20.0, 8.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "P`c31o?001O001O00001O001O001O001O00001OO1N2NR`R4"}, "image_id": 104, "id": 1639}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 0.0, 62.0, 33.0], "area": 1740, "segmentation": {"size": [512, 512], "counts": "`P^48b?6I7M300000001O000000004L001O00000000001O00000000001OM30000001O0000000000001O00000000001O000000000010O00000000001O0I7J6J6JTPc2"}, "image_id": 104, "id": 1640}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 0.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "P`i51o?001O00001OOQPS2"}, "image_id": 104, "id": 1641}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 11.0, 45.0, 30.0], "area": 739, "segmentation": {"size": [512, 512], "counts": "PQc33k?2N3L3N3M2N2M4M2N3N110O01O01ON3M2N3L3O2O01O010O01O010O01O010O01O010O01O010O01O0N3L3N2N3LY_f3"}, "image_id": 104, "id": 1642}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 43.0, 103.0, 62.0], "area": 2824, "segmentation": {"size": [512, 512], "counts": "kaX32l?2N3L3N3]@E_?>0010O010M2010O00010O010O01O010O01O010O01O010O01O010O01O010O01O01O010O010O01O01O010O010O010O00010O010O0010O00N3M2M4M2M4M2N2M4M2N3L3O201N10O01O010O01O010O01O010O01O01O010O01O0mNgA<]>AfA<\\>AgA=[>AgA<]>@gA=[>AgA=\\>@gAM2M4M2N3M2M3N3M2M4M2N3L3N3M2N[Zd6"}, "image_id": 104, "id": 1646}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 191.0, 53.0, 59.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "[WZ11m?3M2M3N3M2O2O001L3N2N3FXOWAk0g>XOVAk0g>8M4M2N2M4M210O010O01O01O010O010O010O0001O0M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3MdYk5"}, "image_id": 104, "id": 1647}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 214.0, 29.0, 29.0], "area": 515, "segmentation": {"size": [512, 512], "counts": "ZW[32l?2N3L3N3L3N2M4M2O2O01O01O01O010O01O01O001L3N2M4M2M4M2M3NUYV4"}, "image_id": 104, "id": 1648}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 219.0, 53.0, 57.0], "area": 1443, "segmentation": {"size": [512, 512], "counts": "ShQ22m?1N3N2M2O2M3N1N3N2M2O2M3O010O1N1N3N2N1N3N2M2O2N1N10O010O010O100O3N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2N1N3N2M3NhhS5"}, "image_id": 104, "id": 1649}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 223.0, 30.0, 40.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "Ph11m?2M4M2M3N3L3N3L3N3M2M3N3L3O20O0O2M2N2M4M2M4M2N2M4M2M4M2N3LmX_7"}, "image_id": 104, "id": 1650}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 236.0, 50.0, 59.0], "area": 1551, "segmentation": {"size": [512, 512], "counts": "^Xm23l?3N3L3N3L3N3L3N3L3N00O010O01N2M2O2M2O2M3N1O2M2O2M30O2O0O2O000O2O0O2O0N2M4M2M4M2M3N3L3N3L3N2M4M2M4M\\hY4"}, "image_id": 104, "id": 1651}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 244.0, 29.0, 31.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "ZXm01l?4M2N3M2M3N3M2N3L3O20O010O0010O010O00N3M2N3L3N3M2N2M4M2NVXd6"}, "image_id": 104, "id": 1652}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 261.0, 52.0, 58.0], "area": 1623, "segmentation": {"size": [512, 512], "counts": "`Ye32l?2N2M4M2M4M2M4M2N2O20O010O00010ON3L3N2M4M2N3L3N3L3N201O010O00010ON3L3N2M4M2M4M2N3L3N2M4M2M4M2N2M4M2M4Mcg`3"}, "image_id": 104, "id": 1653}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 276.0, 16.0, 18.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "oXf13j?3N3M2M4N100010O010O0M4M2N2M4MZgQ6"}, "image_id": 104, "id": 1654}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 302.0, 56.0, 66.0], "area": 1992, "segmentation": {"size": [512, 512], "counts": "P[\\22l?3L3N2M4M2N3L3N3L3N2N3L3N3L3N3O01O01O010O010O01O01L3N3M2M4M2N2M4NO2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2M\\fg4"}, "image_id": 104, "id": 1655}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 304.0, 25.0, 21.0], "area": 311, "segmentation": {"size": [512, 512], "counts": "lY`61m?3L3N2N3L301O010O0010O0010O010O0010O001M2M4M2N2M\\VS1"}, "image_id": 104, "id": 1656}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 325.0, 53.0, 57.0], "area": 1564, "segmentation": {"size": [512, 512], "counts": "ZkS63m?1N3N2M3BESA=k>ERA=l>FQA=m>EQA=l>>M2OO010O010O01O0IlNdAT1[>oNbAQ1_>QO_An0a>90O010O010O01O01O010O010O2O2M2N3N2M3N1N3N2M3N2M2N3N2M3N2M2O2M3NjdQ1"}, "image_id": 104, "id": 1657}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 326.0, 22.0, 28.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "kZ11l?4M2N3L3N3M2M3N3M210O010OO2M2N2M4M2N3L3N3Mfec7"}, "image_id": 104, "id": 1658}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 332.0, 54.0, 62.0], "area": 1789, "segmentation": {"size": [512, 512], "counts": "akP32l?2N3L3N0O3N2M4M2O110O0N3M2M301O0O2O000O2N1\\AmNY>V1dAlNZ>V1cAnNY>^1O2O01O01O010O01OO2N1010O0QOkA3U>KnA5R>GQB9o=ETB;l=AWB?i=_OYBb0f=\\O]Bc0d=YO_Bg0a=WObBf0]>M3N3L3N3L3N2M4MaTT4"}, "image_id": 104, "id": 1659}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 345.0, 56.0, 75.0], "area": 2255, "segmentation": {"size": [512, 512], "counts": "^lm62l>1RB0m=1QB2l=1RB1l=1lAIA8b>0kAJB8`>1lA7R>KkA7S>LkA6S>KkA8R>KlA7R>KjA8U>k0O1O010000O01000O010000O0O2M3N1N3N\\OWBTOg=l0[BROd=n0_BPOa=P1`BnNb=l0YBgN8;a=k0eBSO^=l0cBQO_=P1aBnNa=R1^BlNe=Q1^BmNc=R1g0N2M2O2M3N1N3N2N2M2O2M3N1N3N2M2OiT6"}, "image_id": 104, "id": 1660}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 350.0, 27.0, 26.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "_kn02k?4M2N2M4M2N3L300010O010O00010O010O000N3M2N3L3N3L3N2Nmdc6"}, "image_id": 104, "id": 1661}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 353.0, 49.0, 70.0], "area": 1940, "segmentation": {"size": [512, 512], "counts": "a\\h12k?3N3M2TOHmA

GmA;Q>GlA=P>FnA

GmA;Q>GlA=Q>FlADiA`0V>@hAb0Y>b0010O01O010O01OgA]NU>g10010O010O01O01O010O01YNjAc1U>[NnAk0MLZ>1iAOX>NkA2T>LnA4R>IRB7n=FTB:l=CWB=j=@YB`0f=^O\\Ba0e=\\O_Ba0a>M3N3L3N3M2MaS_5"}, "image_id": 104, "id": 1662}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 368.0, 44.0, 62.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "Y<49:k>HRA;a>AbA6K:b>BaA5L;`>CaAl0]>VOaAl0]>UObAl0\\>>N2N2M2O2O100O01N2N2M2O2N2M3N1O2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2NjcY7"}, "image_id": 104, "id": 1663}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 386.0, 52.0, 71.0], "area": 1796, "segmentation": {"size": [512, 512], "counts": "lmf02k?4M2N3L3N3L3N2N3L3N3M2M3N3L3N3M2M4M2M3N30O010O00010M2M4M2N0O100O10O10O2O3L3N3M2M4M2N2M4M2M4M2N3L3N2M4MiS_6"}, "image_id": 104, "id": 1664}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 390.0, 13.0, 21.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "bli74j?2M4M2N2M4O0010O0010O010fC"}, "image_id": 104, "id": 1665}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 396.0, 62.0, 51.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "Xmc22l?2N3L3N2M4M2N3L3N3M2M3N3O010O01O01O010O01O010O01O01O010O010O00010O010O000SOSAh0m>TOVAl0P?0O01O010O01O01O010O01O01VOo@c0W?10O000N3L3N3M2M4M2NfR]4"}, "image_id": 104, "id": 1666}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 400.0, 63.0, 57.0], "area": 1946, "segmentation": {"size": [512, 512], "counts": "f]a51l?3N3L3N3L3N2M4M201O01O010O01O01O010O010O00010ON3M2M4M2M3N3L3N3L3N210O010O00010O010O00010O01M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2MWS_1"}, "image_id": 104, "id": 1667}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 404.0, 28.0, 28.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "Wm]11l?4M2M3N3M2M4M2N210O0010O010O00010O010O001L3N2M4M2M4M2NVST6"}, "image_id": 104, "id": 1668}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 413.0, 54.0, 45.0], "area": 1337, "segmentation": {"size": [512, 512], "counts": "n]l62l?3L3N3L3N2N3L3N3L3N3L3N2M4N110O010O01O01O010O01M2N3L3N2N3L3O20O010O0010O1O10O010O0010O0010OO2L3N2N3L3N3M2Ncb8"}, "image_id": 104, "id": 1669}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 431.0, 28.0, 25.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "nm[24j?2N3L3N2N3L3O2O01O01O010O01M201O01O01@f@9Y?Ei@;_?O01O01M2N3L3NTRV5"}, "image_id": 104, "id": 1670}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 436.0, 55.0, 58.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "kn\\12l?2N3N2M2O2M3N1N300O0100O01M3M2FXO[Aj0c>WO[Al0b>WO\\Aj0c>XOZAk0c>;M2O2M2O2O10O10O0N10O012M3M2O2M2O2M3N1N3M3N1N3N2M2O2M3M2O2M2O2M3N1N3Moag5"}, "image_id": 104, "id": 1671}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 461.0, 22.0, 27.0], "area": 349, "segmentation": {"size": [512, 512], "counts": "Q_T22l?2M4M2N3M2N3L3N2N3O010O0001M2M4M2N2M4M2M4M`a`5"}, "image_id": 104, "id": 1672}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 465.0, 51.0, 47.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "hoZ23j?3M3N3L300a@C[?a0001O001O0000O1M3M3N2M3N2M3N2M3N2M3M30000001O001O00001O001O000M4M2M3N3L3N3L3N2M4M2M3N3L3NXak4"}, "image_id": 104, "id": 1673}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 469.0, 13.0, 14.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "mnU72k?4M2N3N10010O010O00N3L3NZac0"}, "image_id": 104, "id": 1674}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 475.0, 64.0, 37.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "boW53j?3N3M2N2M4M2N3L3N3M20010O010O010O01O01O010O010O000N3M2N2001O001O00001O001O001OO1N2N21O001M21O01O010O001O000O2M2UOn@e0Y?M2N3M2M4M2N2MlPh1"}, "image_id": 104, "id": 1675}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 476.0, 31.0, 30.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "^oa64j?2M4M2N2M4M2M4N10010O010O00010O010O0010O001N1N3M2M3N3L3N3L3Nm`n0"}, "image_id": 104, "id": 1676}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 501.0, 27.0, 11.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "m_V72m?2N1O1O100O1O1O1O1001O1O1O2N1O00O1O1O1O12N1O1O1O1O1ORP<"}, "image_id": 104, "id": 1677}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 509.0, 5.0, 3.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "o_f71n?1O11O1OQP7"}, "image_id": 104, "id": 1678}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 0.0, 45.0, 20.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "PPl02n?2N2N3M2N2N3M2N2NO100O100O10000O100O100O10000O100O100O10000O100O100O10000O100O100O100LV@2j?NT`]6"}, "image_id": 105, "id": 1679}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "P`c12n?00O10P`Z6"}, "image_id": 105, "id": 1680}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 0.0, 45.0, 27.0], "area": 814, "segmentation": {"size": [512, 512], "counts": "ZPl43k?2N3L3N3N101O001O00001O001O001O00001O001O001O00001O001O001O001O0000O1M3N2N2M3N2N2M3N2N2NR`]2"}, "image_id": 105, "id": 1681}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 0.0, 82.0, 42.0], "area": 1517, "segmentation": {"size": [512, 512], "counts": "PQd53j?3N2M4M2M4M2M310O010O00010O0O2M2M4M2M2O0O04M2N2M4M2N30O00010O010O0010O0010M2M4M2O101O001O00001O00O1M3N2M3N2M3N2M3N2N2M3N2M3N200001O001O00001O001O00001O001O00O1N2MSPS1"}, "image_id": 105, "id": 1682}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 0.0, 67.0, 54.0], "area": 1931, "segmentation": {"size": [512, 512], "counts": "a`n64j?2M4M2N2M4M2O2O0010O0010O0010O0010O010O0010O0010O0010O0010O010O0010O0010O0010O0010M2N3M2M4M2N2M4O001O001O0_AfN\\>Z1bAiN]>]110O0010O0010O0010O0WObA0_>McA4W>"}, "image_id": 105, "id": 1683}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 2.0, 19.0, 35.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "2S1m>0000000001O000000000000003M0000000Mm_f7"}, "image_id": 105, "id": 1684}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 24.0, 65.0, 83.0], "area": 2473, "segmentation": {"size": [512, 512], "counts": "eb_11n?2N2M3N2N2e@Gl>;RAGY>MPB>EGY>MhAM0a0MGX>NhAM0a0NEY>NhANOk0Y>YOfAo0Z>;000O0102N2N2N00O01000000000O010000000O01000000001N3N2N2N2N2N2M2O2N2N2N2M3N2N1O2N2N2M3N2N2N1O2N2M3N2N2N2Nan_5"}, "image_id": 105, "id": 1685}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 30.0, 12.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "WQb62k?3N3L3O20O00010N1N3L3NQoW1"}, "image_id": 105, "id": 1686}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 42.0, 74.0, 45.0], "area": 1973, "segmentation": {"size": [512, 512], "counts": "RRf61m?2M4M2M3N3L3N3L3N2N3O010O01O01O010O01O010O01O01O010O01O01O01O0M4N1010O00010O010L3O101O010O01O0M301O0010O010O0010O0010O0010O00M4M2M4M2M3N3L3N3L3N2MZn4"}, "image_id": 105, "id": 1687}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 47.0, 73.0, 83.0], "area": 2622, "segmentation": {"size": [512, 512], "counts": "`cU21m?3M2N3N2M2N3M2N3O0010O0100O010O0100O01O0N3M3M210ETO`Al0^>VOcAj0Z>YOeAg0Z>ZOgAf0V>]OiAb0V>@hAa0W>BfA`0X>BfAa0S>nNPBf1n=]NoAf1n=7N3M02N3M0HRBaNm=`1TB^Nm=a1VB\\Nj=d1XBZNh=h163M2O2O1O01M2O2M2N3M2N3N1N3E_ASOd>j0_ASOc>l0^AROe>k09N3O010M2N3If@E[?9g@E\\?88M2N3MVne4"}, "image_id": 105, "id": 1688}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 54.0, 7.0, 12.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "nal72l?2M4L300010OYN"}, "image_id": 105, "id": 1689}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 71.0, 10.0, 19.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "cRk74j?2M3N3M2M4O010O01OgM"}, "image_id": 105, "id": 1690}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 78.0, 59.0, 89.0], "area": 2498, "segmentation": {"size": [512, 512], "counts": "jd^32k?3N3L3N2N3L3N3L3N2M4M2O2O00010M2_OlNSBX1j=kNSBW1j=lNSBX1j=jNTBX1j=kNSBW1j=b0M2M4M2M3N3M2M11O2M4M2M4M2M4M2N2M4FoAaNS>\\1QB`NS>]19M3N3L301O0M3N3M2M4M2M4M2M3N3M2M4MXmc3"}, "image_id": 105, "id": 1691}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 85.0, 25.0, 22.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "Qc^42k?4M2M3N3M210O00010O010O00010O010O00010ON3M2M3N3LVmT3"}, "image_id": 105, "id": 1692}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 88.0, 18.0, 19.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "VSg71m?3L3N3M2M3N3O010O0001O0N3L3N3L3N2NVM"}, "image_id": 105, "id": 1693}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 100.0, 27.0, 26.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "gSe12l?2O1O2N1O2M2O1O2N1O2M2O1O2NO011O2N2M2O2N2M3N2N2N1N3Ne\\m5"}, "image_id": 105, "id": 1694}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 107.0, 54.0, 67.0], "area": 2310, "segmentation": {"size": [512, 512], "counts": "PU\\43k?2M3TOLiA8S>KjA8T>JiA:S>JjA8S>KjA8V>IfA;Y>EdA>\\>c010O00010O00010O0010O00O2L3M3M4M2O110O01O01O01N1N2M4L3M3M4M2M3N3O010O01OAZADg>8\\AHd>5`AIa>4bAIb>4aAIa>4bAIa>4h0L^lh2"}, "image_id": 105, "id": 1695}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 108.0, 39.0, 42.0], "area": 1035, "segmentation": {"size": [512, 512], "counts": "\\3o0Q?001O01O01O01O0001O01O00PAROn>Q1O01O01O0001O0WOSA`0l>\\OYAc0R?000010O0000010O0000010N1L4M3L5K4LT\\\\7"}, "image_id": 105, "id": 1696}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 138.0, 51.0, 64.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "beT53k?2N3M2N3M2N3M2N3M2N3TAUO_>m0_AVO^>m0_AUO_>m0_AVO^>W1N3M2N3M2N3M210O01M2N3M2O2O010O0N3M2N3N101O0N3M2IXATOk>i07N3M2N3M2N3M2N3M2N3M2NYkQ2"}, "image_id": 105, "id": 1697}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 144.0, 60.0, 53.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "[eo53k?3M2M3N3L3N3L3N2M4M210O01O01O010O01O01N1N3L3O11M21O010O010O00010O010O01OUAPOf>U1010O010O00010WO[A5e>H^A9b>DaA;_>BdA>\\>@fAa0Z>[OjAd0i>10O0O1N3L3N3M2M4M2MaZR1"}, "image_id": 105, "id": 1698}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 148.0, 72.0, 41.0], "area": 2038, "segmentation": {"size": [512, 512], "counts": "[eR11l?4M2N3M2M4M2N3M2M310O010O010O0010O001ON3N100000000010O00000001O0001O000001O0001O000001O0001O0001O00000K5M3010O00000001O01O00000001O01O000ZOXANn>KYANn>LXANWji5"}, "image_id": 105, "id": 1699}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 151.0, 27.0, 26.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "Xeo62k?4M2M3N3L3N3M20010O01O01O010O00010O01O0M3N3L3N3L3M3NTkb0"}, "image_id": 105, "id": 1700}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 157.0, 5.0, 12.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "m4QO^Ao0i>010lI"}, "image_id": 105, "id": 1703}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 184.0, 56.0, 62.0], "area": 2029, "segmentation": {"size": [512, 512], "counts": "_6T1i>4M2N3L3N3L3N0000O4M2N3L3N2M4O0010O01O01O010O010O00010O010O010O0N2M2OO10O13M2010O01O0N2M4M2N3L3N3L3N2N3L3N3M2MSjS7"}, "image_id": 105, "id": 1704}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 185.0, 54.0, 37.0], "area": 1084, "segmentation": {"size": [512, 512], "counts": "\\VT22k?3M3N3L3N3L3M310O0010O00010O0010O0010O0O20O00010O010O0O101O010O00010O01O01O010O01O01O010O01N1N2M4M2M4M2M3NiiP5"}, "image_id": 105, "id": 1705}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 193.0, 28.0, 33.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "nVf41m?2M4O0N2N3L3N3M1N011O2M3N3N110O010ON2N3L3N3M2M4M2N2M4Mkik2"}, "image_id": 105, "id": 1706}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 195.0, 63.0, 56.0], "area": 2022, "segmentation": {"size": [512, 512], "counts": "PWe54i?3N2M4M2M4M2M3M4M2M4M2O1010O01O01O010O00010O010O00010O010O00010O010O00VAmNg>V110O0N3M200010O010O00010VO^A4b>J`A6a>FcA:\\>DfAAjA?V>]OmAc0S>[OoAe0g>O0N2M4M2M4M2M3MmX[1"}, "image_id": 105, "id": 1707}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 199.0, 72.0, 44.0], "area": 1965, "segmentation": {"size": [512, 512], "counts": "nfP32j?5L3M3L4M4K4N2010O000010O00010O0000010O00010O000010O000010O00010O0000010O000O2K400010O0000010O00010O000010O000010O000010O000010O00001K4Bn@KU?2n@KV?0n@LV?1jYk3"}, "image_id": 105, "id": 1708}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 210.0, 73.0, 66.0], "area": 2229, "segmentation": {"size": [512, 512], "counts": "\\h?1l?4M2O2O010O00010O010OM3N3M03L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2M10O2O2N3L3N2M4O010O01O01O010O010O00010M1N01002M3N3L3N3M2M3N3L3N3M2M4M2M3N3MYi[6"}, "image_id": 105, "id": 1709}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 233.0, 56.0, 44.0], "area": 1387, "segmentation": {"size": [512, 512], "counts": "RXP71l?4M2N2M4M2M4N11O01O001L3N3L3N2010O010O01O01O010O010O00010O010O010O00010O010O00010O010O001L3N2N3L3N3M2M3N3L3N3MXh3"}, "image_id": 105, "id": 1710}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 234.0, 18.0, 22.0], "area": 236, "segmentation": {"size": [512, 512], "counts": "jWc62k?3M4L3M3M4N11O01O01O01ON3L3M3M4L3MehS1"}, "image_id": 105, "id": 1711}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 239.0, 49.0, 56.0], "area": 1461, "segmentation": {"size": [512, 512], "counts": "TXc22k?3N3L3N2M4M2M4N100010O010O01O01O010OZAYOS>h0iA\\OV>d0hA^OY>a0dAB\\>?aAD_>;_AGa>m0O01O010O01O000N3L3N3L21M3N3L3N2M4AYA]O2Jg>g0YA]On>?UA^On>`0:Jd@E`?76N3L_Xd4"}, "image_id": 105, "id": 1712}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 241.0, 88.0, 138.0], "area": 4383, "segmentation": {"size": [512, 512], "counts": "_ko02k?4M2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4N10010O001M2001L3N2M10O10O11N4M2N2M4M2^NTNmDm1T;UNiDj1X;XNfDh1Z;[NbDf1^;]NRDHEj1]<]NlCKEh1a<^NgCMDe1i<]N`C1Ed1k<\\N]CT2cF^A;a>CbA<^>BdA>]>^OfAc0Y>[OjAd0W>YOkAh0g>O010O00010O0M4M2N3L3N2N3L3NTgb1"}, "image_id": 105, "id": 1714}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 264.0, 39.0, 25.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "ehX43j?4M2M3M4M201O00010O01O01O010O00010O010O0001N1M3N3O0010O0]@G_?=1O01O01O010L3N2M4L_gS3"}, "image_id": 105, "id": 1715}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 268.0, 46.0, 70.0], "area": 2000, "segmentation": {"size": [512, 512], "counts": "lh]33e00MNn=5QB1MMo=5RB0LNP>4QB2LMo=5RB0LNP>4QB1MMo=6QBb0m=i010O01O01O010O01O01O010O01O01O01N1N2M4L3N3L3N2M4M0O2O2M4M2M3M4M2M4M2M3N3L3N3L[Wk3"}, "image_id": 105, "id": 1716}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 281.0, 73.0, 53.0], "area": 2242, "segmentation": {"size": [512, 512], "counts": "_Y]63k?2M4M2M4M2g@Ao>a0n@BR?f010O00010O010O00010OO2L3M310O010O000101N10O0000H9F900000001ON2000001O000001O00000[AoN[>[1010O0000000001OeNcAS1e>0000000010O0000000UOZA=e>ZOeAe0k>00K5J6J6KaV>"}, "image_id": 105, "id": 1717}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 11.0, 30.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "k8n0R?N3L3N3L3N2M4M2M4M2MTWj7"}, "image_id": 105, "id": 1718}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 287.0, 52.0, 63.0], "area": 1728, "segmentation": {"size": [512, 512], "counts": "]j83k?2M3N3L3N3L3N2M4M2M4RAQOg>Q1WAQOf>W1M2M4N11O01O010O01O01O010OM4M2M2OO01003O010L3N2N11M4M2M3N3L3N0O2O3L3Im@_OV?=8M3M4M2MQWm6"}, "image_id": 105, "id": 1719}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 290.0, 45.0, 58.0], "area": 1374, "segmentation": {"size": [512, 512], "counts": "jYY21m?3M2M3N3M2M4M2N3L3N2O20O01O010O01O]AVOP>k0mAWOT>h0jA[OU>f0gA]OY>c0eA@[>`0bAB^>P110O010O0010O0010O0010bNaAY1c>00N3[O\\AJf>3]AJg>3\\AJf>4\\AJg>2\\AKg>3\\AJf>3f0M^VP5"}, "image_id": 105, "id": 1720}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 291.0, 49.0, 57.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "XZU52k?4M2M3N3L3N3L3N2M4M2M4M2M3N3M210O01O01O010O2OO01O01O010O01O01O010O0ZO_AN`>OcA1^>LeA3[>JhA7X>FjA:V>CnA=Q>AQB?P>]OTBb0l=\\OVBd0c>M4M2M4M2M3NiUR2"}, "image_id": 105, "id": 1721}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 334.0, 22.0, 24.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "nZj33k?2M4M2M3N3M2N3O01O01O010O01O0N3L3N2N3L3N3L_ej3"}, "image_id": 105, "id": 1722}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 336.0, 13.0, 15.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "kjP31m?3M2N3L3N30O001M2N3M2N3M^eh4"}, "image_id": 105, "id": 1723}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 337.0, 24.0, 27.0], "area": 422, "segmentation": {"size": [512, 512], "counts": "m:8e?4M2M3M4N110O00010O010O00010O010OO1M4M2M3N3L3NZec7"}, "image_id": 105, "id": 1724}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 339.0, 63.0, 42.0], "area": 1422, "segmentation": {"size": [512, 512], "counts": "U[W63k?2M3N3L3N3N11O01O010O01O01O010O0O1N3M2M4N110O01O01O010O01O010O01O01O010O01O01O010O01O01O010O01O010O01O01O01M2M4M2M3N3L3N3L3MoTi0"}, "image_id": 105, "id": 1725}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 344.0, 57.0, 54.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "c[h42l?3L3N3L3N2M4M2M4M2M3N3O001O01O010O01O01O010O01O01O010O01O0WAnNe>Q1YAQOg>U10O01O01O010O01O01O010YOYA3g>J\\A7c>G`A8a>DbA<^>BdA?\\>]OhAb0k>10O000N3L3N3L3N2MWT[2"}, "image_id": 105, "id": 1726}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 345.0, 57.0, 69.0], "area": 2227, "segmentation": {"size": [512, 512], "counts": "n[i21l?4M2M4M2m@E^>>_AE_>>^AE^>>_AE_>>^AD_>?^AE_>o0M40O01O01O010O01O01O010O01O000N3M2010O0010O00O2L3N3L10O010O0100O3N3L3N2N3L3N3L3N3L3Ke@C]?;6L3N3LTUZ4"}, "image_id": 105, "id": 1727}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 350.0, 40.0, 65.0], "area": 1634, "segmentation": {"size": [512, 512], "counts": "SlQ22a0OW>2fA200W>1fA20OX>1`AJ1850V>e0fA^OX>d0fA^OX>W1L3O2O010O00010O0O2M2O2O01O0O1N3L3N3M2FeAlN^>Q1eAmN^>P1eAmN]>Q1;L3N3M2M3N3M2M4M2M4M2NkTZ5"}, "image_id": 105, "id": 1728}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 365.0, 19.0, 23.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "n[U72k?3N3M2N2M4M2N2O20O01ON3M2N3L3N2N3M2NaTa0"}, "image_id": 105, "id": 1729}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 377.0, 48.0, 77.0], "area": 2189, "segmentation": {"size": [512, 512], "counts": "hmc33o>NlA4Q>0lA2R>0kA4Q>OfAHI;_>0dAIL8\\>2fAHN6Z>5eAH04NBW>S1lA\\OR>g0mAYOP>j0QBVOl=l0TBTOi=P1VBQOg=Q1XBPOf=f1O20O010O00010O010O000N3L3N3L3N2N3L3N3L3N2M4iNZAP1m>L3N2M4M2M4M2M3N3L3N3LhSd3"}, "image_id": 105, "id": 1730}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 379.0, 57.0, 48.0], "area": 1530, "segmentation": {"size": [512, 512], "counts": "f\\Q64j?2M3N3M210O0M3N3L3N3L3N2M4N101O01O010O01O01O010O01O01O010O01OnNWAm0n>010O010O00010O0010O00010O001M2M3N3L3N2N3L3N2N3L3NcSR1"}, "image_id": 105, "id": 1731}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 383.0, 83.0, 66.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "T]Y42l?3L3N3L3N2M4M2N30O00010ON3L3M3N3L3M4L3O1010O010O01O01O01O010O01O01O010O00cNcAW1d>0O010O00010O0010O010O000gN]AV1g>O01O01O0WOXA;h>A[A?e>^O^Ac0a>[ObAd0m>0010O010N1N210O01O01O010O00010O000O2L3N2M4L3N3LdR]2"}, "image_id": 105, "id": 1732}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 414.0, 25.0, 23.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "]]i61m?2M4M2M4M2M301O010O01O01O010O01O01O01N1M4M2M3N3LnRj0"}, "image_id": 105, "id": 1733}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 415.0, 27.0, 26.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "`]j21l?3N2N3L3N3M2M40O0010O0010O010O0010O0010O0M4M2N3L3N2NkRh4"}, "image_id": 105, "id": 1734}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 428.0, 36.0, 43.0], "area": 976, "segmentation": {"size": [512, 512], "counts": "Zno53j?3N2M4M2M4M2M3N3L3N3L3N2010O010O00010O010O00010O010O000N3M2BRAGR?6PAHR?5RAGQ?7QAGR?5>N]R^1"}, "image_id": 105, "id": 1735}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 431.0, 55.0, 46.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "omW42k?4M2N2g@Hg>;UAGk>h00O010O01O01O010O010O01O01O010O010O01O01O010O01OO2M2N3O001O01O010O010O01O0O1N3M2M4M2N3L3N2M4Moal2"}, "image_id": 105, "id": 1736}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 457.0, 7.0, 19.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "Y>c0^?L3N2N3L3N3LgQl7"}, "image_id": 105, "id": 1737}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 459.0, 51.0, 47.0], "area": 1523, "segmentation": {"size": [512, 512], "counts": "^oZ53k?3L3N2@GVA=f>FXAGVA=f>FXA`0N3O010O01O01O010O01O01OO2L3O20O00010O010O0010O0010O0010O001O0N2M4M2M4M2M3N3L3N3L3N2MXak1"}, "image_id": 105, "id": 1738}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 475.0, 53.0, 37.0], "area": 1286, "segmentation": {"size": [512, 512], "counts": "n_?2l?2M3N2IJa@9\\?7N2M3N2M30000001O001O00001O001ON2N2M3N2M300001O001O00001O001O00001O001O0JPA\\OP?b0RA^Oo>>UAAk>=WACj>9ZAFf>8\\AHd>5_AKb>2aAM_>0dA0W?O0000QPf6"}, "image_id": 105, "id": 1739}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 479.0, 28.0, 33.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "[?e0\\?ON2N2M3N2M3O1001O00001O001O00001O000N3L3N3L3N2M4M2M4Mk`a7"}, "image_id": 105, "id": 1740}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 487.0, 24.0, 22.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "boZ62l?3M2M4M2N3O010O01O010O010O010O010O01OM4M2N3M2NcPY1"}, "image_id": 105, "id": 1741}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 509.0, 10.0, 3.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "n_l12m?100001O00001O0000Q`n5"}, "image_id": 105, "id": 1742}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 82.0, 39.0, 55.0], "area": 1140, "segmentation": {"size": [512, 512], "counts": "Sc\\72m?2N2N2N2M2O2N2N2N2M3N1O2N2N2N2O1000OUAQOf>o0XASOh>S10000O0100000000000O01000O1N2O10O1N2NoL"}, "image_id": 106, "id": 1743}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 108.0, 68.0, 64.0], "area": 2064, "segmentation": {"size": [512, 512], "counts": "lSn62m?2M2O2N2N2N2M3N2f@^OS?c0l@_OR?c0l@_OR?i0M3N101000000O10O10000000O10O1000SAoNj>S101000000O1N1O2O10000O10O10000000O0100O1M3N2N1O2N2M3N2N2N1010000O1000O01N2N2M3N^K"}, "image_id": 106, "id": 1744}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 140.0, 73.0, 62.0], "area": 2038, "segmentation": {"size": [512, 512], "counts": "kTc63l?2N2N2N2M3N1O2N2N2N2j@ZOn>i0PAYOn>n0N101O1000000000O10O10002N0000000O0100000000000O001N2N20000000O0O2N2N2N2N2N2M210000000O1000OO2N2N2M300N2N1O2N2M3N2N2N2N1OYZ8"}, "image_id": 106, "id": 1745}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 165.0, 66.0, 65.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "feQ61n?2N1N3N2N2N2N2N1O2N2N2N2N2N1O2N200000000O010000000O10O10000000O10O10O1QASOh>T1N10100000O10O100000O1000OSOZAa0f>]O\\Ac0d>[O^Ad0c>ZO_Af0a>XOaAf0`>XObAg0m>M3N2N2N2N1O2N2M3N2N1O\\Ym0"}, "image_id": 106, "id": 1746}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 200.0, 69.0, 62.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "kfh51n?2N2N2M3N2N2N1O2N2N2M3N2N2N1O2N2N2O100000O100000O10000000OTAoNj>T1O1M201O10000000O10O10000000O1000O01N2N2M3N2N1O2N2M3N2O10OO2N2N2M3N2N1O2N2M3NchT1"}, "image_id": 106, "id": 1747}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 220.0, 77.0, 69.0], "area": 2019, "segmentation": {"size": [512, 512], "counts": "^WX52m?2M3N2N1O2N2N2M3N2N2N2N1N3N2N2O100O10N2N2N2N2N20O01000000000O10OQAXOg>h0WAZOi>f0UA\\Ok>d0RA_On>j0O10O1000000000O10O10000000O10O1000000000N1N3N10O2N2N2N2M3N1O2N2N2M3Jc@F_?8c@F^?96N2N2M3NiWa1"}, "image_id": 106, "id": 1748}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 228.0, 12.0, 38.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "aWj72<0R?3l@OR?3l@OR?3l@NS?4j@OT?2k@0R?a0N2N2N2N1O2kH"}, "image_id": 106, "id": 1749}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 258.0, 65.0, 59.0], "area": 1816, "segmentation": {"size": [512, 512], "counts": "fXo42m?2N1O2N2M3N2N2N2N2N1O2M3N2N2N2N2N1O200000000000OVAmNg>V11000M3O1O1000O10000000O100000O100O1N2N1O2M3N2N2N2N2N1O200N2N2N2N2N2N1N3N2N2N2NkVP2"}, "image_id": 106, "id": 1750}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 263.0, 44.0, 55.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "TYZ71n?1O2N2N2M3DHm@:Q?Gn@;P?Gn@:Q?Hm@:Q?d000000000000000000O1000O100000000000000000O1000O1000000001O=C5K00000O10O10000000000000000000O1000O100008H=Cfee2"}, "image_id": 106, "id": 1752}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 296.0, 60.0, 57.0], "area": 1821, "segmentation": {"size": [512, 512], "counts": "[Zl61n?2BOi@3V?Oh@3V?Ni@4T?Oj@3T?Oj@3T?=O2N2M3N2N2N1N3N2N2N2N1N3O100000O0O2N2N2N2N2M2010000O1kNWAR1l>000000O010000000N1O2N2N2M3N2N1O2N2M3N2N1O2M3N2N2Nee5"}, "image_id": 106, "id": 1753}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 336.0, 62.0, 57.0], "area": 1808, "segmentation": {"size": [512, 512], "counts": "[[\\61n?2M3N2N1O2JGb@;[?Gd@;Z?7N2N2M21000N2N2N2M3N2N1O1O002N20000O0100000000000O10O1000000N2N1O2M3N2N2N2N00000O2O2N2N2N2N2M3N1O2N2N2N2N2Mgdd0"}, "image_id": 106, "id": 1754}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 345.0, 75.0, 36.0], "area": 1978, "segmentation": {"size": [512, 512], "counts": "T[Y45k?:E8I0000000000O10000000O100000000000004L00000O1000O10000000L40000O100000O10000000000000O100000O10000000000000O1000O1000000000000GWA[Oi>e0810000000I701O;EkTa2"}, "image_id": 106, "id": 1755}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 349.0, 26.0, 40.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "`[c71n?2N2M3N1O2N2M3N2N1O2M3N2N20O100000O1O0O2N2N2M3N1O2RE"}, "image_id": 106, "id": 1756}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 369.0, 51.0, 61.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "QlS61m?3N2^@KV?6i@KU?8h@K6Mc>:UAK5Me>:TAK5Me>d0YA^Od>d0[A^Oc>P1O2M3N2O010aA`N\\>c100000O1000O10O1000000O0O2N2M3N2N2N1O2N2N2M3N2N2N1O2N2N2M3N2N1O2N2N2N2MbcR1"}, "image_id": 106, "id": 1757}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 376.0, 60.0, 58.0], "area": 1860, "segmentation": {"size": [512, 512], "counts": "d\\R71n?1O2M3N2N2M3N2M2O2N2M3N2O100O1N1O2PAROk>S1M3N2N1O2N2M3N2N1100000O10O100N2N2M3N1N3N2M3N2M3N2N1N3O1000000000O01000000N2N2N1O2N]C"}, "image_id": 106, "id": 1758}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 384.0, 74.0, 38.0], "area": 1966, "segmentation": {"size": [512, 512], "counts": "blZ46j?;E3M00000O1000O10M3000000000000000000O10O10002N0000E]OYAc0g>;000000O1000O1000000000000000O10000000O100000000000O100000000000O100000O10DRAFn>:<00000000J606IkS`2"}, "image_id": 106, "id": 1759}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 424.0, 67.0, 53.0], "area": 1786, "segmentation": {"size": [512, 512], "counts": "Pna62m?2N2N2M3N1O2N2N2M3N2N1O2N20000O10O10O1N2M3N2N1O2N2M3N2N2N1O200000O1M3N1O2M3N2N2M3N1O2N2M3O1000O0100000000O010N2N2N2N2M2O2N2N2N2M3N1Oka<"}, "image_id": 106, "id": 1760}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 427.0, 72.0, 32.0], "area": 1641, "segmentation": {"size": [512, 512], "counts": "f]]45k?;E4L00O100000000000000000000O1000O1000000000000000OK60003L2O00000000000000O1000O10000000000000O10000000O10000000O100000000000OAVAJj>6`0000000000:F[b^2"}, "image_id": 106, "id": 1761}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 474.0, 61.0, 38.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "Zoa42c0Of>3XANg>4WAMg>5XALg>6WAKh>7UAKj>7TAJk>8SAIk>:SAGl>:SAGl>g0O1O1O11O1O1O1O1O001O1O1O1OO1O1O1N21O001O1O1O1O1OO1O1O1N2O1O1O000O11O2N2M2O2N2N2N2N2M2O2N2N2N2M3N1O2Nj`_2"}, "image_id": 106, "id": 1762}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 480.0, 61.0, 32.0], "area": 1213, "segmentation": {"size": [512, 512], "counts": "f_e53l?2N2N1O2M3N2N2N2N1N2O1O1O1O1001O1O1O001OO1N2O1O1O1O1N2O11O001O1O1O1O1O001O1O1O1O1O001O1O1OO100001O1O1O1O1O1O0O2N2N2N2N2MZP\\1"}, "image_id": 106, "id": 1763}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 335.0, 69.0, 70.0], "area": 2260, "segmentation": {"size": [512, 512], "counts": "o[U62m?2N2M3N2N2N2N1O2N2M3N2N2N2N1O0O01002N2N2N2N2M3N2N1O2N2N2N2M3N00O3NO10000000O1000O100002N2M3N2KaAfNa>X14N3N2N2N2N2N2N2M2O2N2N2N2N2N2M3N1O2N2N2NlTh0"}, "image_id": 107, "id": 1764}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 388.0, 61.0, 58.0], "area": 2078, "segmentation": {"size": [512, 512], "counts": "Ymg61b?1h@1V?1g@2V?1h@1V?1h@1V?1h@0W?>N2M3N2N2N1O2M3N2N2N1O2M2O00002O010O10000000O10O10N2N2N2NO0100000O10O3N2N2N2O1N1O2N2M3N2N2N1O2M3Jd@E^?96N2M2O2N2NZc9"}, "image_id": 107, "id": 1765}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 0.0, 29.0, 24.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "P`a71o?1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O00"}, "image_id": 108, "id": 1766}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 0.0, 101.0, 160.0], "area": 11054, "segmentation": {"size": [512, 512], "counts": "bc]63k?2N2M4M2N3M2M4M2N3L3N2N3L3N3M2N3L3N2N3L3N3M2N3L3N2N3L3N3M2N3L3N2N3L3N3M2M4M2N2N3L3N3M2M4M2N2N3L3N3M201O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O001O00001O"}, "image_id": 109, "id": 1767}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 193.0, 370.0, 319.0], "area": 64224, "segmentation": {"size": [512, 512], "counts": "n_W22k?3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001VLmHeNS7X1QIgNP7V1RIjNn6T1TIlNm6P1WIoNi6o0YIQOg6l0]ISOd6j0^IVOb6h0aIWO`6e0cI[O]6c0eI]O\\6`0gI_OY6>jIBV6Z5_OjJ`0W5]OkJc0U5[OnJb0U5ZOnJd0T5ZOnJc0V5ZOmJf0R5WOQKi0o4UOTKj0m4ROVKo0i4oNZKP1g4mN[KT1d4iN_KW1a4gNbKY1^4dNdK\\1\\4aNhK^1Y4_NiKb1V4[NmKe1T4XNoKh1P4VNRLj1n3SNVLm1j3PNXLP2h3nM[LQ2f3kM]LU2c3iM_LW2b3fMaLY2_3dMdL\\2\\3bMgL]2Z3_MiLa2W3]MlLb2U3[MmLe2S3XMPMh2P3VMSMi2n2TMTMl2l2QMXMn2m701O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001N1N2M4M2N3L3N2M4M2N3L3NiA"}, "image_id": 109, "id": 1768}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 0.0, 7.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "PPU31o?001OO100001o_g4"}, "image_id": 110, "id": 1769}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 0.0, 64.0, 64.0], "area": 2340, "segmentation": {"size": [512, 512], "counts": "VQf42k?4M2N2M4M2M4M2M4M2M3N3M2M4M2M3O2O001O00001O010O010O01O01O010O01O01O010O0eA`NW>_1gAcNY>c1O010O010O00010O`NhAX1Y>eNjAX1`>N2TO\\A=d>_O_Aa0a>]ObAb0_>ZOdAg0[>WOgAi0i>0O01L3N2N3L3N3L3N2MknY2"}, "image_id": 110, "id": 1770}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 0.0, 33.0, 17.0], "area": 349, "segmentation": {"size": [512, 512], "counts": "TP^52l?2N3O00001O010O0O2O001O00001O001O001O00001O001O001OO1N2N2M3N2N2NR`Q2"}, "image_id": 110, "id": 1771}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 0.0, 59.0, 37.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "aPT62k?3N3L3M3N3L3O101O001O00001O001O00001O001O00001O001O00001O00001O001O000011N10O00010O0010O0010O0001O0M4M2M3M4M2M4M2M3Nh_n0"}, "image_id": 110, "id": 1772}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 1.0, 15.0, 20.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "_Pm13k?3M2M4M2N3M2O11M2M4M2N3M2M3Nn_k5"}, "image_id": 110, "id": 1773}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 5.0, 15.0, 12.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "[`e32k?3N2O2O010O01O01O010O01N1N3LioR4"}, "image_id": 110, "id": 1774}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 11.0, 29.0, 22.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "g`W32l?2M3N3L3N30O00010O0010O0010O0010O0010O00010O001O0M3N3L3N^oY4"}, "image_id": 110, "id": 1775}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 13.0, 54.0, 40.0], "area": 1228, "segmentation": {"size": [512, 512], "counts": "RQX22k?3N2N3L3N3N10010O010O010O000N3L3N3L3O2O00010O010O00010O010O010O00010O010O01O01O010O01O0N2M4M2N3L3N3L3N2N3LXol4"}, "image_id": 110, "id": 1776}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 30.0, 51.0, 52.0], "area": 1615, "segmentation": {"size": [512, 512], "counts": "RR^32l?2M4M2M4L3N2M4M2M4L3N2M4M2M301O010O01O01O010O01O01O01O01O010O01O01O010O01M2N2M4M2M4L3N2M4M2M3M4M2M4Mf^h3"}, "image_id": 110, "id": 1777}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 40.0, 48.0, 42.0], "area": 1396, "segmentation": {"size": [512, 512], "counts": "TRn56e?5J6J7I6K5000001O01O00000001O01O00000001O01O00000001O01O00000oNZAi0n>00000010O00000001O01O0M3J6J6J6K^nY1"}, "image_id": 110, "id": 1778}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 45.0, 34.0, 32.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "fQc42k?4M2M4N100010Of@Eo>LaA3_>JdA7\\>FfA:Z>CjA=U>AmA?T>]OPBb0P>\\ORBe0d>010O010O00N3M2M4M2M3N3LmlQ5"}, "image_id": 110, "id": 1780}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 71.0, 36.0, 26.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "cbl22k?3N3L3N3M201O01O010O01O01O010O01O01O010O01O01O010O01O01O010N1N3L3N2M4M_]a4"}, "image_id": 110, "id": 1781}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 76.0, 63.0, 53.0], "area": 1909, "segmentation": {"size": [512, 512], "counts": "obT55g?4L5K4L4M310O00000010O00UAZO\\>f0`A^O`>c0[ABe>l00000010O00000010O0000010O0000010O00000010kN]Ak0l>000010O0000010O000000010O0000010O000\\ORA6n>FVA;j>_O[Aa0R?01N1L4K5Ldlk1"}, "image_id": 110, "id": 1782}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 89.0, 23.0, 20.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "USf41m?3L3N3M2M310O0010O010O0010O0010O001O0M3N3L3NR]n2"}, "image_id": 110, "id": 1783}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 115.0, 66.0, 58.0], "area": 1989, "segmentation": {"size": [512, 512], "counts": "bTm34j?2N2M4M2N3L3010O0010O010M2N3L3N2M4M2N3L3O1010O010O010O00010O010O010O00010O010O010O00010O010TO\\A:d>C`A=_>AcA?^>^OeAa0[>\\OhAe0X>XOjAh0h>0O010O0001M2M4M2N3L3N2M4M2N[kQ3"}, "image_id": 110, "id": 1784}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 117.0, 26.0, 24.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "Rda72k?4M2N3M2N2N3O010O010O0010O010O010O010N1N2N3M2N3M2NU\\1"}, "image_id": 110, "id": 1785}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 118.0, 57.0, 45.0], "area": 1546, "segmentation": {"size": [512, 512], "counts": "bTa11m?2M3N3M2M4M2M3N3L3N3L30010O010O00010O010O010O00010O0N3N11O010O01O01O010O01O01O01L3NO011N4M2N2M4M2M4M2M4M2M3N3L3NW\\b5"}, "image_id": 110, "id": 1786}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 124.0, 35.0, 30.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "]Td22l?3L3N3L3N2M4M201O00010O010O01O01O010OO2O01O01O010O0^Of@?Y?_Oi@a0\\?0ON3L3N2N3L3Nh[j4"}, "image_id": 110, "id": 1787}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 146.0, 53.0, 49.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "`eP51l?4M2M4M2M3N3L3N3L300O2M2M4N10010O0010O0010O0010O0010O0010O0010O0010O001lNVAQ1l>010O00010O010OO1M4XOo@=S?@QA<[?N2M4M2M4MejT2"}, "image_id": 110, "id": 1788}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 168.0, 10.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "]UW32l?2N3N11O010O01OM4Mgjc4"}, "image_id": 110, "id": 1789}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 170.0, 54.0, 42.0], "area": 1305, "segmentation": {"size": [512, 512], "counts": "PVU13j?3N3M2M3N3N1010O010O0010M2N3M2M4M2O1010O01O01O010O010O01O01O010O01O010O01O010O01O01O0N3M2M4M2N2M4M2M4M2N2M[jo5"}, "image_id": 110, "id": 1790}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 170.0, 14.0, 18.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "hUn22l?2N2M4M2N3L3001O0M4M2M3N3Mejj4"}, "image_id": 110, "id": 1791}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 173.0, 31.0, 30.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "nUU23k?3L3N2N3L3N3L301O01O01O010O01O010O01O010O01O01O01O0M4M2Gc@La?1a@Ma?1bZ[5"}, "image_id": 110, "id": 1792}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 173.0, 68.0, 52.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "Wfc32l?2M3N3L3N3M2M3N3L3N3M201O01O010O01O01O010O01O01O010OO2O0010O0010O0010O010O3M010O01O010O010O00010O010O01TOQAf0n>XOUAg0R?0010O010O0N2M4M2N3L3N3L3N2NgYZ3"}, "image_id": 110, "id": 1793}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 183.0, 24.0, 25.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "Zfi51l?3N2M4L3N3L3M301O00010O010O0001N1N2M4M2M4L3N2MWZj1"}, "image_id": 110, "id": 1794}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 184.0, 11.0, 12.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "neY33j?4M2O101O010O01M2M3MYj`4"}, "image_id": 110, "id": 1795}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 185.0, 10.0, 11.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "oUe22l?3M2N210O0010M2N3MVjU5"}, "image_id": 110, "id": 1796}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 194.0, 49.0, 53.0], "area": 1380, "segmentation": {"size": [512, 512], "counts": "TWg43j?3N3L3M3N3L3N3L3N2M4L3N3L31O01O01O010O01O01O010O0oN[Ah0d>UO_Ak0b>ROaAm0i>1O010O01O01O01O01O01VORAa0m>]OVAb0T?010ON2N3L3N2M4M2MPY`2"}, "image_id": 110, "id": 1797}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 216.0, 24.0, 25.0], "area": 354, "segmentation": {"size": [512, 512], "counts": "ZW>1m?2M4M2M3N3L3N2O2O010O00010O010O00M4M2M3N3L3M4MTiU7"}, "image_id": 110, "id": 1798}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 216.0, 91.0, 61.0], "area": 2706, "segmentation": {"size": [512, 512], "counts": "aWj02l?2M4M2M4M2N2M4M2M4M2O1010O01O01O010O010O00010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00010O010O00RAROj>S10O0010O0010O010O0001M2N3M21O01O010O01O010O0O1N3M2M4M2M3N3L3N3L3N2N3L3N3LbXh5"}, "image_id": 110, "id": 1799}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 218.0, 34.0, 27.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "YWo31l?4M2N3L3N2M4O010O00010O010O0010O0010O010O00010O010O00O2M2N3L3N3L3Nmh_3"}, "image_id": 110, "id": 1800}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 232.0, 20.0, 24.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "iWb53j?3N2M4M2M4M2N210O010O0001M2N3L3N2M4M2MfhS2"}, "image_id": 110, "id": 1801}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 236.0, 33.0, 32.0], "area": 640, "segmentation": {"size": [512, 512], "counts": "Ph[23k?2M4M2N3L3N2N3L3O2O010O00010O010O010O00010O01O0M4M2N2N3L3N3M2M4M\\hS5"}, "image_id": 110, "id": 1802}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 237.0, 52.0, 71.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "]Xn63j?4M2M3M4M2M3010O01O01O010O^A\\Oh=d0UB_Ok=b0RB@o=?nAEQ>9iAIX>6eANZ>3bA0_>O_A4`>f00010O000O2L10O00010O0001O04L3N3L3M3N3L3M3N3L3M3N3L3M4M2M3M`h7"}, "image_id": 110, "id": 1803}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 240.0, 53.0, 56.0], "area": 1655, "segmentation": {"size": [512, 512], "counts": "dXZ43k?2M4M2M4M2N2M4M2M4M2M3N3M2M4M21O010O01O010O01O010O01O01O0fN^AW1f>O010O00010O010O01OmN\\Aj0e>SO]Am0l>0OO2L3N2M4M2M4M2N3L3N2MgWk2"}, "image_id": 110, "id": 1804}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 244.0, 39.0, 34.0], "area": 799, "segmentation": {"size": [512, 512], "counts": "Yh81m?3L3N2M4M2N3L3N3M20001O010O01O010O01O010O01O010O01O010O01O0O1N3M2M4M2M4M2N2MThS7"}, "image_id": 110, "id": 1805}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 271.0, 15.0, 15.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "hXf32l?2N3L3N3O01O010O01O01M2N3L3N_WR4"}, "image_id": 110, "id": 1806}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 273.0, 13.0, 33.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "Qii73j?3N3M2M3f@Dn>?o@Cn>`0PACn>h0010O00010O]G"}, "image_id": 110, "id": 1807}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 278.0, 26.0, 26.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "ZYl31l?3L4M4L3L4M4O01O0001O01O01O01O0001O01O000M4L3L4M4KXgf3"}, "image_id": 110, "id": 1808}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 287.0, 23.0, 18.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "Xii63j?4L3M301O01O01O01O01O01O01O01O01O01O0O1M4L3Mnfj0"}, "image_id": 110, "id": 1809}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 290.0, 63.0, 53.0], "area": 2084, "segmentation": {"size": [512, 512], "counts": "RZ>1l?3N2M4M2M4M2M3N3M2M4M2M310O010O0010O0010O0010O0010O001UAmNh>V1O010O01O01O010GkNeAT1Z>oNfAm0[>UOeAl0[>TOdAl0\\>TOcAm0^>RO_AR1`>710N1O110O010O00010OO2M2]O^AEe>8^AFe>7^AFd>7_AFe>7^AFd>7_AFd>8d0M2MiVb6"}, "image_id": 110, "id": 1810}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 300.0, 58.0, 66.0], "area": 1913, "segmentation": {"size": [512, 512], "counts": "jjd22l?3N2N1N3N2N2N1N3N2N2M3IWOTAj0k>XOSAj0k>7M3N1O2M3N2N2M2O2N2N0O10000O010000O101O2M3N2N2N1N3N2N2M2O2N2N2M2O2N1N011O2N2M3Gc@L_?2b@M_?2c@K`?29NVV^4"}, "image_id": 110, "id": 1811}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 304.0, 60.0, 87.0], "area": 2959, "segmentation": {"size": [512, 512], "counts": "kjb12k?3GMc@6Y?Md@6Z?9M4M2M4o@UOh>T1M2O1010O010OkAhNb=X1[BkNe=V1XBmNh=R1UBQOk=o0SBSOm=n0oAVOQ>\\10010O001L3N2M4M2M4L3O110O00010O010O000bB`M]=a2010N1DcBSN_=k1dBQN`=c1\\B`N7K_=b1]B`NP>]1TB`No=8lAi0_>UOcAh0a>TOcAi0_>UOcAh0l>M4M2M4M2M3M4M2MlU_5"}, "image_id": 110, "id": 1812}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 304.0, 26.0, 38.0], "area": 675, "segmentation": {"size": [512, 512], "counts": "PZc73j?3N2M4M2M4f@Bn>`0PABo>h0010O01O010O01O01O010O01O01O010O01O01XF"}, "image_id": 110, "id": 1813}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 310.0, 60.0, 47.0], "area": 1746, "segmentation": {"size": [512, 512], "counts": "bZP43j?4L3M3M4K4M3M4L3M3010O000010O000010O00010O00010O00010O00010O00010O000010O000010O00010O00010O00010O000M4L3M3M4L3L4M3M4LmeQ3"}, "image_id": 110, "id": 1814}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 310.0, 56.0, 41.0], "area": 1268, "segmentation": {"size": [512, 512], "counts": "Zj[63k?2M3N3L3N3L3N2M4O001O01O010O010O01O01O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O001M2M3N3L3N3M2M4MiUh0"}, "image_id": 110, "id": 1815}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 334.0, 54.0, 39.0], "area": 1230, "segmentation": {"size": [512, 512], "counts": "T[U72l?2M4M2N2M4M2M4M21O010O000N3N11M2N3M21O010O01O01O010O01O010O01O01O010O01O01O010O01O010O01O01O010O0M3N3L3N3LVE"}, "image_id": 110, "id": 1816}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 336.0, 33.0, 27.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "Q[V63k?2M4M2M4M2M3N3O010O00010O010O00010OO2L3N2M4O01O01O010O01OO2L3N3LYUY1"}, "image_id": 110, "id": 1817}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 357.0, 59.0, 48.0], "area": 1699, "segmentation": {"size": [512, 512], "counts": "Q\\[62l?2N3L3N2M4M2N3L3N2M4M2N3O010O01O01O010O01O010O01O010O01O01N1N3O01O01O010O01O01O010O010O0O01N3L3N3M2M4M2M3N3L3N3M2M3N`Tg0"}, "image_id": 110, "id": 1818}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 367.0, 31.0, 28.0], "area": 576, "segmentation": {"size": [512, 512], "counts": "S\\Y22k?3L4M4K4M3M310O000010O000010O0000010O00010O000001N1M3L5L3L4M]TW5"}, "image_id": 110, "id": 1819}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 370.0, 70.0, 47.0], "area": 2247, "segmentation": {"size": [512, 512], "counts": "]\\d33i?4L4L4L5K4L4M310O0000010O000001O01O0001O01o@SOm>P1001O01O0001O01O0001O01O0000010O000001O01O000L4L5O0000010O0000010O0000010O000000K6K4K5L4K6K4K5L\\dX3"}, "image_id": 110, "id": 1820}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 371.0, 14.0, 16.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "lk\\73k?3M2M4M210O01O01ON3M2M4M2N[T<"}, "image_id": 110, "id": 1821}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 375.0, 38.0, 35.0], "area": 787, "segmentation": {"size": [512, 512], "counts": "]\\\\12k?3N3L3N2M4M2M4M2N2010O01O01O010O01O010O01O01O010O01O01OO2M2M4M2M3N3L3N3L3NPdP6"}, "image_id": 110, "id": 1822}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 379.0, 73.0, 57.0], "area": 2140, "segmentation": {"size": [512, 512], "counts": "hl12k?4M2N2M4M2N3L3N3M2M3O2O0O2M2O20O0010O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010O00oNWAl0i>QOYAo0m>O010O01O010O01O010O01O010O01L3N3M2M3N3M2M4M2N3L3NYci6"}, "image_id": 110, "id": 1823}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 411.0, 15.0, 15.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "Tm`12l?3L3M3O20O0010O0010O000M4M2MTcW6"}, "image_id": 110, "id": 1824}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 419.0, 26.0, 21.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "_mo24h?4L4L401O01O0001O0001O01O0001O0001O01O0001O01K4L4LlRc4"}, "image_id": 110, "id": 1825}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 423.0, 56.0, 68.0], "area": 1862, "segmentation": {"size": [512, 512], "counts": "^^j13l?1N3N2M2O2M2O2M3N1N3N2M2O2M3N1N3M3N1N3cAeNR>]1kAfNR>\\1mAeNR>]1kAfNR>f1N1N3N2OO2M3N1N3K5N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2M2O2MXbY5"}, "image_id": 110, "id": 1826}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 432.0, 65.0, 57.0], "area": 1913, "segmentation": {"size": [512, 512], "counts": "XnZ33l?2M2O2N2M2O2M3N2N1N3N2M2O2N2M2O2M300O010000O0100000O01000O1000O10O100O00100O1O001O100O001O100O000010O0001O00010O0000010J5K5K6J5Kiad3"}, "image_id": 110, "id": 1827}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 433.0, 61.0, 39.0], "area": 1662, "segmentation": {"size": [512, 512], "counts": "[^Q63h?5K5L5J5K5N20001O01O000000010O00000001O01O000001O01O000001O0001O000001O01O000001O0001O0001O0001O0001O0001O000K5K5L4K6J5LYRP1"}, "image_id": 110, "id": 1828}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 436.0, 28.0, 52.0], "area": 893, "segmentation": {"size": [512, 512], "counts": "V>n0Q?1M4M2M4M2M4M2N201OO2M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2NWba7"}, "image_id": 110, "id": 1829}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 438.0, 48.0, 27.0], "area": 623, "segmentation": {"size": [512, 512], "counts": "X^S71m?3L3N3L3N2M4L301O01O010O01O01O010O00010O000M4M2M4M2N201O010O01O01O01O010O01O01O01O010O01O01L3Mna4"}, "image_id": 110, "id": 1830}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 453.0, 49.0, 59.0], "area": 1969, "segmentation": {"size": [512, 512], "counts": "]_k01m?2M3g@Ke>8XAJf>8WAKX>KjA>KIY>KjA>KIX>MiA>KHZ>LiAm0U>VOhAl0U>a0N200001O001O00001O001O00001O001O00001O001O00001O001CcAUO]>h0gAWOY>g0iAYOX>c0lA\\OT>b0nA^OS>>QBAo==SBAP>;TBBn=f0600O1O1O1O1O100O1O11O1O2N1O1O1O2N1O1O1O00O1O100O1O1O1O1O100O1O1O2N2N2O1N3M2N2N2N2Of_l3"}, "image_id": 113, "id": 1844}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 29.0, 18.0], "area": 312, "segmentation": {"size": [512, 512], "counts": "UP\\52m?1LNX@5g?3O001O1O001O1O001O1O001O1O1O00N2O1N2O1N2O1O1N2O1N2OQ`U2"}, "image_id": 113, "id": 1845}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 0.0, 81.0, 51.0], "area": 2160, "segmentation": {"size": [512, 512], "counts": "m`U63k?2M3N3L3N3O01O01O010O010O00010O010O00010O010O000O2M2M4M2M4M2M3N3L3N3N10001O001O00001O001O001O0000N2M3N2N2M3N2M3N2M3N2N2M3N2001O001O00001O001O010OM3N3L3N3L3N2M4Mnoa0"}, "image_id": 113, "id": 1846}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 6.0, 83.0, 71.0], "area": 3395, "segmentation": {"size": [512, 512], "counts": "VaQ47e?6M2N3M3M100O2N1QA\\Ob>d0YAAg>m0O000001OL5J5M30000010O00000000dA_NY>e1O01OO100O1O2O0O100000001O000000000010O1O2N1OO101N1O100O1ON3J5K5K5L5J5M310O00010O00010O2N2O2M2N3N1N3M2N2O2M2N3N1NYod2"}, "image_id": 113, "id": 1847}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 18.0, 35.0, 84.0], "area": 1594, "segmentation": {"size": [512, 512], "counts": "hb^71l?4M2M3M4M2M4O01O01O01ON3h@[OQ?l0L3YOROYBQ1e=QOYBR1c=QOZBR1d=QOYBR1c=QOZBR1d=QOXBR1e=QOYBR1d=f0001N1M4M01O2M3N3M2010O00]O"}, "image_id": 113, "id": 1848}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 55.0, 50.0, 52.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "Pc^52l?2M4M2M4M2M4M2M3N3L3N3L3O1010OO2L3N2M4M201OO2L3N3O00010O010O00010O2O0O0SO^AB`A?`>]OcAc0]>[OfAe0Z>WOiAi0i>N1N1M3N3L3N3L3N2M]]h1"}, "image_id": 113, "id": 1849}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 77.0, 50.0, 52.0], "area": 1544, "segmentation": {"size": [512, 512], "counts": "eSY63j?3N2M4M2M4M2M3M4M2M3N3L310O01OO2M2M4M2M301ON2M4O0010O00010O0101N0010OSO_A;a>BbA?^>^OeAa0[>\\OhAd0Y>YOiAh0h>O001L3N2M4M2M4M2M3Nglm0"}, "image_id": 113, "id": 1850}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 129.0, 54.0, 52.0], "area": 1719, "segmentation": {"size": [512, 512], "counts": "QUU71m?2M4M2M3N3M2M4M2O20O00010O010O0010O0010O010O0001N1M4M2M3N3M2M4M210O01O010O01O01O010O0N210O0010O010O00010O0fK"}, "image_id": 113, "id": 1851}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 134.0, 21.0, 21.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "bTU32l?3M2N3M3M2N3O0010O010O010O0O2M2N3M2N3M2Nf[`4"}, "image_id": 113, "id": 1852}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 147.0, 51.0, 53.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "cUb22l?3N1O2M3N2M2O2N2M2O2N2M3N1O2M3N2M2O2N2M3N1O2M3O010O10N2M3N1O2M3N2N1N3N2M2O2N2M3N1O2M3N2M2O2N2M2O2N2MlZd4"}, "image_id": 113, "id": 1853}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 184.0, 73.0, 83.0], "area": 3519, "segmentation": {"size": [512, 512], "counts": "eWX53k?3M2M4M2M3N3M2M4M2M3N3L3N3M2M3N3L3N3L3N3M2M3N3L3N3N11O010O010O00010O010O01O01O010O01O01O010O0N3M2N2M4M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2M3N3M2M4M2M3NgYc1"}, "image_id": 113, "id": 1854}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 469.0, 55.0, 43.0], "area": 1504, "segmentation": {"size": [512, 512], "counts": "l_=2l?2M4M2N3L3N2M3N2M3N2M3N2M3N2001O0000N2M3O11O00001O001O00001O001O0nNYAk0g>SO\\Al0l>01O00001O001O00001O001M2N3L3N2M4M2M4M2NhPg6"}, "image_id": 113, "id": 1855}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 0.0, 27.0, 18.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "W``23k?3M2O2M201O001O001O1O001O001O00O1N2N2O1N2N2N2N200O1NRPR5"}, "image_id": 114, "id": 1856}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 0.0, 61.0, 23.0], "area": 990, "segmentation": {"size": [512, 512], "counts": "PPP31o?1O001O1O002N1O001O001O001O1O001O001O001O001O1O001O001O001O1O001O001O0000000000001O0000000000000000000000000000000000N2^Ob`Q4"}, "image_id": 114, "id": 1857}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 60.0, 38.0], "area": 1406, "segmentation": {"size": [512, 512], "counts": "]Pl51m?3M2O2M2N3M3N1O2O001O001O1O001O001O00100N101O001O001O1O001O001O001O001O10O010O010ON3NO2N2O1N2N2N2N2O1N3M3M2O2M2N3M2O2MmoU1"}, "image_id": 114, "id": 1858}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 7.0, 41.0, 40.0], "area": 876, "segmentation": {"size": [512, 512], "counts": "o`m61m?2N3N1N3M2N3M3N1N3M2N3N1N3N11000O010O010O010O01000O010ON3M2N3N1N3M3M2O2M2N3M2N3N[o="}, "image_id": 114, "id": 1859}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 8.0, 10.0, 10.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "^`R21n?1N3M2O20O010O1N1Ng_h5"}, "image_id": 114, "id": 1860}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 16.0, 13.0, 16.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "jPP23k?2O2M2N3N2M21M2O2M2N3N1N__i5"}, "image_id": 114, "id": 1861}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 28.0, 60.0, 48.0], "area": 1466, "segmentation": {"size": [512, 512], "counts": "aQe21m?3N1N3M2O2M3M2N3N1N3M2O2N20O010O01000O010O010000O100O0100N1010O010O10O10O010N2M210M2O200OO2N2M2N3N1N3M3N1N3M2O2M2N3HZ@3j?N`n\\4"}, "image_id": 114, "id": 1862}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 33.0, 36.0, 32.0], "area": 602, "segmentation": {"size": [512, 512], "counts": "`al12m?1N3M2N3M2O2M2N3N2O010O010O0100O0100O010O010O0100O0100ON3M2N3M2O2M3M2Nc^a5"}, "image_id": 114, "id": 1863}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 38.0, 58.0, 45.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "ja^51n?1N3M3M2N3N1N3M201O10O010O01O1N1N3M2N3N11000O010O010O01000O010O001M2O2O10O010O10O010ON3N1N3M2N3N2M2N3M2O2M2N3M3M2O[^d1"}, "image_id": 114, "id": 1864}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 56.0, 62.0, 63.0], "area": 1983, "segmentation": {"size": [512, 512], "counts": "jRW22l?3N1N3M3M2N3N1N3M2N3M2O2O10OO2M2N3O010InN]AS1a>8N2N110O0100O010O0100O010O0100O010O0100OnNbAb0_>[OdAe0[>YOgAg0Z>VOiAj0W>TOkAk0U>SOmAn0c>O010M2N3M2N3N2M2N3M2N3N1N3M2NUmi4"}, "image_id": 114, "id": 1865}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 56.0, 37.0, 41.0], "area": 830, "segmentation": {"size": [512, 512], "counts": "cbb62m?2M2N3M3M2O2M2N3M2O2M3M2N3M2O2N110O010O10O0N3M3M2O2M2N3M2O2M2N3M3M2O2M2Nomj0"}, "image_id": 114, "id": 1866}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 64.0, 14.0, 16.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "WRi72m?1N3M3O010O010O0O2N1N30O01nM"}, "image_id": 114, "id": 1867}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 78.0, 61.0, 49.0], "area": 1661, "segmentation": {"size": [512, 512], "counts": "WSQ52l?2O2M2N3M2O2M3M2N3M2O2M2N3M20100O0100O010O0100O010O0100O01M2N300O010O010O010O10O1N1N3M2OO01O002O2M2N3M2N3N1N3M3M2O2M2N3MW]P2"}, "image_id": 114, "id": 1868}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 82.0, 32.0, 26.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "mbb13k?2N3M2N3M2O2O0010O010O010O010O0100O0100O010O010O001M2N3M2O2M2NT]m5"}, "image_id": 114, "id": 1869}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 91.0, 7.0, 16.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "Ucl72l?3N1N3M3N1N3TM"}, "image_id": 114, "id": 1870}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 99.0, 37.0, 28.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "^cW61m?2O2M3M2N3N1O2O010O10O010O10O010O10O010O10O010O10O010O10O01N1O2M2N3M3N1N_lU1"}, "image_id": 114, "id": 1871}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 118.0, 61.0, 49.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "]dn12l?2O2M2N3M2O2M3M2N3N1N3M2N300O010O0100O0100O010O0100O0100O010O0100O0100O010O010M2N3M3NO003M2O2M2N3M3N1N3M2N3N1N3M3M2O2M2NmkR5"}, "image_id": 114, "id": 1872}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 123.0, 35.0, 30.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "XTU13k?2O2M2N3M2N3N1O200O010O0100O0100O010O010O0100O0100O01N1N3M3M2O2M2N3Mh[Y6"}, "image_id": 114, "id": 1873}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 123.0, 33.0, 32.0], "area": 574, "segmentation": {"size": [512, 512], "counts": "^dS71m?3M2N3M2O2M2N3M2N3M210O10O010O10O010O010O010OO2M2N3M3M2O2M2N3M2Nkk;"}, "image_id": 114, "id": 1874}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 125.0, 57.0, 49.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "gTe41m?2N3M2N3N1N3M3M2O2M2N3M2N3N2O010O010O010M2O2N2O010O0100O0100O010O0100O010O0100O01M2N3M3N1N3M2N3M2O2M3M2N3N1N3M2Nc[^2"}, "image_id": 114, "id": 1875}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 146.0, 33.0, 34.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "WUk52m?2M2N3M3M2N3N1N3M2N3M2O2O010O10O10O010O01N1N3N1N3M3M2N3N1N3M2N3MU[d1"}, "image_id": 114, "id": 1876}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 154.0, 16.0, 18.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "WU[61m?3M3M2O2M2N3M2000O2M2N3M2N3M2NUk\\1"}, "image_id": 114, "id": 1877}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 158.0, 51.0, 52.0], "area": 1441, "segmentation": {"size": [512, 512], "counts": "kUd12l?3N1N3M3M2N3N1N3M2N3M2O2M3M2N3M2010O010O01000O010O010O010O01000O010O010O010nNZAh0f>VO\\Ah0g>UO\\Ah0P?M2O2M2N3M2N3N1N3M3MXZb5"}, "image_id": 114, "id": 1878}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 165.0, 60.0, 55.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "SVX43l?1N3M3M2O2M2N3M2N3N2M2N3M201O001M2N3N2M201O010O010O10O10O010O010O10O1eN_AV1`>iNbAV1d>10O010O0N1O001O012M2N3M3N1N3M2N3N1N3M3M2O2M2N[ji2"}, "image_id": 114, "id": 1879}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 169.0, 47.0, 51.0], "area": 1350, "segmentation": {"size": [512, 512], "counts": "heY62m?2e@Me>5YANd>5YAMe>5ZAMd>5YAMg>4VAOj>0TA3k>NRA4o>?O0100O010OO2M3M2O2M2N3M210O10O010O1M2N3M2O2M2N3M2N3N2M2N3M2O2M2N3M3M2O2M2N[jn0"}, "image_id": 114, "id": 1880}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 177.0, 15.0, 27.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "Rfh72m?2M2N3M2O2M2N3M3N1N3O0100O01\\J"}, "image_id": 114, "id": 1881}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 182.0, 36.0, 38.0], "area": 792, "segmentation": {"size": [512, 512], "counts": "]f<3k?2N3M2N3N1N3M3M2N3M2N3O010O010O0N3M201O10O010O0N3M2IPA\\OS?a0PA\\OR?b08M2N3N2M2N3M2NTZQ7"}, "image_id": 114, "id": 1882}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 188.0, 35.0, 44.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "WV^53k?2O2a@Ko>8n@JQ?7n@Ko>8n@J4Id>>VAL4Gd>m0YAVOg>i0WAZOh>o0100O0100O0100O0O2N1N3M3N1N3M2O2M3N1O2N1N3M3N1N3M2OeYP2"}, "image_id": 114, "id": 1883}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 196.0, 55.0, 57.0], "area": 1624, "segmentation": {"size": [512, 512], "counts": "UgU12l?2N3N1N3M3M2N3N1N3M2N3M2N3N2M2N3M2N3O0010O01000O010O010O010O010fN^AT1c>iN_AX1e>0O010O01POYAf0f>XO]Ag0d>WO^Ai0b>TO`Al0k>O0N3N1N3M2N3M3N1N3M2N3M2Nlhn5"}, "image_id": 114, "id": 1884}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 205.0, 36.0, 34.0], "area": 604, "segmentation": {"size": [512, 512], "counts": "kVh61m?3M2O2M2N3M3N1N3O001000O010Og@]OU?h00O10O10O010O10O10O010M3M2N3N1N3M2O2M3M2OUie0"}, "image_id": 114, "id": 1885}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 211.0, 45.0, 50.0], "area": 1256, "segmentation": {"size": [512, 512], "counts": "`gY72l?3N1N3M2N3M2N3M2N3N1N3M3M2N3M2O2O010O010O010O010O10O010O010O10O01YOXA5g>I\\A6e>G]A:b>DaA;`>CaA=_>AdA<_>AcA=_>AdAROYAk0o>N3M2O2M3M2N3M2N3N1N3M2NbgZ6"}, "image_id": 114, "id": 1892}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 250.0, 33.0, 34.0], "area": 620, "segmentation": {"size": [512, 512], "counts": "^hm43k?3M2N3N1N3M2N3M3N1N3M2100O010O0100O0100O0O2M2N3N1N3M2N3M3N1N3M2Nlga2"}, "image_id": 114, "id": 1893}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 254.0, 44.0, 39.0], "area": 906, "segmentation": {"size": [512, 512], "counts": "X8;c?3M2N3M2N30O0100O010O010O0100O010O0100O010O0100O010O0100O010O010O01O1M2N3NO01O3M2H]@1f?L\\@2k?N]gY7"}, "image_id": 114, "id": 1894}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 258.0, 56.0, 55.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "mhj63l?1N3M2[@Ha?<010O010O01M2N3M3N1N3M2N3M2O2M3M2N3N101O010O010O10O010O10O01gN^AS1a>kNaAV1e>O010O010O10OPOXAh0i>VOYAh0h>VOZAi0P?M3M2N3M2O2M2N3M2O2M3MPW9"}, "image_id": 114, "id": 1895}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 277.0, 58.0, 45.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "\\Y\\32l?2N3N1N3M2N3M2N3M2N3M2N3O00100O010O010O010O010O010O0O2M201O010O10O010O010O10OO1N0000002N2N3N1N3M2N3M2N3M2N3M2O2M2NUgf3"}, "image_id": 114, "id": 1896}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 285.0, 26.0, 24.0], "area": 330, "segmentation": {"size": [512, 512], "counts": "[iZ41m?2N3M2N3N1N3M2O2000O010O010O010O010N2M2O2M2N3M2N3NjVX3"}, "image_id": 114, "id": 1897}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 291.0, 42.0, 35.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "dio52l?2N3N1N3M2N3M2N3N20O10O010O010O0O2N1N3N2O010O010O010O01000ON3N1N3M2N3M2N3N2M2N3M2NdV[1"}, "image_id": 114, "id": 1898}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 298.0, 57.0, 46.0], "area": 1402, "segmentation": {"size": [512, 512], "counts": "PZ?2m?2M2N3M2O2M3M2N3N1N3M2N3N20O010O10O010O10O010O10O010O10O010O10O010O010O1N1O2M001O02N2N3N1N3M3M2O2M2N3M2O2M3M2N3NZVd6"}, "image_id": 114, "id": 1899}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 311.0, 15.0, 26.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "g9g0Y?10O010O10ON3M2N3N1N3M2N3M3MSVh7"}, "image_id": 114, "id": 1900}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 320.0, 57.0, 48.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "ijn21n?1N3M2N3M2O2M3M2N3M2O2M2O2O010M3M2010O010O010O10O10O010O010O010OO200O010O010O0100O0N3N1N3M2N3M2N3N2M2N3M2N3N1N3M^eT4"}, "image_id": 114, "id": 1901}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 323.0, 76.0, 51.0], "area": 1768, "segmentation": {"size": [512, 512], "counts": "cZP61n?2M2N3M2N3N1010O010O0100O010O0100O010O010O0100O010O0100O01N1N3M2N3M3N1N3M2N3M2010O010O0100O010O0100O010O010O0100O010O0100O010nNXAi0i>UOYAi0Q?M2N3M2N3N2M2N3M2N3M2OTei0"}, "image_id": 114, "id": 1902}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 327.0, 24.0, 23.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "cZ\\51m?3M2N3M2N3M30O010O01000O010O0100OO2N2M2N3N1N3MaeW2"}, "image_id": 114, "id": 1903}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 334.0, 54.0, 46.0], "area": 1289, "segmentation": {"size": [512, 512], "counts": "V[i41m?2N3M2N3N1N3M2N3M3N1N3M2N3O010O01000O010O010O010O01000O010O010O010O01000O010O01O0O2M2N3M3M2O2M2N3M2N3N1N3Mnd[2"}, "image_id": 114, "id": 1904}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 336.0, 57.0, 51.0], "area": 1795, "segmentation": {"size": [512, 512], "counts": "Z[23k?3M2N3M2O2M2N3M2N3N2M2N3O0010O010O010OO2M5K3N1O20O1N1O20O10O010O010O10O010O10O0O2N101O010O10O10N1N3M2N3N1^On@4U?Im@5V?Hm@2IM\\?0l@1WdQ7"}, "image_id": 114, "id": 1905}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 338.0, 30.0, 25.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "nZT41m?2N3M2N3M3N1O2O010O010O010O010O01000O010O010O01M2N3M2O2M2NUe\\3"}, "image_id": 114, "id": 1906}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 348.0, 6.0, 6.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "oZg42m?1N3O01O0MVeU3"}, "image_id": 114, "id": 1907}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 366.0, 14.0, 16.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "i[Z42l?2N3M2N3N1N3OO2M2N3M2O2M2Nad^3"}, "image_id": 114, "id": 1908}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 368.0, 59.0, 45.0], "area": 1384, "segmentation": {"size": [512, 512], "counts": "Ula21m?2O2M2N3M2O2M3M2N3N1N3M201O10O010O01000O010O010O10O10O010O010O10O10O010O0100O0N3M2OO001O2O2M2N3N2M2N3M2O2M2N3M3N1N3MTd`4"}, "image_id": 114, "id": 1909}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 372.0, 10.0, 8.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "hko41m?3M210O0010O001M2N[Tk2"}, "image_id": 114, "id": 1910}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 377.0, 68.0, 45.0], "area": 1593, "segmentation": {"size": [512, 512], "counts": "Wlh53k?2N3M2O2M2N30O0100O0100O010ON3M2N3M30O010O010O010O01000O010O010O010O0100N1N3M2N3O0010O01000O010O010O010O0N3M3M2O2M2N3M2N3N1N3M3M2N3N1NhSU1"}, "image_id": 114, "id": 1911}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 381.0, 27.0, 23.0], "area": 330, "segmentation": {"size": [512, 512], "counts": "X\\e31n?1N3M3N1N3M2O2O1O01000O010O01000O010O01N2N1N3M2O2M3MjSm3"}, "image_id": 114, "id": 1912}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 381.0, 10.0, 12.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "S\\`52l?3M2N30O10O10M2N3MQdZ2"}, "image_id": 114, "id": 1913}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 382.0, 16.0, 15.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "V\\U41n?2M2O2M3M210O10O10O01M3M2O2M2Omcb3"}, "image_id": 114, "id": 1914}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 383.0, 47.0, 41.0], "area": 1085, "segmentation": {"size": [512, 512], "counts": "dlZ43l?1N3M2N3N1N3M3M2N3N1N3M2010O01000O010O010O0100O010N1N30O0100O01O0N3N2M2N3M2O2M2N3M3N1N3M2N3Mdcm2"}, "image_id": 114, "id": 1915}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 388.0, 32.0, 41.0], "area": 868, "segmentation": {"size": [512, 512], "counts": "VS100O0100O010M2O20O01000O010N1N3M1O101O20O010O10O01N1[OYAOj>OWAOk>OXAOj>NXA0k>MXA0j>OXANk>OWA0j>N^So4"}, "image_id": 114, "id": 1918}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 419.0, 39.0, 53.0], "area": 1219, "segmentation": {"size": [512, 512], "counts": "nmd52l?3M2N3N1N3M2N3M2O2n@[Oe>h0XAZOf>h0XA[Oe>Q1O2M2N30O0010O010O010O010O01O012M10O01WO\\A2g>K[A3g>L[A1h>L[A1g>M[A2g>K\\A2f>L]A1f>M[A1UQh1"}, "image_id": 114, "id": 1919}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 427.0, 37.0, 34.0], "area": 701, "segmentation": {"size": [512, 512], "counts": "m]T33k?2N3M2N3M2O2M2N3M2O2O10O10O010O010O010O010O010O10O001M3M2N3M2O2M2N3M2N3MYRY4"}, "image_id": 114, "id": 1920}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 430.0, 67.0, 58.0], "area": 1829, "segmentation": {"size": [512, 512], "counts": "Wnn32l?3N1N3M2N3M3N1N3M2N3M2O2M2N300O010O0100O010O0100O010O0100O010O0100O010O0100O010O0100O010O0100O010O0100O01ROUAf0j>XOXAh0Q?0ON3M3M2O2M2N3M2N3N1N3M_ao2"}, "image_id": 114, "id": 1921}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 450.0, 5.0, 11.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "R>;f?M2N3M2OmQm7"}, "image_id": 114, "id": 1922}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 452.0, 60.0, 54.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "Q_h12l?3M2N3N1N3M2N3M3N1N3j@YOo>m0N3M2N3N1N11O1010O10O010O010O010O10O10O010O010O010O010O10O10O0O1O0O3M2N3M2N3N1N3M2N3M2N3N2M2N3M2NZaY5"}, "image_id": 114, "id": 1923}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 455.0, 32.0, 32.0], "area": 562, "segmentation": {"size": [512, 512], "counts": "jnS62l?3N1N3M2N3N1N3M3M2N3N1100O010O0100O010O01N2M2N3M2O2M2N3M2N3N1N`Q\\1"}, "image_id": 114, "id": 1924}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 463.0, 46.0, 49.0], "area": 1236, "segmentation": {"size": [512, 512], "counts": "b_S51n?2M2N3M2N3N1N3M3M2N3N1N3M2N3N1N3M3M2N3N1100O010O001O001O1O001jN]Am0c>QO_Ao0j>ZOSA7m>HTA8m>EVA:k>CWA=i>AZA>g>@[A?S?O2M2N3M2N3N1N_`U2"}, "image_id": 114, "id": 1925}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 469.0, 51.0, 43.0], "area": 1214, "segmentation": {"size": [512, 512], "counts": "]of23k?3M2N3N1N3M2N3M3N1N3M2N3N1O20O0100WAnNb>R1\\APOe>P1YAQOh>S1001O001O001O1O001O001O001O0N3N2M2N3M2O2M2N3M2N3O10O010ON3M2Ni`_4"}, "image_id": 114, "id": 1926}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 470.0, 57.0, 42.0], "area": 1371, "segmentation": {"size": [512, 512], "counts": "foa32l?3N1N3M2N3M2N30O010O10O10O010O001OO1N2N2N2O1N2N2N2N2N2N2N2O1N21O001O001O1O001O001O001O0MTAROm>k0UAUOk>i0XAVOi>g0YAYOg>e0f0oA\\OQ>d0lA^OT>b0jADS>:hAIU>R1N3M3N00O2O2M2N3M2N3M2O2O00100O0100O010O010OUO_A7b>FaA:^>DdA<]>AfA?Z>^OhAb0X>\\OkAd0U>YOmAf0T>YOnAd0h>O2M2N3M2N3M3N1N3MYnj3"}, "image_id": 115, "id": 1938}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 33.0, 9.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "WQo21m?3M2O2N10O2M3N1No^l4"}, "image_id": 115, "id": 1939}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 36.0, 67.0, 58.0], "area": 1883, "segmentation": {"size": [512, 512], "counts": "nQo02k?3N3M2N2M4M2N3M2M4M2N3N11O01O010O010O010O00010O010O010O010O00010O010O010O010O00010O010SAQOg>T11O010O01O010O010O01O0XOXA9h>D[A;e>C]A>b>@aA?`>]OcAb0o>M2M3N3M2N3L3Nf]o5"}, "image_id": 115, "id": 1940}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 43.0, 23.0, 31.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "la\\51m?3M2N3N2M2N3M2N3N1O20Oi@ZOT?j00O010O0Fm@HS?6n@JS?3PAMo>1TALo>1SAMo>1b0N_nW2"}, "image_id": 115, "id": 1941}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 46.0, 46.0, 41.0], "area": 945, "segmentation": {"size": [512, 512], "counts": "PRl13k?2N3M2N3M2N3N1N3M2O20O0100O011N010O010O010O0100O010VOk@g0W?1000O010O010O010O010M2N3M2N3M2N3M2O2Mom\\5"}, "image_id": 115, "id": 1942}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 60.0, 62.0, 56.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "`bZ42l?3N1N3M2N3M2O2M3M2N3M201O0100O010O0100O010O0100O010O0100O010O0100O010O01o@UOk>l0SAVOj>R1N1N3M2010O10O010O10WO]A3c>K_A6a>GbA8^>FdA;\\>BgA=Z>AgA=[>AhA<[>AgA>Z>@iA=n>N3M3M2O2MP]f2"}, "image_id": 115, "id": 1943}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 76.0, 6.0, 19.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "kRm72k?4L3M4L3M3dM"}, "image_id": 115, "id": 1944}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 81.0, 57.0, 48.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "[ca01m?2N3M2N3N2M2N3M2N3M2O2M201O10O010ON3M2N30O010O10O10O010O010OO2O010O010O10O010O1O0N3M2N3N1N3M2N3M2N3N2M2N3M2N3M2Onla6"}, "image_id": 115, "id": 1945}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 97.0, 58.0, 65.0], "area": 1677, "segmentation": {"size": [512, 512], "counts": "UTl31m?3N1N3M2N3N1N3M3M2O2M2N3M2O2M3M2N3N1N3M2O2O10O10O010O010O10RObA8_>EcA<\\>BgA=Z>@iA`0V>_OkAb0U>[OnAd0R>ZOPBg0P>VOSBi0n=UOSBl0a>O10O10O010O010VOn@d0X?0O10M2N3M2O2M3M2N3N1NckV3"}, "image_id": 115, "id": 1946}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 98.0, 65.0, 47.0], "area": 1364, "segmentation": {"size": [512, 512], "counts": "aSf21n?2M2N5M1O010N1N3M2N3O00100O0100O010O010M2N3M2N3N20O010O10O010Oe@Hm>:PAIm>9QAIm>:PAIP?6n@LR?b000O010O0O2N1N3M2N3M2N3N20O001M2N3M3CVAAk>=WAAl>>VA@m>=TO]Ak0d>RO^Ao0j>O010O010O0100O010O01M3M2N3M2N3M2O2M2N3M2N3M3NaZh3"}, "image_id": 115, "id": 1955}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 152.0, 56.0, 52.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "eem63k?2N3M2N3M2N3M2N3N1N3M2N3M2010O01N2M2N3O0010O010O010O10O010O010O010O010O10O010N1N3M2N3M2N3M3M2N3M2O2M2N3M2N3M2NhZ6"}, "image_id": 115, "id": 1956}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 157.0, 54.0, 58.0], "area": 1543, "segmentation": {"size": [512, 512], "counts": "lel11m?2N3M2O2M2N3M2N3N2M2N3M2N3N1N3M2N3O10O10O010O010O010ZAhNb>\\10O10O010O010O010OWO]A6c>G`A8a>EaA<^>BeA=\\>@gA`0X>_OiAa0X>\\OkAd0T>ZOnAf0g>O0N3M2O2M3M2N3N1N3MmYX5"}, "image_id": 115, "id": 1957}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 167.0, 18.0, 14.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "[U_44j?2N2010O010O010O010O010O010N1N3N1NdjW3"}, "image_id": 115, "id": 1958}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 175.0, 43.0, 50.0], "area": 1415, "segmentation": {"size": [512, 512], "counts": "_5R1o>O010O10O010O010O010O010O10O010O10O010O010M2N3M3N101O010O010O01N1N3M2N3N1^ORA0Q?MQA1R?LQA1Q?MRA0Q?MQA1Q?MRA0TiZ7"}, "image_id": 115, "id": 1959}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 175.0, 9.0, 9.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "dek01m?3M2O20O010O0O2M`jo6"}, "image_id": 115, "id": 1960}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 198.0, 30.0, 34.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "nfm01m?3M2N3N1N3M3N1N3M2N3N1N3M3N101OO2M2O2M3M2N3N1N3M3N1N3M2O2MdYc6"}, "image_id": 115, "id": 1961}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 198.0, 7.0, 11.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "\\fl72l?2N3M201O10OiI"}, "image_id": 115, "id": 1962}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 199.0, 50.0, 51.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "TWP31[?1UA2h>0VA2i>0UA2h>1UA1i>1UA2i>0TA3j>NUA3l>a0O010O01000M210O01000O010O0100O0100O010O0100O0100O010O01oNUAl0o>N3M2O2M3M2N3N1N3M2N3N2M2NPiV4"}, "image_id": 115, "id": 1963}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 199.0, 46.0, 41.0], "area": 1043, "segmentation": {"size": [512, 512], "counts": "QWc63k?2N3M2O2M2N3M2N3M2N3N1N3M3M2010O010O010O010O010M2O2M3M2N3M2N30O010O10O010O001M3N1N3M2N3M2N[ie0"}, "image_id": 115, "id": 1964}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 206.0, 14.0, 16.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "eVi72l?2N3N1N3000O010O01000O010O\\I"}, "image_id": 115, "id": 1965}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 208.0, 31.0, 35.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "Tg?1n?2M2N3N1N3M2N3N2M2N3M2O2O010O10O010O10O10O010O0Ao@0Q?MRA2o>KSA3o>LSA2o>KSA3o>KTA2^?NihP7"}, "image_id": 115, "id": 1966}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 213.0, 34.0, 30.0], "area": 550, "segmentation": {"size": [512, 512], "counts": "TgV22l?2N3N2M2N3M2N3M2010O0100O0100O010O010O010O01000O001M2N3M2N3N1N3M2NPYX5"}, "image_id": 115, "id": 1967}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 225.0, 10.0, 23.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "Q7g0Y?O2M2N3M3M2N3N1N3Mlhj7"}, "image_id": 115, "id": 1968}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 231.0, 45.0, 45.0], "area": 1110, "segmentation": {"size": [512, 512], "counts": "fg92m?2M2N3N2a@HR?:m@GQ?M[A3e>L]A4c>I_A8`>FcA9^>DeA9]>EeA:]>CfA:\\>EeA9S?M2N3M2OUgf4"}, "image_id": 115, "id": 1972}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 257.0, 66.0, 50.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "fhS62l?3M2N3M2N3N1N3M2N3M2N3O10O010O010O010O10O010O10Oo@UOm>o010O01M210O10O010O010O010O1O0N3M2O2M2N30O010O010M2N3M3N1N3M2N3M2N3N1N3M2N3M3M2O2M`Wk0"}, "image_id": 115, "id": 1973}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 258.0, 36.0, 30.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "`Xd12m?2M2N3M2N3M2O2M201O010O10O10O010O010O010O010O10O10O01O0N3N1N3M2N3M2O2Mbgi5"}, "image_id": 115, "id": 1974}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 267.0, 31.0, 30.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "khV73k?2N3M2O2M3M2N3N1N3O010O10O010O010O10O010ON3N1N3M2N3N2M2N3M2N\\g9"}, "image_id": 115, "id": 1975}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 282.0, 19.0, 21.0], "area": 213, "segmentation": {"size": [512, 512], "counts": "Xib71m?3M2N3N2M2N3M2O20O010N2M2N3M2O2M2N3MRg3"}, "image_id": 115, "id": 1976}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 286.0, 37.0, 39.0], "area": 750, "segmentation": {"size": [512, 512], "counts": "iY21m?2O2M2N3N1N3M2N3N2M2N3M2O2M2N3N110O10O10O01M2N3M2N3N1N3M2N3M2O2M3M2N3M2OhV[7"}, "image_id": 115, "id": 1977}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 295.0, 26.0, 22.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "]i]13k?2O2M2O2O10O03NO010O3NO10O010O10O10O010O001M2N3M2O]VU6"}, "image_id": 115, "id": 1978}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 295.0, 60.0, 52.0], "area": 1557, "segmentation": {"size": [512, 512], "counts": "mil11m?3M2O2M2N3M3N1010O10O010O10O010O010O10N1O2M3M2N3N1N3M2N3M3O0010O010O10O010O10O010O10O010O010O10O0\\OZAOf>N\\A2e>K^A5a>JaA4a>IaA5a>IbA5`>HbA6`>HcA5`>IaA5W?M2OeUU5"}, "image_id": 115, "id": 1979}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 305.0, 58.0, 51.0], "area": 1664, "segmentation": {"size": [512, 512], "counts": "gig5141d?3Y@Oe?8M3N2M2O2O1O1O0N3N1N2O0O2O1O0O2O0O2O0O2O10O2OM2O20O10O10O010O010O01000O010O010O010mN[Aj0e>SO^Al0b>RO`Am0k>M2N3M2N3N1N3M3M2O2M2N3MfU[1"}, "image_id": 115, "id": 1980}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 305.0, 12.0, 12.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "fic71m?2N3N2O010O10O0100OM4M]V6"}, "image_id": 115, "id": 1981}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 309.0, 28.0, 34.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "UZZ12l?2N3_@JT?8j@KT?7i@KU?8h@KU?a0N3M2O2O1O01O0N3M3N1N3O0O2M2N3M2O2M3M2N3M2OSfW6"}, "image_id": 115, "id": 1982}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 316.0, 36.0, 30.0], "area": 601, "segmentation": {"size": [512, 512], "counts": "_ji61m?2O2M2N3M2O2M3M2O2M2N300O0100O010O010O1M2O2O0010O10O1O0O2M2N3M3N1N3M2OgUd0"}, "image_id": 115, "id": 1983}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 322.0, 11.0, 12.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "YZ=2l?3N1N3M201O01N1N3M2NnU]7"}, "image_id": 115, "id": 1984}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 328.0, 15.0, 15.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "ajc01m?3M2N3M2O20O01000O001M2N3M3NceT7"}, "image_id": 115, "id": 1985}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 333.0, 17.0, 28.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "]:h0Y?0O10O10O010O01M2N4L3M2N3N1N3M2N[Ug7"}, "image_id": 115, "id": 1986}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 336.0, 43.0, 51.0], "area": 1101, "segmentation": {"size": [512, 512], "counts": "gk12m?1N3M2N3M2N3M2O2M2N3M2N3M3N1N3M2N3M2N3M2O1OO2N3M2N3M3M2O2M2N3M2N3M2O2M2N3M3M2N3N1N3MWeX7"}, "image_id": 115, "id": 1987}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 344.0, 23.0, 20.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "RkR11m?2N3N2M2N3N1010O010O010O01000O010O0N3M2O2M2NRea6"}, "image_id": 115, "id": 1988}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 345.0, 55.0, 51.0], "area": 1579, "segmentation": {"size": [512, 512], "counts": "gkg12l?2N3M3N1N3N1010OO2M2N3M3N1N3M2N3M2O2M2N3O10O010O010OO2M210O10O010O010O0N3M3M2N3N1N3M2N3M2O2M3M2N3M2O2M2N3M2Nkd\\5"}, "image_id": 115, "id": 1989}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 353.0, 59.0, 50.0], "area": 1773, "segmentation": {"size": [512, 512], "counts": "gk[55e?6J6J6L5O0000000010O0000000001O01O000001O0001O000001O000001O01SAYO_>T101O001O010O001O00I7010O00000000ROZAb0f>WOaAj0j>000001O000O1J6I7Icdf1"}, "image_id": 115, "id": 1990}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 354.0, 40.0, 32.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "W[m035N`?4^@O_?:O200O010O010O0100O010O010O010O0100O010O010O0100O010O010O010O010M2N3M2N3M2O\\d^6"}, "image_id": 115, "id": 1991}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 360.0, 20.0, 19.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "b[i62m?1N3M2N3M3N110O010O0100O010M2N3N1N3M2Ncdl0"}, "image_id": 115, "id": 1992}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 373.0, 28.0, 29.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "W\\b62l?2N3M2N3N1N3M3M2N3N110O0100O0100O01M2N3M2O2M3M2N3N1N3MSdo0"}, "image_id": 115, "id": 1993}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 385.0, 9.0, 10.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "V\\P72l?2O2M3O010ON3L3NoSk0"}, "image_id": 115, "id": 1994}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 387.0, 16.0, 27.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "S5TAHn>6TAHo>5TAHn>6TAIn>4UAIm>6`0M2NSSW2"}, "image_id": 115, "id": 1997}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 400.0, 29.0, 28.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "Qm51n?1N3M3M2O2M2N3M2N3N2O010O010O010O0100O0N3M2N3N1N3M3M2O2MXc[7"}, "image_id": 115, "id": 1998}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 403.0, 19.0, 22.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "S]i62l?2N3N1N3M2N3N2O01N1N00001O2O1N3M2N3M]Sm0"}, "image_id": 115, "id": 1999}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 409.0, 28.0, 24.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "X]k52l?2N3N1N3M2N3M2N3O0010O0100O01N1N3M2N3O0010O01E\\@9f?O0O2M2Nnbf1"}, "image_id": 115, "id": 2000}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 411.0, 23.0, 47.0], "area": 565, "segmentation": {"size": [512, 512], "counts": "jmd71m?3N2N1N3N2M3N1O2M3N1N3N2N2M2O2N2M2O2M3N2N1N1VC"}, "image_id": 115, "id": 2001}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 413.0, 34.0, 32.0], "area": 603, "segmentation": {"size": [512, 512], "counts": "]ma03k?2N3M2N3N1N3M2N3N10100O010O010O010O0100O010O0100ON3M2N3M2O2M2N3M2NiRm6"}, "image_id": 115, "id": 2002}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 440.0, 66.0, 55.0], "area": 1746, "segmentation": {"size": [512, 512], "counts": "_^Z43k?2O2M3M2N3M2O2M2N3M2N3N2M210O010O0100O0100O010O010O0100O0100O010O010O0100O0100O010O010O0100O0100O010O010ON3M3M2N3N1N3M2N3M2O2M3M2N]ad2"}, "image_id": 115, "id": 2003}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 448.0, 50.0, 45.0], "area": 1197, "segmentation": {"size": [512, 512], "counts": "h^l01n?1N3M2O2M3M2N3N1N3M3N1N3M2010O0100O010M2N3N20O10O010O01000O010O010O10mNWAm0o>N1N3M3N1N3M2O2M2N3M3N1N3M2O\\aZ6"}, "image_id": 115, "id": 2004}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 451.0, 24.0, 48.0], "area": 661, "segmentation": {"size": [512, 512], "counts": "S>^1c>O010O010N2M2N3N1N3M2N3M3N1N3M2N3N1N3M2N3M3N1Nbac7"}, "image_id": 115, "id": 2005}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 481.0, 18.0, 21.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "`_X51m?3M2O2M2N3M2O2M3N02M3M2O2M2N3M2O2Ml`^2"}, "image_id": 115, "id": 2006}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 483.0, 86.0, 29.0], "area": 1316, "segmentation": {"size": [512, 512], "counts": "n_22m?1N2N2N2N2N2O1N2N21O001O1O001O001O001O001O1O00O1OO0030O010O010M2010OFFn@:Q?Hm@9Q?Jn@6P?LPA4n>ORA0n>0PA2P??1O1O001O001O00O11O001O001O1O001O001O001O001O1O001O001O001O001O1O001O001O001O001O001M3N1NX`b6"}, "image_id": 115, "id": 2007}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 483.0, 42.0, 29.0], "area": 776, "segmentation": {"size": [512, 512], "counts": "jo_51m?2O2M3M2\\@G_?>N2O1N2N2N2O1N2N2001O001O001O1O001O001O001O1O001O001O001O001N2M2O2M2N3M2O_Pk1"}, "image_id": 115, "id": 2008}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 488.0, 27.0, 24.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "ioW61n?2M3M2N3N1N3M2N3M3N100001O001O001O1N1N3M2N3N1N3M3M2Oa`Z1"}, "image_id": 115, "id": 2009}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 493.0, 57.0, 19.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "n_R42m?1N2N2N2O1N2N2N2O1N2001O1O001O001OO1N2O1001O1O001O001O001O1O001O001O001O1O001O0000N2N2001O001O1O001O001O001O1N1NWPQ3"}, "image_id": 115, "id": 2010}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 495.0, 22.0, 17.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "nob61m?2N3M2N2N2O1N2N2001O001O001O001O001O0K]@Md?0_@Nc?ObPR1"}, "image_id": 115, "id": 2011}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 496.0, 60.0, 16.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "h_R78h?000000000000000IMa@3_?7000000000000000000O1000000006J000000O10000000000000000000000000000000000O100N2O1000000000000000000"}, "image_id": 115, "id": 2012}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 510.0, 4.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "o_Y51n?11O00Q`d2"}, "image_id": 115, "id": 2013}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 12.0, 16.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "0`0`?00O1O100O1O2O1N3M2O2Mjoi7"}, "image_id": 117, "id": 2014}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 0.0, 86.0, 51.0], "area": 2577, "segmentation": {"size": [512, 512], "counts": "oPX22m?3N1N3M2O2M2N3N1N3M2O2M2]OVOmAk0S>VOlAj0S>YOjAg0V>[OhAe0Y>]OeAc0Z>_OdAa0\\>BaA>`>C_A=`>a01O100O1O100O1O11O1O2N1O1O00O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O2O2M2N3N1N3M2Ofo\\4"}, "image_id": 117, "id": 2015}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 0.0, 42.0, 20.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "PPl41o?1O2N1O1O2N1O2N1O2N1O2N1O2NO100O1O100O1O100O1O100O1O100O1O1O100O1O100O1O100O1O100OQP_2"}, "image_id": 117, "id": 2016}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 0.0, 89.0, 63.0], "area": 2886, "segmentation": {"size": [512, 512], "counts": "lPW51o?1N3M3N1N3M3N1N3M2O2M3N1N3M3NO01O01O01O010O00010O3N2M2N3N1N3O10O0100OO1N10O00010O0010O00010O00010O010O00010I`AmN`>S1cAkN\\>U180O00010O000100O1O100O100O1O100O1O100O3M2O2M3M2O2M2O2M2N3N1N3M3N[_\\1"}, "image_id": 117, "id": 2017}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 0.0, 9.0, 4.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "P`R71o?2N1O00O100O1O10PPi0"}, "image_id": 117, "id": 2018}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 0.0, 46.0, 52.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "gPY71o?1N3M2O2M2N3N1N3M2O1N3M2O2M2N3N1N3N101M1O10O00010O00010O00010O00MiN^AX1b>iN]AV1d>21O100O1O100O1O100O1O1"}, "image_id": 117, "id": 2019}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 26.0, 79.0, 76.0], "area": 2948, "segmentation": {"size": [512, 512], "counts": "nQh23m?1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M210O01O010O01N1O2M1O1AgNWBX1i=jNUBV1l=lNQBU1n=mNQBR1o=QOnAo0S>ROkAo0T>>0010O0001O01O01O01O01O01O01O0MkA]NT>c1nA[NS>d141O01O3O0010O01N1jNcAf0`>WOcAf0_>YObAf0`>WOcAf0_>XObAg0a>WO^Aj0k>01O3N1N3M2O2M2N2O2M2N3N1Nm]P4"}, "image_id": 117, "id": 2020}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 38.0, 86.0, 80.0], "area": 3342, "segmentation": {"size": [512, 512], "counts": "ebi51n?2E0c@3[?Nd@3Z?0c@3Z?;O2M2N3N1N3M2O2M00100O3M2O2M2N3N1N2N3N1N3M100O000100O2N3N1N3M2OO00010O00010O00JeAhN\\>X1eAgNZ>Y1OhNgAX1Z>iNeAW1Z>70010O00010O000010O00010O002O1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3M2O2M2N3Nh]k0"}, "image_id": 117, "id": 2021}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 39.0, 80.0, 70.0], "area": 2741, "segmentation": {"size": [512, 512], "counts": "PRU11o?2M2j@N[>5cAL\\>5bAN[>5bAM\\>5cAL\\>6aAM\\>5bAM\\>6bAL[>6cAL\\>5bAN[>5cAL[>o0NO011N3M2O1N3M10DfNRBY1o=iNnAW1R>kNlAV1S>mNkAR1V>oNhAQ1X>R1cAlN]>T1fAiN[>V1gAiNX>X1700010O00010O0001O01O01O010O2N2O2M2N3N1N3M2N3N1N2N3N1N3M2O2M2N3NZl8"}, "image_id": 117, "id": 2026}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 95.0, 73.0, 81.0], "area": 2867, "segmentation": {"size": [512, 512], "counts": "fS31o?1T@Of?8M2O2M2N3N1N3M2O2M2N3N1N3M2OO00010O1O2O0O02O1N3M2O2M2N3hAaNk=`1SBbNk=a1RBbNk=`1TBaNk=a1RBbNk=k1N3N1N3M2O2M02N2O2M2N3N1N3M2O2M2N3]NkAW1V>gNlAW1W>fNlAW1V>gNlAX1`>M2N3N1N3M2O2M2N2O2M2N3N1N3M2O2M2N3NX[h6"}, "image_id": 117, "id": 2027}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 121.0, 81.0, 77.0], "area": 2950, "segmentation": {"size": [512, 512], "counts": "\\Uk32n?1N3M2O2M3M2O2M2N3N1N3M3N1N2N10O1O10O010O3M2O2M2O0O00010O000BiNUBW1j=lNSBT1m=nNQBR1P>POnAP1Q>ROmAn0S>UOjAk0W>VOhAj0W>?0010O00010O00010O00010O00010O00010O00010002O001M2N3N1N3jN^Ai0d>UO^Aj0d>TO^Ai0d>UO^Ai0o>N2M2N3N1N3M2O2M2N3N1NnZl2"}, "image_id": 117, "id": 2028}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 140.0, 12.0, 23.0], "area": 143, "segmentation": {"size": [512, 512], "counts": "dTj71n?2O1N3M2N2O2M2N2O2M2N2cK"}, "image_id": 117, "id": 2029}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 144.0, 79.0, 67.0], "area": 2692, "segmentation": {"size": [512, 512], "counts": "bUW23m?1N3M2O2M2BETA>i>EUAETA>i>ETA=j>EUA=i>=O1O01O01O01O01O2O2M2010O010O010ON3N1N3M2O0O000101N3M2OO00010O00010O0001F`ATO_>l0cARO]>n0fAoN[>Q1fAnNY>R1:010O00010O00010O0002O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1NhZa4"}, "image_id": 117, "id": 2030}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 160.0, 66.0, 68.0], "area": 2733, "segmentation": {"size": [512, 512], "counts": "ael63m?4K5L4L3E^ORAf0i>_ORAf0j>:N000O10O10O10001O4K5L4L4L3L5L0000O0100000O010000O03N4L1O0O10O10O1000OVOlAJT>5QBFP>:TBBl=>WB_Oh=b0\\BZOd=e0aBVO`=j0j01000O10O10O1000O10O4M4L4L4K4M4L4LUZ2"}, "image_id": 117, "id": 2031}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 170.0, 8.0, 8.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "^e_32m?2O1N001O02N3NcZ\\4"}, "image_id": 117, "id": 2032}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 180.0, 77.0, 69.0], "area": 2858, "segmentation": {"size": [512, 512], "counts": "WW]43m?2AOj@5S?Mk@5R?Nl@5Q??N2M2OO010O01MQOUAo0k>2010O01000O010O01AmNQBR1P>QOlAP1S>SOkAm0U>UOiAj0X>YOdAh0[>>10O10O010O10O010O10O10O02O2N0O010O010O10O10O010O010O10O10O010O01001N3N2M4M2M4M2N4K3N3L3N3M2M3N3L3NVY\\2"}, "image_id": 117, "id": 2033}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 182.0, 57.0, 61.0], "area": 2369, "segmentation": {"size": [512, 512], "counts": "Wgl0:\\?:E;F:E;I70010O000000000000000000010O00000000L4E;K50001O01O000000000000000001O01O0000J60000QOVAi0P?00000000000001L3E;FXjV6"}, "image_id": 117, "id": 2034}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 208.0, 71.0, 57.0], "area": 2662, "segmentation": {"size": [512, 512], "counts": "SgS31f?9H8N210O0000000000000i@Bl>i01O00WA[OW>e0aAD^>o0000000001O000001O0000000001O0001O000000000001O0001O0000000000dNhAP1d>01O01O00000000000001O01O00000000QOXAf0Q?000010H7G9Glhh3"}, "image_id": 117, "id": 2035}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 235.0, 68.0, 50.0], "area": 2420, "segmentation": {"size": [512, 512], "counts": "^XP53d?9I8O0001O00K5G9O11O01O0000006J001O01O0000000M3M30I70001O01O00000000000006J01O00000000000eN`AV1e>1O000001O0000000001O000001O000H8G9J60000L4Gehm1"}, "image_id": 117, "id": 2036}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 243.0, 45.0, 47.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "lgY7>b?00002I50000000c0]O00000000000000003I4000000000000000000000000000M30000000000000004L000000YH"}, "image_id": 117, "id": 2037}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 256.0, 54.0, 84.0], "area": 2737, "segmentation": {"size": [512, 512], "counts": "[jb07^?;J6000000I7F:ElNjAT1k=XOTBh0a=R1G80000001O000000000001O00K5E;L5O0001O0000]NTBR1l=cN_B]1R>0000002N01O000000000jNjAb0j>0001O01O00000000I7ERWb6"}, "image_id": 117, "id": 2038}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 265.0, 56.0, 87.0], "area": 2959, "segmentation": {"size": [512, 512], "counts": "oY`13e?8G9G:G8J61O0000000001O0001O00000M3O100000001O6KO000hATO\\=l0\\B\\Oe=c0RBFn=:RBFn=:RBFm=;SBEe=c0YB@]=f1J600000000000001O01O00000000000TOjBoNU=h0TCVOoi0010O00J6E;F:K50000000L400002N000001O00I7F:O11O0001O0000000000000001O000001O0000I7E;UOUBUOW>a0j0E:Efg`4"}, "image_id": 117, "id": 2040}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 290.0, 10.0, 8.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "RiU67i?000000000000001O0nVe1"}, "image_id": 117, "id": 2041}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 293.0, 53.0, 76.0], "area": 2768, "segmentation": {"size": [512, 512], "counts": "R[Y59]?:F;O000000000001O000003M1O000M3F:G9F:F:000001O0001L30000001O000001O000000000001O01O00000N2F:F:G9F;E:F:GPWl1"}, "image_id": 117, "id": 2042}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 296.0, 50.0, 75.0], "area": 2963, "segmentation": {"size": [512, 512], "counts": "hZ^49`0SAJc>n0J6000000000000010O0000J6G9J600000N20010O000000000000K501O01O00000000000001O01O000TO\\B\\Od=;eBE[=:fBFZ=:fB]Oc=GVB`06@Z>8k0Gbfh2"}, "image_id": 117, "id": 2043}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 298.0, 49.0, 66.0], "area": 2479, "segmentation": {"size": [512, 512], "counts": "oZa35b?9I70000[O^OlAc0j=GUB9d=N[B3e=MRBa0j=j000000000000000010O0000000000000010O000000N2N2000001O00000001O0000ROoAMQ>JXB6i=@`B`0`=XOhBh0X>M3G9GjUf3"}, "image_id": 117, "id": 2044}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 303.0, 40.0, 48.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "dZ\\71l?3L5K4FGi@=S?:M4K4L4O1010O0000010O0000N3O00010O0000010O0000010O00000O2O01O0001O01O]F"}, "image_id": 117, "id": 2045}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 356.0, 59.0, 48.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "WlR71l?3L5K4M3L4N3O00010O0000010O0000010O00010O0N2L4M4K4L4N2010O00010O0000010O0001O01O00010O0M3N201O00010O0000010O0001O01OcD"}, "image_id": 117, "id": 2046}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 425.0, 67.0, 55.0], "area": 2412, "segmentation": {"size": [512, 512], "counts": "bn;7a?8H8O2O0001OJYOSAg0e>A[A?]>IcA7Z>LfA4Z>LfA4Z>LfA5Y>KhA4X>LhA4Y>i000000000000001O01O00000000003M01O00000000000001O01O0000000000TOeA3[>DnA001O0001O00000000000001O01O00000N2F:Goab6"}, "image_id": 117, "id": 2047}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 437.0, 75.0, 49.0], "area": 2397, "segmentation": {"size": [512, 512], "counts": "U^a19]?:J6010O0000000000000000010O0000000000000000YA[OS>f0bAD_>n00000N3O000000000000000010O00000000000000000M4F900\\OSA5m>A]Ad0m>000O1O1000000010O0000000000000000010O0000000J6FSRY5"}, "image_id": 117, "id": 2048}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 445.0, 71.0, 54.0], "area": 2365, "segmentation": {"size": [512, 512], "counts": "Yon21g?8H8O2O0000000O1H8H8G903M10O0000000000N2010O00000000000000010O0000000lNfAb0Z>VOnAj0e>O01O000000000001O01O0000L4O10003M00000010O00000000000001O01O00O1G9H`am3"}, "image_id": 117, "id": 2049}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 454.0, 7.0, 27.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "V>k0U?000000H9F9HQRl7"}, "image_id": 117, "id": 2050}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 454.0, 48.0, 58.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "dn[45d0MW>=_AMOGR>b1001O000001O000000004L01O000000000000000000001O0000000[NkA_1[>00000001O0000000I7E;K5000000010OK5EjQl2"}, "image_id": 117, "id": 2051}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 461.0, 57.0, 51.0], "area": 2028, "segmentation": {"size": [512, 512], "counts": "n^X52h?7J5J6001O0001O0001Oo@_Oc>a0XADh>j0000010^APOR>P1iAUOV>l0dAZO]>T100000000001O000000001O0000O100001O0000000eN_AW1e>0001O000000001O0000000K5J7I6K5J6K\\Qk1"}, "image_id": 117, "id": 2052}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 479.0, 56.0, 33.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "fo[61k?4M3L5K4L40010O0001O01O00010L3L40001O0n@VOn>n01O0000001O0000001O00002N1OM31O0000001O0000001O0000001O00001M2L4M3L5K4LlPh0"}, "image_id": 117, "id": 2053}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 489.0, 104.0, 23.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "l_24d?8H8M300000000001O00000000000000001O000000000000001O00000000000000001O00000000000000001O00000000000000001O000000000000001O00000000000000001O00000000000000001O0000000000L4O1001O0000000000000000001O0000000N2Fk`Y6"}, "image_id": 117, "id": 2054}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 499.0, 44.0, 13.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "n_Z72k?3N21O00001O0000001O0000001ON2L4L400001O0000001O0000001O00N2001O0000001O00001O0000001O"}, "image_id": 117, "id": 2055}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 503.0, 69.0, 9.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "j_V26g?3001O00000000000000001O000000000000001O000000000000001O000000000000001O000000000000001O00000000000000001O000000000000001O00000000000000QPg4"}, "image_id": 117, "id": 2056}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 0.0, 41.0, 16.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "P`V61o?001O00001O001O001O00001O001O00001O001O00001O001O004L001O0000N200001OO1M3N2M3N2NRPU1"}, "image_id": 118, "id": 2057}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 5.0, 24.0, 24.0], "area": 354, "segmentation": {"size": [512, 512], "counts": "e`k62l?2M3N3M2M4M2N201O00010O010O00010OO2L3N2M4M2M3Mi_h0"}, "image_id": 118, "id": 2058}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 67.0, 189.0, 133.0], "area": 11416, "segmentation": {"size": [512, 512], "counts": "hdQ51m?2M4M2M4M2M3N3L3N3M2M4M2M3N3L3N3L3N2M4M2O2O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O010O00010O010O00010O010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O010O00010O010O00010O010O01O01O010O010O00010O010O01O01O010O01O01OO2O001N101O0O101N101O0O101L3N3M2M4M2M3N3M2M4M1N1000O010O011O2M3N3L3N3hAQO[=Q1bBRO[=R1bBPO\\=R1aBRO[=Q1cBQO[=R1aBRO\\=P1bBRO[=R1bBQO[=Q1bBRO[=k1N3M2M4M2M3N3L3N3M2M3N3L3N3M2N2010O010O01O01O010O0UOoChMQ`1O00010O010O00010O0001WObAO]>NgA1Y>LjA4W>IkA8T>EPB:P>CSB=n=_OUBb0j=\\OYBc0b>1OO2L3M4M2M3M4L3NaYl5"}, "image_id": 118, "id": 2062}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 175.0, 13.0, 19.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "ne31l?4L3M3M4M2O101ON3L3M3M4Lbje7"}, "image_id": 118, "id": 2063}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 196.0, 2.0, 6.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "T66k?Lnin7"}, "image_id": 118, "id": 2064}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 201.0, 10.0, 9.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "_f21l?3M4O0010O00001L3MhYh7"}, "image_id": 118, "id": 2065}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 220.0, 26.0, 22.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "WWP14i?3N2M4M2O1010O010O00010O010O00010O010O000N3L3N3L3Nnhb6"}, "image_id": 118, "id": 2066}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 224.0, 72.0, 56.0], "area": 2361, "segmentation": {"size": [512, 512], "counts": "Qh63k?3M2N3L3N3N11O010OECSA>j>DWA;g>GYA:c>J\\A6b>L_A4^>OaA1^>0cAO]>2bAO]>1dAN]>2bAO]>1aA1`>N^A5a>e010O00010O010O010O01O01N1O2O0010O010O0010O010O0001M2O2O010O01O04K1M4M2N30O01O01ON3M2N3L3N3M4K4M2N2NYXe6"}, "image_id": 118, "id": 2067}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 228.0, 249.0, 219.0], "area": 11966, "segmentation": {"size": [512, 512], "counts": "YhS41m?2M3N3L3N3M2M3N3L3N3L3N3L3N2M40O010O00010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O010O01O01O01gN^AS1a>kNbAT1e>0010O010O010O00010O010O00010O010O00010O010O01O01O010O010O00010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O00010O010O01O01O010O010O00010O010O00010O010O0010O0010O010O00010O010O0010O0010aClNP:T1nEoNQ:R1kEQOV:n0hEUOW:k0fEXO[:h0bE[O]:e0aE]O`:c0\\E@d:`0ZECe:>WEEj::TEIk:7RELo:4nDNR;2kD2T;OiD3X;LfD7Y;IdD:];F`D<`;D]D`0b;A[Da0f;^OWDf0h;[OUDg0l;XORDk0m;UOPDn0Q1M21O010O01O01O010O00010O01O01O010O01O01M2M4M2M3N3L3M4N11O01O01OM4M2M4M2M3NoV\\7"}, "image_id": 118, "id": 2070}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 302.0, 61.0, 50.0], "area": 1824, "segmentation": {"size": [512, 512], "counts": "ZZc01l?4L3N3L3M3O2O000M4L3O110O010M2M3M4O01OVAPOd>V1010O01O01O010O00010O00010O00010O010O00010M2M3NO01O003001O01O0N2M4M2M3M4L3M4M2M3M`V^6"}, "image_id": 118, "id": 2071}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 311.0, 106.0, 68.0], "area": 3529, "segmentation": {"size": [512, 512], "counts": "hZU42l?2M3N3M2M4M2N3L3N2N3L3N3M2M3010O010O00010O010O0010O0010O0010O0010O0010O0010O0010O0010O010O00010O010O00010O010O00010O010O0010O0010O0010O0010O0010O0010O0010O0010O0010ON2N3M2M4M2M3O20O01O01O0101N0010OM4M2M3N3L3N3MTeU2"}, "image_id": 118, "id": 2072}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 351.0, 65.0, 74.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "o:f1Z>1O00010O0010O0010O0010O0010O00010O01SOiA0V>NlA2T>KPB4Q>IQB8n=EVB:k=BXB?g=_O[Ba0e=\\O_Bc0b=ZO`Bg0]>O01O100O0O2GTOZAn0c>UOZAn0c>:N100N2N2O2O01O010O01O01O010O0N3L3N2M4M2M4L3Go@AT?;PAAS?=9M4M2M4MUTo6"}, "image_id": 118, "id": 2073}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 413.0, 154.0, 99.0], "area": 10673, "segmentation": {"size": [512, 512], "counts": "\\_c52l?2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2iAcNi=`1TBbNi=a1TBbNj=a1SBaNj=l1N2M3N2M3N2M3N2N2M3N2M3N2O11O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O00"}, "image_id": 118, "id": 2074}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 424.0, 91.0, 88.0], "area": 4739, "segmentation": {"size": [512, 512], "counts": "V^m31l?4M2M3N3L3N3L3N2N3L3N3L300gBmNf;S1VDPOj;P1TDROm;n0oCUOQ]1c>01O001O00001O001O001O000O2L3N3L3N2N3N1010OO2L3N2N3L3N3L3NSa`7"}, "image_id": 118, "id": 2076}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 0.0, 103.0, 33.0], "area": 2323, "segmentation": {"size": [512, 512], "counts": "S`16g?301O00000[@L^?;0000000001O000000000000001O000000000000L4M30_@IY??0001O00000000000000001O0000000000000000001O00000000000000001O0000000000000000001O00000000006J00000000000000001O0000000000000000001O00000000UORA>T?_Oo@7[?Ie@Nd?2UP[6"}, "image_id": 119, "id": 2077}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 0.0, 114.0, 47.0], "area": 3499, "segmentation": {"size": [512, 512], "counts": "ZPU24d?8N2000001O00000000000a@B\\?a00001O000000000000001O00000000000000M30000001QADX><_AMa>i001O00000000000000001O000000000000001O00000000L4000000001O00002N001O00000000000000kN_Al0k>O000000000000010O000000000000010O0000000000000010O00000000000000K6G8H8HioQ4"}, "image_id": 119, "id": 2078}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 0.0, 17.0, 2.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "PPf41o?0000000000000000001O00000000000P`Q3"}, "image_id": 119, "id": 2079}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 0.0, 36.0, 15.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "UP_54g?5000001O00000000001O00000000001O000000001O00000000001O000000001O00L4KUPo1"}, "image_id": 119, "id": 2080}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 0.0, 40.0, 25.0], "area": 766, "segmentation": {"size": [512, 512], "counts": "_``64h?5L3L4L4010O00O2O0000001O0000001O0000001O000000L4N200001O0000001O0000L4L4L4M3LT`k0"}, "image_id": 119, "id": 2081}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 0.0, 47.0, 36.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "Y`X71k?5`@KP?8m@LP?6l@NT?2i@1W?<01O0000001O00001O0000001O0000001O00001O0000001O00001O0000001O0000001O00001O0000001O"}, "image_id": 119, "id": 2082}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 11.0, 9.0, 8.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "[P_57i?0000000010O000Oe_\\2"}, "image_id": 119, "id": 2083}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 17.0, 11.0, 14.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "a0=c?0000000010O00006J1OW_j7"}, "image_id": 119, "id": 2084}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 21.0, 78.0, 58.0], "area": 2933, "segmentation": {"size": [512, 512], "counts": "fQR49_?8G9M3010O0000000000000001PASOl>Q1O0N2000000001O0001O000000000001O01O0000004D8I7000001O01O000000000000010O00000000000001O01O0I7G9M3POYAg0P?000001O01OJ60000000010O0G9HUof2"}, "image_id": 119, "id": 2085}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 30.0, 50.0, 70.0], "area": 2575, "segmentation": {"size": [512, 512], "counts": "mb15b?9G9G9O2O0001O00N2G9G9L401O0H8L400000M3000000010O00000000000000010O000000000000XOTB\\Ol=;]BEd=1eB0Z=OgB1Y=OgBL^=4cBBf=>n0L4G9GX_U7"}, "image_id": 119, "id": 2086}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 34.0, 81.0, 53.0], "area": 3200, "segmentation": {"size": [512, 512], "counts": "`ab58^?:L4_A\\Of=d0PBFP>:gAOY>1eA2Z>j000000000001O000001O0000000001O000001O0000L4000001O0001O000000000001O000001O000000000001iNbAj0j>001O000001O000000000001O0001O0000000TOSAe0T?000010O00000000000F:G\\nT1"}, "image_id": 119, "id": 2087}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 41.0, 51.0, 66.0], "area": 2400, "segmentation": {"size": [512, 512], "counts": "SSn03b?;L403M00000I7I7000001O00000000000E;E;000001O00M300000000001N1J600001O0000000001O0000000001O000POVBEo=0]BEm=0l_X6"}, "image_id": 119, "id": 2088}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 47.0, 40.0, 54.0], "area": 1392, "segmentation": {"size": [512, 512], "counts": "iR\\71k?5L3L4L5N11O00010O000001N1L4L4M4K4L4L5L3O11O01O0001O01O01O0001O01O01O0001O01O0\\N"}, "image_id": 119, "id": 2089}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 49.0, 50.0, 67.0], "area": 2483, "segmentation": {"size": [512, 512], "counts": "obS25

  • =o@Kl>g0001O000001O000000000H8H8G:O00000000000000N3O0001O0N2I70000000010O00000000000000010O0000nNYBFl=2\\BEm=3[BEm=2b_S5"}, "image_id": 119, "id": 2090}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 53.0, 56.0, 71.0], "area": 2668, "segmentation": {"size": [512, 512], "counts": "iSo22f?8G9M31O0000000001G8H8G9000000000O11O0001O000000000M03G9H8K510O000000000000010O00000000000000010O00K5[OQBPOX>h0PBPOX>g0e0H8GanT4"}, "image_id": 119, "id": 2091}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 84.0, 77.0, 51.0], "area": 2794, "segmentation": {"size": [512, 512], "counts": "mSn31f?9G9G90001O00000000000001O03M000M3M4O000000000001O0001O000000M31O01O0000008H0003M00010O000000000K5F:K510O00000000N200000001O00000001O000000000001O0I7VOaAJh>MS^k2"}, "image_id": 119, "id": 2092}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 97.0, 67.0, 70.0], "area": 2596, "segmentation": {"size": [512, 512], "counts": "mTe61l?3L4L4M4K4M3L4L5N10010O0000010O000010L3MO0011030O00001TAVOa>i0\\A[Ob>f0ZA^Oc>Q1L5L3N20001N1M3L5K4000010O0001O01O0001O0M3L5L3L4L4M4K4M3L4L5L3L4M4K4L4M3Ll\\9"}, "image_id": 119, "id": 2093}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 100.0, 85.0, 53.0], "area": 3057, "segmentation": {"size": [512, 512], "counts": "Vd\\52j?5L3L4L4L5L3L4M301O00010O0010O0001O01O01O0001O01O0001O01O01O00M4M20001O01O000M40O000VAlNh>V11O0001M2L41O01O00010O0001N1000010O000001L3M3O1010O0000010O0O1M4K4M3M3M4BSACQ?9SADP?9=M4K4MjlX1"}, "image_id": 119, "id": 2094}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 114.0, 21.0, 49.0], "area": 666, "segmentation": {"size": [512, 512], "counts": "lde71k?5L3L4L4M4K4L4O110L3M3L4L5M200010O000001\\L"}, "image_id": 119, "id": 2095}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 168.0, 3.0, 13.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "`en74h?5K4hJ"}, "image_id": 119, "id": 2096}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 0.0, 86.0, 70.0], "area": 3400, "segmentation": {"size": [512, 512], "counts": "cQb32m?3\\NLXC7efNQBV1P>iNSBU1m=kNUBR1l=nNVBP1j=oNZBn0f=RO\\Bk0e=TO^Bj0b=VOaBf0`=ZObBd0^=\\OdBa0]=_OeB?[=AhB;Y=EiB9W=GkB7U=InB3S=MoB1Q=OQCNP=2RCLn<4_100000000000000000000000PPS3"}, "image_id": 120, "id": 2097}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 6.0, 12.0, 25.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "6i0W?00000000000000000000000HQok7"}, "image_id": 120, "id": 2100}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 156.0, 48.0, 42.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "oTj5g0Y?00000000000000000M:I9G00000000000000000000000000000000000000000000000000000000O1000k0UO0000O100Zj]1"}, "image_id": 120, "id": 2101}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 178.0, 21.0, 39.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "bee7W1i>000000000000000000000000000000000000000^J"}, "image_id": 120, "id": 2102}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 152.0, 24.0, 27.0], "area": 357, "segmentation": {"size": [512, 512], "counts": "TU<2g?O]@4`?O]@3a?8N110O01O01b@BY?=e@E[?a00O01O010O010ON3M2N3M2N3L3NQkW7"}, "image_id": 122, "id": 2103}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 159.0, 34.0, 38.0], "area": 747, "segmentation": {"size": [512, 512], "counts": "o4j0W?0O010O010O00010O010O010O00010O010O01XOl@c0X?01O010O01O010O01O0_Of@:a?N2N3L3N[j^7"}, "image_id": 122, "id": 2104}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 191.0, 11.0, 15.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "Xfg05g?4L4O101O01O000O1K5LTjR7"}, "image_id": 122, "id": 2105}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 217.0, 28.0, 28.0], "area": 769, "segmentation": {"size": [512, 512], "counts": "XW`0=T??0000000000000000000000000000000000000000000000000000WiQ7"}, "image_id": 122, "id": 2106}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 435.0, 14.0, 14.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "im23m?1N2N3N1N00010O0003N1N3M2OWRf7"}, "image_id": 122, "id": 2107}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 485.0, 39.0, 27.0], "area": 792, "segmentation": {"size": [512, 512], "counts": "i_f64h?4L4L5K4L41O01O0001O01O0001O0000001O000000001O0000001O0000001O0000001M2L4L4LgPf0"}, "image_id": 122, "id": 2108}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 79.0, 95.0], "area": 4630, "segmentation": {"size": [512, 512], "counts": "0c2]=00001O001O001O00001O001O00001O001O00001O001O00001O001O]MQCU2o]OoAa0S>@mADjA:X>FhA7[>JeA3]>McA0`>1`ALb>4^AIe>8ZAFh>:YACi>=WA@l>a09001O00001O001O000000N2M3N2M3N2M3N2NR`h6"}, "image_id": 123, "id": 2109}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 0.0, 163.0, 37.0], "area": 3126, "segmentation": {"size": [512, 512], "counts": "PPk24l?6J6J3M00000000O10000000000O10000000000O10000000000O10000000000O100005K6J6J6J0000000000O1000000000000O10000000000O10000000000O10000000000O10000000000O1000000000000O10000000000O10000000000O10000000000O10000000000_Oo@2R?MUAMk>3a000000000O1000000000000O10000000000O10000000000O10000000000O10000000000O1000000000000O10000000000O1000000000P`c2"}, "image_id": 123, "id": 2110}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 0.0, 22.0, 15.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "PPh56j?7I2N000000O10000000000O10000000000N2N2M3MSPm1"}, "image_id": 123, "id": 2111}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 3.0, 12.0, 20.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "TPe27i?6I7J0000000000O2O6J7I_oT5"}, "image_id": 123, "id": 2112}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 4.0, 23.0, 23.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "W`Y23m?7H8I3M00O1000O10000000O10O1000000000O107I7I^oZ5"}, "image_id": 123, "id": 2113}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 36.0, 41.0, 24.0], "area": 714, "segmentation": {"size": [512, 512], "counts": "ZQW37i?6J5K00000O10O10000000O1000O100000O100000O100000O1000O10000000O1000O100000O17I7I^^T4"}, "image_id": 123, "id": 2114}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 127.0, 91.0, 180.0], "area": 11594, "segmentation": {"size": [512, 512], "counts": "YTd14k?8I7I7I7I7I7I8G8I7I7I7I7I7I7H9H7I7I7I7I7I3L10006J6J6J3M0O10000000O1000O100000O100000O10O1000000000O010000000000O01000000000O10O10000000[OcEfK^:Z4gEaK`:W4gEcK`:V4l0I7I7I7I7I7H8I7I7I7I7I7I7H8I7I7I7I7I7I^Wn4"}, "image_id": 123, "id": 2115}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 210.0, 18.0, 116.0], "area": 1096, "segmentation": {"size": [512, 512], "counts": "eVg76j?6J7I6I7J7I6J6J6J7H7J6J7I6J6J6I8I6XI"}, "image_id": 123, "id": 2116}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 269.0, 306.0, 229.0], "area": 49876, "segmentation": {"size": [512, 512], "counts": "Y^i22V=0`E6Z:0`E6Z:1_E6Y:1aE5Y:1`E6Z:1_E6Z:LdE5[:KeE5[:KdDlN>Y1n:KdDRO8R1T;LdDVO4o0X;KdDUO5P1W;KdDUO5P1W;KdDUO5P1W;KdDWO3n0Y;KdD^OLg0`;KcDEG?f;LdDK_O:m;KdDV1\\;jNdDV1\\;jNdDV1\\;jNdDV1\\;jNdDU1];jNcDW1\\;jNdDV1W:iMXFQ1AV1W:PNQFj0HV1j9jMlE=;c0OV1j9kNPFO6U1k9SOhEG=W1j9h8BXG>g8CYG=n8\\OQGe0V9SOkFl0\\9nNdFR1c9gN]FY1j9`NVF`1Q:YNoEg1X:RNhEn1_:kM`EV2UM30O0102N7I6JUb="}, "image_id": 123, "id": 2117}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 308.0, 103.0, 204.0], "area": 14379, "segmentation": {"size": [512, 512], "counts": "P:d4\\;8H8H7I8H7H7J00000000000O1000000000000O10000000000O1000000000000O1000000000000O10000000000O1000000000000O10SJ_F[5a9eJfFT5Z9lJmFm4S9SKTGf4l8ZK[G_4e8aKaGY4^8gKjGR4V8nKQHk3o7ULXHd3h7\\L_H]3a7cLfHV3Z7jLmHo2R7RMUIg2k6YM\\I`2d6`McIY2]6gMiIS2W6lMQJm1o5SNXJf1h5ZN_J_1`5bNeJY1[5gNeJY1[5gNeJY1[5gNeJY1[5gNeJY1[5gNdJZ1\\5fNdJZ1[5fNfJZ1\\5dNdJ\\1b5^N^Jb1P:00000O10000000000002N6J5K6J5K6J5K6J5K6JSP\\6"}, "image_id": 123, "id": 2118}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 504.0, 42.0, 8.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "n_P72n?000000000O10000000000O10000000000O1000000000000O10000000000O10000000000O10000005KS`:"}, "image_id": 123, "id": 2119}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 58.0, 60.0], "area": 2231, "segmentation": {"size": [512, 512], "counts": "0Y1g>1O1O2N1O1O2N1OO1O1002N1O1O2N1O1O2NO1O1O101N2N2N3N10010O00010O0O1N3M2O1N3N1O1O2M1O00101N2N2N3N1N2N3M2O1N3M2N2O2M2NgnR7"}, "image_id": 124, "id": 2120}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 0.0, 57.0, 37.0], "area": 1360, "segmentation": {"size": [512, 512], "counts": "\\`e03a?Nj@3U?Ni@5U?Li@6V?Lg@6X?Kg@7W?:O1O2N1O2N1O1O2N1O1O2N00O1O1O100O1O100O1O1O100O1O1O100O1O11O2N1O0000O1O1O1O100O1O1O3N1N3M2N2N3N1N2N3Mdo]6"}, "image_id": 124, "id": 2121}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 0.0, 37.0, 35.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "b`R72m?2O1N2N3M2N2O1N3M2N2N2O2M1O000010O000000010O001O2N2O1N2N3M2N2N2O2M2N2N2Nco:"}, "image_id": 124, "id": 2122}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 33.0, 80.0, 56.0], "area": 2567, "segmentation": {"size": [512, 512], "counts": "TR]33m?1N2N3M2O1N3M2N2O1N3M2N2GXOXAj0_>^O_Ad0Z>c001O01O010O001O011N1O100O1O101N1O100O1O2O0O1OO20O1O100O2O0000000oNTAn0o>0010O00000000010O00002N0001O000001O01O0005K000001O0001O0K5]Of@:c?K5JbnZ3"}, "image_id": 124, "id": 2123}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 51.0, 57.0, 75.0], "area": 2171, "segmentation": {"size": [512, 512], "counts": "gb_23m?1N2N3M2XOLbA5Z>OdA4Y>OdA3Z>OdA3Z>OdA4Y>OeA2Y>0eA2Z>OdA4Y>j0000O02O2N1O101N1O1O2O0O101O000010XNTBY1o=`NUB_1W>O2O0O1O2M2N2N3N1N210O0N2N3M2XOm@a0T?]On@a0[?M2N2O2M2N2N3N1N2N`lc4"}, "image_id": 124, "id": 2124}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 54.0, 79.0, 81.0], "area": 2844, "segmentation": {"size": [512, 512], "counts": "^RQ53m?1N2N2N2N2N2N2N3M2O1N2N1O00001O2N2N2O2M2N2N2N2N2N2N2N3N100000000001O01O00000gA^NS>b1kA`NU>f11O00N3M200001O00000O1N2O1N2N3M2N2N2N2N2N2O2M2N2N2QOQAj0T?N3M2N2O1N2N2N2N3M2N2N2N2Ob\\g1"}, "image_id": 124, "id": 2125}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 63.0, 24.0, 37.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "o1P1Q?0000000000O1N2N2O1N2N2N2O0O2N2N2O1N2N2N2O1N2N\\mc7"}, "image_id": 124, "id": 2126}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 67.0, 23.0, 47.0], "area": 574, "segmentation": {"size": [512, 512], "counts": "abd73l?2O1N3M2O2M2N2O2M2N2O2M2N3N1N2N3N1N2N3M210O0iM"}, "image_id": 124, "id": 2127}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 78.0, 27.0, 27.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "iRb41n?2N3N1N3M2O2M2N3N1N3M01O01O01O010O2N3N1N3M2O2M2N3N1NV]P3"}, "image_id": 124, "id": 2128}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 83.0, 78.0, 76.0], "area": 2799, "segmentation": {"size": [512, 512], "counts": "nbg12m?2N3N1N2N3`@EV?=h@EW?V1\\AkNc>[10O00010O0O1N3N1N2N3O00O0O0010O0002N2N3N1N2N2N3N1N2gA^NQ>j1O10010O00010O0001O0N2O2M2^NgAZ1[>dNgAZ1\\>dNfAZ1a>N2N2N3N1N2N3M2O1N3M2N2O2M2N2N3M2O1N3M2Ng[Q5"}, "image_id": 124, "id": 2129}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 105.0, 73.0, 81.0], "area": 2788, "segmentation": {"size": [512, 512], "counts": "cTW11n?2N3N1TOKjA7T>KjA8T>JiA8U>JjA8S>JkA8S>KjA7U>JiA9T>IkA8S>KjA7T>KjA7U>KiA4W>NgA2Y>j010O0000010O3N110O010O0010O00010O01O010O01O01O01O0N2O2M2N3N1N2N3N1N2N3N1O20O00010O00N3N1N3M2N2O2M2N2O2M2N3M2O1N3MdZd5"}, "image_id": 124, "id": 2130}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 115.0, 106.0, 127.0], "area": 4055, "segmentation": {"size": [512, 512], "counts": "[T\\42i?0Y@2e?6N2N2N2N0000012M2N2N2N2N2N3M2N2O1N2N2N3M2O1001O0001O000001OiAeNh=[1WBfNi=[1TBgNl=Y1RBiNn=W1PBkNP>U1nAmNS>R1kAPOU>_1O00000001O0001O00POlA3T>KnA5S>HPB7P>HQB8o=FSB:m=DUB=i=BYB>f=A\\B?b=AaB>^=AdB?\\=@eB`0[=^OgBb0Y=WOQBOg0l0W=TOTBMe0Q1X=oNmBR1o=2N2N2N2N3M2O11O00000001O01O00000000010O0O1N2N2N2N3M2N2O1N2N2N3M2N2N2N1O00011N2N3M2N2IZ@1l?NUin1"}, "image_id": 124, "id": 2131}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 117.0, 17.0, 16.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "lcg31n?3N1N2N3M2O1N001O01O2N3N1N2N3M2OSlo3"}, "image_id": 124, "id": 2132}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 122.0, 79.0, 82.0], "area": 2855, "segmentation": {"size": [512, 512], "counts": "cTi51o?1N2N2N2N3M2N2N2O1N2N3M2N000000101N2N2N3M2N2N2N2O1N2N3M2N200001O0001O00000001OgA_NR>a1lAaNS>g1N2N20000010O0000000N2N2N3M2N2N2O1N2N2N3M2N2N2N2RORAh0o>VOSAh0T?N2N2N3M2N2O1N2N2N2N3M2N_Zo0"}, "image_id": 124, "id": 2133}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 128.0, 16.0, 14.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "WTk31n?2N2N2O2M2N10O10O0001O2N2O2M2Nlkl3"}, "image_id": 124, "id": 2134}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 136.0, 26.0, 27.0], "area": 376, "segmentation": {"size": [512, 512], "counts": "cTX31n?3M2N2O2M2N2O2M2N3M2O1N0010O003M2O2M2N2O2M2N2N3N1N[kZ4"}, "image_id": 124, "id": 2135}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 142.0, 68.0, 61.0], "area": 2032, "segmentation": {"size": [512, 512], "counts": "SU<1n?2O2M2N3N1N2N3M2O1N3M2O1N3M20010N1N3N1N1O0010O0000010O00000100O3M2N2O2M2O110O01O01O010O01O001M2O2M2N3M2O1N3M2N3N1N3M2N3N1N2N3M2O2M2N3NVja6"}, "image_id": 124, "id": 2136}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 150.0, 18.0, 20.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "jdT43l?2O1N3M2N2O2N1O00101O04K1N3M2N2O2MmZb3"}, "image_id": 124, "id": 2137}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 163.0, 48.0, 52.0], "area": 1230, "segmentation": {"size": [512, 512], "counts": "beg22m?2O2M2N3N1N2N3N1N2N3M2O2M2N2O2M2N2O2M2N2N3O001O01O000N3M2O2M2N2O2M2N2N3N1N3M2N2O2M2N2N3N1N2N3NjY`4"}, "image_id": 124, "id": 2138}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 182.0, 32.0, 37.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "mUd32m?2O1N3M2N2O2M2N3N1N2O2O01O010O01O01O01O01O0O2M2N2O2M2N2N3N1N3M_ik3"}, "image_id": 124, "id": 2139}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 185.0, 63.0, 70.0], "area": 1728, "segmentation": {"size": [512, 512], "counts": "YWc11n?3N1N3M2O2M2N3M2O2M2N3N1N3M2O0O1O00010O0001O01O01O0001O01O0001BROjAm0V>UOhAk0Y>WOdAj0[>XOdAg0\\>\\OaAd0`>=01O0001O100ZO^AMb>3`AK`>6bAH^>7dAG\\>9fAEZ>>iAAV>?lA_OT>b0nA[OS>d0oAZOS>d0g0M2O1N3M2O2M2N3N1NjY]5"}, "image_id": 124, "id": 2140}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 186.0, 55.0, 67.0], "area": 1819, "segmentation": {"size": [512, 512], "counts": "gVZ4151c?1Z@1Q?M_A:`>G^A<_>G^A;`>G_A9`>J]A6d>KZA5f>MXA5f>NXA4e>NYA4f>d0N3M2O2M2O1010O00010O0010O00010O00O2N1N2N3M2O1N3M2N2O2N11OO2M2N2O2M2N2N3N1N3M2N2O2M2N2NdXj2"}, "image_id": 124, "id": 2141}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 187.0, 59.0, 79.0], "area": 2720, "segmentation": {"size": [512, 512], "counts": "P6o0Q?1N3M2O101]AkNX>U1fAmN[>R1cAQO\\>P1aARO`>W101NO1O00010O1O1O01O0jAeNh=[1XBgNg=Z1XBgNf=\\1WBgNf=[1XBgNf=\\1WBfNh=[1WBgNf=k1N2N3N01N2N2N3N1N2N3M2O2M2N2O2`NiAR1Y>lNiAR1Y>lNiAR1Z>kNiAS1X>lNiAR1d>N3M2O1N3M2Ii@BZ?;h@DY?;7N3M2N2O2MdXR7"}, "image_id": 124, "id": 2142}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 209.0, 33.0, 31.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "Qg\\31n?2N2N3N1N2N3M2N2O2M2N2N01O01O0001O0001O01O1O2O1N1O1O2O1N2N2G]@2j?O1NSiR4"}, "image_id": 124, "id": 2143}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 243.0, 15.0, 31.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "c7o0R?N2N3N1N3M2N3N1N2N3N1N3M2N2OjWh7"}, "image_id": 124, "id": 2144}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 246.0, 22.0, 19.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "PhP21n?2O2M2N3N1N2N010O00010O000010O2N2O2M2N3M2ORXd5"}, "image_id": 124, "id": 2145}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 247.0, 46.0, 46.0], "area": 1275, "segmentation": {"size": [512, 512], "counts": "ogP13m?3M3L5L3M3L5L00O10O101O4K4M3M4L1N10O10O10O10O10O1000O10O103M3L4M4L3L5L1O00O010001N4M4L3L4MWWX6"}, "image_id": 124, "id": 2146}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 249.0, 51.0, 56.0], "area": 1503, "segmentation": {"size": [512, 512], "counts": "YXY32m?3N1N3M2O2M2N2O2M2N3N1N3M2O2M2N2N3N1N3M2O2M2N3O010O01N1N2N3N1N3M2N3N1N3M10O01O01O00M5O1N3M2O2Ce@0]?Od@O_?Nc@1^?Me@0ifm3"}, "image_id": 124, "id": 2147}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 256.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "\\h`02m?3N1N3M2O1N3M2O2M2N3N0O000010O00011N2N3M2O2M2N3N1N3M2O1NbgP7"}, "image_id": 124, "id": 2148}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 260.0, 10.0, 21.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "[Xk72m?3N1N2N3M2O1N3M2N2kG"}, "image_id": 124, "id": 2149}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 266.0, 27.0, 32.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "fhP23l?2M4N1N3M2O2M2N2O2M2N3M2O0O0010O2N2O2M2_Oi@6Y?Hi@7Y?Fi@8Y?Gi@6b?N3N1NQga5"}, "image_id": 124, "id": 2150}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 282.0, 99.0, 166.0], "area": 5802, "segmentation": {"size": [512, 512], "counts": "Uj^62m?2O1N3M2N2O2M2N3M2O1N3M2N2O2M2N2O2M2N3M2O1N3M2N2O20O010O00010O0001nN[NgCd1W<^NiCc1T<_NlCa1RROcAn0]>TOaAm0_>UO_Aj0a>:0010O0000010O000010O0000100O3M2N2O0O1O102M2N2N3N1N3M2N2O2M2N2O2M2N3M2O1NRfR7"}, "image_id": 124, "id": 2153}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 297.0, 54.0, 63.0], "area": 1622, "segmentation": {"size": [512, 512], "counts": "QZg11n?2O1N3M2N3N1N2N3N1E_OTAd0i>^OUAd0i>_OTAc0k>^OTAd0i>;N2N3N1N3M2O1N30O00010O010O000O2M2N2O2M2N3M2O1N3M2O2M2N2N3N1N2N3M2O2M2N2O2M2N2N\\e]5"}, "image_id": 124, "id": 2154}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 313.0, 21.0, 26.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "Zjj01n?2O2M2N2N2O2DEn@O11M2\\ASOR>P1kASOS>n0kATOS>o0kASOR>o0lASOS>o0jASOT>o0jATOU>\\101N1N3M2O2M20010OO2SNTBb1m=\\NUBb1m=]NUB`1n=]NTBb1W>M2N3N1N2N3M2O2M2N3N1N3M2O1N000010O000011N2N01O01O2O2M2N2N3N1K6NRcR6"}, "image_id": 124, "id": 2157}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 345.0, 63.0, 70.0], "area": 2239, "segmentation": {"size": [512, 512], "counts": "f[m52m?2N2O1N3M2O1N3M2N2O2M2N2O1N3FSO\\An0`>XO^Ag0`>]O^Ac0b>=1O01O2O2M2O1O2N100O2N1O1O110O00O2M20gA]NT>h10000OO3N100O2N1N2N2N3hN_Am0b>RO_Al0c>RO_Al0d>QO_Al0l>N2O1N3M2@e@8]?Ff@8b?N2N3M2O1NaSS1"}, "image_id": 124, "id": 2158}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 383.0, 63.0, 99.0], "area": 2794, "segmentation": {"size": [512, 512], "counts": "Tmm41n?3N1N2N3M2O2M2N2O2A^O[Ad0c>_OZAd0c>^O\\Ac0c>^O[Ae0b>^O[Ad0c>>O2M2N3M2O1N3M2N3N1N3M2O1N3M2N2OO01O3MRObB]O^=b0dB^O[=`0gB@Y=>iBCW=;kBDU=:mBFT=7oBIP=5RCKn<4TCLl<1VCOjM2O2M2N2N\\aR2"}, "image_id": 124, "id": 2159}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 418.0, 47.0, 64.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "b]Y43m?1T@Mh?7N2N3N1N2N3N1N3M2N2O2M2N3M2O1N3M2O2M2N2N3O010O0O1O20O01O01O001WOdAI^>6dAG^>7dA^ON0`>?dA@MOa>?eA_OL0b>?cA_Oh>?>N3N1N2N3M2O2MPRo2"}, "image_id": 124, "id": 2160}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 423.0, 50.0, 76.0], "area": 1918, "segmentation": {"size": [512, 512], "counts": "cmo51o?1N3M2N2]ALX=6aB0]=2]B4a=NXB:e=ITB=k=DQB`0m=BQBa0l=BQB`0m=k0O2M2N2N3N01N3N1N2N3M2N2O2M2]NkAX1W>fNkAX1X>eNjAZ1W>dNlAY1_>N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N3M2O1NWQW1"}, "image_id": 124, "id": 2161}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 424.0, 65.0, 74.0], "area": 2662, "segmentation": {"size": [512, 512], "counts": "Zne61n?2O2M2N2N3N1N3M2N2O2M2N2^O\\OeAg0U>_OiAa0W>@gA`0Y>CdA=\\>EbA<]>c010O3N1O2N1O1O2O0O1O2N1010O00010O0010M1O000102M1O01O0001iNoA>Q>BQB=n=DTB9m=FUB8k=HWB6i=KYB3f=M\\B1f=M\\B1g=M[B1f=OZBOh=O[B@\\O<\\>2[BFYO0^>8[BGYOO^>7\\BGXO0^>8U1N3M2N2OVb9"}, "image_id": 124, "id": 2162}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 436.0, 47.0, 61.0], "area": 1511, "segmentation": {"size": [512, 512], "counts": "d=[1f>10O010O00010O0010M2N3N1N2N01O012M2N2N3N1N3M2N3N1N3M2N3NO01O00010O00010O2N2N3N1N3M2N3N1N3M2NZQX7"}, "image_id": 124, "id": 2163}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 464.0, 72.0, 48.0], "area": 1645, "segmentation": {"size": [512, 512], "counts": "R_R31n?2N2O2M2N3M2O2M2N2O2M2N3M2O2M2N3N1N2N3N1N2N1O100O11O1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1O2N1O1O2N1O2N1O1O2N1O2N00O1O1O100O1O100O1O12N1O1O2N1O5KQ`i3"}, "image_id": 124, "id": 2164}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 468.0, 67.0, 44.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "goi41o?1N3M2O1N3M2O0O1O100O1IAl@`0T?Bj@>e>AfA3C=f>BeAe0Z>^OdAb0\\>_ObAb0]>A`A`0_>AaA?_>@aAa0_>_O`Ab0b>]O]Ac0d>ZOZAj0h>SO[Ak0f>SOZAn0l>0002N1O2N1O1OO100O00101N1O100O11O1O2N1O2N1O2N1O2N1O2N3M1O2N1O2N1O0000O1O1001O1N3NS`T2"}, "image_id": 124, "id": 2165}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 483.0, 47.0, 29.0], "area": 799, "segmentation": {"size": [512, 512], "counts": "h_Z63m?1N3M2O1N1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O12N1O1O2N1O2N1O1O2N1O1O2N1O2M2N2N3NUPn0"}, "image_id": 124, "id": 2166}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 501.0, 17.0, 11.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "koe73m?1N2N100O1O100O1O11O1O1O2N1O2N1OR`1"}, "image_id": 124, "id": 2167}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 509.0, 9.0, 3.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "ool21o?0O1O1001O1OO11OQ`n4"}, "image_id": 124, "id": 2168}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 124.0, 28.0, 33.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "eT<1f?2_@0_?2_@O`?2_@0_?:N2N2N2M3N1O0000O1000O12N1O2N2M3N2N2N2N2N1O2M3NkkU7"}, "image_id": 126, "id": 2169}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 413.0, 418.0], "area": 94665, "segmentation": {"size": [512, 512], "counts": "m3a4\\;3N3L3N3L3N2N3L3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N2M4O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001iI_Fm5a9QJbFn5^9oIeFQ6f9O00001O001O00001O0000N2N2M3O11O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O0000mJnJYOQ5e0QK[Oo4b0UK^Oj4`0XK@h4>[KAe4<^KEa49bKF^48dKI[44hKLX42kKNT4OoK1Q4MRL2n3LTL5k3KVL4j3MUL4j3LVL4j3MVL2j3NVL3i3MXL2h3OWL2h3NXL2h3OXL0h30XL1g3M\\L2d3K_L6`3HcL7]3iNYKRM\\1U4[3fN]KQM[1Z4W3cN`KQM\\1[4T3bNbKPM]1_4P3^NgKPM\\1a4m2]NiKPM\\1d4j2YNnKoL\\1g4f2XNPLoL\\1i4d2UNTLoLZ1m4a2RNWLnL\\1o4]2QNYLnL\\1R5Z2mMhNR2X1lMjNT2V1iMmNX2R1fMQOY2o0eMSO\\2l0aMXO^2h0`MZO`2f0]M^Oc2a0[MAe2?YMCh23]A;d>E[A;e>E[A;e>FZA:e>a0O11O1O00\\AkN^>[11O001O1OO1I70000000K500000000000000000001O00000000000000000001OgNeAm0g>0000D=ZOb_Z3"}, "image_id": 131, "id": 2177}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 39.0, 36.0, 27.0], "area": 726, "segmentation": {"size": [512, 512], "counts": "`QU3;]?80000000000000000000001O0O100001O000001O00000000000000000f@^OV?f00001O00K6BToX4"}, "image_id": 131, "id": 2178}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 92.0, 28.0, 13.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "QcV36e?50000001O0000000001O03M00001O0000000001O00000001O000Oo\\[4"}, "image_id": 131, "id": 2179}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 99.0, 16.0, 19.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "SSj3>b?5K0000000000O1L4000000001O00EXmm3"}, "image_id": 131, "id": 2180}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 109.0, 8.0, 16.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "]Sl5`0`?0000000000000clo1"}, "image_id": 131, "id": 2181}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 115.0, 8.0, 21.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "cSX4e0[?0000000000000]lc3"}, "image_id": 131, "id": 2182}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 134.0, 9.0, 14.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "VTi4=c?000000000001O01h[R3"}, "image_id": 131, "id": 2183}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 136.0, 9.0, 20.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "_Tl5<]?8O000000000000FR\\o1"}, "image_id": 131, "id": 2184}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 144.0, 40.0, 31.0], "area": 1023, "segmentation": {"size": [512, 512], "counts": "gdY3h0X?0000000000000000000000000I700000000000000007I00000000000000000000000000000000Y[R4"}, "image_id": 131, "id": 2185}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 158.0, 52.0, 32.0], "area": 1344, "segmentation": {"size": [512, 512], "counts": "de54X?d0N2000000000000000001O0001O0005K00000000M3N20000000000000000000001O000000000001O00000000000000000000HX[P7"}, "image_id": 131, "id": 2186}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 165.0, 18.0, 9.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "UUh48h?00000000000000000000000000001O000kjn2"}, "image_id": 131, "id": 2187}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 243.0, 73.0, 79.0], "area": 3158, "segmentation": {"size": [512, 512], "counts": "Pik61n?2M3N1O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2N2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2O10O0O2N2N2N2N2M2O2N2N2N2N2M2O2N2N2N2M3N2N1O2N2M3N2N2N1O2N2M3N2N2N1OcG"}, "image_id": 132, "id": 2188}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 297.0, 40.0, 26.0], "area": 981, "segmentation": {"size": [512, 512], "counts": "]iW5f0V?400000000000000000000000000003M1O000000M300000000000000000000000000000000000_OWWT2"}, "image_id": 132, "id": 2189}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 297.0, 27.0, 55.0], "area": 772, "segmentation": {"size": [512, 512], "counts": "Yjb72m?2M3N1O2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N1N3N2N2N2M3N1O2fF"}, "image_id": 132, "id": 2190}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 301.0, 79.0, 78.0], "area": 3136, "segmentation": {"size": [512, 512], "counts": "cZU12m?2N2N2N2M3N1O2N2N2M3N2N1O2N2N2M3N2N1O2N2M3N2N2N2N1N3N2N2N2M3N1O200O100000O100000O100000O100O0O2N2M3N2N2N1O2M3N2N2N2N1O2M3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O^Uc5"}, "image_id": 132, "id": 2191}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 369.0, 81.0, 79.0], "area": 3288, "segmentation": {"size": [512, 512], "counts": "h\\72m?1O2N2M3N2N2N1O2b@AW?f0N2N2N2N1N3N2N2N2N0O101O2N2N2M2O2N2N2N2M3N1O2N2N2O1000O10O10000000O10O100000O1N2N1N3N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N1N3NZS`6"}, "image_id": 132, "id": 2192}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 376.0, 59.0, 32.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "Z\\o39]?:H9O000000000001O000001O00000000000001O000001O000000000001O000001O00000000000001O000001O00000000000001O0K7D:000000HYTS3"}, "image_id": 132, "id": 2193}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 451.0, 23.0, 47.0], "area": 567, "segmentation": {"size": [512, 512], "counts": "S>_1b>M2O2N2N2N2N2M2O2N2N2N2M3N2N1O2N2N2M3N2N1O2N\\Qd7"}, "image_id": 132, "id": 2194}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 134.0, 215.0], "area": 17537, "segmentation": {"size": [512, 512], "counts": "i2h2R=[MoBh2n<6M4M2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N3L3N2M4M2M4M2kE]Jj9f5SF]Jk9f5RF]Jj9n5N3O000000N2M3O100001O001O001O00001O001O00001O001OTMZF^Of9?]FAc9=_FD`9:cFE]98fFIY95jFJV93mFMS91PGOo8NTG2l8LVG5i8H[G7e8G]G:b8DaG;_8BdG>\\8@fGa0Y8\\OkGc0U8[OmGf0R8WORHh0n7VOTHj0l7SOWHn0h7PO[Ho0e7oN]HR1b7kNbHT1^7jNdHV1\\7gNgHZ1X7dNkH[1U7bNnH_1Q7_NRI`1n6]NUIc1k6[NXIe1g6YN[Ig1e6VN^Ik1a6SNbIl1^6QNeIP2Z6nMiIQ2W6lMlIT2T6jMnIV2R6gMRJU2Q6iMnIX2T6fMlIY2W6dMjI\\2X6bMgI^2\\6_MeIa2]6]MbId2`6YMaIf2m9O3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M3N3M2M4M2K5N3L3N3L3N3L3N[ll5"}, "image_id": 133, "id": 2195}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 0.0, 38.0, 14.0], "area": 277, "segmentation": {"size": [512, 512], "counts": "P`T51o?001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001OO1M3N2M3NR`X2"}, "image_id": 133, "id": 2196}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 16.0, 29.0, 84.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "c`a7:f?9G8H9G9G9G7I000O1000O100000000000O1000O100000000004K:G9WO"}, "image_id": 133, "id": 2197}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 30.0, 104.0, 235.0], "area": 16483, "segmentation": {"size": [512, 512], "counts": "iQj53m?8H8H2N000O100I_Oo@a0Q?7000009G8H8H9G8G9H9G8H8H8`EgLi7b3nGfLj7b3nGaLo7g3iGYLV8Q4aGoK_8Y4YGgKg8a4QG_Ko8j4hFVKX9R5_FoJa9g51000O9H7I0000000000000O010000000000000O0100000G[IWGe6i8900000O1000005K2NO100HYIWGg6g8\\IXGc6i8:O100000000001N2O1QIUGg6R900000007I8G9H8H9G8H8H9G8H8H8G:G8H8H8H9cNmB[O[==mB[O[==mB[O\\=[1H8GUia0"}, "image_id": 133, "id": 2198}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 31.0, 65.0, 98.0], "area": 2957, "segmentation": {"size": [512, 512], "counts": "eS[31m?2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3L3N2M3NO010O010O102M3N3L3N2M4M2N3L3N2M4M2M3N3L3N2M4M2M4M2M3N3L3N2M4M2M4M2M3Ni^d3"}, "image_id": 133, "id": 2199}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 116.0, 32.0, 39.0], "area": 690, "segmentation": {"size": [512, 512], "counts": "aT\\42l?3M2M4M2N3M2N3L3N2N3M2N3M2O2O010O01N1M3N3M2N3M2N3L3N3M2N3M2M3NWlS3"}, "image_id": 133, "id": 2200}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 188.0, 50.0, 140.0], "area": 4488, "segmentation": {"size": [512, 512], "counts": "l5m3S<10M2MTDYLe;e3[D]Lf;_3ZDeLe;Y3[DiLf;T3YDPMf;m2[DUMe;i2[DYMf;g2VD]Mi;c2UD_Ml;[3O00010O010O0010O0`NTDlNm;P1VDQOi;m0ZDROf;l0\\DUOd;g0_DYOa;e0bDZO_;b0dD\\O^;b0eDZO^;c0eD[O^;b0dD[O_;b0eD[O^;b0dD[O_;b0eD[O];c0eDZO_;c0cD[O_;b0eD[O^;b0dD[O_;b0eD[O];c0eDZO_;b0eD[O];c0eDZO_;b0dD\\O^;b0Q2N3L3N3L3N2MdiV7"}, "image_id": 133, "id": 2201}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 239.0, 181.0, 203.0], "area": 18188, "segmentation": {"size": [512, 512], "counts": "\\\\U12l?2M4M2M4M2M3N3L3N3L3N2M4J5N3L3N2M4M2M4M2M3N2M10O02O2M4MH_BUN^=l1eBSNY=o1gBRNU=R2kBmMS=U2mBiMR=Z2nBdMR=_243N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3M200O2J[KmDg4Q;_KPE]4l:dKVE\\4h:eKYE[4h:dKYE\\4f:eKYE[4h:dKXE]4g:cKZE\\4f:eKYE[4h:dKUE`4j:;1YKTEY4k:dKYE[4g:eKYE[4g:eKYE6Md3i:WLZE10h3g:VLYEO4k3b:VL[EL5n3a:VLYEI9Q4^:VLkEj3T:WLkEi3V:VLkEj3T:ULmEk3S:RLPFn3DoKk90dFQ4^OQLo:o3nDTLS;l3jDWLU;i3hDZLX;f3fD]LZ;T40010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O001M2N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L2OO010O010O010O010O010O02O2M4M2M4M2M3N3F]AROf>k0]ASOf>j09M3N3L3N3L3N2M4M2MXWP4"}, "image_id": 133, "id": 2202}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 243.0, 96.0, 128.0], "area": 9327, "segmentation": {"size": [512, 512], "counts": "TYP41l?;F:E:G:E;F:E;F:F:E:G2M2O2M3N2M3N2N2000O10000000000000O10O100000000000000O01000000000000000O10O100000000000O100000O10000000O1000000000O100000O100000003M8H8H8G8I8H8H8H8H7I8H8H8G9H5K000000007Ihd_2"}, "image_id": 133, "id": 2203}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 291.0, 93.0, 172.0], "area": 10878, "segmentation": {"size": [512, 512], "counts": "^Z\\67i?4L0O10000nNI[B6e=4RBLn==iACW>f0`AZO`>S10001O9G9F:G9G9G8H0000003M9F:G9G9G9G2N0000000O100000O1000000000O100000O1000000000O107I9G1O00O100000O1000000000O1000O100000000000O1M8K9YOlDPL];g3lDPL];g3g0G8G:G9G9G9G9G9G8H9F:G9G9GZR5"}, "image_id": 133, "id": 2204}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 369.0, 77.0, 94.0], "area": 5699, "segmentation": {"size": [512, 512], "counts": "jkV47i?8H8G9H8H8H7I8H8H8G8IO100000000000O10O100000000000O010000000000000O010000000000000O010000000000000O0100000000000O10O100000000000O16I9H8H7I8H8H8H8G9H7I8Hkab2"}, "image_id": 133, "id": 2205}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 487.0, 41.0, 25.0], "area": 712, "segmentation": {"size": [512, 512], "counts": "o_f31o?00000000000[O0ZA0f>8QAIo>>j@BV?d0O1000000000000O1000000000000O10000003M2N000000O10000001O8H7IUPe3"}, "image_id": 133, "id": 2206}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 496.0, 45.0, 16.0], "area": 389, "segmentation": {"size": [512, 512], "counts": "n_22l?2M3N2N2M3N200001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00QPW7"}, "image_id": 133, "id": 2207}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 510.0, 15.0, 2.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "oo`51o?0000000000000O1000000000000R`W2"}, "image_id": 133, "id": 2208}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 0.0, 27.0, 11.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "P`:2n?2N1O1O1O1O1O001O000000001OO100000000O100O1O1O1O1O1MSPX7"}, "image_id": 134, "id": 2209}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 0.0, 21.0, 20.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "\\`h03k?2N3L3N3M2O101O001O001OO1N2N2M3N2N2M3N2NRPm6"}, "image_id": 134, "id": 2210}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 0.0, 141.0, 66.0], "area": 4917, "segmentation": {"size": [512, 512], "counts": "P`R41o?001O00001O001O00001O001O001O00001O001O000`AIW=8fBKY=5dBN\\=3aBO_=1_B2`=N]B5c=LZB7e=IXB:h=GUB;k=EPBa0o=_OoAc0Q>^OkAf0T>ZOjAh0V>a001OO1M3N2N2M3N2M3N2M3N2M31O0\\ATOT>m0iAVOV>j0gAYOY>g0eA\\OZ>e0bA^O^>S1O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001OO11O00001O001O001O00001OO1M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2N2001O00001O001O00001O001O00001OO1N2M3N2M4MQPg1"}, "image_id": 134, "id": 2211}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 47.0, 152.0, 134.0], "area": 10148, "segmentation": {"size": [512, 512], "counts": "[c_11m?3M2M3N3M2M4M2N3L3N2N3L3N3M2M3N3N110O010O00010O010O0O2L3N2N3L3N3L3N3O0M3N3L310O0010O0010O010O0010O0010O010O00010O010O010O00010O010O010O00010UCfMn;Z2PDiMPhNiAU1Y>hNjAU1c>M2N3L3N2N3N110O010O00002L3L3N2N3L3N3Mh\\T4"}, "image_id": 134, "id": 2212}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 117.0, 198.0, 189.0], "area": 22385, "segmentation": {"size": [512, 512], "counts": "gWX4440^?5_@M\\??L1N3N4K6L2N1aAmNQ>S1mAoNS>R1jAQOT>P1iASOW>[101O010O00010O010O00010O010O0O1M4M2M4M2M4M2M3N3L0010O010O03N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3N3L3N2M4M201O00010O010O0010O0010O0010O001N1O101O010O01O01O010O01O010O01O01O010O01O010O01O01N1M4M2N2M4M2M40O00010O010O00010O010O0010O0010O0010O0010O0010M2M4M2N2M4M2M4M2M3N3L3N3L3N3M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3Nljd0"}, "image_id": 134, "id": 2213}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 119.0, 72.0, 53.0], "area": 2045, "segmentation": {"size": [512, 512], "counts": "cTW11l?3N3M2M4M2N3L3N2N3M2M4M20010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O00O2M2M4M2N3L3N2M4M2M4M2Nckd5"}, "image_id": 134, "id": 2214}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 180.0, 85.0, 51.0], "area": 1742, "segmentation": {"size": [512, 512], "counts": "Vfg12k?3N2M4L3N3L3N3O01O010O010O00010O010O01O01O010O010O00010O010O00010O010O01O01O010O010O00010O010O00010O010O01O01O010O010O00010O010O00010O010O01O01O010O010O0O1N3M2M4M2M3N3L_im4"}, "image_id": 134, "id": 2215}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 217.0, 148.0, 192.0], "area": 16977, "segmentation": {"size": [512, 512], "counts": "T;b0\\?3L3N3L3N2M4M2M4M2M3N3M2PO^N_Cf1^<\\N`Cf1]<^N_Ce1_<]N_Cf1]<]N`Cf1]<^N`Cd1^<^N_Cf1]<^N`Cd1^<^N_Cf1]<]NaCe1]N3L3N3L3NhT]4"}, "image_id": 134, "id": 2217}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 316.0, 156.0, 196.0], "area": 19404, "segmentation": {"size": [512, 512], "counts": "PnX43k?2M4]AIY=9dBKX=8fBJX=9dBJY=9eBJX=9dBJY=9eBJX=8eBKX=9eBIY=9dBKX=9eBIY=9dBKX=8fBJX=`1M4M2M4M2M3N3L3N3L3N2M4ZO^LjDd3T;^LiDf3S;]LkDe3S;^LiDe3T;^LjDe3S;e0M4M2M3N3L3N3O000010O010OO1M4M2M4M2M3N3L3N30O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001M2M3N3L3N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M4M2M3NlTY1"}, "image_id": 134, "id": 2218}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 416.0, 97.0, 96.0], "area": 5774, "segmentation": {"size": [512, 512], "counts": "n_b02k?3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2O1001O001O00001O001O00001O001O001O00001O001O00001O001O00001N1M4M2N2M4M2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M\\Rm5"}, "image_id": 134, "id": 2219}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 0.0, 79.0, 54.0], "area": 2263, "segmentation": {"size": [512, 512], "counts": "PPd12n?1O1O2N1O1O2N1O1O2N1O1O2N1OO1O1O1002N1O1O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O0000O1O1O1001O`NfA[1Y>dNjA[1U>eNlAZ1T>eNnAY1S>fNPBW1^>O2M2N2N3N1N2N1O010O2N2O1N3M2N2O2M2N3M2O1No^T5"}, "image_id": 136, "id": 2220}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 0.0, 39.0, 21.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "P`o22n?1O1O2N1O1O2N1O1O2N1O1O2N1O1O1OO1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O2N2Ono\\4"}, "image_id": 136, "id": 2221}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 0.0, 79.0, 82.0], "area": 3196, "segmentation": {"size": [512, 512], "counts": "YPV42m?2O2M2N3M2O1N3M2N2O2M2N2O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O2N10010O01O01O01O01O0N2N3N101O01O01O0O1N3M2O1N3M0010O000010O03ROVBAl==VBAl=>UBAm=UBAm=mNfAS1X>POfAR1W>QOfAQ1Y>=M2N3N1N3M2O2M2N3N1N3M2O20M2N3N1N3M2O2M2SNQBf1R>WNQBg1V>M2N3N1N3M2O2M2O20O010ON3M2O2M2N3NO000101N3M2O2M2N3N1N3M2O2M2NRnl5"}, "image_id": 136, "id": 2224}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 38.0, 17.0, 15.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "]QX32n?1N2N3M2O1N1O0010O002O1N2N2N3N1Nd^_4"}, "image_id": 136, "id": 2225}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 40.0, 89.0, 75.0], "area": 3336, "segmentation": {"size": [512, 512], "counts": "ea]33l?2N3N1N2N3M2O1N3M2N2O2M2N3N1N210O00010O010O00010O00010O01O01N1N3M2O1N3M2N2IaNkAa1R>aNlAa1R>aNlAb1R>7N2N3N0O000010O0000010O0000100O3M2N2O2AmAlNU>S1lAkNW>R1kAmNV>Q1mAlNU>T1kAjNX>U1:0010O00010N1O2M2N2O2M2N2N3N1N3M2N2O2M2N2O2M2N]mU3"}, "image_id": 136, "id": 2226}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 42.0, 74.0, 83.0], "area": 3296, "segmentation": {"size": [512, 512], "counts": "[2g0X?3M2O1N3M2N3N1O20O0N10O2O2M2N2O2M2N3N1N3M2N2O0O0010O00HUN\\Bl1c=VN\\Bi1d=ZNYBf1h=[NVBf1i=80010O0000010O00010OJWBVNi=j1YBTNh=k1[BSNe=l1:N1N3M2O2M2N2O2M2N3N1N3M2N2O2M2N3N1N3M2O2M2N2N3N1N3M2O2M^mj6"}, "image_id": 136, "id": 2227}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 69.0, 63.0, 86.0], "area": 2960, "segmentation": {"size": [512, 512], "counts": "dbe21n?3\\@MV?5h@MW?5g@MV?5h@MV?5h@NV??N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M210O010lAWNn=n110O001M2O2M2N3N1N3M21N1O2M2N3N1N3M2O0O0YOSB\\Om=e0TBZOl=e0WBXOk=f0WBXOk=g0WBWOk=f0WBXOk=f0XBWOk=g0VBXOk=f0XBWOk=e0XBYOj=e0k0N1Af@5]?Hf@5\\?Je@5e?M2NalZ4"}, "image_id": 136, "id": 2228}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 86.0, 10.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "ibS11n?3M2O1N3N01N2N3N1NV]g6"}, "image_id": 136, "id": 2229}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 94.0, 74.0, 78.0], "area": 2982, "segmentation": {"size": [512, 512], "counts": "_SQ22m?2TAMi=5TBMj=5UBMh=6UBLj=6SBMj=5gAB5:R>7gA@55gAB4;S>6fAA6;Q>?lACS>>kAER>V1N3N1N3M2O0O00010O00010O0002O2O0N3M2O2M2N3N1N3M2O2M3M2O2K4010O010O010O010O010O10O0O2N1N3M2O0O00010O2N2O2M2N3N1N3M2O2M2N3N1NTki4"}, "image_id": 136, "id": 2230}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 114.0, 82.0, 60.0], "area": 2515, "segmentation": {"size": [512, 512], "counts": "YT?1n?3N1N3M2O2M2N3N1N3M1O01O01O01O01O0o@\\Oh>f0UA\\Oi>f0UA]Oh>e0WA\\Oh>P1N1O2N1O2O0O1O2N1O01O0O1O100O2N100O1O100O2N100O1O01O01O0001O01O01O01O01O01O01O01O01O01O2O1N3M2N3N1N3M2O2M2N2O2M2N3N1N3M2OfkW6"}, "image_id": 136, "id": 2231}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 156.0, 70.0, 70.0], "area": 2268, "segmentation": {"size": [512, 512], "counts": "ZVW12m?2O2M2O2M2N3N2M2N3N1N1O10O000102M2N3N1N3M2OO00]OnNYBS1f=oNYBP1h=ROUBn0k=TOTBl0k=WORBi0o=XOoAi0P>ZOnAe0R>]OlAc0U>d0O01O01O01O01O01O011N3M3N1N3HaAjNa>T1aAkNa>S17N2O0O00010O00102M2N3N1N3M2O2M2N3N2M2N3N1NWje5"}, "image_id": 136, "id": 2232}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 253.0, 16.0, 48.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "m7^1d>N101N2PO[Aa0f>]O]Aa0e>]O\\Ab0e>\\O^Ab0d>\\O\\Ad0Q?0O000000J6I8I^gg7"}, "image_id": 136, "id": 2233}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 0.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "P`]51o?1O1O00O1OQ`_2"}, "image_id": 137, "id": 2234}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 0.0, 34.0, 18.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "P`b51o?1O1O1O1O1O2N1O1O1O1O1O2N1O1O1O00O1O1O100O1O1O1O1O1O1O100O1O1O2N2No_l1"}, "image_id": 137, "id": 2235}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 0.0, 71.0, 65.0], "area": 2205, "segmentation": {"size": [512, 512], "counts": "Qan51o?1N2N2N2N2N2_@EZ?=d@FY?b0N2N2O1N2N2N000021O0001N1N2N2N2N2N2N2N3NO0001O00000000000001O0001O0000IdAkN\\>U1fAiNZ>W1hAgNY>Y17O1O1O1O1O1O1O2N2O2M2N2N2N2N2N2N2N3M2O1N2N2N2N2N^om0"}, "image_id": 137, "id": 2236}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 0.0, 55.0, 43.0], "area": 1453, "segmentation": {"size": [512, 512], "counts": "P`T71o?1O1O1O1O2N1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O2N1O1O1O1O1O1OO1O100O1O1O1O1O1O100O1O1O1002NROSAi0m>VOUAh0l>"}, "image_id": 137, "id": 2237}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 13.0, 57.0, 66.0], "area": 1763, "segmentation": {"size": [512, 512], "counts": "iPh21n?2N2N2O1N2N2N2N2N2N2N3M2N2[AVOR>k0lAWOT>i0kAXOU>h0iAZOW>f0gA\\OY>d0eA^O[>b0cAA[>`0cAB[>`0cAB\\>R1N2O1000000000001O0000N2N3M2N2O10000eN^AW1f>O2M2N2N2UORAa0P?]ORAa0P?]ORAa0X?N2N3M2O1N2N2N2N2NR^[4"}, "image_id": 137, "id": 2238}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 17.0, 8.0, 8.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "dPR71n?2N3M2O01N2N2N]oi0"}, "image_id": 137, "id": 2239}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 36.0, 68.0, 61.0], "area": 1773, "segmentation": {"size": [512, 512], "counts": "Zba61n?3M2N2N2O1N1O002N2N2N2O2M2O10000O1N2O2M2N2N2N1O0001O01O0000000001O01O000000KWATOi>l0YAROh>m051O2N2N1O0001O000000010LWAPOk>n05N2N3M2O1N2N2N2N3M2N2O1N2N2N\\^<"}, "image_id": 137, "id": 2240}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 48.0, 70.0, 81.0], "area": 2778, "segmentation": {"size": [512, 512], "counts": "mcd33a?_OPBa0P>\\OSBb0P>[OSBb0Q>XOSBf0e>N2O1N2N2N3M2N2N2N2O1N2N3M2NQ]X3"}, "image_id": 137, "id": 2241}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 56.0, 72.0, 60.0], "area": 1985, "segmentation": {"size": [512, 512], "counts": "[RT21n?2N2N2N2O1N3M2N2N2N2N2N2O1N3M2N2N2N2N2N2N2^AfN]>[1`AgN`>]1000010O000000000000O2M2N2N2N2O1N2N2N3M2N1O001O01O000000002N2N10O00011N2N2N3M2N2N2N2N2O1N3M2N2N2N[mg4"}, "image_id": 137, "id": 2242}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 66.0, 27.0, 48.0], "area": 736, "segmentation": {"size": [512, 512], "counts": "R2`1a>N2N2N2N2N200000001O0O1N2N2N2O1VOn@c0T?[On@c0Z?M2N2N2N2N2N2N2N2Om\\b7"}, "image_id": 137, "id": 2243}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 69.0, 12.0, 12.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "[RZ62m?2N2N2N2N0001O1O2N2N2Mjm_1"}, "image_id": 137, "id": 2244}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 81.0, 11.0, 11.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "fR^61n?2N2N2N2N2OO2N2N2N2N\\]\\1"}, "image_id": 137, "id": 2245}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 98.0, 72.0, 74.0], "area": 2595, "segmentation": {"size": [512, 512], "counts": "ecT13l?2N2O1N2N2N2N2N2YA@k=b0SB@k=b0SB@k=b0SB@k=b0SB@k=b0TB_Oj=c0TB_Oj=c0TB_Oj=c0TB_Ok=b0SB@m=`0QBBo=>oADQ>T100000000O0O000001O2N2N2N2N2N2N000GcAPO]>P1eAnN[>R1gAmNX>S1jAkNV>U1:000000000000000001O3M2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N[\\g5"}, "image_id": 137, "id": 2246}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 112.0, 75.0, 74.0], "area": 2512, "segmentation": {"size": [512, 512], "counts": "Pde41n?2N2O1N2N2N2N2N2N2N2N3M2O100001OO1N2N2N2N2N3M2N2N2O1N2O1gAbNo=^1oAdNR>[1lAgNT>Y1jAiNV>W1hAkNX>_101O00000000000O1N2N2000N2N2N2N2N2O1N2N3M2iN\\Ao0f>oN\\Ao0f>oN\\Ao0l>N2N2O11O01M2N2N2N2N2N2N2N2N2O1N3M2NnjT2"}, "image_id": 137, "id": 2247}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 135.0, 14.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "[dY32n?1N3M2N2N2O1000O2M2O1N2N2Nb[_4"}, "image_id": 137, "id": 2248}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 137.0, 82.0, 77.0], "area": 2793, "segmentation": {"size": [512, 512], "counts": "fd61n?2N2N2N2N3M2d@CR??l@CR??l@CR??l@CS?f0N2N2N3M2O11O0000000000000001O0O1N2dAgNn=[1PBgNn=[1PBgNn=[1PBgNo=Z1oAhNQ>X1mAjNS>b11O0001O000000N2N2N2N2N3M2N2N2N2N2O1N2N1O000000000000000010O0001O2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2NcZ`6"}, "image_id": 137, "id": 2249}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 139.0, 61.0, 78.0], "area": 2224, "segmentation": {"size": [512, 512], "counts": "XUn31n?2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N3M2000000000000000_NPBQ1P>mNRBS1n=kNTBU1l=iNVBW1j=hNWBY1h=eNZB[1g=bN[B]1f=aN\\B]1U>N2N3TOZA9h>EZA9h>EZA9h>EZA9h>EZA9h>FYA8i>FYA8i>FYA8Y?O1N2N2NhYS3"}, "image_id": 137, "id": 2250}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 141.0, 13.0, 13.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "cTe33l?2N2N2O1N1O0001O2N3M2N2N_[T4"}, "image_id": 137, "id": 2251}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 145.0, 79.0, 68.0], "area": 2603, "segmentation": {"size": [512, 512], "counts": "\\UQ22m?2N2N2N2N2N3M2O1N2N2N2N20001O2N000N2N2N3M2N2N2N2N0001O2O1000000000N2N2N3N1N2O1000N1O0000000000000010O00DdAUO\\>k0fASOZ>m0hAQOX>P1iAnNW>R1kAlNU>T1;0001O2N2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N3MlZg4"}, "image_id": 137, "id": 2252}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 160.0, 9.0, 9.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "TUc31n?2O1N3M0002N2N2NnZX4"}, "image_id": 137, "id": 2253}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 168.0, 76.0, 70.0], "area": 2682, "segmentation": {"size": [512, 512], "counts": "oVX39e?4AETA=j>ETA=j>ETAFSAFSA=O1O1O1O1O1O1AhNUBY1j=iNTBW1l=kNRBU1n=mNPBS1P>oNnAQ1R>QOlAo0T>SOjAm0V>?O00000000000001O2N2N3N1N2N2N2N2N2N2N2O110O000000000000000000010O00000000000O1N2N2N3M2N2N2N2O1N2N2N2N2N2N3M2NVia3"}, "image_id": 137, "id": 2254}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 179.0, 49.0, 74.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "_fW72m?2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N3M1O001O0001O02N2N2N2N2N2N2O1000000000001O000eA]NX>f10^NhA[1X>cNjA]1V>aNlA_1[>0000001O0dI"}, "image_id": 137, "id": 2255}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 194.0, 11.0, 23.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "R6g0Z?N3M2N2N2N2N2N2O2M2NcYj7"}, "image_id": 137, "id": 2256}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 212.0, 66.0, 75.0], "area": 2339, "segmentation": {"size": [512, 512], "counts": "dgb61n?2N3M2N2N2N2N2N2O1I^Om@d0Q?^Om@d0Q?7N2O2M2N1O00002N2N2N2N2N2N3N1N2N2N2N200000000000001O0001[NmAZ1S>dNoA\\1Q>bNQB^1o=`NSB`1m=^NUB`1W>N2N2N2N2N3M2N2N2O1N2Im@^OU?`0m@^OU?`07N2N2N3M2N2N2N2NmW<"}, "image_id": 137, "id": 2257}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 228.0, 15.0, 15.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "]WT11n?2N2N2N2N2N1O000002N2N2N2N2NhXd6"}, "image_id": 137, "id": 2258}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 229.0, 65.0, 60.0], "area": 2090, "segmentation": {"size": [512, 512], "counts": "mg^11n?2N2N2N2N3M2N2N2N2N2N2l@ZOl>g0RA[Ol>n0N2N2N2N2N2N3M2N2N2N2N2O1N20O1N2N3M2N2N2O1N2N2N2N2N2N2O10N2N2N10O2N2N2N2N2N2N2N2OO000001O3M2N2N2N2O1N2MSh`5"}, "image_id": 137, "id": 2259}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 247.0, 25.0, 40.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "n7Q1n>0000001O001O10O2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3MhWc7"}, "image_id": 137, "id": 2260}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 261.0, 66.0, 76.0], "area": 2360, "segmentation": {"size": [512, 512], "counts": "Wic02m?2N2N2O1N2N3M2N2N2N2N2N2O1N3M2N2N2N2N2O11OO2M2N2N2N2N2N2O1N3M2N2N2N2N2N2N01O000WOWB\\Oi=d0YB[Of=e0\\BYOd=g0^BWOc=h0`BUOb=i0`BUOc=h0_BVOc=i0^BUOd=i0^BUOd=i0^BUOd=i0^BVOc=h0_BVOc=h0l0N3M2N2O1N2N2N2N2N3M2N2N2OSW[6"}, "image_id": 137, "id": 2261}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 267.0, 69.0, 61.0], "area": 2052, "segmentation": {"size": [512, 512], "counts": "dXS64i?6K3N2N2N2O1N2O1O2N1O1O1O1O1O100O20O00000000O1N2N:G1O1O1O2O0O1OO100O2N2N3M2N2N2O1N2N30O0000000000010O000000N2N3N1N2N2N2N2N2N3N1N2N2N2N2NYVj0"}, "image_id": 137, "id": 2262}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 283.0, 14.0, 13.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "RY]71n?2N2N2N2N2N1O01O2N2N2N2N2NQg;"}, "image_id": 137, "id": 2263}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 284.0, 9.0, 10.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "Pi72n?1N2N2N2NO3N2N2NRgc7"}, "image_id": 137, "id": 2264}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 299.0, 48.0, 75.0], "area": 1984, "segmentation": {"size": [512, 512], "counts": "V[X72g>O^B3`=O^B3`=0]B2a=0]B2a=0]B2a=0]B2a=0^B1`=1^B2`=O^B3`=O^B3`=0]B2a=0]B2a=0]B2a=0]B1b=1\\BOd=3ZBMh=3WBLk=4SBLo=Q12N2O1N2N2N2N000000001O0FcARO]>n0eAPO[>P1gAoNX>Q1jAmNV>S1lAkNT>U1;000000000101N2N2N2N2NaF"}, "image_id": 137, "id": 2265}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 306.0, 80.0, 81.0], "area": 2690, "segmentation": {"size": [512, 512], "counts": "PZi42m?3M2N2N2N2O1N2N3M2N20000001ON3k@XOm>j0QAXOm>o0N2O2M2N2N2N2N2N2O20O0000000000010O0000N2N00000002N2N2010O0000fNbAQ1^>mNdAS1\\>kNgAT1Y>kNhAV1a>00000001O0001O00lNYAn0g>QOZAo0f>oN\\AQ1j>01M2N2N2O1N2N2N3M2N2N2N2O1N2N3M2N]dn1"}, "image_id": 137, "id": 2266}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 317.0, 67.0, 57.0], "area": 2209, "segmentation": {"size": [512, 512], "counts": "X:22d0S?6N2N0001O00000001O1O2O11O000001M2N2N2YAiNb>\\1N2O1N3N1001O0000000O1N2N2N3N1N2N2N2N000002O0O001O000000000000N3O3HQAZOQ?d0QAZOQ?d07O1N2N2N2N3M2N2N2N2N2O_Un6"}, "image_id": 137, "id": 2267}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 324.0, 54.0, 71.0], "area": 2083, "segmentation": {"size": [512, 512], "counts": "ojj31n?2i@0[>2dAOZ>3dAOZ>3dAOZ>4cANY>GZA<;OY>6eALY>6eALY>6eALY>6eAMY>4fAMX>5fAMX>o0N2N2N2N0002N1O0010O000000000000000101N2N2N2N2N2N2N2oN\\Ac0g>ZO[Ae0f>YO\\Ae0f>YO\\Ae0f>YO\\Ae0Q?N3M2N2N2N2N2O1N2N3MiTZ3"}, "image_id": 137, "id": 2268}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 367.0, 65.0, 71.0], "area": 2163, "segmentation": {"size": [512, 512], "counts": "^lc21n?2N2O2M2N2N2N2N2N2H^Oo@d0o>^OPAd0m>^OQAd0n>8N1O00002N2N2N2O1N2N2N3M2N2N2N2O1000001O000001O00000\\NlA\\1S>bNoA\\1S>bNoA\\1S>bNoA\\1\\>N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2N2N3N1N2N2NRc[4"}, "image_id": 137, "id": 2269}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 378.0, 24.0, 73.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "]\\d73l?2N2N2N2N2N2O1N2ZA@i=b0UB@i=b0UB@i=b0UB@i=b0VB_Oh=d0UB^Oi=d0UB^Oj=c0TB_Oj=c0TB_Oj=c0TB_Oj=c0TB_Oj=c0TB_Oj=c0TB_Oj=^1O000SD"}, "image_id": 137, "id": 2270}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 380.0, 58.0, 72.0], "area": 1942, "segmentation": {"size": [512, 512], "counts": "]la42m?2N2O1N2N2N2N3M2N2O1]A]Of=e0XB]Of=e0XB]Of=f0WB\\Og=f0WB\\Og=f0XB\\Oe=f0YB\\Oe=f0YB\\Of=e0XB]Of=e0XB]Of=f0WB\\Oi=d0UB^Ok=]1O00N2N2O2M2N2N2N2N2N2N3N1N2N2N2N2N3M2ISAYOn>e0TAYOm>f061O01O2N2N2N3M2O1N2N2N2N2N3MQSa2"}, "image_id": 137, "id": 2271}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 392.0, 15.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "]lb52n?1N2N2N2N2O2M20N2N2N2O2M2N2NacU2"}, "image_id": 137, "id": 2272}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 394.0, 8.0, 6.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "ZlT46j?000000O101O00eSg3"}, "image_id": 137, "id": 2273}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 394.0, 69.0, 74.0], "area": 2547, "segmentation": {"size": [512, 512], "counts": "f]c52n?2M2N2N2N2N2N2N2N2N2N2N3O00000N2N2N2N2N2N2N2N3M2O1N000000000000001O000001O000000000002N2N2N2N2N2O0O0000000001YOlABU>1]A1`0LU>1]A2?KW>0\\A3`0JV>1\\A3e?N2NPSZ1"}, "image_id": 137, "id": 2274}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 419.0, 79.0, 70.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "Qna11n?3M2N2N2O1N2N2N2N2e@^OU?h0N2N2000001O000N2N2N2N2N2N2N1O0001O1O2001O0000000N2N2N2N2N3M2N01O000000000001O000001ChAQOX>o0jAoNV>Q1lAmNT>S1nAkNR>V1oAhNQ>X1QBfNP>Y1=N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1N2NXbV5"}, "image_id": 137, "id": 2275}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 425.0, 16.0, 16.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "bm]31n?2N2N2N2N1O2N2N1O02N2N2N2N2N2NbRZ4"}, "image_id": 137, "id": 2276}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 427.0, 78.0, 60.0], "area": 2223, "segmentation": {"size": [512, 512], "counts": "U^e31n?2N2N2O2IHa@:]?Ha@:]?6N2N2N3N1N2N2N000000010O00003M2N2N2O1N2\\AgN`>^1O01O00000001O00000000N2N2N2O1N2N2N2N2N2N2O1N1O00000000000100O2N1O00001O02N2N2N2N2N2O2M2N2N2N2N2N3N1NjaS3"}, "image_id": 137, "id": 2277}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 436.0, 70.0, 76.0], "area": 2398, "segmentation": {"size": [512, 512], "counts": "[o_62m?2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N1O1O1O1O100O1O1O1[OdNeB]1Z=eNdB\\1[=gNbBZ1]=hNaBY1^=iN`BX1_=jN_BV1a=lN^BS1b=oN\\BQ1d=QOZBf0NPOi=;WBf01mNh=g1ZBWNf=i1\\BUNg=h1:N2N2N2N2N2N2O1N3M2N2N1O00002N2N2O1N2N2N2N2N2@d@9_?Eb@9d?N3M2N2O1N\\Q="}, "image_id": 137, "id": 2278}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 458.0, 75.0, 54.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "[ob42m?2N2N2N2N2N2N3M2N2N2O1N2N2N2O101O00O1O1O1O1O00O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1OL`AjN_>W1bAhN]>X1eAgNZ>Y1hAfNW>Z170001O001O2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2NQaW2"}, "image_id": 137, "id": 2279}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 465.0, 66.0, 47.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "i_W22h?7N2N3M2N1O1O1O100O1O1O1O1O1O1O1O1O100001O1O1O00O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1001O1I_AmNb>Q1`AnNa>P1aAoNa>o0`APOa>n0:N2Dm@HU?7l@GV?7l@GV?7m@FU?8;N3M2N2Nn`g4"}, "image_id": 137, "id": 2280}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 468.0, 16.0, 17.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "ini51n?2N2N2N2N2O1010O00000N2N2N2N2NSQn1"}, "image_id": 137, "id": 2281}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 470.0, 9.0, 18.0], "area": 91, "segmentation": {"size": [512, 512], "counts": "mnk72m?2N2N2N3N1N2N2N2YA"}, "image_id": 137, "id": 2282}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 500.0, 13.0, 12.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "koQ32m?2N2N2O0O1O1O0002N2N2N2NY`g4"}, "image_id": 137, "id": 2283}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 506.0, 12.0, 6.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "o_k31n?1O100O1O1O1002N1O1O1OQ`n3"}, "image_id": 137, "id": 2284}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 508.0, 9.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "o_f31o?0O1O1O1001O1O1OQPU4"}, "image_id": 137, "id": 2285}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o_b31o?0Q`\\4"}, "image_id": 137, "id": 2286}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 7.0, 30.0, 44.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "VQh21m?3M2O2M2N3N2M2N3M2O2M2N3N1N3M2O2M3M201O01oN\\Ae0c>YO_Ae0d>XO_Ae0c>ZO^A:IKl>H^A:IKW?4k@JW?3k@KX?2g000000000000001O0001O0000000000000000000_O]dc3"}, "image_id": 141, "id": 2293}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 10.0, 4.0, 9.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "aPn71m?2M4M2F"}, "image_id": 143, "id": 2294}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 92.0, 9.0, 28.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "dck71l?3N3L3N2M4M2I@l@c0Q?7TM"}, "image_id": 143, "id": 2295}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 100.0, 28.0, 27.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "fcY72k?3N3L3N2M4M2M4O000010O010O00010O010O000N3M2M4M2M3N3L3Ng\\8"}, "image_id": 143, "id": 2296}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 128.0, 33.0, 25.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "cd_71l?3N3L3N2M4M2M4N11O010O01O01M2M4N100010O02OO01O010O01O01O010O01O0eK"}, "image_id": 143, "id": 2297}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 190.0, 63.0, 57.0], "area": 1950, "segmentation": {"size": [512, 512], "counts": "kfP74j?2N3M2M4M2N2M4M2N3M2M4M2N3O000010O010O010O0010O0010O010O0oNXAk0h>QO[Ao0l>O01O010O010O0010O0010O010O0001M210O01O010O01O01O01UOo@f0P?XORAh0T?0O010O000SI"}, "image_id": 143, "id": 2298}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 193.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "Qfo71n9"}, "image_id": 143, "id": 2299}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 246.0, 64.0, 56.0], "area": 1973, "segmentation": {"size": [512, 512], "counts": "`hV62k?4M2M4M2M3N3L3N3L3N2010O010O00010O010O00010O01O0N2M4N1010O00010O010O00010O\\AjN^>W1^AlNc>Y1O0010O0010O0010O001XO]A2b>L`A4`>IdA7\\>FfA:Z>CjAAkA`0T>]OoAc0Q>[ORBc0f>N3L3N2M4M2M4MWWi0"}, "image_id": 143, "id": 2300}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 279.0, 43.0, 44.0], "area": 1192, "segmentation": {"size": [512, 512], "counts": "aiZ72l?2M3N3L3N3L3M3N3L3N3N11O01O010O01O01O010O01O01O010O01O01O01O010O01O01O010O01O01O01O0mF"}, "image_id": 143, "id": 2301}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 324.0, 31.0, 29.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "fjb62l?2M3N3L3N3L3N2O20O010O00010O010O00010O010O00010L3N3L3N2M4M2Mfem0"}, "image_id": 143, "id": 2302}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 329.0, 62.0, 46.0], "area": 1645, "segmentation": {"size": [512, 512], "counts": "Q[Q72l?2M3N3L3M4M2M3N30O00010O01O01O010O01O01O010O00010O01O01O010O01O01O010O00010O01O01O01N1N3L3M3N3L3M3010O01O01O0M3M4M2M4M2M3MfE"}, "image_id": 143, "id": 2303}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 331.0, 15.0, 18.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "]Z92n?3M3M3L4M2N0O10O101O3L4M3M3LYU_7"}, "image_id": 143, "id": 2304}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 340.0, 42.0, 34.0], "area": 814, "segmentation": {"size": [512, 512], "counts": "SkU63k?2M4M2M3N3L301O00010O0010Of@]OW?g00O01O01O010O01O01O010O01O01O010O00010O0N2N3L3M4M2M3N3LRUU1"}, "image_id": 143, "id": 2305}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 376.0, 73.0, 43.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "Yl_62k?3M4M2O101O001O000000001L30001O010O00010O0O20O01O0N2N3L301O01O01O010O00010O010O00010O0010O0010O0010O01O000N3M2O2O01O010O01O01O000N3M2M4^Oo@1S?Mo@1S?LQA1R?LPA2R?LQA1PS<"}, "image_id": 143, "id": 2306}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 433.0, 39.0, 27.0], "area": 541, "segmentation": {"size": [512, 512], "counts": "n]_51m?2N2M4M2M4N1010O00010O010O00010O010O01O01O010O010O00010O010O00010O0M4M2M3N3MTRm1"}, "image_id": 143, "id": 2307}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 446.0, 12.0, 15.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "X^m43k?2N3M2N3M2001M2N2M4M2NRbl2"}, "image_id": 143, "id": 2308}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 464.0, 74.0, 48.0], "area": 1945, "segmentation": {"size": [512, 512], "counts": "^_i52l?2M4M2M3N3L301O010O00010O010O010O00010O0N3M2M4M2N2M4M2N3M201O010O00010O010O01O00001O0PO]Ac0d>ZO^Af0b>XOaAg0`>UOcAk0i>01O001O001O001O00001O001O001O00001O001O0O2M2N2M4M2N3Mc`Q1"}, "image_id": 143, "id": 2309}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 487.0, 32.0, 25.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "goV51l?4M2M3M4M2M40O00010O010O00010O010O00010O001O00001O001L3N2M4M2MbPY2"}, "image_id": 143, "id": 2310}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 505.0, 21.0, 7.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "ool61l?3M300001O00001O001O00001O00001O001O0000Q`h0"}, "image_id": 143, "id": 2311}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 510.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "n?2o?O00QPn7"}, "image_id": 143, "id": 2312}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 510.0, 5.0, 2.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "n_52n?0001O00QPh7"}, "image_id": 143, "id": 2313}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "oo?1o?0QP_7"}, "image_id": 143, "id": 2314}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 75.0, 67.0, 69.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "QSS12n?1N3M2N2N2N2N3N1N2N2N2N2N3N1N2N2N2N3M2N2O1N2N2N3M2N2O10000010O000000010O00000N2N2O2M2N2N2N2N3M2O1N2N2N2N3M2O1N2N2N3M0N202N3N1N2N2N2N\\\\k5"}, "image_id": 144, "id": 2315}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 135.0, 32.0, 33.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "`do11n?2N2N2N2O1N3M2N2N2N2O1O2O000001O0001O0000N3M2O1N2N2N3M2N2N2N2NT[`5"}, "image_id": 144, "id": 2316}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 428.0, 22.0, 21.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "em]13j?6K4M2N2N2O0O2O00000000000000O2O0O2O1N3L4L_RW6"}, "image_id": 145, "id": 2317}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 470.0, 58.0, 42.0], "area": 1447, "segmentation": {"size": [512, 512], "counts": "boo11n?3M2O1N2N3M2N2O1N3M2N1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O11O1O2N1O1O1O2N1N2N2N3M2O1N2N3M2N2O1N3M2N2N2O_PS5"}, "image_id": 145, "id": 2318}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 493.0, 36.0, 19.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "]?c0]?01O00000000001O000000001O00000000001O000000001O00000000001O0000000K6Ib`]7"}, "image_id": 145, "id": 2319}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 0.0, 46.0, 10.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "UPd35f?50000000000000000000000000000000000000000000O1I700000000000000000000000000000000000000000PPe3"}, "image_id": 147, "id": 2320}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 0.0, 21.0, 6.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "PPe46j?000000000000000000000000000000000000000P`P3"}, "image_id": 147, "id": 2321}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 14.0, 4.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "PP[63m?1O0000O10000O1000000O1000PP^1"}, "image_id": 147, "id": 2322}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 2.0, 63.0, 52.0], "area": 1996, "segmentation": {"size": [512, 512], "counts": "m`g12k?3N2N3L3N3L3N2M4M2M4M20010O0010O0010O0010O010O00010O02OO00010O01O0N2N30O010O00010O010OO1O2O010O01O01O010O01O01OYO\\A2e>K^A1e>L^A2e>K^A1e>L^A2d>L^A1f>L]A1V^Y5"}, "image_id": 147, "id": 2323}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 32.0, 39.0, 29.0], "area": 988, "segmentation": {"size": [512, 512], "counts": "jQP61^?a0G9001O000001O00000000000000000000000001O000001O000000000000000000000000M4^O^_\\1"}, "image_id": 147, "id": 2324}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 33.0, 100.0, 145.0], "area": 8044, "segmentation": {"size": [512, 512], "counts": "nTl2?Z?7M3N3M2N2N2N20000000000000001O01O0000000000000000000001O000001O000000000000000001O00000E;C=B>C=C=C=C=C=L5O00000000000000000000000000O100O100O100O100O100O100O100O1O1N2N2M3N2N2N2L4E;DZOWAb0U?N3M2N2M4O001G]@4g?O2MS^Q6"}, "image_id": 147, "id": 2326}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 55.0, 51.0, 41.0], "area": 1130, "segmentation": {"size": [512, 512], "counts": "ZbX21m?3L3N3L3N3M2M3N3O001O01O010O01O01O01j@YOR?j000010O010O00010O010O00010O010O01O01O010O01OO2L3N3L3N3L3N2M4M2Mlmm4"}, "image_id": 147, "id": 2327}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 68.0, 53.0, 91.0], "area": 2538, "segmentation": {"size": [512, 512], "counts": "fdQ63j?3M4L3M3M4L3M3L5L3M3M4L3M3M4L3L4M4L2N00002N3O1010O00010O00O2L3L41M2M3M4L3M2N0O4O2HlA_NW>^1311O4K4M3\\OZAKj>1YALj>1YALj>0[ALi>0ZAMi>0^nS1"}, "image_id": 147, "id": 2328}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 68.0, 28.0, 42.0], "area": 697, "segmentation": {"size": [512, 512], "counts": "WSY72j?5L3M3L5L3M3L4M4L3L4O2O00010O0001M2M3L5L3M3L4M4L3L4M4Ljm8"}, "image_id": 147, "id": 2329}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 71.0, 129.0, 79.0], "area": 4236, "segmentation": {"size": [512, 512], "counts": "Sc;2l?3L3N3L3N3M2M3N3M2M4PAROj>T1M2010O0010OO1N3O010O01O01O010O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01O01O010O011N1O010O01O010N100010O010O00010O010O0010O0010O010O00010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010ON2M4M2M4M2N2M4M2M4M2M4M[lc5"}, "image_id": 147, "id": 2330}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 121.0, 50.0, 57.0], "area": 1807, "segmentation": {"size": [512, 512], "counts": "ReT73i?4M3M4L3M3M4L3M3PASOj>S11O01L3L4M4L3O110O000010O000010O00010O00010O00010O0001L3M3M4K4M3M3M4L3M3M4K4M3M4Lo[2"}, "image_id": 147, "id": 2331}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 195.0, 20.0, 56.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "S6h1X>N3L3N3L3N2M4L3N2M4M2M4M2M3N3L3N3L3N2Miie7"}, "image_id": 147, "id": 2332}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 211.0, 46.0, 41.0], "area": 1093, "segmentation": {"size": [512, 512], "counts": "_g?1m?2M3N3L3N3M2M3N3L3N3L3O20O00010O010O01O01O010O010O00010O010O0M4M2N2N3O010O0O1N3M2M4M2M4M2NQYi6"}, "image_id": 147, "id": 2333}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 231.0, 41.0, 42.0], "area": 1058, "segmentation": {"size": [512, 512], "counts": "ThX11b?3d@0Z?2d@1X?=N2N3L3N3M20010O0010O0010O0010O0010O0010O0010O0010O0010ON3L7J2M4M2N2M4M2M4MZhR6"}, "image_id": 147, "id": 2334}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 247.0, 21.0, 22.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "gWg5f0Z?000000000000000000000000000000000000000YXn1"}, "image_id": 147, "id": 2335}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 258.0, 83.0, 52.0], "area": 2100, "segmentation": {"size": [512, 512], "counts": "kh91l?4L3M3M4L3M3N3L3N201O01O01O010O00010O01O01O01O01O01O01O01O01O01O01O010OWOPAb0Q?ZORAf0T?10O00010O00010O00010O0010O0N3L3O110O00010O00010O00010O00010O010O00010O00N3L3M3M4M2M3M4L3MXg\\6"}, "image_id": 147, "id": 2336}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 263.0, 60.0, 64.0], "area": 2396, "segmentation": {"size": [512, 512], "counts": "Xil13j?4M2M3M4L3M301N1WA\\OT>f0iA]OV>e0gA^OU>e0hA^OU>e0hA^OV>X1L3O1010O00010O0001O01O01O01O0N2M4L3M3M4N11O01O01O01O01O01O01O01O010O000O2L3M4L4Fo@@T?>n@_OU?>9L3M4L3M\\WU5"}, "image_id": 147, "id": 2337}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 267.0, 48.0, 39.0], "area": 1691, "segmentation": {"size": [512, 512], "counts": "bXf4n0R?2N0000000000000000000000000000000M3000000000000000000000000L40000000000000000000000000000000Ofga2"}, "image_id": 147, "id": 2338}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 273.0, 46.0, 52.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "iYP61l?4L3M3M4L3M3M4L3M3M4N11O01O01OM4L6J4M200010N1N210O00010O00010O0N2M4L3M3M4L3M3M4L3M3M4L3M3MZgX1"}, "image_id": 147, "id": 2339}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 274.0, 73.0, 99.0], "area": 4064, "segmentation": {"size": [512, 512], "counts": "`ZY3:V?`0H9O0000000000000001O00000006J001O0000000000000001O000000E;@`0C=01O0000000001O0000000000hBfMjXDBh;NhD2X;^OXEb0S=0000000000HYUb3"}, "image_id": 147, "id": 2340}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 293.0, 61.0, 99.0], "area": 3409, "segmentation": {"size": [512, 512], "counts": "o:n0S?0O01O01O001L3N2M4M2M4M2N2M4\\O\\NhBf1V=\\NgBh1U=\\NhBf1V=\\NgBg1V=]NgBf1V=c0M4M2M3O20O010O0O1N3L3N3M2N210O010O0O1M4M2^O_BbNe=Z1^BdNd=Z1_BbNd=[1_BcNd=Z1^BcNe=Z1c0M2M3N3L3N3M2M4M2M3N3L3N3L3N^VQ7"}, "image_id": 147, "id": 2341}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 301.0, 55.0, 57.0], "area": 1609, "segmentation": {"size": [512, 512], "counts": "bjb62m?1N3N2M2O2M2O2M3N1N3N2M2O2M3N1N3N20O10N2N1N3N2M2O2M3N1N3N01O2M3N1N3N2M2O2M3M2O2M3N1N3N2M2O2M3N1N3N2M2O2M3N1NUfa0"}, "image_id": 147, "id": 2342}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 318.0, 8.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "nYY4?a?000000000000KWfb3"}, "image_id": 147, "id": 2343}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 319.0, 29.0, 61.0], "area": 1326, "segmentation": {"size": [512, 512], "counts": "`ka74i?3M3M4]OC^A`0_>C^A`0_>D]A`0_>C^A`0_>C^A`0_>c0L3M4N100010O00O2O01O01O01O01O01O000M4LPF"}, "image_id": 147, "id": 2344}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 321.0, 23.0, 25.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "eZk24Y?c0O100000010O000000000000000000000000000000GWVi4"}, "image_id": 147, "id": 2345}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 321.0, 22.0, 27.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "Yj_4b0V?80000000000000000000000001O00000001O0000HVVU3"}, "image_id": 147, "id": 2346}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 326.0, 58.0, 68.0], "area": 2144, "segmentation": {"size": [512, 512], "counts": "nZc54i?3M3g@Hg>;VAHh>:UAIk>g00O0001_AnNS>Q1jAROV>n0gAVOX>k0dAXO]>U101O01O01O01O01O01O01O0O1010O00010OO1M4L2N000002N4L3AfAUO]>h0gATO]>i0eATO^>m0:01O01ON3L31O01O0M3M4L3M3M4L3Mfe_1"}, "image_id": 147, "id": 2347}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 340.0, 61.0, 87.0], "area": 2815, "segmentation": {"size": [512, 512], "counts": "QlT13j?3M4L3M3L5L3M3O2iAUOW=k0bBUO\\O3R>h0_B@a=`0[BCe==XBFh=T1O000M4L3M3M4L3M3M2N03N3N11O01O0N2M4M2M4L3M3M4M21O01L3M3N3L3M3M4N11O01L3M4L3N2M4L3M3M4M2M3MSel5"}, "image_id": 147, "id": 2348}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 363.0, 74.0, 55.0], "area": 2092, "segmentation": {"size": [512, 512], "counts": "Xl[22k?4^OLSA7k>LRA7j>LSA7k>KRA8l>`0O00010O01O0YORA=m>@WA?j>^OXAb0h>[O[Af0P?O010O0001j@ZOo>g0n@\\OP?l0M21O01O01O01O010O00010O00010O00010O00010O0O2O0O101N100O2ON1N3M2N3M2010O0010O010O3N2N2M3N2M3N2M3N2M3N2NgS_4"}, "image_id": 147, "id": 2349}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 363.0, 23.0, 26.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "mkk63j?3M4K4M3M4M21O01O01O01O01O01O00O2L3M3M4L3L4Mcdh0"}, "image_id": 147, "id": 2350}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 364.0, 29.0, 32.0], "area": 593, "segmentation": {"size": [512, 512], "counts": "TlW62k?3M3M4L3M3M4L3M301O01O01O01O01O01O01O01O0N2M4L3M3M4L3M4L`dY1"}, "image_id": 147, "id": 2351}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 377.0, 28.0, 45.0], "area": 684, "segmentation": {"size": [512, 512], "counts": "nlP73j?4L3M3L5L3M3M4L3M3M4L3M301O01M2M3M4K4M3M4L3M3M3M4L3M3MVTa0"}, "image_id": 147, "id": 2352}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 392.0, 118.0, 44.0], "area": 3056, "segmentation": {"size": [512, 512], "counts": "olg32k?3N210O01O01O01O01O01O0N20M3M3M4L3M3N3L3N2010O00010O00010O00010O00M40O0010O00010O0010O00010O0001N1N2010O00010O00010O00010O00010O00010O0M02O30O00010O00010O000N3M2M3M40O00010O00N3O00O2L3O101O01O01O01O01O01O01O01O01O01O000M4M2M3M4L3M3M4L]S]2"}, "image_id": 147, "id": 2353}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 409.0, 39.0, 39.0], "area": 979, "segmentation": {"size": [512, 512], "counts": "dm^53j?3M4L3M3N3L3M3M4L3O101O01O01O01O01O01O0010O00010O00010O00O2L3M3M4L3M3N3L3M3MQcm1"}, "image_id": 147, "id": 2354}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 418.0, 22.0, 26.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "R=e0[?0010O01O01O01O01O01O01O01O01O0M3M4L3M4L3Mjbd7"}, "image_id": 147, "id": 2355}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 423.0, 63.0, 47.0], "area": 1915, "segmentation": {"size": [512, 512], "counts": "[nU23j?3M3M4L3M3M4L3M3N3L30001O01O01O01O01O0N3L3M3O2O00M4L3N210O01O01O01O01O01OO2L3O1010O00010O00010O00010O01O0O1M4L3M3M4L3M3M4L3M_bj4"}, "image_id": 147, "id": 2356}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 424.0, 88.0, 65.0], "area": 3249, "segmentation": {"size": [512, 512], "counts": "\\^g34i?3M3M4L3M4L3N2M4M21O01M2M3N3L3N2010O00010O00010O00010O00010O00010O00010O00010O0001^AdN_>_1O01O01O010O00010O00010O00010O00010O00010OO1M4L3M4L3M3010O0001M2M310O00010O0N2M4M2M3M4L3M3MSbl2"}, "image_id": 147, "id": 2357}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 445.0, 49.0, 67.0], "area": 1911, "segmentation": {"size": [512, 512], "counts": "ao^63j?4L5K5K5K3M3M4RAQOe>W1M3M3O100001O00001OM3M3M3M3M3M3M3O11O00001M2M3M4M2M3M4L3M4L3M3M4L3M3M4L3M3M4L3M3Nmah0"}, "image_id": 147, "id": 2358}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 448.0, 49.0, 42.0], "area": 1257, "segmentation": {"size": [512, 512], "counts": "knV12k?3M4L3M3N3L3M3M4L3O110O00010O00010O00010O00010O0010O00010O0010O00010O00010O00O2L3M3M4L3N2M4L3M3MfaP6"}, "image_id": 147, "id": 2359}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 456.0, 66.0, 56.0], "area": 1898, "segmentation": {"size": [512, 512], "counts": "ao_53j?3M3M4L3M3N3O00010O00010O00010O00010O0O1M4L3M3M4L3M3M4L3M310O00010O0O1010O0M3M4K4M3N3O0N2M4L3M3N210O00010O00010O00010M2M3M4L3M3M4L`Q_1"}, "image_id": 147, "id": 2360}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 459.0, 12.0, 9.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "]n`34j?20010O00010O00010O00NdQY4"}, "image_id": 147, "id": 2361}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 483.0, 44.0, 29.0], "area": 842, "segmentation": {"size": [512, 512], "counts": "n_i02k?3M3N21O00M3M3M3M3M3M3N200001O00001O0000001O00001O00001O00001O00001O0Eh@NX?Ok@1V?Km@5S?HQA7\\?0001O0M3MZ``6"}, "image_id": 147, "id": 2362}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 483.0, 55.0, 29.0], "area": 1159, "segmentation": {"size": [512, 512], "counts": "h_V21m?3L3M3M4L3M3M4N110O00010O000j@XOU?i000001O0000M3001O00001O00001O001O00001O00001O00001O00001O00001O00001O00001Bb@6`?Hb@5f?M_Pn4"}, "image_id": 147, "id": 2363}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 486.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "V?3g`o7"}, "image_id": 147, "id": 2364}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 489.0, 28.0, 19.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "a__43j?4L3N210O00010O00010O00010O00010O00010O00010O0001M2M3Lc`R3"}, "image_id": 147, "id": 2365}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 492.0, 20.0, 20.0], "area": 317, "segmentation": {"size": [512, 512], "counts": "\\?d0\\?0001O00001O00001O001O00001O000N3M2M3Ma`e7"}, "image_id": 147, "id": 2366}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 494.0, 12.0, 18.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "b_j73j?>E00000000000O1000000"}, "image_id": 147, "id": 2367}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 496.0, 25.0, 16.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "noT32j?4M3M3M3O1001O0000001O00001O00001O0000001O000M4L]`^4"}, "image_id": 147, "id": 2368}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 16.0, 15.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "0LUA0n>MUA0o>LTA2n>LUA0a^c2"}, "image_id": 150, "id": 2376}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 13.0, 13.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "bPj21m?2Y@O\\?4a@O_?:O0001O0N3L3O2O0N2Nb_o4"}, "image_id": 150, "id": 2377}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 13.0, 11.0, 17.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "^`j71o?k0jAXOV>h0hA[OW>W11O01O01O01O01O0aNhAW1W>fNlAZ1U>bNnA^1[>0O00N3L3M3M4L3M3N3L3M3M4L3M4L3NhnU5"}, "image_id": 150, "id": 2379}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 23.0, 17.0, 27.0], "area": 259, "segmentation": {"size": [512, 512], "counts": "Zae51l?4M6I3M3M4M2O110M10N4L3N2M4L3M3MZoQ2"}, "image_id": 150, "id": 2380}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 32.0, 42.0, 66.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "hQo23k?2e@Lg>8UALg>7WAKf>8WAKg>8UALg>h0M4L3N201O01O01hAcNl=\\1QBhNn=X1PBjNP>c110O00010ObNmAo0S>nNPBS1P>iNSBW1m=fNWBY1X>1O01O0O1M3M4L3M3M4L3M3N3L3M3M4L3MZn[4"}, "image_id": 150, "id": 2381}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 51.0, 27.0, 28.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "TRd13k?2N3M2N3M2N3M2M40O010O010O010O010O0O2M2M3N3M2N3M2N3MV^n5"}, "image_id": 150, "id": 2382}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 51.0, 49.0, 80.0], "area": 2184, "segmentation": {"size": [512, 512], "counts": "icW73j?3N3L3M3M4L3N2M4L3M3N3L3M3N30O01O01O010O000N2M000010O001O4M2M3M4L3M4M2N201O01O010O00010O01ON3L3M\\N"}, "image_id": 150, "id": 2383}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 52.0, 13.0, 13.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "hQU65i?4M101N101O0001N101N2N3KZ^d1"}, "image_id": 150, "id": 2384}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 54.0, 42.0, 61.0], "area": 1650, "segmentation": {"size": [512, 512], "counts": "bRi34i?3a@Lo>7n@Lo>7n@Lo>e0L3M3L5O000010O`AhNX>X1eAkN[>U1bAoN^>Y100010ON2M4L3O1010O00010O000M4K4M3M4L3M3M4L3M3[Oo@8U?Dn@8_?M4L3Mmma3"}, "image_id": 150, "id": 2385}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 58.0, 24.0, 34.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "fbd51l?3M4L3M3N3L3M3M4L3N3O00010OM3M4L3M3N3L3M3M4L3NT^o1"}, "image_id": 150, "id": 2386}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 67.0, 13.0, 23.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "eR\\63j?3M4L3M3M4L31L3M3M4L3M3Mn]]1"}, "image_id": 150, "id": 2387}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 76.0, 48.0, 66.0], "area": 2025, "segmentation": {"size": [512, 512], "counts": "PSj42k?4L3d@Il>:QAIl>;PAI>Ij=a0eAI>Jk=k0RBXOo=h0mA[OS>e0jA_OU>a0hABY>R100O2L3N2010O00010O00010O0001O03M010O00010M2M3M4L3M3M4L3M3M3M4L3L4M4L3M3M4LZm]2"}, "image_id": 150, "id": 2388}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 76.0, 22.0, 30.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "SS_63j?3M3M4L3M3M4K4O101O01O01O01N1L4M3M4L3M3M4LcmU1"}, "image_id": 150, "id": 2389}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 85.0, 14.0, 13.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "kRm54i?3M4O00010O00010O0001L3M3M[mk1"}, "image_id": 150, "id": 2390}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 99.0, 54.0, 59.0], "area": 1640, "segmentation": {"size": [512, 512], "counts": "`d`53k?2M3M4L3M3O2O000M40O000010O00010N1M3M4M2M3M4L3M3M4N11O01O01O01O01O010N0OO0001O4L3M3M4L3M3M4L3M3M4M2M3M4L3Mk\\d1"}, "image_id": 150, "id": 2391}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 103.0, 52.0, 67.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "md[61l?4L3M3M4L3N201O01O01A^O^Ab0_>A_Aa0^>B_Ab0]>A`Ab0]>b0M4N10010O00010O00010O0002ON100O2O0M3N3L1O03M4L3M3M4L3M3M4L3M3M4L3Dk@JX?3l@IX?2=Mg\\j0"}, "image_id": 150, "id": 2392}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 130.0, 48.0, 65.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "hUT71l?4K4M3M4L31ON3L3M3M4L3M3M4L3M3O2O01O01O0N2O2O01O01O01O01N1M3M11N3M4L3M1O003M3M4L3M3UORAa0R?[OQAb0X?M4Hb@Ia?47Lik3"}, "image_id": 150, "id": 2393}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 150.0, 4.0, 11.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "f4;f?L3M3M\\km7"}, "image_id": 150, "id": 2394}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 150.0, 50.0, 46.0], "area": 1289, "segmentation": {"size": [512, 512], "counts": "de43j?4L3M3N3O000010O0001O0M3M4L3L4O20O00010O00010O00010O01O01O01O000M4L3M3N3ON2M3M4K4M3M4L3M3M4L3M3L5LY[R7"}, "image_id": 150, "id": 2395}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 178.0, 54.0, 53.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "gfi03j?4L3M3M4O00010O00010L3M3M4L30001O01O01O01O01L3M3M4L3M3N3O00010O00010O00010O00O2L3M3M4L3M3M4L3M3N3L3M3M4L3MYZ[6"}, "image_id": 150, "id": 2396}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 197.0, 55.0, 106.0], "area": 2429, "segmentation": {"size": [512, 512], "counts": "Yib12k?3L4M4L3M3M4L3M3O2N10001kNROUCo0hW1gAfN\\>W19L3M3M4L3L4M4L3M3M4L3Miia5"}, "image_id": 150, "id": 2397}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 200.0, 23.0, 19.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "af84i?3N2M4N10010O00010O00010O00010O00010ON3L3M3Mei[7"}, "image_id": 150, "id": 2398}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 200.0, 51.0, 72.0], "area": 2064, "segmentation": {"size": [512, 512], "counts": "kg_23j?4L3M3M4L3M3M4L3M3M4L3M3bAfNS>^1iAeNV>d1O00010O00010O000O2L3M3M4L31O01ON3L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3Mcif4"}, "image_id": 150, "id": 2399}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 225.0, 49.0, 68.0], "area": 1832, "segmentation": {"size": [512, 512], "counts": "WhX34i?3N2M4L3M3M4L3M3O2O01O01N1M3M4fAjNe=Y1XBjNf=X1WBkNf=Y1VBkNj=T1SBoNm=c1O01O01O01O010O000O2O000M4L3GhAgN[>V1iAgNZ>U1:M3M4L3N2M4L3M3M4L3M3M4Mfhn3"}, "image_id": 150, "id": 2400}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 227.0, 26.0, 22.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "agb41l?3M4L3M3N3O00010O0001O01O00010O00010O00010OL4M4L3MjXP3"}, "image_id": 150, "id": 2401}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 231.0, 27.0, 61.0], "area": 977, "segmentation": {"size": [512, 512], "counts": "g7[1c>3L3M3M4L3N21N1M3M4L3M3M4M2M3M4L3M4L30001L3M3M4L3N2MeXb7"}, "image_id": 150, "id": 2402}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 245.0, 28.0, 30.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "YhX14i?3M3L4M4L3L401O01O01O01O0001O01O01O01O01OM4L3M3M3L5L3MXXY6"}, "image_id": 150, "id": 2403}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 246.0, 31.0, 35.0], "area": 668, "segmentation": {"size": [512, 512], "counts": "bh<2k?3M3M4L3M3M4L3M3M40O00010O0001N1N21O0M3TOPAe0W?N3OM31N1M3M4L3M3M4LWhS7"}, "image_id": 150, "id": 2404}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 249.0, 51.0, 84.0], "area": 2304, "segmentation": {"size": [512, 512], "counts": "ihP41l?3M4Y@L]?>L5M3dAWO]=j0_BYO`=j0[BZOb=j0YBYOEFn=R1ZB^Of=c0VB@k=`0QBDn=W1N2M4L3N201O01O01O00010O00M4L3M3M4L3M3M4L2N0000001O4L3M3M3M4L3M3M4L3M3N3L3M3MVhU3"}, "image_id": 150, "id": 2405}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 258.0, 46.0, 52.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "oXn42k?3M3b@Jo>:n@Hj>0PA;3Hj>`0SADj>j0N210O00010O001O00001O01O01O01O01O01O01M2M3M4M21M2M3M4L3M3M4L3N3L3M3M4L3M3M4L3NjgZ2"}, "image_id": 150, "id": 2406}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 270.0, 52.0, 55.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "fYd53j?3M4L3M3M4N100010O00010O00010O0001L3M3M4L3M3M4L3M3M4O01O01O01O01O01O01OO2L3M3M4L3M3M4L3M3M4L3M3M4L3M3M]ga1"}, "image_id": 150, "id": 2407}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 277.0, 30.0, 31.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "]il23j?4L3M3M4L3M3M4L30001O01O000M4L30001O01O01O01OO2L3M3M4L3M4LWWd4"}, "image_id": 150, "id": 2408}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 287.0, 55.0, 62.0], "area": 1929, "segmentation": {"size": [512, 512], "counts": "_j[62k?3M4K4M3O2O00010O0N2M4K4M30001O0M3O2N1O101O0O101L3M3M3N3L3N201O01O010O00010O0N2M4L3M3M4K4M3M4L3M3L5L3M3M4L3Lmfh0"}, "image_id": 150, "id": 2409}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 299.0, 59.0, 52.0], "area": 1904, "segmentation": {"size": [512, 512], "counts": "]Zk12k?4L3L4M4L3M3M4L3M3M4L31O01O01O01O01O01O01O01O0001O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01OM4L3M3M4L3M3M4L3M3L4MYVW5"}, "image_id": 150, "id": 2410}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 305.0, 24.0, 20.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "mYQ53j?3M3M4L31O01O01O01O0001O01O01O01O01O0O1M4L3M3M\\fb2"}, "image_id": 150, "id": 2411}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 316.0, 46.0, 60.0], "area": 1380, "segmentation": {"size": [512, 512], "counts": "oZY74i?3N2M4L3M3M4L3M3O2O01O01O01O01O010O01OZASOY>n0cAUO]>k0`AXOa>g0\\A]Oc>P100M4L3M000002O200N3L3M3M4L3M3M4L3M3M4L3M3M4LRF"}, "image_id": 150, "id": 2412}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 323.0, 29.0, 30.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "gZY53k?2M3M4L3M4L3M3010O00010O00010O00010O0010OO1M4M2M3M4L3M3MiUX2"}, "image_id": 150, "id": 2413}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 327.0, 93.0, 96.0], "area": 3131, "segmentation": {"size": [512, 512], "counts": "iki32k?3O110O00010O0000010N10010O00010O00010O0M3N2010O00010O00O2L3M3M40O00010O0000010O00010O00010O0O]A\\Ol=c0RB@m=a0PBCl=`0QBCl=a0PBBm=a0QBBk=a0RBBk=b0UB^Oi=c0TB@l=`0SBAj=]1LYOZBZOc=i0]BWO`=l0`BUO\\=o0dBPOY=S1gBmNV=V1jBkNS=X1mBgNS=Y1mBgNT=X1lBiNS=X1mBPN1b0S=^1QC^NS=a1mB\\NV=e1iBXNZ=h1gBTN]=k1cBSN_=n1`BoM_=S27N3M2N3M3L5L4L4eN_AR1j>L3M3M4L3M3M4L3M3M4L`eg2"}, "image_id": 150, "id": 2414}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 351.0, 42.0, 51.0], "area": 1385, "segmentation": {"size": [512, 512], "counts": "o:c1]>0001L3K5K5M310O000000010O000000010O000000010O000000010O000000010O000N2K6J5K5K5K5KndZ7"}, "image_id": 150, "id": 2415}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 371.0, 13.0, 15.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "lkh54j?2M4L3O110O01O000M4M2M3M]dP2"}, "image_id": 150, "id": 2416}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 376.0, 17.0, 21.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "Ylk61l?3L5L3M3M4N11O01O01O000M4L3M3M4LWdk0"}, "image_id": 150, "id": 2417}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 379.0, 65.0, 47.0], "area": 2201, "segmentation": {"size": [512, 512], "counts": "W\\b2m0S?00000000000000000000001O0000000000000000005K000000000000000D<0000000K50000000000000000000000000000000000000=C000F:0000000000000hS]4"}, "image_id": 150, "id": 2418}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 380.0, 25.0, 27.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "^\\f34i?3M3M4L3M3M4O00010O00010O00010O0001O0M3M4L3M3M4LQTm3"}, "image_id": 150, "id": 2419}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 382.0, 31.0, 45.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "Vl\\53j?3M3N3O0010Oe@FP?:m@IS?8j@KV??0001PA\\Oc>c0ZA@f>`0XABOIe>V100010O000M4M2O110L3N2Hn@_OV?=8M3M4L3MlcS2"}, "image_id": 150, "id": 2420}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 382.0, 20.0, 29.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "d\\R72l?3L3M3M4L3N2M4L310O00010OM3M7J2M3M4L3Mncc0"}, "image_id": 150, "id": 2421}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 390.0, 24.0, 30.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "m\\j53j?3M3M4L3M3M4L3N2010O00010O00001L3M3M4M2M4L3M3Mhci1"}, "image_id": 150, "id": 2422}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 395.0, 29.0, 35.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "Sma73j?4M200O2L3N3L3M3M4M2N210O00010O0010O0010ON2N3L3M3M4M2M3MbC"}, "image_id": 150, "id": 2423}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 396.0, 14.0, 15.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "e\\n03j?3M3N3N11O01O01O010O0M3N3Lccj6"}, "image_id": 150, "id": 2424}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 397.0, 54.0, 79.0], "area": 2246, "segmentation": {"size": [512, 512], "counts": "c^l02k?3M4M2M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M10O3M3M4L3N2M40O0010O00010N1M3M4L3M3M4L3M3M4L3N2M4L3M3M4L3M3M]cX6"}, "image_id": 150, "id": 2425}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 417.0, 57.0, 64.0], "area": 1989, "segmentation": {"size": [512, 512], "counts": "h^e43f?7L5L3M3M4L30010O00010O0001O01ON3L30E\\OZAd0b>@_A`0]>DbA<[>GdA:\\>FaA>^>b00010O01OLeNcA[1Z>8K4N210N1M3O20O00001L3L4M4L3M4L3M3L5L3M3M4L3M3L5L3M3MkR^2"}, "image_id": 150, "id": 2426}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 423.0, 23.0, 45.0], "area": 560, "segmentation": {"size": [512, 512], "counts": "Y=[1c>11O2N2M2O2N2N2M2O2N2M3N1O2N2M3N1O2M3N2N1O2M\\Rd7"}, "image_id": 150, "id": 2427}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 437.0, 86.0, 75.0], "area": 3030, "segmentation": {"size": [512, 512], "counts": "_o_62m?1O2N2N2N2M3N1O2N2N2N2N2M2O2N2N2N2M3L3L4M3M3M3M2N2001O2N1O1N3NN2N2O1N1O2O12N1O1O2M2O2O0KcAhN\\>U1hAjNY>R1jAnNV>o0mAROR>k0QBUOP>g0SBYOm=d0RBAm=:]B;d=A_B?a=^ObBc0]=ZOgBe0Z>001O0O1M4L3M3M4L3O1010O00010O0O1M4L3M3M4L3M3N3L3M4L3M3MaQ5"}, "image_id": 150, "id": 2428}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 447.0, 17.0, 25.0], "area": 199, "segmentation": {"size": [512, 512], "counts": "d^b51l?3L5M2000010O0L4M4K4NO4L3L4M4K4LSRU2"}, "image_id": 150, "id": 2429}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 448.0, 19.0, 24.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "c^W32k?3M3M4L3M3M4N10010O0001O0M3M4L3M3M4LoQ_4"}, "image_id": 150, "id": 2430}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 456.0, 9.0, 13.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "_nk73k?2M4M21O01O010O0fA"}, "image_id": 150, "id": 2431}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 471.0, 29.0, 31.0], "area": 553, "segmentation": {"size": [512, 512], "counts": "^_Y41l?4L3N2M4L3M4L3M310O00010O00010O00010O0001M2M3M4L3M3M4L3NTQX3"}, "image_id": 150, "id": 2432}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 474.0, 14.0, 14.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "T_`01l?3M4M2N2010O00010O0N3M2M3MVaX7"}, "image_id": 150, "id": 2433}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 479.0, 58.0, 33.0], "area": 1108, "segmentation": {"size": [512, 512], "counts": "o_^11l?3N2M3M3M300001O001O00001O00001O00001O0000N2M3N2M3M3M3N2M3N21O00001O00001O001O00001O00001O001O00001N1M3N3L3M4L3N2Mj`d5"}, "image_id": 150, "id": 2434}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 486.0, 44.0, 26.0], "area": 800, "segmentation": {"size": [512, 512], "counts": "n_`22l?2N2N2N2N2N2N2N2N2N2N200001O001O001O1O001O0000O10000O1000000O1000000O1O1O1O_Ol@8T?HPA4Q?LQA1o>OUAMl>0YAMg>1]ai4"}, "image_id": 150, "id": 2435}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 500.0, 34.0, 12.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "m_g03j?3M3M3001O00001O00001O00001O00001O00001O00001O00001O0000001O00001OR`g6"}, "image_id": 150, "id": 2436}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 506.0, 17.0, 6.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "nok32l?2N200001O001O00001O001O001O0000Q`k3"}, "image_id": 150, "id": 2437}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "nof32n?01O00QPW4"}, "image_id": 150, "id": 2438}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 95.0, 84.0], "area": 7134, "segmentation": {"size": [512, 512], "counts": "0[2e=00O1000000000000000000O1000000000000000000O100000000000000000000O100005K8H0000000000000000O100000000000000000000O1000000000000000000O1000000000000000000O1000000000000000001NAWB4P>F[BOUbb7"}, "image_id": 151, "id": 2442}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 452.0, 29.0, 9.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "U^o18h?000000000000000O1000000000000000O100000000000000000005KgQb5"}, "image_id": 151, "id": 2443}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 494.0, 8.0, 18.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "^_l78h?:F00000000000"}, "image_id": 151, "id": 2444}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 499.0, 42.0, 13.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "e_i65k?6J0000000O10000000000000000O10000004L000000000000O1000000000000000000O1000000008HS`a0"}, "image_id": 151, "id": 2445}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 502.0, 39.0, 10.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "mo^53h?5O1000000000000000000O1000000002N4L0000O1000000000000000000O10000000000004LR`m1"}, "image_id": 151, "id": 2446}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 0.0, 131.0, 15.0], "area": 1691, "segmentation": {"size": [512, 512], "counts": "PP1;e?000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000000D\\`m5"}, "image_id": 152, "id": 2447}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 0.0, 51.0, 15.0], "area": 718, "segmentation": {"size": [512, 512], "counts": "QPT2=b?1000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000000000MS`R5"}, "image_id": 152, "id": 2448}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 0.0, 67.0, 108.0], "area": 7028, "segmentation": {"size": [512, 512], "counts": "^bn6l0U>o0QOo0@`0000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000"}, "image_id": 152, "id": 2449}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 186.0, 9.0, 21.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "jeY7e0[?000000000000000Vja0"}, "image_id": 152, "id": 2450}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 239.0, 84.0, 128.0], "area": 10263, "segmentation": {"size": [512, 512], "counts": "hZf6d0a>k0VOj0UOl0VOi0000000000000000000000000000000000000000000001O000001O00000000000000000000000000000000000000000001O00000001O00000000000000000000000000000000000000000001O000F:UOYI"}, "image_id": 152, "id": 2451}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 283.0, 53.0, 53.0], "area": 2263, "segmentation": {"size": [512, 512], "counts": "QZY4h0H8000000000000001O00000000000000000001O00000000000000000000000001O00000000000000000001O00000000000000000000000001O00000000000000000001O000000000000000000000001O0000000]Ogc0"}, "image_id": 152, "id": 2454}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 419.0, 57.0, 93.0], "area": 4418, "segmentation": {"size": [512, 512], "counts": "Tmo0V1j>f1ZN00000000000000000000000000000000000000000O1000000000000000000000F:0000000000000000:F0000000R1nNa1_N000M3000000000Z`S6"}, "image_id": 152, "id": 2455}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 468.0, 19.0, 44.0], "area": 605, "segmentation": {"size": [512, 512], "counts": "d>\\1d>000000000000000000000Q1oN000000000000[Pf7"}, "image_id": 152, "id": 2456}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 31.0, 45.0], "area": 879, "segmentation": {"size": [512, 512], "counts": "0]1c>000000000000_Oa00000000000000000000004G5000000000000000000000l_`7"}, "image_id": 153, "id": 2457}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 0.0, 70.0, 91.0], "area": 4615, "segmentation": {"size": [512, 512], "counts": "PPP1`0`?R2nM000000000000O100000000000000000000000000000000000000000000000?Ab0HF00000000000000000000O10000000000000000000O10H800000000000000O100000R1nNnml5"}, "image_id": 153, "id": 2458}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 0.0, 9.0, 13.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "PPe16j?7I000000000009Gg_V6"}, "image_id": 153, "id": 2459}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 75.0, 11.0, 45.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "[2]1c>00000000000000000;EZ]j7"}, "image_id": 153, "id": 2460}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 373.0, 269.0, 139.0], "area": 27372, "segmentation": {"size": [512, 512], "counts": "gmh25j?:G9G9G9G:F9G9G4L0O10000000000000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O10000000000000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O10000000000000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O100jNYMSEg2m:cMiD]2W;mM_DS2`;XNVDh1j;W10000000000000000O1000000GRL`Dn3`;90000000000000000000000000000000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009fLUDm1Ug=CXB?f=CWB?g=DWB>g=DWB>g=CWB`0f=CXB>g=DWB>g=DVB?h=BWB`0f=CXB>g=DVB?h=n0M3N2N1N3N2N2N1N3N2N2M2O2N2M3N1O2M3N2N2N1N3N2O10O1000O10O1000O1000O10O1000O10M3N2N1N3N2N2M2O2N2M3N2N1O2M3N2N1N3N2N2M2O2N2N2M2OO10O10O10O10O10O1000O10O10O10O11N3N2N2M3N1O2N2M3N1O2M3N2N1N3N2N2M2O2N2N2M2O2N2M3N101O1000O10O1000O1000O10O1000O10O1000O10O100000O10O1000O10O1000O10O1000O10O1000N2M2O2N2N2M2O2N2M3N1O2M3N2N1O2M3N2N1N3N2N2M2O2N2M3N1O2N2M3N2N1N3N2N2M2O2N2M3N1O2N2M3N1O2M3N2N1N3N2N2N1N3N2N2MdZc2"}, "image_id": 154, "id": 2464}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 124.0, 69.0, 74.0], "area": 2883, "segmentation": {"size": [512, 512], "counts": "Wem61n?2O2M2^@LU?6i@LU?6i@LU?6j@KT?c0N1N2N2N2N3N1N2N2N2N10OEeNRB[1o=fNoAZ1Q>iNlAW1T>kNjAU1V>mNhAT1W>:1O000001O01O0000003NO01O000001O0001O00010O3M2N2N2N201O01O0001O0001O0001O01O0001O01OO1N2NYK"}, "image_id": 156, "id": 2465}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 353.0, 408.0], "area": 86204, "segmentation": {"size": [512, 512], "counts": "Z1[9d6O1000O0100000O010000O0100000O0100000O01000O0100000O0100000O01000O101O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1N2O1OWJeHo2Z7QMhHn2V7SMkHm2T7RMoHm2P7SMRIl2m6TMUIk2i6VMYIi2f6VM\\Ij2c6iLQImM>Y5_6kLUIjM>Z5\\6lLXIhM>[5Y6lL[IgM>]5V6lL^IeM>^5R6nL]JQ3b5oL`JP3_5oLdJP3[5PMfJP3X5QMjJn2U5QMnJn2Q5RMQKm2m4TMTKl2k4TMWKk2h4TM[Kk2d4UM^Kj2`4WMbKh2]4XMdKh2[4WMhKh2V4YMlKf2S4ZMoKe2P4ZMSLe2l3[MULe2i3\\MYLc2f3]M\\Lb2c3]M`Lb2_3^MbLb2\\3_MfL`2Y3`MiL_2V3`MmL_2Q3bMQM]2n2cMSM]2l2bMWM]2h2cMZM\\2d2eM^MZ2a2fMaMY2^2fMdMZ2[2fMgMY2W2hMkMW2T2hMoMW2P2iMQNW2m1jMUNU2j1kMXNT2g1kM\\NT2c1lM_NS2_1nMbNR2]1nMeNQ2Z1nMiNQ2U1PNmNo1R1QNoNo1P1PNSOo1l0QNVOn1h0SNZOl1e0TN]Ok1b0TN@l1?TNCk1;VNGi18WNJh15WNNh10YN1g1NYN4f1KYN8f1GZN;e1C\\N>d1A\\Na0c1^O\\Ne0c1ZO]Nh0b1VO_Nl0`1SO`Nn0`1QO_NR1`1lNaNV1^1iNbNY1]1fNbN]1]1bNcN_1]1_NdNc1[1\\NeNf1Z1ZNdNi1[1XNcNi1]1XNaNj1^1WN_Nl1`1TN_Nn1`1SN^No1a1RN]No1c1RNZNQ2e1PNYNR2f1nMYNT2f1mMXNT2h1mMUNV2j1kMTNW2k1jMSNX2l1hMRN[2m1fMQN[2o1cLfLeMY1j5P2`LiLfMU1l5Q2]LkLgMR1o5R2YLnLhMn0Q6S2ULRMjMi0Q6V2TLSMkMe0Q6Y2SLSMmMa0Q6]2PLUMnM=Q6`2PLUMoM9Q6c2oKVMPN4R6g2mKWMQN0R6j2kKXMSNMQ6m2kKXMTNIQ6P3jKYMUNDR6T3gK[MWN_OQ6X3gKZMYN\\OP6[3fK[MYNXOR6^3dK\\MZNTOR6a3bK^M\\NoNQ6e3bK^M]NkNQ6h3aK^M_NgNQ6l3_K_Mb5b2\\JaMa5a2^JaM`5`2_JbM_5_2_JcM_5_2`JcM^5^2aJdM]5]2bJeM\\5\\2bJfM\\5\\2cJfM[5[2dJgMZ5Z2dJiMY5Y2fJiMX5X2gJiMX5X2gJjMW5W2gJlMV5V2iJlMU5U2jJmMT5T2jJnMU5S2jJoMS5S2lJoMR5R2mJPNQ5Q2mJQNQ5Q2nJQNP5P2oJRNo4o1PKRNo4o1oJRNP5P2oJPNQ5Q2nJnMS5S2kJnMU5S2jJmMU5U2jJkMV5V2iJiMX5X2fJiMY5Y2fJgMZ5Z2eJfM[5[2cJeM^5\\2aJdM^5^2aJbM_5_2`J`Ma5a2]J`Mb5b2]J^Mc5c2\\J]Md5d2[J[Mf5f2XJ[Mg5g2XJYMh5h2WJWMj5j2TJWMm5i2RJWMP6h2oIXMS6g2lIYMV6f2hI]MX6b2gI`MX6`2gIbMY6]2eIeM\\6Z2cIhM]6W2bIkM^6T2aInM]6S2aIoM]6S2bIoM\\6R2cInM]6S2aImM_6U2`IkM`6V2_IjMa6l5N2M3N1O2N2M3N1O2UJRF`5Q:^JQF`5Q:^JQF`5X:O2M3N2N1N010000O0100000O010001N3N2N2N1N3N2N2N2M2O2N2M3N1O2N2M3N2N1O2M3N2N2M2O2N2N2M2O2N2M3PM\\Ca2f<\\M]Cb2d<]M]Cb2e<\\M]Cm1J`Nl=ZAEc>>ZAEd>=ZADe>=ZAEc>>[ADc>>ZAEd>k0O10O1000O10O12M3N1O2M3N2N1N3N2N2N1N3N2N2M2O2N2M3lBbM[<0^C`25bM[:[AHc>:[AHc>9[AJd>7ZAJf>7XAKh>5VAMj>2TA1l>b00O0100000O10O100N2M2O2N2N2TOeNoB^1ob0^A[Oe>a0_A\\Od>a0?M3M3M3M3M3MS`g5"}, "image_id": 158, "id": 2472}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 0.0, 180.0, 135.0], "area": 15656, "segmentation": {"size": [512, 512], "counts": "SPV5f0Z?0000000000000M3000000000000000000000000000000000000000000000000000000000000000000\\Bd0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ec0e:]O[Ec0e:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:\\O\\Ed0d:2fDNZ;\\200000000000000000000000000000000000?A0000000000O10000000000000000000000000000000000C=00000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000eMZDV180004L:F9G:Ecmk6"}, "image_id": 159, "id": 2480}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 0.0, 132.0, 185.0], "area": 13819, "segmentation": {"size": [512, 512], "counts": "gda41l?3[@MZ?6c@NY?>M3M4L3M3M4L3M3M4L3M3M4O00010O0N2M4L30001O01OFQNcBP2Y=TNfBl1W=WNfBl1W=WNfBm1V=`0M3M4L3N2M4L3M3M4L3M3M4M21O01O0CTLdDn3Y;VLcDn3Y;ULdDn3Y;>M4L3M3M4L3M3M4L3N201O00001O00001O00001O00001O00001O0000O1M3M3M3M3M3M3M3M4L3M3M4M2M3M4L3M3lKXDm3o;M3M4L3M3M4L3M3M4L3M3M4L3M3M1O3M4L3M3M4L3M3M4L3M3M4L3M3M4lNVAm0Q?L3M3M4L3M3M4L3M__\\1"}, "image_id": 159, "id": 2481}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 198.0, 36.0, 87.0], "area": 2798, "segmentation": {"size": [512, 512], "counts": "X6e2[=000000000000000O10O100000000000000000000000O10O10000000009G>B>B>B>B>B[g]7"}, "image_id": 159, "id": 2482}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 328.0, 100.0, 113.0], "area": 7053, "segmentation": {"size": [512, 512], "counts": "il^23j?4L3M3M4L3M3L5L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3N2010O00010O00010O00010O00010O00010O00010O00010O0010O00010O00010O00010O00010O00010O00010O00010OlNoBYOQ=d0RC\\Oo<`0UC@j<=YCCg<:\\CFe<6^CKa<2cCM]<0fC0[01O010O01OO2M2N3M2Bk@NY?Oj@NX?0j@NY?Oo^i4"}, "image_id": 162, "id": 2496}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 28.0, 55.0, 63.0], "area": 1772, "segmentation": {"size": [512, 512], "counts": "T28f?3N1O2M3N1O2M3N2N1N3N2M3N1O2M3N2N1N3N2N2M2O1O0O10O10O010000O3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2Md^T7"}, "image_id": 162, "id": 2497}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 32.0, 20.0, 20.0], "area": 247, "segmentation": {"size": [512, 512], "counts": "[al13k?3M2N3L3N201O010O010O010O00O2M2M4M2N3Ml^i5"}, "image_id": 162, "id": 2498}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 47.0, 22.0, 19.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "kQh11m?2M3N3L3N3O01O010O01O01O010O01O01N1N3L3N2M^nl5"}, "image_id": 162, "id": 2499}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 57.0, 69.0, 59.0], "area": 2297, "segmentation": {"size": [512, 512], "counts": "iRT52k?3M3M4M2M4L3M3M4L3N210L3N201O01O01O01O01O010O00010O00010O01O01O01OXAlNd>Y1O01O01O01OM4O000010O00010O00010O01OVOaA3`>IcA7]>FgA:X>CkA=V>@lA`0T>]OPBc0P>YOSBg0c>010O00N3L3N2M4L3M3MU]i1"}, "image_id": 162, "id": 2500}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 67.0, 14.0, 21.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "bba33j?4M2M4M2M3N3OM3N3L3N2M4L3Nm]W4"}, "image_id": 162, "id": 2501}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 71.0, 67.0, 49.0], "area": 1802, "segmentation": {"size": [512, 512], "counts": "RSl11m?3M2M4M2N2M4M2N3L3O20O0010O0010O010O0010O0010O0010O010O0010O0010O010O0010ON2N3M2M4M2N3L3O1010O010O0O2M2N2M4M2N3L3N3M2M3N3L3N3M2M4M2Nb]R5"}, "image_id": 162, "id": 2502}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 82.0, 69.0, 60.0], "area": 2259, "segmentation": {"size": [512, 512], "counts": "dcZ61m?3L3M3M4L3M3M4M2M3O2M2M4M2O110O00010O00010O00010O00010O00010O010O00010O000O2O00010ZAhNb>[1010O00010O00010O000YO^A1b>KaA5_>HdA8]>DgAAkA?U>^OnAc0R>ZOQBe0e>010O00M4L3M3N3L3M3MZlb0"}, "image_id": 162, "id": 2503}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 92.0, 23.0, 21.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "XS[12k?4M2M4M2N210O0010O0010O0010O010O001L3N2M4M2No\\Y6"}, "image_id": 162, "id": 2504}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 119.0, 49.0, 48.0], "area": 1286, "segmentation": {"size": [512, 512], "counts": "edg11m?3M2M4M2M3N3L3N3M2M3N3L3O2O00010O010O0010O0010O010O00010O010O0001YOUA8j>FXA:i>B[A>d>@^A`0b>]ObAb0n>1O010O01L3N2N3L3N3L3N^k_5"}, "image_id": 162, "id": 2505}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 120.0, 73.0, 44.0], "area": 2185, "segmentation": {"size": [512, 512], "counts": "eTn42k?3M4M2M3M4L3M3N3L3M3O20O010O0010O00010O01O00001O00000001O000001O00000000000010O2N1O00000000N3O0000000000010O000000000000010O0000000000001O0[OTA7k>B\\A>S?Gf[m1"}, "image_id": 162, "id": 2506}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 143.0, 41.0, 54.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "]e[71l?3`@MP?7l@LQ?7l@MP?6m@MP?c0N30O00O2L3M3M4L3O20O00010O00010ON2010O00010O00010L3M3M4L3M3M4L3M3M4L3M]K"}, "image_id": 162, "id": 2507}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 150.0, 30.0, 31.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "ZUV12l?2N3M2M4M2N2N3L3N30O010O01O01O010O010O010M2N2N3M2M4M2N3M2MTkZ6"}, "image_id": 162, "id": 2508}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 152.0, 23.0, 66.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "h4n1S>O0010O010O00010O01UOnAGQ>7QBHQ>4RBIQ>5RBHP>8PBFS>9nACU>;mACV>:mACU>;mABV>8YAFd00V>6m0N3LQ[d7"}, "image_id": 162, "id": 2509}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 160.0, 43.0, 27.0], "area": 610, "segmentation": {"size": [512, 512], "counts": "]em51l?3N3O000010O0001O0M3M4L301O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01M2N2M4L3M3Nhj\\1"}, "image_id": 162, "id": 2510}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 168.0, 27.0, 27.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "iU_52l?3L3N3L3N2M4M201O01O01O010O01O01O010ON3M2M3N3L3N3L3NcZS2"}, "image_id": 162, "id": 2511}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 170.0, 71.0, 80.0], "area": 3472, "segmentation": {"size": [512, 512], "counts": "Pg61m?2M4M2M3N3L3N3L3N2M4M4K3N3L3N3L3N2010O01O01O010O01O01O01O0M4M2M3N3L3N3N100010O010O00010O010O00010O010O00010O010SOYB_Og=?[BAe=<_BDa=9aBG_=6dBJ]=3fBJ\\=3gBJ\\=4gBI\\=3gBJ\\=4fBJ]=2gBJ\\=4fBJ\\=3hBI\\=4Y1M\\je6"}, "image_id": 162, "id": 2512}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 171.0, 54.0, 52.0], "area": 1729, "segmentation": {"size": [512, 512], "counts": "_fc43j?3M4L3M3M4L3M3O2M2M3N30O00010O00010O00010O0001O0M3N3L3M310O00010O01O01O01O01O01L3M3M4L3M3M4M2M3M4L3M3M4L3M_Za2"}, "image_id": 162, "id": 2513}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 186.0, 28.0, 37.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "hfj61m?2M4M2M3N3L3N3L3N2N3L3N3N10010ON3L3N2M4M2M4M2M3N3L3N3LSZg0"}, "image_id": 162, "id": 2514}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 191.0, 58.0, 71.0], "area": 2217, "segmentation": {"size": [512, 512], "counts": "PWn53k?2N2M4M2N3M200010O010O01O01O010OSAAY>`0dACX>`0fABX>a0dACX>`0fABX>`0fACW>T1M3N3M2M4M20010O0010O010O0010ON2M4M201O0010O]OlAZOT>d0oA\\OQ>`0RBPOL;R>c0YB^Og=>\\BBd=<_BAc==_BAd=;`BAc==_BAd=2l@55Km>=PAGl>h0M4M2M4M2N3L3O110O0010O0010O0010O0ON02O3M2M4M2O20ON2M4M2M4M2N2M4M2M4B_@9e?M4M2NlgT2"}, "image_id": 162, "id": 2522}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 269.0, 53.0, 29.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "hho62k?4L3O1010O00N3L30001O01O01O01O000O2H_@Oa?Nb@3]?Jf@6c?O01O01O0N201O00010O00010O00010O00010O0001U@Ng?500010O00010O00010N1MPg5"}, "image_id": 162, "id": 2523}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 289.0, 59.0, 46.0], "area": 1624, "segmentation": {"size": [512, 512], "counts": "aYd33k?2M4L3N3L3M301o@[Oe>e0YA]Og>c0VA@k>`0QADn>g0010O0010O00010O0010O00010O0010O0010O0N2010O010O00010O0O1O2O0010O0010O00010O00O2L3N3Dk@HX?6j@GY?6k@GX?5;N2McV^3"}, "image_id": 162, "id": 2524}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 290.0, 68.0, 46.0], "area": 2475, "segmentation": {"size": [512, 512], "counts": "XZl54d?8I7I7I7L410O000000000010O0000000000010O000000000010O00000000MO4K6O000001O000001O000001O000001O000001M2M30000000010O000000000L5H7J6I7IPgQ1"}, "image_id": 162, "id": 2525}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 299.0, 23.0, 53.0], "area": 711, "segmentation": {"size": [512, 512], "counts": "[9e1\\>O1N2N2O10000N1N3N2mNZAi0h>UOZAi0h>UOZAi0P?N2N1N3N2@c@:_?Dc@:c?N2N2N2MgUd7"}, "image_id": 162, "id": 2526}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 318.0, 69.0, 82.0], "area": 2958, "segmentation": {"size": [512, 512], "counts": "k[^41m?3M2M4M2N3L3N2N3L3O20O010O0001M2\\OUOQBn0l=UOPBn0n=TOPBo0l=UOQBm0m=UOPBn0m=UOQBn0l=d0N6I30001O010O010O01O01O010O010O01L3jN]BMe=2\\BKh=4YBIi=8VBFm=9TBCo=>PB@S>?mA_OU>b0kAZOX>f0hAXOZ>h0gAUOY>n0fAoN\\>R1eAkN]>V17O001L3N2N3L3N3M2M4M2N2M4M2N3LhU_2"}, "image_id": 162, "id": 2527}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 332.0, 64.0, 46.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "kjZ33k?3L3N2M4N101N1N2O2O0n@\\Oh>e0TA^Om>a0QABn>h010O010O00010O010O00010O010O00010O010OO1M40O01O01O010O01O01O010O01O010O01O010O01VOm@e0W?10O001L3N2M4M2M4M2MmTe3"}, "image_id": 162, "id": 2528}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 335.0, 79.0, 73.0], "area": 3087, "segmentation": {"size": [512, 512], "counts": "j[<2l?2JM^@5`?L_@6_?L_@6^?8N1O2M3N2N1O2M3N2N2M2O2O100O01NO1000O101O2M3O10O0100000VAoNc>R1[APOe>o0ZASOf>T10O10O100O0O2M3N2N2M2H^NQBd1m=]NRBe1k=^NSBd1k=80O0100000O2O1O2M3N2N2O0]NjA]1V>aNlA^1[>100000O010M3N2N2ROZAJ2I[AI2;f>JZAI2;f>JcA4_>IcA5_>JcA4_>JcA4V?NST\\6"}, "image_id": 162, "id": 2529}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 347.0, 15.0, 13.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "Rkd61n?2N2O2M2N1O1O01O00101N2N3M2NQeS1"}, "image_id": 162, "id": 2530}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 348.0, 44.0, 54.0], "area": 1440, "segmentation": {"size": [512, 512], "counts": "k[Z73j?3N2M4M2M4L3010O00010O0101N0010O010O01O01O01K4J7J5J7O001O001O010O001O0010O0001O0001O01PE"}, "image_id": 162, "id": 2531}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 377.0, 72.0, 66.0], "area": 2330, "segmentation": {"size": [512, 512], "counts": "klk53l?2N2O1N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N2N3M2N2N2N2N2OO000000002N2O2M2N2N1O0000000N^AhNa>X130001O0001O00000001O2N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2N2N3NaSP1"}, "image_id": 162, "id": 2532}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 379.0, 38.0, 44.0], "area": 1064, "segmentation": {"size": [512, 512], "counts": "_\\Q33j?3`@KS?7k@LQ?8k@KS?7k@KS?c0L3N3M200010O010O010O00010O01N1N2N30O01OO2L3N3FTA\\On>b0UAZOo>c08M3N3L3N3M2M3NQd[4"}, "image_id": 162, "id": 2533}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 391.0, 47.0, 43.0], "area": 1178, "segmentation": {"size": [512, 512], "counts": "X]c31l?3N3M2M3N3L3N3L3N2M4M2O2O0M3N3O001O01O010O01O01N1N3L3N2M4M201O01O010O01O01O01M2M4M2M4M2M3N3L`Se3"}, "image_id": 162, "id": 2534}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 413.0, 54.0, 74.0], "area": 2479, "segmentation": {"size": [512, 512], "counts": "bn33k?3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3M201O01O010O01O01O01O010O01O01O010O01O01O01TORBEm=8VBHk=[ORBc050k=[ORBb07Oj=\\ORBc06OR>MRBOQ>OQBOR>MQB0R>NQBOQ>NRBOR>NQBOa`Q7"}, "image_id": 162, "id": 2535}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 413.0, 47.0, 39.0], "area": 1105, "segmentation": {"size": [512, 512], "counts": "g]W43k?2N2M4M2N3M2M4M2N2N3L3O2O010O010O00010O010N1N3OM3N3M2N3L3N3O01O0O1N3O010O00O2M2M4M2N3L3N3M2NoRQ3"}, "image_id": 162, "id": 2536}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 426.0, 44.0, 50.0], "area": 1382, "segmentation": {"size": [512, 512], "counts": "c^Z74c?9H8J601O000001O0001O0000000000001O01O0000000001O000O1O1N2O1O2N1O1N2O1O1O1O1O0O10O1000gB"}, "image_id": 162, "id": 2537}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 427.0, 36.0, 45.0], "area": 897, "segmentation": {"size": [512, 512], "counts": "_^f61m?3L3N2M4M2N3L3N3L3N2M4M2M4M2N201O0010O001L3N2M4M2N3L3N2N3L3N3L3N2N3L3N`bg0"}, "image_id": 162, "id": 2538}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 430.0, 41.0, 46.0], "area": 1213, "segmentation": {"size": [512, 512], "counts": "h]k43k?2M3b@Ko>8n@Kn>8PAJ3Kc>;VAN4If>f0WA^Oi>l001O01OM4O0010O00010O010O00010OO2L31M2N3L3N3L3N2M4M2N3L3N2M4M2N3L3N]R`2"}, "image_id": 162, "id": 2539}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 435.0, 20.0, 24.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "U^^52l?2N3L3N2M4M2N3M20010O00O2M2M4M2N2M4M2M\\bW2"}, "image_id": 162, "id": 2540}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 438.0, 18.0, 13.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "l]P61m?3M2N2010O010O010O010O010O00001M2MWbf1"}, "image_id": 162, "id": 2541}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 444.0, 52.0, 42.0], "area": 1351, "segmentation": {"size": [512, 512], "counts": "c^k22k?4M2M4M2M3N3L3N3L30001O010O010O01O01O010O010O01ON3M201O00010O010O0010O0010O010OO1N3L3N3M2M3N3L3N3M2M4MjaZ4"}, "image_id": 162, "id": 2542}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 471.0, 67.0, 41.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "^_l32k?3N3L3N2M4M2N3M200010O010O00010O010O01O01O010O010O00010O010ON2M4M2M4M2O20O0010O0001O001O00001O001O001O00001_ORANn>OVA0k>MWA3i>KYA5g>H]A7d>F^A:U?O00001N1M4M2M]PR3"}, "image_id": 162, "id": 2543}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 472.0, 21.0, 24.0], "area": 291, "segmentation": {"size": [512, 512], "counts": "Zod31l?3N3L3N3L3N2M4O01O01O010O01L3N2M4M2M4M2MVaP4"}, "image_id": 162, "id": 2544}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 482.0, 19.0, 15.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "Yoh23j?3L5O000010O00010O0001O01O00010O0M3Ln`m4"}, "image_id": 162, "id": 2545}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 482.0, 35.0, 28.0], "area": 553, "segmentation": {"size": [512, 512], "counts": "c_T72l?2N3L3N3M2M4M2O1010O010O010O00010O010O010O0N3L3N2N3O010O010O000N3M2MfP:"}, "image_id": 162, "id": 2546}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 492.0, 37.0, 20.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "ho[52l?2M40O0001Z@Ib?:001O00M3N2N21O00001O0000O1N2N200001O00001O001O00001O00001Fa@0`?Mc@1noQ2"}, "image_id": 162, "id": 2547}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 500.0, 35.0, 12.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "n_U62l?2N2M3N2O1001O00001O001O00001O001O00001O001O00001O001O00001O001O0000QPY1"}, "image_id": 162, "id": 2548}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 505.0, 13.0, 7.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "ooe31m?2N2N21O001O00001O001O0NU`S4"}, "image_id": 162, "id": 2549}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 509.0, 8.0, 3.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "m_X33m?0001O00001O00Q`c4"}, "image_id": 162, "id": 2550}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 509.0, 8.0, 3.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "n_`32m?1001O001O0000Q`[4"}, "image_id": 162, "id": 2551}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 51.0, 43.0], "area": 1575, "segmentation": {"size": [512, 512], "counts": "0m0S?1O00001O010O01ON3O001O00001O001O01O010O01O01O010oNUAk0Q?0M2M3N3L301O00001O01O0M4M2M3N3L3M3N3L3N3L3N2M4M2Mm_V7"}, "image_id": 163, "id": 2552}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 0.0, 5.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPa51o?00001O0P`\\2"}, "image_id": 163, "id": 2553}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 28.0, 11.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "P`\\61o?00001O001O001O00001O001O001O001O00001O001O001ON2M3N2NR`U1"}, "image_id": 163, "id": 2554}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 0.0, 35.0, 23.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "[Pj61m?3L3N2M4N101O00001O00001O001O00001O001O00001O001O000010M2M3N3L3N3L3No_d0"}, "image_id": 163, "id": 2555}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 5.0, 13.0, 15.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "^`V53j?3N3M2N201O010ON2M4M2N3Mjob2"}, "image_id": 163, "id": 2556}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 6.0, 72.0, 56.0], "area": 1882, "segmentation": {"size": [512, 512], "counts": "SQc51l?3N2N3L3N3L300010O010O01O01O010OO2L3N2M4M201O010M2N2M4N110O00010O010O0010O0010O0010O0010O010OZOZA2f>K]A6c>G`A8`>FbA:_>BeA>Z>@hA`0X>]OkAd0i>O0010O010O0010O0010OO1N3M2M4M2M4M2NfnX1"}, "image_id": 163, "id": 2557}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 8.0, 35.0, 22.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "_`f42l?2N2M4O0010O010O010O00010O010G[@5h?1O01O010OO2M201O010O01O01O010OO2M2N3L_og2"}, "image_id": 163, "id": 2558}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 14.0, 28.0, 30.0], "area": 532, "segmentation": {"size": [512, 512], "counts": "Sah02k?4L3N3L3M3N3L3N201O010O00010O01O01O01O0N2M4L3N3L3M3N3L^_i6"}, "image_id": 163, "id": 2559}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 27.0, 72.0, 51.0], "area": 1967, "segmentation": {"size": [512, 512], "counts": "faW12k?3N3L3M3N3L3M4M2M3N3N110O01O01O01O01O010O01O01O01O01O010O01O01O010O01O01O01O01O010O01O01O010O01O01O01O01O010O01O01OO2L3N2M4M201O0Hd@M[?1h@NY?Nj@3U?Km@5`?O0M4MS^d5"}, "image_id": 163, "id": 2560}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 56.0, 29.0, 28.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "Yb93j?4L3M3M4L3N3O01O01O010O00010O0010O00010N1N2M4M2M3N3M2M3NSnW7"}, "image_id": 163, "id": 2561}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 65.0, 69.0, 53.0], "area": 1942, "segmentation": {"size": [512, 512], "counts": "mRi23j?3M4M2M3M4N100010O010O00010O00N3h@ZOQ?l0N2M4L3N2O2O01O01O01N1010O00010O00010O0010mNZAj0f>SO^Al0k>1O01O01O01O010O01O01O01O01O01O01O010O0N2M4M2M3M4M2M3M4L3NX]T4"}, "image_id": 163, "id": 2562}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 67.0, 13.0, 37.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "S2U1l>N10010O0O1M4M2M4_Oj@3Y?Jk@3W?Jl@3b?N`]i7"}, "image_id": 163, "id": 2563}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 72.0, 32.0, 32.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "nRV22l?3L3N2M4M2M4M2N2M40O010O01O01O010O01O01O0O2M201N1M3N3M2M4M2M3NamY5"}, "image_id": 163, "id": 2564}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 84.0, 36.0, 60.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "mR>1n?4M4L3M4K4PANl=S100O10000O10000O1000000O10000O10000O1000000O1004L3M4L3M3M4Lglo6"}, "image_id": 163, "id": 2565}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 87.0, 63.0, 65.0], "area": 2217, "segmentation": {"size": [512, 512], "counts": "\\TS11l?3N3L3N2M4L3N2M4L3N3L3M3N3L3M3N3L3N3L3M3010O01O01O010O00010O0M4L3N2M4L3N2M4L3N3L300010O00010O010O00010O00O2M2M4L3N2M4L3N3L3Nk\\m5"}, "image_id": 163, "id": 2566}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 93.0, 48.0, 46.0], "area": 1329, "segmentation": {"size": [512, 512], "counts": "lcm32l?3L3N2M4M2M3N3L3N3L3M3N3N110O01O01O010O01O01O010O01O01O01O010O01O01O000M4M2M4M2M3M4M2M4M2M3N3Lh\\Z3"}, "image_id": 163, "id": 2567}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 108.0, 102.0, 73.0], "area": 3147, "segmentation": {"size": [512, 512], "counts": "^dd42k?4L3N2M4M2M4L3N2M4M2M4L3N2010O00010O010O00010O01O01O010oNZAf0f>WO]Ai0c>TOaAl0i>3M010O01O01OO2O010O00010O00010OO2L3M3O2O010O00010O00010O010O0001mN\\Ai0c>UO_Ak0a>RObAo0h>O0010O00010O0010O0WOQAa0P?[OTAe0k>YOWAg0Q?10O00010O0010O0010O0010O0010O000N3M2M4L3N2M4M2MS[h1"}, "image_id": 163, "id": 2568}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 110.0, 32.0, 31.0], "area": 546, "segmentation": {"size": [512, 512], "counts": "kSY23m?1N3M2O2M2N3N1N3M2O2M1O0NYOm@g0R?300101N2N01O02N2N3N1N3M2O1N3M2O2M2NTlV5"}, "image_id": 163, "id": 2569}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 128.0, 13.0, 12.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "VTg33k?2M3O2O010O00010O01N1M3No[R4"}, "image_id": 163, "id": 2570}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 134.0, 44.0, 35.0], "area": 858, "segmentation": {"size": [512, 512], "counts": "jdl31m?3L3M3N3L3N3L3N210O00010O0010O0010O0010O0010O00010O010O00010O0010O0010ON2M4M2M4L3N2M4M^[]3"}, "image_id": 163, "id": 2571}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 149.0, 78.0, 63.0], "area": 2943, "segmentation": {"size": [512, 512], "counts": "RVS2`0U?;0000000000000001O0000000000000000000001O0000000000000000000000000001O0B>[Oe0000000000000000000000000000000001O000QOTBJl=6TBJl=6TB^OX>b0c000000000000000000000000000000000Fdke4"}, "image_id": 163, "id": 2572}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 173.0, 71.0, 59.0], "area": 2267, "segmentation": {"size": [512, 512], "counts": "WfY41m?2M4M2010O_@HZ?9c@J]?hNcAX1b>0010O0010O010O00010OoN[Ag0f>VO\\Ak0c>SO`Al0`>QOcAP1g>O00010O010O0N3L3N2N3L3N3L3N2N3L3Nfib2"}, "image_id": 163, "id": 2573}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 191.0, 47.0, 85.0], "area": 2139, "segmentation": {"size": [512, 512], "counts": "ThX71m?3L3N2M4L3N3L3O110O010O00010O001M2M3N3L3N2M4M2M4M2M3N3GZNSBi1j=ZNSBj1i=8N2M4M2M4M20001ON3M2M4L3N2M4M2M4MoI"}, "image_id": 163, "id": 2574}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 205.0, 38.0, 26.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "ffi34j?2N2M4M201O01O01O012M01O010O01O01O010O01O010O01O010O01O01N1O2O01ON3M2M4M2NXYc3"}, "image_id": 163, "id": 2575}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 225.0, 15.0, 34.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "Q7P1P?1O010OO2QOPAj0T?N3L3N2M4M2N3L3N2MkXh7"}, "image_id": 163, "id": 2576}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 231.0, 61.0, 59.0], "area": 1704, "segmentation": {"size": [512, 512], "counts": "Zhe11n?2N2O1N2N3M2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N00000001O0001O0000000000000001O00010O2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2N2N2NTh[5"}, "image_id": 163, "id": 2577}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 236.0, 62.0, 50.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "UXV44j?2M3N3L3N3O000010O010O00010O010ON2N3L3N3L3N2N3N110O010O00010O010O00010O010O00010O010O00YOXA6i>GYA:f>C]A=d>@_A`0`>]OcAc0]>[OfAd0k>1O010O01O001L3N2M4M2M4Mfgj2"}, "image_id": 163, "id": 2578}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 253.0, 40.0, 31.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "ZXa31l?4M2N2M4M2O2b@@[?b0010O00010O010O010O00010O010O0010O0010O010O00010O010O0N3L3N2M4M2Nggj3"}, "image_id": 163, "id": 2579}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 272.0, 82.0, 103.0], "area": 3919, "segmentation": {"size": [512, 512], "counts": "U[g61l?4M2M4M2M3N3L3N3M20010O0010O0010O0010ON2M4M2M3N3oNhNUCZ1iM3N3L3N3L3N2O20O010O00010OM4M2M3N3L3N2M4MkF"}, "image_id": 163, "id": 2580}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 278.0, 27.0, 27.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "WiX22k?4M2M3N3L3N3M20010O0010O0010O0010O0010OM3N3L3N3L3N2MVgY5"}, "image_id": 163, "id": 2581}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 281.0, 75.0, 57.0], "area": 1971, "segmentation": {"size": [512, 512], "counts": "bYj32l?2M4M2M4M2O20Ob@@\\?b0010O010O0010O0010O01L3N2N3L3N3M2M4O01O01O010O01O010O01O010O01O010O01O010O01O01OXOYA7h>F[A:d>D^A@aA`0^>^OdAb0]>[OfAd0k>1O010O010O00010O010O01O01ON3M2M4M2N3L3N2NTVP3"}, "image_id": 163, "id": 2582}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 292.0, 48.0, 64.0], "area": 1612, "segmentation": {"size": [512, 512], "counts": "cYW13l?2N3N1N2N3N1i@Bi>a0UAAh>a0VAAi>a0TABi>`0UABi>`0VAAh>n0N1N3M2O1N3M2N3N1N3N10010ON2O2M2N3M2O1N3M2O2M2ROXAa0k>]OVAb0k>\\OXAa0j>]OXAa0k>]OWA`0V?O2M2N2O2M2N3M2O^eP6"}, "image_id": 163, "id": 2583}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 311.0, 79.0, 64.0], "area": 2742, "segmentation": {"size": [512, 512], "counts": "djc22k?4M2N2M4M2N3L3N2N3L3N3M201O01O01O010O01O010O01O01O010O01O010O01OO2L3O2O01O010O01O01O010O01ZAiNa>[1N3M2M4N110O00010O010O0001TOiANV>OmA1T>LoA4P>ISB7n=BjAE:i0l=@[B`0d=^O^Bb0c=ZOaBf0^=XOdBh0Z>10O0O2M2M4M2M3N3M2M4MQeT4"}, "image_id": 163, "id": 2584}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 340.0, 24.0, 26.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "V[m13j?3M4L3L4M3N3O01O01O01O01O01O01O0001N1M3L5L3M3MZef5"}, "image_id": 163, "id": 2585}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 341.0, 48.0, 55.0], "area": 1580, "segmentation": {"size": [512, 512], "counts": "n[S42l?3L3N2M4M2M4M2M3GXOXAk0d>YOXAj0f>9M4M2N3O00010O010O00010O010O00010O010O0001L3N3L3N2M4M2M3N3L3N3L3N2M4M2M4MPeT3"}, "image_id": 163, "id": 2586}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 351.0, 11.0, 30.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "fkj73k?2M3N3M2M4M2M3N3L3O2PE"}, "image_id": 163, "id": 2587}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 363.0, 59.0, 66.0], "area": 1920, "segmentation": {"size": [512, 512], "counts": "P]i02l?3L3M4M2M3N3L3M3N3L3M4O00010O0010O00KmN^AS1_>oNaAQ1\\>ROcAo0[>=L3N3L3N2M4N10N2M4M2M4L3N2M4M2M4L3N2JTAUOP?h050010O0010O0010O000N3L3N3L3M3N3L3N\\TY6"}, "image_id": 163, "id": 2588}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 365.0, 27.0, 27.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "Plf11m?2M3N3M2M4M2N2M4O00010O0010O0010O0010O0N3L3N2M4M2M4M^dk5"}, "image_id": 163, "id": 2589}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 366.0, 33.0, 30.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "R\\S21l?3N2M4M2N3L3N3L300010O01O01O010O01O01O010O010O00001L3N3L3N2N3L3N[T\\5"}, "image_id": 163, "id": 2590}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 379.0, 20.0, 24.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "]le41l?4M2M3N3L3M4M200010O00010M2M4L3N2M4M2MTTP3"}, "image_id": 163, "id": 2591}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 383.0, 59.0, 57.0], "area": 1857, "segmentation": {"size": [512, 512], "counts": "Qma32l?2M4M2N3L3N2N3L3N3L3N3M2M3N3N110O0010O0010O0010O0010O010O0010O0010O0010gN[AU1i>O010O01O010O01mNWAm0i>QOYAP1l>O010O0\\ORA7m>GVA8k>EWA;i>BZA>g>_O\\A=U?N3L3N2N3Lkb`3"}, "image_id": 163, "id": 2592}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 399.0, 24.0, 24.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "olT31l?4M2N3L3N2N3M210O0010O0010O010O00N3L3N3M2M4M2N\\S_4"}, "image_id": 163, "id": 2593}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 406.0, 54.0, 52.0], "area": 1795, "segmentation": {"size": [512, 512], "counts": "l]d62j?4M3L5M200010O0000M4K4M3L5L3L4M3N3O00010O0001O03M00010O00010O00000M4OM4O00000010O00]OaAE`>6dAK[>2iAMW>OmA1S>LPB4Q>GTB9k=DXBM4K4M3LZb`0"}, "image_id": 163, "id": 2594}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 412.0, 28.0, 30.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "`]S64i?3M3N3L3M3M4L30001O01O01O010O01O01O01O0M3M4M2M3M4L3M3NPc^1"}, "image_id": 163, "id": 2595}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 413.0, 26.0, 24.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "\\]l21l?4M2N3L3N2N3N110O00010O010O010O00010O01N1N210M2M4H[@1bRg4"}, "image_id": 163, "id": 2596}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 415.0, 13.0, 24.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "`mi73j?3M3M4L3M3N30O00010O0001nB"}, "image_id": 163, "id": 2597}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 436.0, 28.0, 28.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "VnW13k?3L3M4M2M3N3L30010O0010O0010O0010O0N201O001N1N2M4D^@6g?M4MSRZ6"}, "image_id": 163, "id": 2598}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 437.0, 61.0, 46.0], "area": 1579, "segmentation": {"size": [512, 512], "counts": "\\nn22k?4M2N3L3N3L3N2O2O010O00010O010O010N1M3N3M2O20O0010O0010O010O00010O010O0010O0010O010O00010nNUAm0o>1O010O01O0M3N3M2M4M2M4M2N2M4MiaR4"}, "image_id": 163, "id": 2599}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 442.0, 63.0, 64.0], "area": 2030, "segmentation": {"size": [512, 512], "counts": "Vn93l?2O2M2N3N1N3M2O1N3M2N3N1N3M2O2M2N3eAjNh=W1WBlNg=U1WBnNg=R1WBPOj=P1TBoNn=Q1QBoNQ>Q1mAoNT>Q1jAoNY>Q1eAoN\\>Z12M2OO1O010O010O0001O01O01O010O3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3M2O2M2N3NPaf6"}, "image_id": 163, "id": 2600}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 454.0, 60.0, 56.0], "area": 2034, "segmentation": {"size": [512, 512], "counts": "Vo_11m?3M2M4M2N2N3L3N3M2N3L3N2N3M2010O010O010O00010O010O010O02L3M2O101M2010O01O010O01O010O012M10O0010M2N3L3N3AVACm>:VADl>:WACl>9WADl>:?L3N3L_Qb5"}, "image_id": 163, "id": 2601}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 462.0, 63.0, 38.0], "area": 1197, "segmentation": {"size": [512, 512], "counts": "Qof52k?3M3M4L3M3M4N11O01O01O01O01O0O101O01O01O01O01O01O01O01O01O01O010O00010O01O01O01O01O01O01O01O01O01O01O01O01O01O01ON3L3M3M4M2MRaY1"}, "image_id": 163, "id": 2602}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 474.0, 51.0, 38.0], "area": 1394, "segmentation": {"size": [512, 512], "counts": "joV71l?3M4M2M3M4L3M3N3L3M2N1102O01O01O010O00010O00010O01O01O010O00001O00N21O00001O00001O00001O001O00001O00"}, "image_id": 163, "id": 2603}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 479.0, 57.0, 33.0], "area": 1302, "segmentation": {"size": [512, 512], "counts": "noj22l?2N2M3N2M3N2M3N2N2M3N2N2001O001O00001O001O00001O001O001O0000O1N2M3N2N2001O001O001O00001O001O0O1N3L3N3L3N2N3L3N3Lk`X4"}, "image_id": 163, "id": 2604}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 499.0, 28.0, 13.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "lo42n?2M10000O100O100O10000O100O100O10000O100O1002N2N3M2N2NRP]7"}, "image_id": 163, "id": 2605}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 196.0, 54.0, 68.0], "area": 1934, "segmentation": {"size": [512, 512], "counts": "b6f1Y>0000O1000O100000O1000O12N2N2N1N3N2N2N2N2N20O100000O10000000O100000O10000M3N1O2N2N2N2M3N2N2N1O2N2N2M3N2N2NihT7"}, "image_id": 164, "id": 2606}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 196.0, 47.0, 58.0], "area": 1171, "segmentation": {"size": [512, 512], "counts": "^gh12m?2N2M3N2N2N2N2N2N2N2N1O2N2N2N2M100EPOgAo0Z>SOdAm0\\>UObAk0^>WO`Ai0`>YO^Ag0b>;00O10000002M3N2N1O2N2N2HPA]OR?a0o@^OS??PA_OR??9N2N2N2N2N2N1O2N]i_5"}, "image_id": 164, "id": 2607}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 316.0, 17.0, 21.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "oie01o?2M3N2M2O2M3N101M3N@e@FWA9i>EYA@]A?c>_O_Ab0o>010N1N2M4M2N3L3N2N3O010O010O00010O010O00001M2M4M201O01O01O01ZOcAH\\>5gAKZ>2iALX>1kALX>2kAKX>1kALX>2jALY>0kALX>2jALX>2a`o5"}, "image_id": 168, "id": 2613}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 0.0, 25.0, 9.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "P`T21o?00001O001O00001O001O00001O00001O001O00001OM3N2MSP_5"}, "image_id": 168, "id": 2614}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 0.0, 18.0, 86.0], "area": 1147, "segmentation": {"size": [512, 512], "counts": "RRg7d0e>g0M30000000000M3ZOf0A?000000000000000"}, "image_id": 168, "id": 2615}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 86.0, 111.0, 79.0], "area": 3597, "segmentation": {"size": [512, 512], "counts": "i2o0o>3N1010O0010O0010O010O00010O010O00010O010O0010O0010O010O00010O010O00010O010O001M20010O0010O0010O0XOQA?P?^OSAb03ZOa>1_Ae0N\\Om>d0o@@P?h0010O0010O0010ON201O010O0010O0010O010O00010O010O00010O010O0010O0010O010O00010O010M2O101O01O0M4M2M3N3L3N3M2N2O5J2M4Mj[X6"}, "image_id": 168, "id": 2616}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 140.0, 6.0, 14.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "\\4=c?10M2N3L3Ndkl7"}, "image_id": 168, "id": 2617}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 142.0, 17.0, 22.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "md]44i?4K4M3M3N3O00010O000001K4M3L5L3LckY3"}, "image_id": 168, "id": 2618}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 153.0, 19.0, 25.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "ZUf76c?7I7M300001O01O0000000000010O00N2I7I\\;"}, "image_id": 168, "id": 2619}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 154.0, 87.0, 67.0], "area": 3319, "segmentation": {"size": [512, 512], "counts": "ZVd43g?7J5J6M30010O0000000M3J7I6K5J6M30010OM3001O01O000001O0001O000001O0001O000UOiAMW>NnA3Q>GUB9l=@ZB`0f=[O_Be0_>O01O00000001O01O00000001O01O0O1J6K5J7I6O1001O04L000001O01O00000001O01O000000010ON2UOfAJ`>1eAJ`>0fAJ`>0a[P2"}, "image_id": 168, "id": 2620}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 155.0, 63.0, 60.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "n55i?2M4M2M3N3L3N3M200010O010O0001L3N3L3N2M4M2M4O00010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010OmNbAd0^>ZOeAc0^>YOeAe0]>YOeAd0_>XOeAe0l>M4M2M3N3L3N3LYZP7"}, "image_id": 168, "id": 2621}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 160.0, 78.0, 42.0], "area": 2476, "segmentation": {"size": [512, 512], "counts": "RVT31h?7J6J6I8I6N200001O01O00000001O01O00000001O0001O00000001O01O0000O1J7O0000004L10O0j@YOS?j001O000L4L4001O01O00000001O01O00000001O01O0000000001O01O00000001I6I7J6J6IPkd3"}, "image_id": 168, "id": 2622}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 169.0, 83.0, 47.0], "area": 3193, "segmentation": {"size": [512, 512], "counts": "\\fZ6:R?d0I8O0000000L4L40000000000001O00000000000001O0007I00001O0000000000000000000O100mN_Ag0m>00000B>I71O0I700000000000000000000001O00000000000001O000000000000000000000001O00H8[OZk;"}, "image_id": 168, "id": 2623}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 222.0, 67.0, 55.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "n6j0V?010O010O00010O010O010O010O00010O010O010M2N3N10010O0010O01QAROk>R1O010O00010O010O0010O010O0010O0010O010O00010O010O010lNWAl0P?N2N3L3N3M2M4M2M3N3MZXn6"}, "image_id": 168, "id": 2624}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 267.0, 38.0, 61.0], "area": 2126, "segmentation": {"size": [512, 512], "counts": "fhX4c0]?2N1Ol0TO0000000G900000000000000N2000000000000000000000000000000000000000000eWT3"}, "image_id": 168, "id": 2625}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 279.0, 69.0, 35.0], "area": 2177, "segmentation": {"size": [512, 512], "counts": "gXV5>b?e0[O0000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000004^O>0000000000000008HmVg1"}, "image_id": 168, "id": 2626}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 281.0, 40.0, 63.0], "area": 2047, "segmentation": {"size": [512, 512], "counts": "Wif6d0\\?00000000000000000k@2j=Y10000001O0000000000000000000000000000000000000000000000000iN]Xe0"}, "image_id": 168, "id": 2627}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 290.0, 101.0, 67.0], "area": 3546, "segmentation": {"size": [512, 512], "counts": "WZm13j?3M3N3L3N2M4L3010O00010O00010O01O01O01O01O01O01O00M4O01O0001O01O01O01O0001O01O01O01O00N3K4O1O2N1O100N3K4M3L4M4O00010O0000010O00010O0000010O000010YNkAa1Z>00010O0001O01O00010O00fNfAn0[>mNkAQ1U>kNQB3Ej0o>00010O000001K4M3L5L3L4M4LiU`4"}, "image_id": 168, "id": 2628}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 292.0, 17.0, 34.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "T9P1P?01O01O010OO2M2M3N3L3M4M2M3N3L3NhVg7"}, "image_id": 168, "id": 2629}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 294.0, 8.0, 30.0], "area": 197, "segmentation": {"size": [512, 512], "counts": "oYl74_?=D<00000001O0jF"}, "image_id": 168, "id": 2630}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 312.0, 29.0, 27.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "XZ83j?3N2M4L3M4M20001O01O01O01O010O01O01O01O01O01OM4L3N3L3M3MTVY7"}, "image_id": 168, "id": 2631}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 327.0, 23.0, 23.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "\\j]74l?3M3L4M3M2M1000O01000O01000O10O10O2O3M3L5L3M[e6"}, "image_id": 168, "id": 2632}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 328.0, 26.0, 26.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "iZe32k?4L3M3N3L3M301O01O01O010O00010O00010OO2M2M3M4L3N2Meem3"}, "image_id": 168, "id": 2633}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 334.0, 7.0, 20.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "^:d0\\?M4L3N2M4M2McUl7"}, "image_id": 168, "id": 2634}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 347.0, 62.0, 47.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "ekd54k?5L4L4L4K2O00O10O1000O10O1D[O]Ae0c>_OYA`0h>DTA=000O10O1000GTA]Om>c0WAYOh>g0900000O0100000O0100000O0100000O0100000O0100000O0100000O010002N4K5L4L4L4K`T\\1"}, "image_id": 168, "id": 2635}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 350.0, 14.0, 16.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "X[d33j?4M2M3N30O00010O000N3M2M3MReT4"}, "image_id": 168, "id": 2636}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 358.0, 64.0, 58.0], "area": 2162, "segmentation": {"size": [512, 512], "counts": "flh11m?2M4VOMcA5[>MbA6[>McA6Z>MbA6[>MbA6[>MeA4X>OhA0U>3hA1W>0fA2Z>j010O010O00010O010O00010O00N3L3N3L3N2M4N110O00010O010O00010O00010O010O00010M2M4M2M3010O0001M2M4M2M3N3L3MYTW5"}, "image_id": 168, "id": 2637}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 377.0, 36.0, 33.0], "area": 731, "segmentation": {"size": [512, 512], "counts": "QlP52n?4L3L5L3M4K5L1O00O0100000O010000O0100Kn@\\OR?d0410O10O10O1000O10O2O4L4L3L5L3MdS]2"}, "image_id": 168, "id": 2638}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 393.0, 78.0, 61.0], "area": 2042, "segmentation": {"size": [512, 512], "counts": "dm_38h?000O1O012N7I2N000O0100000000000O0100000@]OcAc0]>A_A?`>F\\A:d>JWA6j>MZALf>8]AAb>d0;00O10O1000O010000GXOZAg0g>]OTAd0k>90O1000O10O1000O10O10O10O1000MoNVAR1j>110O1000O10O1000O10O10O10003M4K5L3M4L4K5L3M4LfRY3"}, "image_id": 168, "id": 2639}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 395.0, 25.0, 25.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "`lf43m?4L4K4M4L2M1000O10O10O1000O10O10O1001N4M4L4K4M4LRcl2"}, "image_id": 168, "id": 2640}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 399.0, 74.0, 60.0], "area": 2224, "segmentation": {"size": [512, 512], "counts": "Xm_21n?2N3N2M2O2M2N3N2M2O2M3M2O2M3N1O2000O010O01M100O00010O01O01O0IlNdAT1[>nNcAR1^>PO`Ao0`>90O01O01O010O01O01O01O010O011O01N3N2M2O2KUAPOn>m04N3NO00010O01O102M2O2M3M2O2M3M2O2M3MgR[4"}, "image_id": 168, "id": 2641}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 411.0, 58.0, 57.0], "area": 2018, "segmentation": {"size": [512, 512], "counts": "Xn[13j?3N2N3L3DFPA=m>EQA=m>=N1N2N3M2M4M2M3N3M201O010O00010OO2M2M4M2M3O2O010O010O00010N1N3M2O20O0O2O01O01OO2M2O20OO2M2N2M4M2Fe@M^?0d@M_?0PSg5"}, "image_id": 168, "id": 2642}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 413.0, 50.0, 81.0], "area": 2539, "segmentation": {"size": [512, 512], "counts": "P^W72m?5L3YOH`A<\\>H_A=\\>GaA<\\>H`A8`>K]A5c>c001000O0103M4FdNhA_1U>:L3L5L4L3M0O1000O0100000O0100000O01001N5L3M4L4K4WOlA@X>=lA_OY>=kA_OX>=mA_OW>=i0M4K5L3M`A"}, "image_id": 168, "id": 2643}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 432.0, 31.0, 27.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "g]f61o?4K5L3M4L4K1000O10O1000O10O1000O10O1000O01000O10O12N4K5L3M4LoQj0"}, "image_id": 168, "id": 2644}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 458.0, 37.0, 51.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "W_]5e0[?00000000000SOm000000000000000000000000001O^Ob0O100000000000000000000000o080O100000O1000O1EoNgAQ1Y>VO`Aj0`>:010000000000O010000005K3L1000000000O10O1001DgAoN_>k0hAnN]>m0;0000O0100000000000O01000000000O0100000001O7H2O0000O100000O100000O1000O100004K8I6J6J7A_@1kkT2"}, "image_id": 169, "id": 2649}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 137.0, 29.0, 45.0], "area": 979, "segmentation": {"size": [512, 512], "counts": "\\da53m?8H7I7I8G8I2N00000000O0100000000000O10O100004L7I8G8I7I8H^jo1"}, "image_id": 169, "id": 2650}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 142.0, 41.0, 92.0], "area": 2102, "segmentation": {"size": [512, 512], "counts": "ff[71m?2N3M2N3L3N3M2N3M2N3M2N3M2M3N3M2N3M2N3M2N3M2M4M2N3M2N3M2N3M2N3L3N2N3O010O010O010^K"}, "image_id": 169, "id": 2651}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 150.0, 28.0, 44.0], "area": 929, "segmentation": {"size": [512, 512], "counts": "iTZ45k?7I6J7I7H7J3M00000000O0100000000000O0100003M7I7I7H7J7IUjW3"}, "image_id": 169, "id": 2652}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 258.0, 31.0, 29.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "dXo61m?2O2M2N3M2N3M2N3M2O2O0100O010O010O010O010O001M2N3M2N3M2N3M2NfWa0"}, "image_id": 169, "id": 2653}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 493.0, 38.0, 19.0], "area": 388, "segmentation": {"size": [512, 512], "counts": "noW41m?2O2N1O1N2O1O1O1N2O1O1O1N2001O001O1O1O1O001O1O1O001O1O1O1O001O1O1O1O001O1OQPU3"}, "image_id": 169, "id": 2654}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 10.0, 78.0, 63.0], "area": 2525, "segmentation": {"size": [512, 512], "counts": "[QX22l?3L3N2N3L3N3M2M3N3M2M4M2M3N30O010O00010O010O0010O0010O010O0010O0010O0010O010O00010O010O01O01O010O01O01O010O01O010O01O01O0UOYA>g>_O\\A`0d>^O^Ac0b>YOaAg0_>WOdAh0j>10O010O0M3N3M2M4M2N3L3N_n`4"}, "image_id": 170, "id": 2655}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 3.0, 3.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "03m?O1OQ`n7"}, "image_id": 174, "id": 2656}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 0.0, 80.0, 25.0], "area": 1061, "segmentation": {"size": [512, 512], "counts": "P`c22n?00001O00001O0000001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0000L4M3L4L5L3LS`T4"}, "image_id": 174, "id": 2657}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 0.0, 32.0, 17.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "PP[43UAMk>3UAMk>f0L200000000000000000000000000000000010O0000000000000000000000000000D<0003M000000000000000000000000000000000000000000000000_Oa00008_OQ`n1"}, "image_id": 174, "id": 2659}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 4.0, 3.0, 8.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "[`n71h?700L"}, "image_id": 174, "id": 2660}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 7.0, 13.0, 25.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "7i0X?N2O1N2N3M2N2N2N2N2O1N3M\\_i7"}, "image_id": 174, "id": 2661}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 7.0, 7.0, 9.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "W`l39g?00000000000ioo3"}, "image_id": 174, "id": 2662}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 22.0, 83.0, 74.0], "area": 3262, "segmentation": {"size": [512, 512], "counts": "]an13j?3N3M2N3M2N2M4M2N3jAWOQ=k0mBWOR=k0RBSOe05Y=k0eBWO[=j0aBZO_=e0_B]Oa=c0]B@c=`0ZBBf=>XBEg=YOeAg0\\>VOgAj0X>TOjAl0W>QOkAP1c>O010O010O010O010O0010O010O0010O010O010M2N3L3N3M2N3M2N2M4M2N3MPng4"}, "image_id": 174, "id": 2663}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 25.0, 44.0, 50.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "mPZ7c0]?000L4000009G00000000000000000000000006J00000b0^O000000000000000000000000000K50000000000VO"}, "image_id": 174, "id": 2664}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 51.0, 60.0, 86.0], "area": 3125, "segmentation": {"size": [512, 512], "counts": "]ba3d0Z?2000000XOh000000000000000_AC_=_10000000000000000000000000005K00000?A00009G01O000000000000kNU10000000000000000000000000000000O1f]`3"}, "image_id": 174, "id": 2665}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 58.0, 84.0, 49.0], "area": 2789, "segmentation": {"size": [512, 512], "counts": "eRf4a0_?000000001O000C=000000000000000000000000000000000000000000000000000001O000D?M00000000000000000000000000000N200000000000000000_Oa0000000000000K;J000000000000000000000000Pno1"}, "image_id": 174, "id": 2666}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 84.0, 81.0, 75.0], "area": 3243, "segmentation": {"size": [512, 512], "counts": "ScQ12m?2_@MR?5l@MR?5l@MR?5l@MR?5l@MS?4k@OR?a0N2o@ROm>R1N2N2N2N2N2N2N11O1000001O00000000000N2N11N2N2N2O10000000000N2N2N2N2N2N201O0000N00000000000000000^OPBUOP>k0SBROm=n0UBPOl=o0VBoNm=n0UBPOm=n0UBPOm=n0UBPOm=n0UBPOm=n0d0N2N2N2N2N2N2N2N2N2N2O1N2N2N2Nfle5"}, "image_id": 174, "id": 2667}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 88.0, 45.0, 47.0], "area": 1830, "segmentation": {"size": [512, 512], "counts": "ZcY7:f?0_Oa000XA[OU>X10000000000001O00000000000000000000N200000000000M30001O0000000gNaAQ1g>00000001O00000000nL"}, "image_id": 174, "id": 2668}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 97.0, 16.0, 13.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "Wc[21l?4M2O2O010O00010O010O010OM4M2Nl\\\\5"}, "image_id": 174, "id": 2669}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 116.0, 97.0, 55.0], "area": 3488, "segmentation": {"size": [512, 512], "counts": "[da4d0\\?0000007I000000000000005K0M300000000000M300000000000K5C=000000000000N200000000000000000000000000000000000000004L0000000002N000i0WO0000000000000000000000000000000000000000000000000000000000000000_km1"}, "image_id": 174, "id": 2670}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 122.0, 9.0, 8.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "mc[45h?30000000000001OUl_3"}, "image_id": 174, "id": 2671}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 130.0, 72.0, 77.0], "area": 2626, "segmentation": {"size": [512, 512], "counts": "kTT21m?3N2N2N2N1N3N2N2N2N2M2bAZOa=h0]BZOa=h0]BZOa=h0]BYOd=g0ZB[Oe=f0YB\\Og=c0WB@i=`0UBBk=>SBDm=;nAGQ>:lAIT>7jAKV>P1O1N2N1N3O01N2N1O2N2M2O00O1000OM\\NmAd1S>]NlAc1T>LlAeNS>\\1oAbNS>\\1oAaNT>\\19O00O1002M3N1O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M^kg4"}, "image_id": 174, "id": 2672}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 138.0, 70.0, 74.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "ZU11n?2O1N3M2N2N2N2N2N2N2N2O1N2N2N2B\\O]Af0b>;0000000001O2O100O1O1O1O1O1O1O11O01O00eNgAn0Y>lNmAT1`>000000000001O0000000000000N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2O1N2NQjk6"}, "image_id": 174, "id": 2673}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 144.0, 59.0, 71.0], "area": 2213, "segmentation": {"size": [512, 512], "counts": "he\\32m?2N3N1N2N2@EXA=f>FWAFWAFWAFWAFWA?N2N2N3M1O00000000001O2N2N2N2O1N2N3N100000000000000000N2N2N2jNjA?X>_OjA?X>SOeA85c0X>SOeA85c0\\>[OfAc0\\>[OfAc0\\>[OfAc0\\>[OfAc0n>N1N2N2N2N2N2N2N2Noie3"}, "image_id": 174, "id": 2674}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 170.0, 48.0, 73.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "`VX7>Z?800000000000001O00000000000J6000000000000A?G90000000000001O000d0\\O00000000000000000000000000000eJ"}, "image_id": 174, "id": 2675}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 184.0, 87.0, 48.0], "area": 2741, "segmentation": {"size": [512, 512], "counts": "QVc4b0^?00000000006J1O1O2N1O1O00000000000000000000000000000000000000000000009G000000000G900000000000000000000000000000000000\\Od000003M0000000000000006E500000000000000000000000000000RZQ2"}, "image_id": 174, "id": 2676}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 204.0, 27.0, 57.0], "area": 848, "segmentation": {"size": [512, 512], "counts": "\\6b1_>000N2N2O1000hNgAh0Y>VOiAi0X>UOjAi0X>UOjAi0X>UOjAi0X>UOjAi0X>UOjAi0X>UOjAi0h>N3N1N2N2N2N2N2N2N2N2N3M^Xb7"}, "image_id": 174, "id": 2677}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 211.0, 67.0, 64.0], "area": 2086, "segmentation": {"size": [512, 512], "counts": "TWh21n?2N2N3M2QAGT>;jAGT>;jAGT>;jAGT>;jAGT>;jAGT>;jAHS>:kAHS>:kAHT>9jAIU>8jAIV>7hAKX>5fAMZ>m01O000N2N2N2N1O000000000001O000000K`AkN`>U15000002N2O2M2N2N2N000000001O000000001O3M2O1N2N2N2N2N2N2N3M2N2NPYV4"}, "image_id": 174, "id": 2678}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 218.0, 9.0, 10.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "mVi32m?2N2N200000N2N2NRYR4"}, "image_id": 174, "id": 2679}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 219.0, 10.0, 10.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "nfg01n?2N2N20000000O1N2NPYS7"}, "image_id": 174, "id": 2680}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 229.0, 78.0, 70.0], "area": 2695, "segmentation": {"size": [512, 512], "counts": "oX_31n?1N3CMi@6U?Li@5U?Mi@6U?Li@5U?=M3N1N3N2N110O0N3N2M2O2M0010O1O3N1N3N2O0100O01N2M2N20N2N3N2M2O2M2N3N2M2O2M3M2O2M2OO1O3N2M2O2M3M2O2M2H^AQOd>m0]AQOf>n0410O00102M2O2M3M2O2M3N1N3M2O2M3N1N3MchY3"}, "image_id": 174, "id": 2681}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 231.0, 8.0, 10.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "\\gf43k?3N1N30O1N1N3MhXU3"}, "image_id": 174, "id": 2682}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 234.0, 72.0, 84.0], "area": 2514, "segmentation": {"size": [512, 512], "counts": "die01c?1g@1V?2g@0W?2g@0W?2g@0W?2g@0e>GjAg0T>[OjAg0T>[OjAg0T>[OjAf0U>\\OiAd0W>^OgAc0Y>^OeAb0[>@cA`0]>a00000000000010O00000000000000010O000KcAhN]>X1eAfN[>Z151O000001O000000001O2N2N1O0001O01O0000001O00002N2N3M2Gm@BU?NQB2o=LRB5m=IVB6k=CnAC9j0i=@^B>d=A^Be=_O^B>d=A^B=d=@^B9VOG^>O^B8i>M2O2Mffo1"}, "image_id": 174, "id": 2684}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 255.0, 51.0, 68.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "_hV71m?3M2M4_@IU?9i@IU?:h@IU?b0N3N1010O010Oo@SOo>o0010O0010O010O010_ATOl=l0RBVOo=j0nAXOR>h0lA[OS>f0jA\\OW>c0fAAY>?eAC\\>Q1O010O010O01O01O010O010O010O010ON3L3N2N3M2N3O^G"}, "image_id": 174, "id": 2685}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 256.0, 18.0, 19.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "TXe22m?2N3N1N2N2001O00000000000N2N2N2N2NdgQ5"}, "image_id": 174, "id": 2686}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 263.0, 13.0, 13.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "]h32m?2O1N2N2N2N0001O2N2N2N2Nfge7"}, "image_id": 174, "id": 2687}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 263.0, 9.0, 8.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "[Xa42l?2O2N20O001N1N3NgWZ3"}, "image_id": 174, "id": 2688}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 269.0, 81.0, 68.0], "area": 2720, "segmentation": {"size": [512, 512], "counts": "WYb11c02e>0YA2e>0ZA1d>1ZA1d>1ZA1d>1ZA1d>1ZA1e>0YA2e>0YA2e>1XA1f>g0M2N2N2N2000000N2N00001O0000000001O00000000011N2N2N2N2N2N2N2N2N2N100O1O1O1O1O1O1O1O1O0001O000000001O2N2N1O1O02N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2NSWU5"}, "image_id": 174, "id": 2689}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 297.0, 11.0, 10.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "^iQ52l?3M2O2O0010O001M2N3Mffh2"}, "image_id": 174, "id": 2690}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 301.0, 22.0, 31.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "a9k0W?M000010O2N000001O2N2O1N2N2N2N2N3M2N2N2N2NWfd7"}, "image_id": 174, "id": 2691}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 301.0, 66.0, 78.0], "area": 2438, "segmentation": {"size": [512, 512], "counts": "ajZ22m?3N1N2N2N2N2O1N2N3M2N2N20000001ON3M2N2N2N2O1N2N2N3M2N2N2O1N2N2N3M2N2N2O0O001O00000001WOSB^Om=b0UB\\Ok=d0WBZOi=f0YBYOf=g0\\BWOd=j0]BTOc=l0_BROb=m0`BQO`=o0cBnN]=R1eBlN]=R1eBlN^=Q1j0N2N2O2M2N2N2Ah@3Z?Lg@2\\?Kg@2[?Lg@2f?MiUd4"}, "image_id": 174, "id": 2692}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 306.0, 75.0, 54.0], "area": 2408, "segmentation": {"size": [512, 512], "counts": "XjT42l?2N3M2N3M2N3M2N3N1010Ok@ZOP?g0m@[OT?i0O010O010O010O010O001M2O4NO010O010O010O010O01M2O2M1O002N3O010O0100O010O01N1N3M2O20O010O0N3M210O001M2N3M2N3AVADl>:ZA@i>=YAAi>=?M2N3M2N3MUfe2"}, "image_id": 174, "id": 2693}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 313.0, 9.0, 9.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "mi:2m?2N2N2N01O2N2N2NUf`7"}, "image_id": 174, "id": 2694}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 326.0, 58.0, 71.0], "area": 2031, "segmentation": {"size": [512, 512], "counts": "_k81n?2N2N2N2N2@GVA;h>GVA;h>GWA:g>HWA:g>HWA:g>IVA:h>`0N2N2N2N1O00001O00002N2N2O1N2N2N2N2O101O000000dNQBf0o=XOTBg0l=XOUBh0k=VOWBh0k=VOWBh0k=VOWBh0l=UOVBi0l=UOVBi0l=UOVBi0l=UOVBi0b>N2N2N3N1N2N2N2N2N2N2N2N2NXTj6"}, "image_id": 174, "id": 2695}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 335.0, 40.0, 90.0], "area": 2752, "segmentation": {"size": [512, 512], "counts": "S\\W5h0X?00000000000E;B>E;1=B000D8nAHR>8nAHoRU2"}, "image_id": 174, "id": 2696}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 342.0, 14.0, 14.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "jjV11n?2O1N2N2N2000000000O2M2N2NRUb6"}, "image_id": 174, "id": 2697}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 356.0, 48.0, 52.0], "area": 1604, "segmentation": {"size": [512, 512], "counts": "g[X7m>@UA>m>@UA>m>@UA>m>ATA=Z?N1N2N2N2N2NVSP5"}, "image_id": 174, "id": 2699}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 381.0, 32.0, 69.0], "area": 1123, "segmentation": {"size": [512, 512], "counts": "m;U2l=N2N3M2N2N2N2N2N2N2N3N1N2N2N2KYAnNi>P15N3M2N2N2N2N2N2N2N3N1N2N2N2N2NQc_7"}, "image_id": 174, "id": 2700}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 381.0, 72.0, 86.0], "area": 2743, "segmentation": {"size": [512, 512], "counts": "i\\^32m?2N2N2N2N2L5M2N2N2N2N2N2N2O2M2N2N2O100N2N2N2N3M2N2O1N2N2N2QBUNf=m1XBUNf=n1WBTNh=T2O00000000VNYBZ1g=dN\\B[1e=bN]B^1c=`N_Ba1`=]NbBc1^=[NdBe1\\=YNfBg1j=00N3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2NYb]3"}, "image_id": 174, "id": 2701}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 388.0, 10.0, 9.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "Xlo22m?1O2N2N2OO2N2N1O2NjSk4"}, "image_id": 174, "id": 2702}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 393.0, 16.0, 16.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "^\\`02m?2N2N2N2N2O2N1000N3M2O1N2N2N2N_cW7"}, "image_id": 174, "id": 2703}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 421.0, 88.0, 72.0], "area": 2948, "segmentation": {"size": [512, 512], "counts": "a]c01n?2N2N2N2N2O1N2N2N2N3M2N2N2000000000000000001O01O000000000000000000m@VOo>o0O01O000000O1N2O1N2N2N2N3M2N2N2N2N2N000001O000000000000000001O000001O002O11OO1N2N2N2TO_A7b>G`A7c>F_A8c>F_A8c>F_A8c>F_A8c>G^A7d>G^A7d>G^A7X?M2N2NcaP6"}, "image_id": 174, "id": 2704}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 428.0, 63.0, 65.0], "area": 2305, "segmentation": {"size": [512, 512], "counts": "lmP71n?2M3M2O2M3M2O2M2N3g@\\OS?j0O0100O0100O0`AUOl=k0QBWOP>h0nA[OQ>f0mA\\OS>c0kA_OU>b0hAAX>?eADZ>=cAF[>Q1O010O10O10O10O01000O010N1N3N2M2O2O010H\\AROe>l0]AROd>l0^ASOc>m072N2N1O1O00000000000000000001ObB"}, "image_id": 174, "id": 2705}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 429.0, 71.0, 72.0], "area": 2606, "segmentation": {"size": [512, 512], "counts": "jmi2290Y?2d@1Y?1f@1X?1f@0Y?2e@0c0GP>g0nA[OP>g0mA[OR>f0mA\\OQ>f0mA\\OP>g0mA\\OQ>f0mA[OS>e0lA]OT>c0iA@W>`0gABX>T10O1000O10O100000O01N2N2M2O2N2O10O100000O10O1000O10O1000O10O100000O1M2O2N2N2M2O00O02O2N2M3N2N1O2M3N2N1N3N2N2M3N1O\\aR4"}, "image_id": 174, "id": 2706}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 452.0, 82.0, 60.0], "area": 2626, "segmentation": {"size": [512, 512], "counts": "fnW42m?2N2N2N3N1N2N2N2N2N2N3N1N2N2N2N2N3M2N2O1]AgN\\>[1cAfN\\>[1bAgN^>^1000000010O0000000O1O2N1O1O1O1O1O1O2N1O1OO100O00000000002N2O0O01O2N3M0001O0001O000000011N3M2N2N2N2N2Jg@B[?d0[?2N2O1N3M2O11O000000000001O000001O000000000O1O1O1O2N1O1O1O1O1O1O1O1O1O2NO1O1O100O1B@[Aa0d>AZA`0e>BYA?g>BWA?h>CWA=h>FUA;j>>O1O100O1O1O1O11O2N1O1O1O1O2N1O1O1O1O1O1OO1O100001O1O2N1O1O1O1O2N1O1O1O1O1O2N1O1O1O1O2N1O1O1O1O1OQP_6"}, "image_id": 174, "id": 2708}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 494.0, 38.0, 18.0], "area": 355, "segmentation": {"size": [512, 512], "counts": "o_V21n?1O1N2O1O1N2O1O1N2O1O1N2O1001O1O001O1O1O001O1O1O001O1O1O001O1O001O1O1O001OQ`V5"}, "image_id": 174, "id": 2709}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 506.0, 12.0, 6.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "o_l21n?1O1N2O1001O1O1O001O1OQ`m4"}, "image_id": 174, "id": 2710}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 506.0, 11.0, 6.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "o_f31n?1O1O1O1O1001O1O2N1OQPT4"}, "image_id": 174, "id": 2711}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 0.0, 57.0, 32.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "PPP5?a?7I0000009G00000000000000000000000000000000000000001O0000J60000000000000000000000001O000000M300001O000000000000YOg`S2"}, "image_id": 175, "id": 2712}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 16.0, 41.0, 71.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "bPT61o?B=C\\]W1"}, "image_id": 175, "id": 2713}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 27.0, 44.0, 58.0], "area": 2017, "segmentation": {"size": [512, 512], "counts": "l`k6;e?"}, "image_id": 175, "id": 2714}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 37.0, 53.0, 45.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "]QS54l?6J6J002M4MO1002N5K6I5LO10000000O10O10000000O10O100000O10O10000000O10L40000000O10O10000N2O01003M5K6I7J6JQ^R2"}, "image_id": 175, "id": 2715}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 43.0, 26.0, 38.0], "area": 874, "segmentation": {"size": [512, 512], "counts": "]Qc73m?;E;E;E00000O1000000000O1000000000O1000000002M1000cN"}, "image_id": 175, "id": 2716}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 73.0, 60.0, 57.0], "area": 1941, "segmentation": {"size": [512, 512], "counts": "nR[54l?3M2M4M3M3L3N3L10O11N4M000O3N2M4M3MO01000O01000O010O0100LgNaAX1_>500O01000O010O0100L`AhN_>X150O10O10O0102N2J7M2M4M2N3L3N3M2M4M2M4Mglf1"}, "image_id": 175, "id": 2717}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 97.0, 29.0, 43.0], "area": 792, "segmentation": {"size": [512, 512], "counts": "[ca72n?2M3N2M2N3N2M3N2M2O2M3N2M3N110OO01N100O010O011N3N2M0010OPM"}, "image_id": 175, "id": 2718}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 104.0, 58.0, 80.0], "area": 2420, "segmentation": {"size": [512, 512], "counts": "iSh61o?1N3M2O2`@IR?9l@IR?9l@JR?8l@IR?9l@JQ?d0O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3M210O010O0010M2N3O0010O010O01N1O2M2N3N1N3M2O2M2N3XOXA4i>JZA3i>KXA3j>KYA3i>JYA4i>KYA2j>KXA4i>JZA3[?Ngj:"}, "image_id": 175, "id": 2719}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 111.0, 65.0, 65.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "]dg52m?3N1N3M3N1N3N1N3M3N1N3N2M1O011N2O2M3M2O2M3N1N3M1L`NhA`1W>410O00010O0010O0010O0010O0010O0012M2N3N2I`AiNb>U17N2M2O2M3M2O2M3N1N3M2O2M3N1N3M3N1N^kW1"}, "image_id": 175, "id": 2720}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 159.0, 60.0, 60.0], "area": 1811, "segmentation": {"size": [512, 512], "counts": "lUV61n?2O2M3M2O2M2N3N1N1O012M2N3N1N3M3N1N3M2O2M2N3N0O000O20O00010O01O01O01O01O01O010O00JbAkN`>S1cAkN_>R19O2M2N3N1N3M3N1N3N1N3M2O2M2N3N1NWjk0"}, "image_id": 175, "id": 2721}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 176.0, 9.0, 20.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "eek73m?1N3M2N3N1N3M3N1_J"}, "image_id": 175, "id": 2722}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 190.0, 73.0, 53.0], "area": 1865, "segmentation": {"size": [512, 512], "counts": "PWb61o?1N3M2O2M2N3N1N3M2O2M1O10O00010O02O2M1O01O01O01O02N3N1N3M2O0O00010O00010O0003N2M00010O00010O00010O0MSATOm>l04O01O01O01O01O010O3M2O2M2N3N1N3M2O2M2N3N1NaY9"}, "image_id": 175, "id": 2723}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 225.0, 70.0, 61.0], "area": 2083, "segmentation": {"size": [512, 512], "counts": "mgl62m?3N2M2N3N1N3M2O2M00012M2N3N1N3M2O2M2N3N2M2N3N1N000010O00010O00012M2N2OO00010O00G_ATOb>k0`ATO_>m0cAPO]>P1:O01O011N1O10O1O3N1N3M2O2M2N3N0O00101N3M2O2M2N3N1N3MZ8"}, "image_id": 175, "id": 2724}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 266.0, 5.0, 10.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "\\hm71o?2M2N3N2dG"}, "image_id": 175, "id": 2725}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 276.0, 44.0, 66.0], "area": 1643, "segmentation": {"size": [512, 512], "counts": "PYZ72n?2M2N3N1N3M2O2M2N3N1j@[On>h0o@[On>m0O2M2N3N1N3M20100O010O010M2O2O0010O00O00O003N1N3O010O0N3N1N3M2OaF"}, "image_id": 175, "id": 2726}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 502.0, 23.0, 10.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "o_^71o?0O1L4L41O0000000000001O0000000000001O000000WP6"}, "image_id": 175, "id": 2727}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 509.0, 6.0, 3.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "ooW71o?0O1O11O1OQPe0"}, "image_id": 175, "id": 2728}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 0.0, 288.0, 104.0], "area": 14902, "segmentation": {"size": [512, 512], "counts": "P`^21o?001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001OO1001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O000010O01O01O010O01O010O01O010O01O01O001M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3M2M3N]_Q1"}, "image_id": 176, "id": 2729}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 0.0, 14.0, 5.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "PP`61o?001O001O00001O001O0000O1NRPY1"}, "image_id": 176, "id": 2730}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 73.0, 30.0, 84.0], "area": 1293, "segmentation": {"size": [512, 512], "counts": "Y2d2]=M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2M4M2N2M4M2M^m`7"}, "image_id": 176, "id": 2731}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 91.0, 194.0, 236.0], "area": 26250, "segmentation": {"size": [512, 512], "counts": "af`21l?4M2M3N3L3N3\\B@d;b0ZD@c;c0ZDAc;b0ZD@c;c0ZDAc;b0ZD@c;c0ZD@d;b0YDBc;b0ZD@d;b0YDBc;b0ZD@d;b0YDBc;a0[DAc;b0YDAd;b0ZDAc;b0YDAd;b0YDBd;`0ZDBd;a0XDBe;a0YDB`;VN`DX4^;7L3N2M4L3N3J5N3L3N2M4L3N3L3N2M4L3N3M20010O0010O010O0010O0010O0010O0010O010O0010O0010O0010O010O00010O010O0010O0010O010O00010O010O010O00010O010O00ZKYFS3h9jLZFW3e9gL^FX3c9dL`F]3_9aLdF^3]9^LfFb3Z9\\LiFd3V9ZLlFf3U9VLnFk3Q9SLRGl3o8PLTGQ4k8mKXGR4h8lKZGT4g8hK]GX4b8fK`GZ4a8bKbG_4]8_KfG`4[8]KgGc4l90O010O00010O010O010O00010M2N3M2N2M4O0001L3N2N3L3N3M2M3N3L3N3M2M3N3M2M4M2M3N3M2M4M2N2M4M2M4M2N2M4M2N3L3N3L3N2N3L3N3M2M3N3L3N3M2M3N3L3N3M2M3N3M2M4M2M3N3M2MVZ^2"}, "image_id": 176, "id": 2732}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 173.0, 309.0, 339.0], "area": 37299, "segmentation": {"size": [512, 512], "counts": "]5j0R1b1n1oN]4_OfIa1m1PO^4^OeIb1n1PO\\4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIa1o1PO\\4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIb1n1oN]4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4^OeIb1n1PO\\4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIa1o1PO\\4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIb1n1oN]4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIb1n1oN]4_OfIa1m1PO^4^OeIb1n1PO\\4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIa1o1PO\\4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN]4_OfIa1m1PO^4_OdIb1n1oN]4_OfIa1m1PO^4^OeIb1n1oN]4@dIb1n1oN^4^OeIb1m1PO^4_OdIb1n1oN]4_OfIa1m1PO^4^OdIc1o1oN\\4_ObIe1Q2lN^4^O^Ii1U2hN]4@[Ik1W2fN]4_OYIn1Z2cN^4_OUIP2^2aN\\4_OTIS2_2^N^4^OPIW2c2ZN]4@mHY2e2XN]4_OkH\\2h2UN^4_OgH_2k2RN]4_OeHb2n2oM^4l2cKTM\\4Z1dHmNo2I^4W1fHoNl2J^4T1iHSOi2I]4R1lHUOg2I^4Q1lHUOg2J\\4Q1mHVOf2I^4Q1lHUOg2I]4R1lHVOf2I]4R1lHUOg2I^4Q1lHUOg2J\\4Q1mHVOf2I^4Q1lHUOg2I]4R1lHVOf2I]4R1mHTOf2J^4Q1lHVOf2I]4Q1mHVOf2I^4Q1lHUOg2J\\4Q1mHVOf2I^4Q1lHUOf2J^4Q1lHVOf2I]4Q1mHVOf2I^4Q1lHUOg2J\\4Q1mHVOf2I^4Q1lHUOg2I]4R1lHVOf2I]4Q1nHUOe2J^4Q1lHUOg2J\\4Q1mHVOf2I^4Q1lHUOg2I]4R1lHVOf2I]4Q1nHUOe2J^4Q1lHUOg2J\\4Q1mHVOf2I^4Q1lHUOg2J\\4Q1mHVOf2I^4P1mHVOe2J^4Q1lHVOf2I]4Q1nHUOe2J^4Q1lHUOg2J\\4Q1mHVOf2I^4P1mHVOf2I]4R1lHVOf2I]4Q1nHUOe2J^4Q1lHUOg2J\\4Q1mHVOf2I^4P1mHVOf2I]4R1lHVOf2I]4Q1nHUOe2J^4Q1lHUOg2J\\4Q1nHUOe2J^4P1mHVOf2J\\4Q1mHVOf2I^4P1mHVOe2J^4Q1lHVOf2I]4Q1nHUOe2J^4P1mHVOf2J\\4Q1mHVOf2I^4P1mHVOe2J^4Q1lHVOf2I]4Q1nHUOe2J^4P1mHVOf2J\\4Q1mHVOf2I^4P1mHVOf2J\\4Q1mHVOf2I]4Q1nHUOe2J^4P1mHVOf2J\\4Q1nHUOe2J^4P1mHVOf2J\\4Q1mHVOf2I^4P1mHVOe2J^4P1mHWOe2I]4Q1nHUOe2J^4o0nHWOe2J\\4m0QIZOb2I^4i0TI]O^2J^4g0VI@\\2I]4e0ZIAY2J^4a0\\IEW2J\\4?`IGS2J^4]1J^4FWJa0[1I]4D[Jb0X1JT76mHJS76lHJT76lHJT77lHHU77kHIU77lHHU78jHHV78kHGU7:jHFW79iHGW79jHFW7:hHFX7:iHEX7;gHEY7;gHEY7eHA\\7>dHB\\7>eHA\\7?cHA]7?dH@]7`0bH@^7`0bH@^7`0cH_O^7a0aH_O_7a0bH^O_7b0`H^O`7b0`H^O`7c0`H\\Oa7c0_H]Oa7c0`H\\Oa7d0^H\\Ob7d0^H\\Ob7e0^HZOc7e0]H[Oc7e0^HZOc7f0\\HZOd7f0]HYOd7g0[HYOe7g0[HYOe7g0\\HXOe7h0ZHXOf7h0[HWOf7i0YHWOg7i0YHWOg7i0ZHVOg7j0XHVOh7j0YHUOh7k0WHUOi7k0WHUOj7k0VHTOj7l0VHTOj7l0WHSOj7m0UHSOk7m0VHROk7n0THROl7n0THROl7n0UHQOl7o0SHQOm7o0THPOm7P1RHPOn7P1RHPOo7o0RHPOn7Q1oGQOQ8o0lGTOU8l0hGVOX8j0eGYO\\8g0aG[O_8e0_G]Oa8c0\\G@e8`0XGBh8>UGEl8;QGGo89nFJS95kFMU94hFNX92eF1\\9OaF3_9M^F6c9I[F9e9HWF;i9EUF=l9CQF?o9AnEb0S:^OjEd0V:\\OgEg0Z:XOdEj0\\:WO`El0`:TO^En0c:ROZEP1f:POWES1j:lNTEV1l:kNPEX1P;hNnDZ1S;fNiD]1W;cNgD_1Z;`NdDb1\\;a11O0O1N3L3N3L3N2M4M2N3L3N2M4M2M4M2M4M2N2M4M2M4M2M3N3M2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N3L3N2N3L3N3L3N2M4M2M4MoSU3"}, "image_id": 176, "id": 2733}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 199.0, 116.0, 122.0], "area": 8448, "segmentation": {"size": [512, 512], "counts": "Pij51m?2M4M2M3N3L3N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2N3O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010N1N201O010O00010O010O00010O01N1N2M4M2N3L3N3L3]MiB[2Y=cMiBZ2a=M2N2M4M2M4M2M3N3M2M4M2M4M2M3N3M2M4M2M3N3L3N3M2M3N3LjX;"}, "image_id": 176, "id": 2734}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 350.0, 18.0, 50.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "n:b1_>M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2N2Mndf7"}, "image_id": 176, "id": 2735}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 359.0, 6.0, 15.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "c[m71m?2M4M2M4M2iD"}, "image_id": 176, "id": 2736}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 33.0, 49.0], "area": 1187, "segmentation": {"size": [512, 512], "counts": "_>a1_>0001O001O001O00001O001O00001O001O0M3N3M2M4M200010O01M2N3Cj@KY?3i@KY?2=MVQ_7"}, "image_id": 176, "id": 2737}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 497.0, 50.0, 15.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "o_a41l?3N2N200001O00N2M3N200001O001O00001O001O00001O001O0000O1M3N2O1001O001O00001O001O00001O001O001M2N2M[`e2"}, "image_id": 176, "id": 2738}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 432.0, 61.0, 80.0], "area": 3076, "segmentation": {"size": [512, 512], "counts": "mnQ72m?2N2M3N2N1O2N2M3N2N2N2N1O2M3N2N2N2N1N3N2N2N2N2N2M2O2N2N2N2M3N1O2N2N2N2M3N2N1010O1O1O001O1O1O1O1O1O0O2M3N2N2N2N1O2M3N2N2NoA"}, "image_id": 178, "id": 2739}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 491.0, 43.0, 21.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "o_c61n?1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O100001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1OQPg0"}, "image_id": 178, "id": 2740}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 17.0, 41.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "0Y1g>N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2MS`g7"}, "image_id": 180, "id": 2741}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 0.0, 100.0, 36.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "PP_21o?001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00M3N2M3N2M3N2N2M3N2M3N2M3N2NRPo3"}, "image_id": 180, "id": 2742}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 0.0, 273.0, 342.0], "area": 53884, "segmentation": {"size": [512, 512], "counts": "Zhj22l?3L3N2N3L3N3L3N2M4M2M4M2VBmNdnN`AR1`>lNbAU1e>ON2M22L3N3L3N2M4M2N3L3N2M4M2M4M`k`5"}, "image_id": 180, "id": 2745}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 92.0, 30.0, 85.0], "area": 1320, "segmentation": {"size": [512, 512], "counts": "RUa73k?2N3L3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N3M2TM"}, "image_id": 180, "id": 2746}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 172.0, 79.0, 141.0], "area": 7559, "segmentation": {"size": [512, 512], "counts": "fhh63k?2M4M2DGn@=n>FPAFo@=n>=M2M4M2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3N110O00010O010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O010O0001TJ"}, "image_id": 180, "id": 2747}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 325.0, 57.0, 187.0], "area": 6214, "segmentation": {"size": [512, 512], "counts": "U:k5U:01O001O00001O001O00001O001O00001O001M2XLcEj1a:SNbEj1`:SNcEk1_:SNcEj1a:hLWEV1;o1`:iLWEU18dAJ\\>6aAM`>3]A0b>0[A3f>MWA6h>JUA9l>=01O01M2N3L3N3L3N3L3N2M4M2Ma[k4"}, "image_id": 181, "id": 2754}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 139.0, 54.0, 38.0], "area": 1085, "segmentation": {"size": [512, 512], "counts": "Ue]13k?2JM\\@7a?5N3L3N2N3L1000O3O2O0010O010O0010O0010O0010O010O0010O0010O0010O0010O010O0010O0010O010M2N2N3M2M4M2N3M2NU[g5"}, "image_id": 181, "id": 2755}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 165.0, 51.0, 45.0], "area": 1352, "segmentation": {"size": [512, 512], "counts": "RVR21l?3N3L3N2M4M2M4M2N2M4M2N3O01O01O010O010O00010O010O00010O010O00010O010O00010O01N1M3N3L3N3L3N2M4M2M4M2M_ZT5"}, "image_id": 181, "id": 2756}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 42.0, 81.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "o7j1S>4K4M3M4O0001O01O00010O0001O01O00010O00iN[B3f=I]B8b=DbB<^=AfB>Z=^OjBEBb0e=ElBECg0`=@\\C`0d<\\OaCc0`RO_Am0a>PObAP1h>O00001O001O001O0000O100O100O100O100ON3N1O21N101O00N2M4M2M4M2N3L3N2M4M2NSA"}, "image_id": 185, "id": 2769}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 174.0, 365.0], "area": 50245, "segmentation": {"size": [512, 512], "counts": "0f91[GW5g9000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000N2L4L4K5L4L4K5L400001O0000001O0000iEcJk9]5QFgJo9f50O0000010O00000010O0^IWJm2i5oL[JQ3e5kL`JT3a5fLdJ[3[5aLiJ_3W5]LmJc3S5YLRKf3o4TLVKk3k4QLYKk3k4QLYKj3l4QLYKk3k4QLZKj3k4QLYKj3l4QLYKk3k4QLYKk3k4QLZKj3k4PLZKk3k4QLYKk3k4QLYKk3k4PL[Kj3k4QLYKk3k4QLYKk3k4PLZKk3k4QLYKk3k4QLZKj3`8K5L4L5J5L4L4K6K4L4L4K5L5K4K5L4L5J5L4L4K6K4L4K5L4L5J5LdkX5"}, "image_id": 187, "id": 2770}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 0.0, 31.0, 14.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "Q`i11n?2O001O001O001O001O1O001O001O001O001O001O001O00O1N2N2O1N2N2NRPg5"}, "image_id": 187, "id": 2771}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 18.0, 158.0, 147.0], "area": 14813, "segmentation": {"size": [512, 512], "counts": "kSQ21l?4K4L4L4L5L3L4L4L5K4M3L5K4L4L4M4K4L4L4L5L3L4L4O20O0000010O0001O01O0001O01O0001O01O00010O0000010O0001O01O0001O01O0001O01O00SCVMdb9_OaFa0_9_ObFa0]9_OcFa0]9@cF`0\\9@dF`0\\9@dF`0\\9AdF?[9AeF?[9BeF>Z9BfF>Z9BgF>X9CgF=Y9CgF=Y9DgFjFCU9=lFBT9>lFBT9?lFAS9?mFAS9`0jFCU9=iFEW9TBCk=;XBBj=;YBCi=;ZBAi=^EBb:e0WE[Oi:l0PETOP;S1iDmNV;[1cDeN\\;P3M3M3M3M3N2M3M3M3M3N2M300001O00001O00001O001O00001O00001O00001O00001O0cLYEa1g:\\NcE]1^:_NlEZ1T:cNWFU1j9hN_FQ1a9lNiFm0W9POTGh0m8TO]Ge0c8XObGf0^8XOdGh0]8TOeGm0[8POhGP1X8mNnGP1S8lNXHl0h7ROaHg0_7VOkHc0V7YOUI?k6^O_I;b6AhI8X;HQ`4"}, "image_id": 193, "id": 2784}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 0.0, 118.0, 56.0], "area": 4900, "segmentation": {"size": [512, 512], "counts": "hPU64h?4L5J5L4L4M4O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O0000001O000000001O0000"}, "image_id": 194, "id": 2785}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 283.0, 434.0, 229.0], "area": 77721, "segmentation": {"size": [512, 512], "counts": "[_W11j?5L4L4K6K4ZAYOn=l0mAYOo=k0mAYOn=m0lAXOP>^1K5L4L4K5L4`LaMZId2b6aMZIb2a6cM`I\\2\\6hMdIX2\\6hMdIX2\\6hMdIX2]6hMbIX2^6hMcIW2]6iMcIW2]6iMcIW2^6iM^IZ2b6fMYI_2g6aMUIc2k6]MPIh2P7YMkHk2V7TMfHP3Z7PMaHU3_7kL]HY3c7gLXH^3i7bLRHb3n7^LmGg3S8YLiGk3W8ULeGo3\\8QL^GT4b8lKZGX4f8hKUG]4k8cKQGa4o8_KlFf4U9ZKfFj4Z9VKaFo4_9QK]FS5c9e01OM3K5L4L4L4001O0000001O0000O1L4K5000000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O00O1N2000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O000eJRFd4n9XKVFh4k9SKYFm4g9nJ^FR5X:01O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O0000O1L4K5L4L4N2001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O1O001O0000001O000000001O0000001O0000001O00002N1O0000001O000000001O0000001O000000001O0[KiDa4[;01O0000001O0000001O000000001O0000001O000000001N1L4K5LaD"}, "image_id": 194, "id": 2786}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 342.0, 26.0, 24.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "V[`02k?3L4M4K4N20001O01O01O01O00010O0001O01O01OM4K4M3L4MXeR7"}, "image_id": 194, "id": 2787}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 402.0, 47.0, 102.0], "area": 3075, "segmentation": {"size": [512, 512], "counts": "g>k0Q?4K5L5J5L4K5L5J5L4K5L4L5J5L4K5N3O000001O01O0000010ON2L4K6K4L4K5L4K6K4K5L4K6K4K5L4K5L5J5L4L4K]SX7"}, "image_id": 194, "id": 2788}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 47.0, 48.0], "area": 1323, "segmentation": {"size": [512, 512], "counts": ";U1j>00000000000000000000011O1O1OO1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O3M2N2N2N2N2N2Ni_X7"}, "image_id": 195, "id": 2789}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 0.0, 40.0, 20.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "P`i21o?1O1O1O2N1O1O1O1O1O1O1O2N1O1O1O1O1O00O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1OQ`b4"}, "image_id": 195, "id": 2790}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 0.0, 24.0, 12.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "PP`41o?1O1O1O1O1O2N1O1O1O1O00O1O1O1O1O1O1O100O1O1O1OQPT3"}, "image_id": 195, "id": 2791}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 0.0, 14.0, 7.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "PPR51o?1O1O1O1O1O1O00O1O1O1O1O1OQPg2"}, "image_id": 195, "id": 2792}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 0.0, 45.0, 27.0], "area": 667, "segmentation": {"size": [512, 512], "counts": "Y`m52m?2N2N2N2O2M2N2N2N2N2O1O1O1O1O1O00O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1OQP\\1"}, "image_id": 195, "id": 2793}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 0.0, 56.0, 69.0], "area": 2143, "segmentation": {"size": [512, 512], "counts": "k`k61o?1N2N2N3M2N2N2N2O1N2N2N3DYOZAi0d>YOZAi0d>YOZAi0e>XOYAj0e>:N2N3M2N2O1O1O1O1O2N1O1O1O1O10001O000001M0000011N2UOmAFU>8mAFV>8kAFW>8kAFW>8kAFW>8kAFW>8lAEV>9lAFU>K\\A8a0KV>J[A9a0K[>3fAK\\>3fAK\\>3j0NW_8"}, "image_id": 195, "id": 2794}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 4.0, 36.0, 42.0], "area": 790, "segmentation": {"size": [512, 512], "counts": "f`d01n?3M2N2O1N2N2N2N2N3M2N2N2O1N1O1O2N3M2N2N2O1O10O1N2N3^OTAKn>3TAKn>3TAKn>3TAKn>3TAIHMV?8TAIP?5?N3N1N^_i6"}, "image_id": 195, "id": 2795}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 5.0, 69.0, 61.0], "area": 2087, "segmentation": {"size": [512, 512], "counts": "Ya\\31n?2N2N2N2N2N2O2M2N2N2N2N2N2N2N3N1N1O001O00000002O10O1N2O1N3M2N2N2N2N2N10O0000001O3M0001O00000000FcARO^>m0dAQO\\>o0fAoN\\>o0gAnN[>P1QBDo=9nAIR>7lAKT>6iALW>o0000O1N2OO0001O0000000001O000001O0JeAhN[>X1gAfNY>Z16001O000001O2N2N2N2O1N3M2N2N2N1O1O00101N3M2N2N2N2N2N2O1N3M2Nemd2"}, "image_id": 195, "id": 2801}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 67.0, 46.0, 75.0], "area": 1686, "segmentation": {"size": [512, 512], "counts": "iSY71n?3M2N2N2N2N2N2O1N2N3M2AZOaAh0]>ZOaAh0]>ZOaAh0]>ZOaAh0]>ZOaAh0]>ZOaAh0]>?N01O001O2N2N1O00001O01O0000000JeNhA[1X>hNeAX1[>60N3O00000000000000nM"}, "image_id": 195, "id": 2802}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 85.0, 64.0, 51.0], "area": 1831, "segmentation": {"size": [512, 512], "counts": "aSR12l?2N3M2N3M2N3O010O010O0N3M2N3M2N3M201O010O010O010O010O0O2M2O2N1M3N30O010O010O010O010O010O0O2O0010O01fN[Aj01_Og>?[A_Oh>>[A_Og>?[A_Oh>>>O2O010O0N3M2N3M2Nllm5"}, "image_id": 195, "id": 2803}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 85.0, 15.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "lbo51n?2N2N2N2N2N2N01O2N2N2O1N2N2NVmh1"}, "image_id": 195, "id": 2804}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 98.0, 73.0, 71.0], "area": 2500, "segmentation": {"size": [512, 512], "counts": "WTl42m?2N2N2N2N2N2N2O2M2e@^OU?d0j@]OT?i0N2N2N3M2N1O010O2N2N2N2N3M2N2N2001O000N2N2N001O01O000000000000FbASO_>l0cARO]>n0eAQOZ>o0hAoNX>Q1jAmNV>S1;2N2N2N3M2O0O0002N2C^AYOd>e0^AYOe>d0]AZOe>d0]AZOe>d0^AYOd>e0=N2N2O1N3M2N2N2N2N2N\\\\o1"}, "image_id": 195, "id": 2805}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 110.0, 33.0, 33.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "kSW22m?2N2N2O1N2N2N2N2N2N2N2N2N2N3N100000N2N2N2N3N1N2N2N2N2N2N2N2N2N2NQ\\X5"}, "image_id": 195, "id": 2806}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 145.0, 74.0, 70.0], "area": 2437, "segmentation": {"size": [512, 512], "counts": "iUe58c?6O1N2O1C_OXAc0f>@WAb0g>@XA`0g>BXA>g>DXAFXA:g>HXA8g>JXA6g>b00YOYA5g>J[A5f>I[A7f>G[A9f>EZABYAa0f>^OZAc0f>[OZAg0f>WOZAk0l>2N2N2N2N2N2O1N2N2N3M00000001O000000000000001EcARO]>n0eAQOZ>o0hAoNX>Q1jAmNV>S1;00001O01O002N2N3M2N2N2N2N2N2N2N2O1N2N2N3M2N2N2NljU1"}, "image_id": 195, "id": 2807}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 148.0, 15.0, 15.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "kTP72m?2N3M2N2O1N1O0001O2N2O1N2N3MW[h0"}, "image_id": 195, "id": 2808}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 160.0, 1.0, 12.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "P5VO_Al0_>VO_Al0_>VO_Al0_>HmA:U>g01O1O2N1O1O2O0OO12N1O2N1O1O2N1O2N101lNUAP1o>N2N2N2N3M2N2N2N2O1N2N2N2N2N3MYiV4"}, "image_id": 195, "id": 2813}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 188.0, 55.0, 57.0], "area": 1541, "segmentation": {"size": [512, 512], "counts": "Pga11n?3M2O1N2N2N2N3M2N2N2O1N2N2N3M2N2N2N10O0000000001O0O1M3N2N2O11O2O0O1O1O2N1O1O2O0O1O2N1O1O1O2N100M4L3M3M4K4M3MPjb5"}, "image_id": 195, "id": 2814}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 208.0, 20.0, 25.0], "area": 352, "segmentation": {"size": [512, 512], "counts": "RgT14h?4L4K6K4O1000010O0000010O0000N3K4K5L4LaYa6"}, "image_id": 195, "id": 2815}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 213.0, 68.0, 58.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "\\gV61n?2O2M2N2N2N2N2N2N3N1N2O100000001O000N0000002N2O1N2N2N3M2N2N2N2O1N2N2N3M00002N100O01O3M2N2N2N2N2O1N2N3M2N2N2KQAVOQ?h05N2N3N1N2N2N2N2N2N3M2O1N_Xg0"}, "image_id": 195, "id": 2816}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 220.0, 26.0, 27.0], "area": 332, "segmentation": {"size": [512, 512], "counts": "\\gd31n?2N1O2N2N2N2N2N2N2N2N2N000O0102N2N2N2N2N2N2N2N2N2NkXn3"}, "image_id": 195, "id": 2817}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 224.0, 37.0, 34.0], "area": 677, "segmentation": {"size": [512, 512], "counts": "ag23l?1N3M3M2O2M2N3M2O2M30O010O010O10O10O010O010O10O10O001M2N3M3N1N3M2N3N1N3MchZ7"}, "image_id": 195, "id": 2818}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 242.0, 73.0, 71.0], "area": 2360, "segmentation": {"size": [512, 512], "counts": "oX\\31n?2N2N2N3N1N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N0000000000001O01O0000000000000000000001O0001O0000001O2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2O2MgW_3"}, "image_id": 195, "id": 2819}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 243.0, 22.0, 24.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "jW]22n?1N2N3M2O1N2N3M2N20001O01M2N2O1N3M2N2N2O2MogW5"}, "image_id": 195, "id": 2820}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 255.0, 52.0, 62.0], "area": 1814, "segmentation": {"size": [512, 512], "counts": "oXV72m?2N2N3M2N2N2O1N2N2N2N3M2N200001NO00001O3M2N2N2HlNaAV1]>lNaAV1]>lNbAU1\\>9O1N3M2N1O000002N2N2O1N2N2N2N1D^AYOb>g0`AWO`>i0bAUO^>k0eARO[>n0gAPOY>Q1:0001O00RH"}, "image_id": 195, "id": 2821}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 263.0, 19.0, 18.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "`hf22m?2O2M2N2N2N2OO0000010O002N2N2O1N3M2Ncgo4"}, "image_id": 195, "id": 2822}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 266.0, 43.0, 38.0], "area": 963, "segmentation": {"size": [512, 512], "counts": "UiQ13i?4L4L5K4N200N3N10000O2O000O2O000O101O0O10001N10000O2O000`0A0OM\\Oj@d0T?AXA0f>3c01N101O00O2O001N3L[gX6"}, "image_id": 195, "id": 2823}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 269.0, 30.0, 41.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "bXR22n?3L4M3M4K4M1O001N4M3M2M4M2N3L100002M4M3L4M3M4K3N3M3L3N3L4Maf^5"}, "image_id": 195, "id": 2824}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 270.0, 21.0, 23.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "dXn22m?3M2N2O1N3M2N2O110O000001M2N2N2O2M2N2N2NTWg4"}, "image_id": 195, "id": 2825}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 285.0, 21.0, 27.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "P9c0[?2O2000O010O010O010O001M2N3N2M2N3M2N3M2OjVe7"}, "image_id": 195, "id": 2826}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 292.0, 40.0, 57.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "ci`21o?1N3M2N2O2M2N2N2O2M2N7I6K6I6J7I100O1O0011O2N1O1O2N1N2O1O2N1O1O2N1N2O6J7I6J7I7HZUk4"}, "image_id": 195, "id": 2827}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 305.0, 47.0, 71.0], "area": 1816, "segmentation": {"size": [512, 512], "counts": "Zji11o?2CNg@6o>Jm@325o>Jm@317o>2n@0o>c0N3L3N2M4M2N3L3N2M4M2M4M2N2M10O3N2M3N3L3N2N3L3N3L3N2N3L3N3L3N2M4M2N2M4M2M4M2M3N3Mdd^5"}, "image_id": 195, "id": 2828}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 320.0, 29.0, 52.0], "area": 948, "segmentation": {"size": [512, 512], "counts": "^:V1f>3M3L4001O2N1O1O1O2N1O1O1O2N1O1O101N1O1O1O2N1M3L4L5K4L4LnUa7"}, "image_id": 195, "id": 2829}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 331.0, 42.0, 56.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "ijm0;e?b0^O:F0000000O10003M000000000K50O1000000000000000O1000000IeNiA[1W>7000000000YOlAAU>?f00000\\OYAOg>1d00eU]6"}, "image_id": 195, "id": 2830}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 333.0, 10.0, 20.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "eZk72m?2N2N2N2N2N2N2N2O2aE"}, "image_id": 195, "id": 2831}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 340.0, 32.0, 33.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "Q[d51n?2N2N3N1N2N2N2N2N3M2O1N2N2N2N3N10N2N2N3M2N2N2O1N3M2N2N2N2N2O2Mkdk1"}, "image_id": 195, "id": 2832}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 358.0, 28.0, 25.0], "area": 524, "segmentation": {"size": [512, 512], "counts": "Z[d1a0_?4L0O1000O1000O1000O1000O1000O10O1000O10002N4L3L4M4L3MWdm5"}, "image_id": 195, "id": 2833}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 371.0, 32.0, 56.0], "area": 1344, "segmentation": {"size": [512, 512], "counts": "j[k42m?4WAN^=5YB1g=1PB6P>MfA:Y>h0O000O1000000O2O000O10000O101O000O100001O3N2G:E:G:E:Focd2"}, "image_id": 195, "id": 2834}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 375.0, 50.0, 56.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "Ulk51o?2M2N3N2M2O2M2N3j@^Oj>c0UA_Oh>d0UA_Oh>c0WA^Oh>d0UA_Oh>n0N3N1N3N1N3M2O1N1O02N2O2M2N3N2M2O2M2N3N1N3M3NO01O2O1N3M3N1N3M2O2M2N3N1N3NTS[1"}, "image_id": 195, "id": 2835}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 412.0, 36.0, 43.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "S]Z41n?3M2O2M2N2O2M2N2N3O001O01O010O00010O010O00010O01O01N1N3N1N2N3M2O1N3M2NRbS3"}, "image_id": 195, "id": 2836}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 419.0, 57.0, 47.0], "area": 1403, "segmentation": {"size": [512, 512], "counts": "i]^33m?2M3N2M3N2M3N2M3N2M3N2M10O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O3N2M3N2M3N2M3N2M3N2M3NWRe3"}, "image_id": 195, "id": 2837}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 433.0, 64.0, 52.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "Y^W62m?2O2M2N2N2N3N1N2N2N3N1N2N2N3N11O01O01O00000N01O0000010O000000010O000000010O000001O01O000001O01O0002N2O2M2N2N3M2O1N2N3M2N2O1N3Mnah0"}, "image_id": 195, "id": 2838}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 453.0, 35.0, 34.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "fnU51n?2N2O2M2N2O2M2N3M2O1N3M2N01O00010O0001O01O00012M2N3M2O1N3M2O1N3M2N3N[aX2"}, "image_id": 195, "id": 2839}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 458.0, 30.0, 28.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "e^\\41n?2O2M3M3N2M2O2M3N2M010O0010O0010O010O0101N3N1N3M3N2M3N1N3NWaT3"}, "image_id": 195, "id": 2840}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 459.0, 12.0, 10.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "^nX42n?2M3N1N10O010O101N3N2MaQa3"}, "image_id": 195, "id": 2841}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 469.0, 17.0, 18.0], "area": 188, "segmentation": {"size": [512, 512], "counts": "jnl54l?2M3N2M4M000O010O010O12M4M2M3N2MQaj1"}, "image_id": 195, "id": 2842}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 477.0, 5.0, 22.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "m>f0[?L3Hd@I_?49LTQm7"}, "image_id": 195, "id": 2843}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 478.0, 48.0, 34.0], "area": 951, "segmentation": {"size": [512, 512], "counts": "oom61n?1O1N2O1O1O1O1O1O1GGj@:U?Hi@9V?Ih@8W?9O1O1O1O1O1O1O1N21OO1O1O1001O1O1O1O001O1O1O1O1M3N002N2N2N2N2N2N2N2N2NaP:"}, "image_id": 195, "id": 2844}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 484.0, 43.0, 28.0], "area": 913, "segmentation": {"size": [512, 512], "counts": "foa1:W??M300000000000000000000000000000000000000ZOXAg0\\>XOfA3Lh0[>XOfA3Lg0]>WOfAR1Y>POeAP1[>:10O0000101O2N1O1O2N1O1O2N1O2N1O1O2O01O01O000O2M2N2N3N1O2ON3O001SN]B[1b=dN_B\\1a=bNbB]1_=`NcBa1\\=]NgBb1Y=]NhBc1Y=ZNiBe1X=YNkBd1W=ZNkBd1j=N3N1N2N0010O00G_ATOa>l0aASO_>m0bAQO_>n0dAoN_>n0;N2O2Hm@^OU?a0l@]OV?a07M2N3N1N2N3N1N2NWnd4"}, "image_id": 196, "id": 2850}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 0.0, 86.0, 53.0], "area": 2114, "segmentation": {"size": [512, 512], "counts": "P`S31o?2N1O2N1O1O2NO1O11O2N1O1O2N1O2N1O1O2N1OO1O100O1O11O1O2N1O2N1O1O2UASO_>n0_AUO_>m0^AUOa>l0]AVOb>k0]AWOa>U1O2N1O1O2N0000O1O100O1O1O100O1O2O1N3M2N2O2M2N2O2M2N3M2O1N3M2O2M2N2N2OO00010O2N2O0O000012M2N3N1N2NX_a3"}, "image_id": 196, "id": 2851}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 0.0, 41.0, 13.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "P`Z42n?1O1O2N1O1OO100O1O1O100O1O100002N1O2N1O1O2N1O1OO1O100O1O1O100O1O100O1O1O100O1O10PPQ3"}, "image_id": 196, "id": 2852}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 0.0, 24.0, 10.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "P`n61o?1O2N1O2N1O2NO1O1O100O1O100O1O11O1OO100O1O1O10P`e0"}, "image_id": 196, "id": 2853}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 3.0, 9.0, 9.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "VPW41o?1N3N1N2N12M3M2Oh_d3"}, "image_id": 196, "id": 2854}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 18.0, 89.0, 84.0], "area": 3565, "segmentation": {"size": [512, 512], "counts": "Wab42m?2O1N3M2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N3M2O1N3M2N2O2M2N2kAYNn=n1O20O00010O00010O00010O00010O0O2M2N2N3N1N2N3N1N1O0010O0001O01O2N10O0001O01O002O1N3M2K\\AkNf>S16M2O1N3M2N2O2M2N3M2O1N3M2O1N3M2N2OmmP2"}, "image_id": 196, "id": 2855}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 25.0, 14.0, 14.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "o`\\41n?2N3M2N2O2M0011N2N2N3N1N2NQ_\\3"}, "image_id": 196, "id": 2856}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 29.0, 70.0, 81.0], "area": 2965, "segmentation": {"size": [512, 512], "counts": "PbV12n?2M2N2O2M2N3M2O2M2N2N3IWOSAj0k>XOSAk0j>60OIoN`AQ1a>PO]AP1c>RO\\AP1a>9O2M2N3M2O1N3M2N3N1N3M2O1N3M210OO1N3N1N2N3N1N3M2OO0001O21O010O0010O0jNlA`0T>]OoA`0S>^OoA`0S>_OoA>T>_OnA?T>_OnA`0T>^OnA?T>_OnA?T>_OoA>T>_OnA?k>O2M2N3M2N2O2MQ]f5"}, "image_id": 196, "id": 2857}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 45.0, 67.0, 95.0], "area": 3119, "segmentation": {"size": [512, 512], "counts": "SRj31o?1N2N3N1N3M2b@DU?>i@EU?=h@EV?d0N3M2O2M2N2O2M2N2N3N1N3M2N2O2M2N0SBWNe=i1[BXNc=h1]BZNa=g1^B\\N_=g1_BZN`=g1^B[N`=g1^B\\Na=R210O00010O01O01O01eM^BV2a=hMaBX2_=gMcBX2c=OO10O2N3M2O1N3M2WOmACU>:nACT>;nACT>AfAf07GX>7jAGX>7jAGY>7iAFY>8iAGX>7kAFX>8iAFY>8j0M2N2On\\T3"}, "image_id": 196, "id": 2858}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 75.0, 9.0, 22.0], "area": 95, "segmentation": {"size": [512, 512], "counts": "abk71n?2N3N1N3M2O1^@E\\?a0N3cM"}, "image_id": 196, "id": 2859}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 78.0, 88.0, 96.0], "area": 3924, "segmentation": {"size": [512, 512], "counts": "PS?2m?2N2O2M2N3M2O1N3M2N2O2M2N2O2M2N3M2O1N3M2N2O2M2N2010O01O01O01M2O101ON3M2NQB_Nc=_1]BcNb=[1^BgNb=0RBQ1;QOd=LSBR18TOd=HVBU13UOn=k0PBXOP>g0nA[OQ>g0mAZOQ>h0mA[OQ>[1N2N3M2O1N001O01O01O0001O01O01O003N1N2N3M2O1N3M2O2M2N2N3N1RO_A:c>E^A9e>D^A9d>E^A:c>E^A9e>D^A9d>E^A9e>D^A:U?N2N3N1N2N_kT6"}, "image_id": 196, "id": 2860}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 97.0, 83.0, 78.0], "area": 3019, "segmentation": {"size": [512, 512], "counts": "`cR31n?3M2O1N3\\@IZ?9d@IZ?`0O2M2N3M2O1N1O1O102M2N2O2^AiNV>Y1iAhNU>Z1iAhNV>c1O0010O0010O00010O00010O0010O00001N1N2N00010O000010O000010O2N2N3N1N2N3M2O2M2N2N3N11OO2N0O1O0001O01O00010O3Jh@_OZ??5N3M2O2M2N2N3Nbkc3"}, "image_id": 196, "id": 2861}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 115.0, 36.0, 93.0], "area": 1919, "segmentation": {"size": [512, 512], "counts": "e3Q2n=3M2O2N11O01XBhMf=Z2O01O01O010O010O010OO2M2O1lNVBOm=OTBOn=DQBG4c0m=CQBH4c0m=DQBG4b0n=DPBH4c0m=CRBG3d0n=C_B:c=D_B:c=D`B:b=C`B;e>M2O2M2N2Oni]7"}, "image_id": 196, "id": 2862}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 132.0, 95.0, 81.0], "area": 3516, "segmentation": {"size": [512, 512], "counts": "gTS21o?1N2N2N2N2N2N2O1N3M2N2N2N2N2N2O1N2N2N3M2N2N2O10000001O01O0000000N2cAcNT>_1jAcNU>^1iAeNV>c1000000000000000010O00000000O0O00000001O0002N2N2N2N2N2N2O1000001N1N2N2N2N2N2N2N2N10O00002N1O0003M2N2N2N2N2N2O1J`@Ib?56N3M`Z]4"}, "image_id": 196, "id": 2863}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 133.0, 13.0, 13.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "Ydj11n?2N2O2M2N2O2OO2N1N2N3M2Ndkn5"}, "image_id": 196, "id": 2864}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 141.0, 90.0, 80.0], "area": 3548, "segmentation": {"size": [512, 512], "counts": "cei52m?2N3N1N3M2O2M2N3N1N3M2O2M2N3N1O2O010O010O010O0O2M2N3M2O0O1O01O01O01O01_OdN]B\\1d=eN[BZ1e=iNXBX1g=jNWBV1j=lNTBS1l=oNRBR1m=QOPBo0Q>?01O01O01O01O01O01O01O03M2O1N0010O00010O0001O102N110M2N3N1N3M2O2M2oNWAh0l>UOVAj0k>UOWAh0R?N3N1N3M2O2M2N3N1N3M2OZZi0"}, "image_id": 196, "id": 2865}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 160.0, 80.0, 80.0], "area": 2952, "segmentation": {"size": [512, 512], "counts": "Sfb12m?2N2N2O1N2N3M2N2N2A^O]Ad0a>^O]Ad0a>^O]Ad0a>_O\\Ac0b>_O]Ab0a>@]Ac0`>`0O1N2N2N2N2N1O0001O2O1000001O0000000001O0001O00000N2N2N2N2O2M20000000000000010O0000000O1N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3MTYU5"}, "image_id": 196, "id": 2866}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 191.0, 65.0, 85.0], "area": 2709, "segmentation": {"size": [512, 512], "counts": "nfo41n?3M2O2M2N3N1N3M2O2M2N3N1N3ETO]Am0c>UO[Aj0e>XOYAh0g>901N2N3N1N3M2O2M2N3N1N3M2O2M2N2O2M2010O010O010O010M2N3N1N3M2O2]NmAT1U>kNmAR1V>kNlAT1U>jNnAS1U>jNmAT1U>kNmAS1a>N3CRACP?;RADP?9SADo>;RACQ?:jNjAU1V>mNhAT1W>oNfAQ1[>901O01O00010O00010O00010O00010O00010O02N2N3N1N2N01O2O2M2N3M2O2M2N2O2M2N3N1N3M2O2SOn@g0X?M2O2M2N3N1N3M2O2M2N3NdX5"}, "image_id": 196, "id": 2868}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 221.0, 9.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "oVS63m?1N3M2O02N1N3M2OmXh1"}, "image_id": 196, "id": 2869}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 222.0, 74.0, 73.0], "area": 2678, "segmentation": {"size": [512, 512], "counts": "\\X>2n?1N3M2O1N3M2O2M2N3N1N3M2O2M2O1N3M2O1N00010DlNkAT1U>nNiAR1X>POfAP1Y>ROeAn0\\>:00010O00010O00010O00010O00010O000010O00010O01O01O012M2N1001N2N3M2OO000101N3WOZA3h>K[A3g>J[A4g>DUA069g>EVAO5;g>DaA:U?M2N3N1Nmg\\6"}, "image_id": 196, "id": 2870}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 223.0, 68.0, 85.0], "area": 2825, "segmentation": {"size": [512, 512], "counts": "RhW41n?3M3M2O2M2N3N1N3M2O2M2KWOPAl0d>TOcA1Hk0e>VO`An0_>TO_Al0a>WO]Ah0d>M2N3N1N3M2O2M2N3N1N2N3N1N3MnVf2"}, "image_id": 196, "id": 2871}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 251.0, 66.0, 69.0], "area": 2323, "segmentation": {"size": [512, 512], "counts": "\\Yo61n?3M2O2M2N2O2M2N3M2O2M2FXOYAj0e>XOZAj0c>YOZAh0e>ZOYAf0h>9O0001O01O01O01O01O01O01O0002O1N20N3M10O00010O000010O000010O00010O002O1N3M10O01O01O01O0001O02N100O1UH"}, "image_id": 196, "id": 2872}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 268.0, 75.0, 79.0], "area": 3305, "segmentation": {"size": [512, 512], "counts": "[Yh32n?2Z@OX?3g@NW?4g@O7JT>0nA9DO9JR>0oA9EO7JS>1nAd0M^OS>OoAe0L]OS>1nAd0M^OR>0oAR1P>POnAR1o=a0N3N1N3M010O00010O000010O0012ON3M2O2M2N3N1N00100O2N3N1N3M2O1N3M2010O010O010O010O01jN[Ao0d>oN^AQ1c>mN_AR1h>N3M01O01O010O3M2O2M2N3N1N3M2O2M2N2O2M2NfUR3"}, "image_id": 196, "id": 2873}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 275.0, 8.0, 6.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "ehi61o?2M2O0O0100O4MZWR1"}, "image_id": 196, "id": 2874}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 277.0, 67.0, 68.0], "area": 2450, "segmentation": {"size": [512, 512], "counts": "oYP11n?3M2O2M3M2O2M2O2M2N3N1N3M3N1N2N10O01HoN`AQ1`>RO^Am0c>TO[Am0d>9N3N2M00010O00010O010O00010O00010O00010O00010O1O3N0O10O00010O00101POfA7]>GdA7^>GeA7]>FgA6[>IgA4\\>IfA6[>HhA5[>IfA5\\>IgA5[>HgA6T?M2OQVn5"}, "image_id": 196, "id": 2875}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 280.0, 19.0, 19.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "mhc51n?3N1N3M2O2M2N3N2N01O2M2N3N1N3N1N2N3NkfR2"}, "image_id": 196, "id": 2876}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 37.0, 48.0], "area": 1008, "segmentation": {"size": [512, 512], "counts": "o8\\1d>O01O01O010O00101N3M2O2M2N3N1N3M10O01O010O00010O011N3M2O2M2N3N1N3M2O2M2N_V]7"}, "image_id": 196, "id": 2877}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 286.0, 7.0, 8.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "Pih62m?2N3N11N1N3MnfS1"}, "image_id": 196, "id": 2878}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 287.0, 68.0, 83.0], "area": 2659, "segmentation": {"size": [512, 512], "counts": "]il52m?2O2M2N2O2M2N3N1N3M2O2M2N3M2O2M2N3N1N3M2O1010O010O010O01cAbNU>^1iAdNX>\\1eAgNZ>`1N3M2O2M2N3O0010O010O0N3N0O1O01O01O01O01O2ZOmA]OT>a0oA\\OT>b0mA\\OU>b0nA\\OT>=eAYO98T>>dAYO:6U>>QB@Q>>RB@P>>QB@Q>>k0M2O2M2N3N1NkUQ1"}, "image_id": 196, "id": 2879}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 300.0, 6.0, 7.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "^Yd02n?1N3M11N3MbfX7"}, "image_id": 196, "id": 2880}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 310.0, 14.0, 17.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "jYi42m?3N1N2N3N1N30O000N3M2O2M2NQfo2"}, "image_id": 196, "id": 2881}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 320.0, 70.0, 75.0], "area": 2541, "segmentation": {"size": [512, 512], "counts": "hkc12n?1N3M2O2M2N3@BZA?e>BYAa0d>BYA`0e>BZA`0d>BYA`0e>BYA`0Y>VOPBX1o=kNoAT1R>mNlAS1T>POiAQ1V>QOiAn0W>=10O00010O00010O00010O00O2O00010O000LeAeN[>[1fAcNZ>]14010O00010O0003O0010O010O01N1N3M2O2mNVAk0l>SOVAk0R?N1N3M2O2M2N3N1N3M2O2M2NhTY5"}, "image_id": 196, "id": 2882}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 324.0, 74.0, 78.0], "area": 2808, "segmentation": {"size": [512, 512], "counts": "b;j0l>WO]Ak0`>WO^Ak0a>WO\\Al0a>VO^Ak0`>;00O00010O00010O00010IeNhA[1Y>gNdAY1\\>6O01O00010O00010O00J`NmAa1S>aNkA^1U>6010O00010O00011N2N3N1N3M2O1N3M2N3N1N3M2O2M2N01O01O01O01O01O01O102M2N2O2M2N3N1N3M2N3N1Nndj6"}, "image_id": 196, "id": 2883}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 324.0, 68.0, 79.0], "area": 2724, "segmentation": {"size": [512, 512], "counts": "`ZR52m?2N3N1N3M2O1N3M2O2M2N3N1N3M2N2O2M2O2O010O0010O0010N1N3M2O2M2N3N1N2N3N1N3O0010O010O0010O001N0O1O00010O02N3M2UOmAGT>7nAGU>7mAFU>8mAGU>6mAHU>7mAFV>2`AD<9U>2`AC>8U>2QBLQ>2RBLo=3RBKQ>2P1N3Njdk1"}, "image_id": 196, "id": 2884}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 369.0, 74.0, 71.0], "area": 2633, "segmentation": {"size": [512, 512], "counts": "olR21n?3M3N1N3M2N3N1N3M2O2M2N3N1N3M2O1N00010O0010O00CnNmAS1R>POkAQ1U>POjAQ1T>ROiAo0V>SOhAm0Y><01O01O01O01O0100O3M01O01O01O01O01O01O01O3N1N3N2O0O2M2O2M2O2M2N3N1N3UOn@c0T?\\Om@c0Z?M2N3N1N3M2O2M3M2OUSh4"}, "image_id": 196, "id": 2885}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 390.0, 86.0, 61.0], "area": 2773, "segmentation": {"size": [512, 512], "counts": "ol61n?3M3N1N3M3M2O2M2N3LZOj@f0U?201O3N2M2N3NO0001O3N2M2N3N20O0100O000O0O00010O001O01O01O010O000010O002O1N3NO01O0M_AiN`>W1410O00010O1O3M2O2M2N2O2M2N1O01O01O01O0001O010O3M2O2M2N2O2M2N3M2O1NYS^6"}, "image_id": 196, "id": 2886}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 435.0, 70.0, 73.0], "area": 2648, "segmentation": {"size": [512, 512], "counts": "U^g21o?2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N110O010O010O010O01M100O00010O00010O00012M2N3N1N3M2O2M2N3nNWAi0j>UOYAh0j>VOWAh0S?N1N3M2O2M2N3N1N3M2O2Mm`U4"}, "image_id": 196, "id": 2887}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 436.0, 92.0, 65.0], "area": 2826, "segmentation": {"size": [512, 512], "counts": "`ne01n?2O2M3M2O2M2N3N1N3N1N1O010O00010O0l@[On>e0RA^Ol>b0TA@i>b0UA@i>n0N1N3M2O2M2O2M2N3NO01O01O01O01O01O010O01O2O2DeAnN\\>Q1fAmNZ>T1hAjNX>U1jAiNV>X1801O01O03M2O2M2N3N2M2N3N1N1O010O01O011N2O0O00010O00010O00010O0010O1O3N1N3M3N1N3M2O2M2N3NjQl5"}, "image_id": 196, "id": 2888}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 466.0, 76.0, 46.0], "area": 2093, "segmentation": {"size": [512, 512], "counts": "onj12m?2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2N10O2N1O2N1O2N1O00O100O1O100O1O100O1002N1O2N1O2N1O2N1O2N1O2N1O2N00O100O1O100O1O01O01O01O2O2M2N3N1N3M2O2M2N3N1N^Po4"}, "image_id": 196, "id": 2889}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 495.0, 31.0, 17.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "ko`12n?2M2N100O100O1O100O1O100O1O100O1O100O1001O2N1O2N1O2N2N1O2N1OR`o5"}, "image_id": 196, "id": 2890}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 85.0, 199.0], "area": 9681, "segmentation": {"size": [512, 512], "counts": "8l5Q:3N2N3N101O001O00O1M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2Me_e6"}, "image_id": 197, "id": 2891}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 0.0, 61.0, 23.0], "area": 1104, "segmentation": {"size": [512, 512], "counts": "SP[33l?9H7I2N0000000O0100000000000O10O1000000000000O1000000000000O1000000000000O1000000000000O100000000000000O100000000K500F\\@8m?Ig_f3"}, "image_id": 197, "id": 2892}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 0.0, 88.0, 154.0], "area": 10437, "segmentation": {"size": [512, 512], "counts": "WPb43m?8H8H8H8H7H9H8H8H8H8H7I8H4K1000000000O1000O100000007I8H8H8G8I8H1OO100000O100000O10000000O100000O10000000O100000O100000000000000O100000000000000O1000005K8H8H7I8G9H8XNPC0X=HPC0X=HPC0W=IPC0X=HPC0X=HPCOVZR2"}, "image_id": 197, "id": 2893}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 15.0, 67.0, 117.0], "area": 6532, "segmentation": {"size": [512, 512], "counts": "c`n61o?8H8mAAU0000000000000000000000000000000f0lN>00000j`c0"}, "image_id": 197, "id": 2903}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 0.0, 80.0, 63.0], "area": 2489, "segmentation": {"size": [512, 512], "counts": "PP61o?1O2N1O2N1O2N1O2N1O2N1O00O11O2N1O2f@\\OT?e0k@]OS?i0O2N1O2N1O2N1O1OO1001O2N1]AgN]>[1`AgN_>_1N1O2N1O2N1O2N1O20O0O2M2N3N2M2N3N1N3M2O2M2N01O01O01O01O0MUASOj>m0XAQOi>o0301O2O2M2N3N1N3M2O2M3N1N3M2O2M2Nona6"}, "image_id": 198, "id": 2904}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 0.0, 56.0, 34.0], "area": 1197, "segmentation": {"size": [512, 512], "counts": "P`W11o?1O2N1Z@N\\?4b@M]?4a@O]?;O2N1O2N1O2N1O2N1O2N1O1OO100O1O100O1O100O1O100O1O100O1002N00O1O100O1O100O1O100O1O100O2N3N1N3M2O2M2Ng_l5"}, "image_id": 198, "id": 2905}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 0.0, 6.0, 3.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "PPX21o?2N00O1O10PPe5"}, "image_id": 198, "id": 2906}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 55.0, 77.0, 86.0], "area": 2981, "segmentation": {"size": [512, 512], "counts": "]RQ33l?2O2M2N3N1N3M2O2M2N3N1N3M2OO00021O01M2O2M2N3N1N3M2O2M2N3N2M2N3N1N3M2O1N00010O020010O01O010O01ZNTBV1k=hNWBX1j=eNYB[1f=dN[B\\1f=aN]B_1b=_N`B_1c=^N`B`1Q>N3M2O1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2O2M2N2OT\\h3"}, "image_id": 198, "id": 2907}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 87.0, 71.0, 72.0], "area": 2583, "segmentation": {"size": [512, 512], "counts": "WSV22n?1N3M2O2M2N3N1N3M2O2M2N3N2M2N3N1N3M100O00010O003O010O010O010O010O010O010O0O2M2O2M2N3N1N2N010O00010O00010O0002O2M2N3RObA6_>HcA6`>GcA6_>IbA6`>GcA6_>HcA6`>HbA6_>HcA6`>GcA6_>IbA6V?M2N3Nc[f4"}, "image_id": 198, "id": 2908}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 117.0, 78.0, 81.0], "area": 2919, "segmentation": {"size": [512, 512], "counts": "_d]13l?2O2M2N3N1N3M2O2M2N3N1N1O010O01O2O2M2OI\\OTAc0k>_OSAc0j>_OTAc0j>@SAc0k>:N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2010O010O010O0O2M2N3N1N3UNmAd1U>ZNnAd1X>N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N_Z[5"}, "image_id": 198, "id": 2909}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 152.0, 78.0, 71.0], "area": 2630, "segmentation": {"size": [512, 512], "counts": "^ec03l?2N3N1N3M2O2M2N3N1N3M2O1N11N2O2M2N010O0001O3LXOl@i0R?5M2O1N3M2O2M2N3M2O2M2N3N1N3M2O1N01O3N1N3M200010O010O010O01O0N3N1N3M2N2O2M2N3N1N3M2O2M2N3N1N3M2O1N3M2O2M2N3MdYU6"}, "image_id": 198, "id": 2910}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 160.0, 39.0, 72.0], "area": 1395, "segmentation": {"size": [512, 512], "counts": "[e\\72m?3N1N3M2O2M2N3N1N3M2O2M3N1N3M2O2M2100O010O0100O001M2O2M2N3N1N3M2O2M2N3N2N110gJ"}, "image_id": 198, "id": 2911}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 184.0, 59.0, 91.0], "area": 2898, "segmentation": {"size": [512, 512], "counts": "a6k0T?3N1JTOUAm0k>3InNaAT1]>oN`AS1^>oNaAS1]>9N3M2O2M2N3N1N3M2O2M2N3N1N3O010O010O010O010ON3M2O2SNXB]1j=bNWB\\1l=aNWB]1j=aNXB]1k=aNWB\\1k=bNWB^1W>O010N1N3N1mNZAi0i>TOYAj0i>TOZAj0o>N3M2O2M2N3N1N3M2O2M2N3NlWR7"}, "image_id": 198, "id": 2912}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 184.0, 67.0, 85.0], "area": 2800, "segmentation": {"size": [512, 512], "counts": "lVe63l?2O2M2N3N1N3M3N1N3M2]OXOlAj0Q>XOmAh0T>ZOiAg0V>[OiAe0W>]OfAf0W>]OfAe0X>]OfAf0X>\\OfAe0X>d0M3N1N3M2O2M2O2M3M20N3M2O2N11000O010O010N1N3M3N1N3M2O2M2O2M3N1iN_An0b>oNaAn0a>QO`Am0l>N2M2O2M2N3N1N3M3N1N3M2O2M2OTX9"}, "image_id": 198, "id": 2913}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 219.0, 78.0, 79.0], "area": 3005, "segmentation": {"size": [512, 512], "counts": "_gj52m?2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3N1010O01O0N3M10O00010O0001O01O02N3N1N3M2O2M2N3N1N3N1010O010O01OO2M2N3N1N3M2O2`NdAX1]>fNeAZ1b>0O010O010O01N1N3N1N3M2O2M2N3]Oj@8W?Gk@6X?Gj@7X?Gk@7`?N3M2ORWn0"}, "image_id": 198, "id": 2914}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 244.0, 28.0, 52.0], "area": 560, "segmentation": {"size": [512, 512], "counts": "d7d1]>O2M2N3hN^Am0d>QO_Am0c>QO^Am0m>M2O2M2N3N1N3M2O2M2N3N1N2O02M2N01O011N3M2OWga7"}, "image_id": 198, "id": 2915}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 266.0, 87.0, 84.0], "area": 3109, "segmentation": {"size": [512, 512], "counts": "dhR52m?3N1N3M2O2M2N3N1N3QA\\O]>d0cA^O\\>c0bA@[>c0cA_OZ>c0dA_OZ>d0cA_O[>b0dA_O]>S1O010O01M2N2O02M2N3N1N3N110O010O010O010O010O000N3N1O20N1O2M2N3]NfA\\1[>bNgA\\1a>M2O2M2N3N1N2N01O01O01O01O01O01O01O01O01O01O01O01O3N1N3M2O2M2N3N1N3M2O2M2N3NPfa1"}, "image_id": 198, "id": 2916}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 280.0, 3.0, 9.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "ihn72n?3L4UG"}, "image_id": 198, "id": 2917}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 296.0, 72.0, 76.0], "area": 2736, "segmentation": {"size": [512, 512], "counts": "PjQ41n?3N1N3M2O2M2N3N1N3M2O2M2JVOSAm0k>UOSAl0k>7M2O2M2N3N1N3M2O2M2N3N1N3M2O2M3N110O01O0N3M2O0O1O01O02N3N1N3M2O2M2iNdAg0_>WObAh0_>VOdAg0_>VOcAh0_>WOcAg0_>VOcAh0k>O010O00010O00012M2N3N1N3M2O2M3M2O2MXUj2"}, "image_id": 198, "id": 2918}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 299.0, 57.0, 83.0], "area": 2928, "segmentation": {"size": [512, 512], "counts": "VjS72n?5K4L5K5J3N0000OB]OaAc0_>B[A?e>>00O010000000ImN`AW1]>600O100000O11N6K5K3M000O10O101O4L1O05K5K5K5J010000000O010001O5K5J3N001O5YNjAZ1[>`NkAZ1o3"}, "image_id": 198, "id": 2919}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 333.0, 75.0, 91.0], "area": 3321, "segmentation": {"size": [512, 512], "counts": "gj]31n?2b@Nl>4QANm>4QAOl>4RAM0Jc>:[AN1Ja>:\\AO0Ib>;\\AM0Jb>;\\A8b>d0N3M2O2M2N2O2N01O2M2N3M2O2M2N3N1N210O00jAiNc=W1ZBkNf=U1XBmNi=S1UBoNj=Q1TBQOl=P1QBSOl=o0SBROk=Q1RBROl=o0RBSOl=b1N3N0O1O00010O0001O011N2N2O2M2N3N1N3M2N2gNfAj0\\>SOfAk0\\>TOfAi0]>TOeAk0\\>SOgAj0i>N2O2M2N3M2O2M2N2O2M2N3N1Ncc\\3"}, "image_id": 198, "id": 2920}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 338.0, 90.0, 56.0], "area": 2829, "segmentation": {"size": [512, 512], "counts": "Tkn51n?3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2O20O001N1N2N3N1N3M2N3N1N1O01O01O01O01O01O01O01O010O1O00010O00010O00010O3M2O2M2N3N101O010O01O001M2O2M2O2O010N1N3N1N3M2O2M2N2N3Hb@J_?4c@J`?38O2MYTd0"}, "image_id": 198, "id": 2921}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 369.0, 72.0, 78.0], "area": 2807, "segmentation": {"size": [512, 512], "counts": "[lZ21o?2M2N3N1N3N1N3M2O2M2N3N1N3IUOTAm0j>UOUAm0i>7M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3N1N2O02M2O2M2N3N1N3M2O2M2N3N1N3M2O2gN`Am0b>QO`Am0c>PO`An0j>N3M2O0O1O01O01O102M2N3N1N3N1N3M3N1N3MkRa4"}, "image_id": 198, "id": 2922}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 394.0, 82.0, 49.0], "area": 2769, "segmentation": {"size": [512, 512], "counts": "]]f41g?8H8H9O01O000000000001O01O00000000L4L5O01O000000000001O01O000000K50001O000000000001O01O00004L001O0M300001O000001O00000001O000001O00000001O0O1H8H8H8O10001O000N2H8HjcP2"}, "image_id": 198, "id": 2923}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 408.0, 52.0, 55.0], "area": 2146, "segmentation": {"size": [512, 512], "counts": "S^V75e?6I7J6O1D]OVAh0c>>L5O000001O0001O00000001O0001O000001O000001O000001O0000010O1O2N1O2N100O2N1kN\\Al0l>00000010O000000_B"}, "image_id": 198, "id": 2924}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 417.0, 70.0, 84.0], "area": 2805, "segmentation": {"size": [512, 512], "counts": "]mi11`01l>1RA1m>1PA2m>0QA2m>0QA2m>1PA1n>1QA1m>0QA2m>b0N1O101N2XAjNc>[1N2O2M200010O0N2O2O03M0001O01O01O01O0N2OO02N2O1010O000010O000010O0N2O2M2N2N00001O00011N3M2_OZAEh>9ZAEi>8YAFi>9XAEj>9XAEj>9XAFj>7XAGj>7a0N3M2N2O_QS5"}, "image_id": 198, "id": 2925}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 441.0, 51.0, 71.0], "area": 2418, "segmentation": {"size": [512, 512], "counts": "m_U43d?9O11O000000H8G9I700001O000000000000I7G9G9F:O1001O00000000000000001O00000000000000001O0]OWBoNi=h0`BXO`=?iB]O[=c0eBTOd=l0h0]OPAOW?HRAN\\aQ3"}, "image_id": 198, "id": 2926}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 459.0, 48.0, 53.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "mo]63f?7J6J6N2000000001O0000M3J6J6J6I7N200000000001O00000000001O00000000001O00000000001O0nNbAa0_>YOgAa0_>YOgAa0o>J6J6JWQj0"}, "image_id": 198, "id": 2927}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 462.0, 65.0, 50.0], "area": 2332, "segmentation": {"size": [512, 512], "counts": "coQ38`?8I71O00000001O000001O00000001O000001O000001K4H8H8O100000001O0001O0000000001O0001N1000000001O0001OL4M3000000001O00000000000VOcAN`>JhANa>JgANPPn3"}, "image_id": 198, "id": 2928}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 469.0, 39.0, 43.0], "area": 1188, "segmentation": {"size": [512, 512], "counts": "lo\\74e?7I7000000001O0000000000N2I7N2000000N2I7J600000000001O0000000000001O00000000"}, "image_id": 198, "id": 2929}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 470.0, 51.0, 42.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "loQ54e?7L40000001O00000000L4H8I7I7N20000001O000000000000001O0000000000001O0000000000001O00000@^ABb>7eAI[>0mAOR?000000MZ`T2"}, "image_id": 198, "id": 2930}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 153.0, 17.0, 15.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "i49h?0O010O01O010O01O010O010O01O0N3MQ[g7"}, "image_id": 200, "id": 2931}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 225.0, 17.0, 22.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "cgY11l?3M4M2M3M4M2N3O0001O0N2M4L3M3N3Loh]6"}, "image_id": 200, "id": 2932}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 0.0, 46.0, 54.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "S`07f?3k@Gc>9TA0l>b00000000001O0000000000000]AYOm=g0jABV>T100000001O00000000000000001O00000000000000001O00eNhAn0X>iNRBn0d>G9G9Gk_X7"}, "image_id": 201, "id": 2933}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 0.0, 45.0, 82.0], "area": 2350, "segmentation": {"size": [512, 512], "counts": "P`Q15k?000000000Z@M_?:000000000000cAFT=:]B5c=KnAd0R>e0000000WB^NT=c1\\BlNd=h1000000000001]BhM\\=^20000000000000000000001O000000nNfBZO^=8PCYOf=1UQX6"}, "image_id": 201, "id": 2934}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 0.0, 81.0, 45.0], "area": 2700, "segmentation": {"size": [512, 512], "counts": "l`R29c?400000000O1F;E:0000001O0000000001O00000001O00000001L3000000000000001O01O0000000000000M30001O00000000000000001O0000000005K000000010O0000000VOZA:f>\\OdAd0l>01O0001O000000F:Gbod4"}, "image_id": 201, "id": 2935}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 0.0, 98.0, 39.0], "area": 2604, "segmentation": {"size": [512, 512], "counts": "S`h48e?3\\@NZ?<000000000001O0000000000000000001O000000M3N2001O00000000000000001O0000000000000000001O0000000n@Ac>n001O0000000000000000001O0000000000000000001OH80000000000001O000003M001O00000001O0000000001O000H8FU`f1"}, "image_id": 201, "id": 2936}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 0.0, 35.0, 19.0], "area": 355, "segmentation": {"size": [512, 512], "counts": "PPf61o?00000000000000001O00000000000`@0P?0f@:Z?6000000000001O000000000000000000K5FZ`h0"}, "image_id": 201, "id": 2937}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 0.0, 29.0, 27.0], "area": 681, "segmentation": {"size": [512, 512], "counts": "``a78^?:K501O01O0000N2000000000001O0000000000000000001O0000F:F:"}, "image_id": 201, "id": 2938}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 45.0, 90.0, 50.0], "area": 2936, "segmentation": {"size": [512, 512], "counts": "aRf43c?:G9000001O000I7F:0000000000010O0000000003M00000001O000000000iNbAm0i>O00000000000000L410O000000000000000001O01O00000004L001O000001O000000000001O000001O00000000000001O000001O000000L4F:FZnl1"}, "image_id": 201, "id": 2939}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 57.0, 86.0, 61.0], "area": 3527, "segmentation": {"size": [512, 512], "counts": "oRn19\\?;F:M31O0001O0000000000000001O01O0000000000000001O0001O0000000M3K500000001O0000000001O00000000F91M30000000001O0001O0000000fAdNP>g1O000001O000000000001O000001O000QOkA3U>ZOmA7O?`>@aA?a>^O`Ab0a>ZObAf0l>1OG9Famf4"}, "image_id": 201, "id": 2940}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 59.0, 87.0, 58.0], "area": 3515, "segmentation": {"size": [512, 512], "counts": "[cd65a?:F:0000001O01K4E;M300000000E;000001O000000000001O0M300000000001O000001O00000000000001O00^NlAX1_>O0000000000000001O01O00009G0000000001O00000001O000000000001O000001O000000000001O0\\M"}, "image_id": 201, "id": 2941}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 81.0, 87.0, 64.0], "area": 3764, "segmentation": {"size": [512, 512], "counts": "cS66a?9F:I7[AXOR>h0dAB\\>Q10000001O000001O0H8O100001O00000001O00000000M4O0001O0000000000XNRB^1X>00001O00000001O0000000001OgNfAl0h>O00000000000001O0001O00000000000001O01O00000000000001O0001O0000000WOWA;X?K4F\\\\^6"}, "image_id": 201, "id": 2942}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 118.0, 88.0, 55.0], "area": 3323, "segmentation": {"size": [512, 512], "counts": "QTb45i?4M2N2N4J3VAEo=k000001O0001O0000000000000L40001O000000000000M301O0001O000000000001O000001O0000000000iNiAe0i>00000010O0000000000000000010O000000000000000010O000000000000000001UOWA>U?0000O1F_kQ2"}, "image_id": 201, "id": 2943}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 120.0, 83.0, 56.0], "area": 2916, "segmentation": {"size": [512, 512], "counts": "PUQ26_?;E;00000000000000010O000000000000000000010O00000000000000000001I6E;M300001O00000001O000000000001O00000001O0000003M0001O000001O0000QOlA2T>DVB00000000000010O00000000000L4Fa[e4"}, "image_id": 201, "id": 2944}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 127.0, 92.0, 58.0], "area": 3559, "segmentation": {"size": [512, 512], "counts": "]ea67_?:H80000000000M3F:N3O000K5H80000000000010O000000000L41O0001O00000000000001O0001O0000000dNfAR1d>00001O000001O000000000001O00iN`An0j>O00000000000001O0001O00000000000001O0001O00000000000001O00H9F9F`;"}, "image_id": 201, "id": 2945}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 150.0, 90.0, 52.0], "area": 3486, "segmentation": {"size": [512, 512], "counts": "Qe1;Z?;000000VAEo=;dA2\\>i0000000000000001O01O0000000000000000000001O01O00000000000000000000010OM300jN`Al0j>01O00000001O00000000000001O0000000001O00000000000001O0000000001O000000oN]Ae0o>00000000000001O0H8CjZa6"}, "image_id": 201, "id": 2946}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 175.0, 50.0, 53.0], "area": 2005, "segmentation": {"size": [512, 512], "counts": "hf_49^?9L40001O0L4F:F:L40000000001O01O000000000000004MO00000000J6O2O000001O0000000001O000001O0000I7F:G9FdZg2"}, "image_id": 201, "id": 2947}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 183.0, 75.0, 50.0], "area": 2527, "segmentation": {"size": [512, 512], "counts": "VVm19\\?;L401O0000000001O0000000001O00000l@_Oi>l01O0000000001O0000000001O0000000001O0000000001O0009G001O0M300000000000000010O00000000000000000I7F;F9Ff@Nf?000000001OLnYm4"}, "image_id": 201, "id": 2948}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 199.0, 45.0, 87.0], "area": 2571, "segmentation": {"size": [512, 512], "counts": "Qg]55]?>M3000000001O000001O0\\AFb=:ZBJf=6ZBJf=6ZBJf=6ZBJ\\=`0ZBJf=X10000YB[NT=X2000000000001O000000000001O00000000000M3C=C=B>oN_A6\\?Cfik1"}, "image_id": 201, "id": 2949}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 199.0, 53.0, 84.0], "area": 3054, "segmentation": {"size": [512, 512], "counts": "fhY63^??@`001O00000001O00000000000000D<@`0A`0M200000H8000000000010O000000000000K500001O00000001O00000000I7A?@`0A?AWjk0"}, "image_id": 201, "id": 2950}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 205.0, 44.0, 94.0], "area": 3022, "segmentation": {"size": [512, 512], "counts": "TXZ7;X?>A>O10000000000000000000000I8G80000]B\\Nk_O`A`0a>\\ObAe0]>XOfAh0j>O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01ON3M2M3M4L3N3L3MZhh6"}, "image_id": 201, "id": 2952}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 234.0, 58.0, 50.0], "area": 2050, "segmentation": {"size": [512, 512], "counts": "VhZ4:\\?:O100G9K5001O01O0000000000000001J500O1O1O1OM310000000001O00000000\\AWOQ>j0dA@\\>R100N201O0000000000000001O01O0000000000TOdA4\\>BnAZOVBFWXh2"}, "image_id": 201, "id": 2953}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 239.0, 78.0, 52.0], "area": 2908, "segmentation": {"size": [512, 512], "counts": "lXd11b?=J600001O00000001O000H8H800000000001O01O000000000000000K5L5O000001O000000000001O0L4000000001O000000014K0000000000000000010O00000000000000000VOaA3`>KaA1c>O^AEm>;<00O1FchT5"}, "image_id": 201, "id": 2954}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 269.0, 70.0, 66.0], "area": 2674, "segmentation": {"size": [512, 512], "counts": "Y9d0Z?2M3N3L3N3M2M4M2M3N3M2N3O01O01O01L3N3O010O01O01O010O010O00010O010O01O01O010O010O01O01O010O01OlNcAe0^>XOdAh0\\>UOhAk0X>ROjAn0d>10O010O00010O010M2M4M2N2M4M2M4M2N2M4M_fl6"}, "image_id": 201, "id": 2955}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 287.0, 18.0, 16.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "WiS12k?4M3M3N2O2N0000O100000001N2O2M2M3LnVc6"}, "image_id": 201, "id": 2956}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 305.0, 92.0, 55.0], "area": 3548, "segmentation": {"size": [512, 512], "counts": "dj_1:\\?:01O000000000Db100000000001O00000001O0000000000UOgAOY>DTB00000001O01O000000000000K5CmUR5"}, "image_id": 201, "id": 2957}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 345.0, 28.0, 84.0], "area": 1142, "segmentation": {"size": [512, 512], "counts": "U;X2e=010O0100O3HYBUNi=h1ZBUNj=h18M4M2M3N3L3N3L3N2M4L3N3L3N2M4M2M4M2M3NQea7"}, "image_id": 201, "id": 2958}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 351.0, 85.0, 60.0], "area": 3546, "segmentation": {"size": [512, 512], "counts": "ZlU49]?;0O0000000000K5F:G91O01O0J9H50000001O0000000N2000000000001O000001O000000000001O00000001O0000000000kNiAa0X>SOSBm0`>00001O01O0000000000M30000010O000000000000000001O01O0000000O1F:E;Eed_2"}, "image_id": 201, "id": 2959}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 365.0, 47.0, 97.0], "area": 3036, "segmentation": {"size": [512, 512], "counts": "X^d53a?<00000000E\\1000010O00000000000000010O000M3000000000001O01O00000000000001O01eNcAQ1f>00000000G9G91OM4F9GZdT5"}, "image_id": 201, "id": 2961}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 370.0, 56.0, 59.0], "area": 2357, "segmentation": {"size": [512, 512], "counts": "Qla63c?:K51O00n@Ac>`0RAJ4H`>V10000000001O0fAfNo=Y1gAQOY>[101O00000000000001O00M4O0000000000000000010O03M00000O1G9F;H70000000000001O0001I6F:FbTb0"}, "image_id": 201, "id": 2962}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 374.0, 23.0, 79.0], "area": 1388, "segmentation": {"size": [512, 512], "counts": "lld73b?;E;D6PAKl>8UAGi>;WAEg>>XACg>=ZABf>?WADi>;TAHl>g00O00010O01MO11lAPOY=Q1eBRO[=n0aBUO_=k0_BXOa=g0RBPO5;i=k0TBYOk=g0SB[On=[1010O010O01O01O010O010O00010O0O2M2YOlA@V>=nA_OV>>lA@V>>mA_OV>LaA9M`A94eAH^>6h0M2M_Sk6"}, "image_id": 201, "id": 2964}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 421.0, 56.0, 56.0], "area": 1905, "segmentation": {"size": [512, 512], "counts": "UnS12a?=1O00000000L4I:M0000000000000N2000001O000000N20001O00000000I7O2O01OVA\\OX>V100000000000000000000000000005L0`NgAW1a>kNhAb0k>00N2_OZRP6"}, "image_id": 201, "id": 2965}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 421.0, 77.0, 44.0], "area": 2524, "segmentation": {"size": [512, 512], "counts": "\\^R42d?;G800000000000000K5F;I600000000000000010O0000000M3M3001O0001O00000000000001O0001O00000000000001O0001O000000000nN[Ai0n>001O000001O000000000001O000001O000L4G9FfRg2"}, "image_id": 201, "id": 2966}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 427.0, 52.0, 60.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "n^P27_?:E;F:F:N201O000001O000000000001O0M3000K6E:M3001O000000I7N210O000000I7O100000010O0000J6L4000000F:01L3FmbU5"}, "image_id": 201, "id": 2967}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 473.0, 40.0, 39.0], "area": 1225, "segmentation": {"size": [512, 512], "counts": "a_\\71b?=O100001O00K6I600000000VAWO]>V100000000001O000000000000000000000lNXAP1l>01O00000000000000"}, "image_id": 201, "id": 2968}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 475.0, 44.0, 37.0], "area": 1306, "segmentation": {"size": [512, 512], "counts": "kon35g?41OF:M300000000L4F:1O000000000000000000001OL40000001O000000000000000000001O00000G9E;E^Q[3"}, "image_id": 201, "id": 2969}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 480.0, 31.0, 32.0], "area": 904, "segmentation": {"size": [512, 512], "counts": "i_f57_?:F:K500000000000000000000001O000000000000000000000000001H7EYQj1"}, "image_id": 201, "id": 2970}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 483.0, 40.0, 29.0], "area": 917, "segmentation": {"size": [512, 512], "counts": "e_]62b?j0000000001O00000000000000000000001O00003M00000000000000000_Ol@6_?00000000000001L_`n0"}, "image_id": 201, "id": 2971}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 486.0, 31.0, 26.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "foi4:e?10000000000000000G9J600000000001O000000000000000000001O0K5ESaf2"}, "image_id": 201, "id": 2972}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 0.0, 52.0, 55.0], "area": 1743, "segmentation": {"size": [512, 512], "counts": "VQo22l?2N3L3N3M2ACXAa0e>AYAa0e>BXA`0e>CZA?c>D\\AF^A;NXO_>?cA<\\>DbA>^>c0O00001O001O001O00001O001O0000N2M3N2N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3NRPW4"}, "image_id": 202, "id": 2973}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 54.0, 70.0, 100.0], "area": 3694, "segmentation": {"size": [512, 512], "counts": "hRZ61m?2ANm@6P?Ln@6o>Nm@6P?Ln@6P?>M4M2M4O010lAkN[=U1bBoN]=Q1aBQO`=o0\\BTOd=l0ZBWOf=i0WBYOi=g0TB]Ok=c0SB_On=Y1010O01cBRNg^NmA_1[>M3N3L3N3M2M4M2M3N3L3N3M2M3N3L3N3MUmb0"}, "image_id": 202, "id": 2974}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 64.0, 23.0, 27.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "dRZ52l?2M4X@Jb?S1000000000000000001O0000000000000000000000POlA1W>DTBNZ^h2"}, "image_id": 204, "id": 2991}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 0.0, 46.0, 66.0], "area": 2055, "segmentation": {"size": [512, 512], "counts": "l`]59]?:J61O0000000001O000K5J600000000N2gASO_=n0VB\\Oj=]100000001O000000000000000000001O00000000\\NWBQ1i=lN\\Bg0o=YORBDiA1a>O`AFj>:>JV`k1"}, "image_id": 204, "id": 2992}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 41.0, 68.0], "area": 2214, "segmentation": {"size": [512, 512], "counts": "e`\\6=W?=F:O1O1O2N1O1O1O1O1OO100OmAkN]=T1WBYOi=_101O0000000000000000000000001O00000000000000oN]BEc=NjB2V=BVC4`>CQon0"}, "image_id": 204, "id": 2993}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 0.0, 29.0, 15.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "[`a72c?;0000000000000000000001O00000000000000000000001OJ6000000"}, "image_id": 204, "id": 2994}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 7.0, 40.0, 91.0], "area": 1504, "segmentation": {"size": [512, 512], "counts": "d0^2b=01M2N2N3L3N3L3TNoAg1V>M3N3L2OO010O10FdAQO]>o0eAoNZ>h0cA[Og>d0\\AZOd>f0_AVOe>g0;1O01O010O010O00010O0N3M2M3N3L3N3L3Nco[7"}, "image_id": 204, "id": 2995}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 30.0, 55.0, 56.0], "area": 1751, "segmentation": {"size": [512, 512], "counts": "la`02l?2N3L3N3M2M3O2O010M2N2M4M2N3L3N201O010]AjNZ>V1dAlN]>S1`APO`>Y10O01O01O0O10N2O1010O010O00010O0010OQObA<_>AeA=[>@jA?U>_OoA?R>]OQBd0n=ZOUBe0c>010ON2N3L3M3M4M2MSnc6"}, "image_id": 204, "id": 2996}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 35.0, 29.0, 59.0], "area": 1289, "segmentation": {"size": [512, 512], "counts": "gba71m?3L3ROL_A115^>L_A107^>J`A2O6^>L_A107^>m0M3O2O010O010O0001N1N3N01N3L3N210O0010O010OjN"}, "image_id": 204, "id": 2997}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 47.0, 63.0, 59.0], "area": 1980, "segmentation": {"size": [512, 512], "counts": "aRb13k?3L3O110O0N3L3N2M3M010O0TA\\O`>f0^A]O^>g0^A\\O`>f0^A]O^>T1O110O0010O0010O0010O0010O0010O0010O00fNbAS1^>jNeAU1c>010O0010O00N30O010N1M3N3L3N3L3N2O20O01M2M3N3L3N3L3Nk]^5"}, "image_id": 204, "id": 2998}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 77.0, 69.0, 90.0], "area": 2829, "segmentation": {"size": [512, 512], "counts": "iT]21m?3L3N2N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3O20O010O0O2L3N1OO02O3M2M4M2N0O1000O10O101O2M4M2N2M4M2N3L3N3M2M3010OO2M2N3L3N2N3L3N3M\\]`4"}, "image_id": 204, "id": 2999}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 83.0, 52.0, 55.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "dc71m?2N3L3N3L3JCg@`0U?7N2N3L3N3M2O2O01O010O010N100010O010XAkNa>\\1M2010O00010O010O0010O00QOaA>_>_OfA>Z>@kA=V>_OmAb0R>\\OQBc0P>ZORBg0c>01N1N3L3N2M4M2M4M]\\n6"}, "image_id": 204, "id": 3000}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 100.0, 68.0, 107.0], "area": 3416, "segmentation": {"size": [512, 512], "counts": "hU\\32l>ORB5j=NTB4j=NSB6i=NTB4j=NSB5j=OSB4j=NSB5j=NTB5l=KPB8P>HnA:5ROb=e0UB=5QOg=e1WB^Nh=m11O01O001M2M3N3L3N3L30010O01N1M3N3L3N3L3M3N3L10O010O010O010O010O03N2M4M2M4M2M3N3L3N3L3N2M4O001M2N2M4M2M4M2M3Nfla3"}, "image_id": 204, "id": 3001}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 108.0, 59.0, 59.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "WT]11m?2M4M2M3N3M2M4QA\\O^>f0`A\\O]>h0`A[O]>g0`A\\O^>g0_A\\O_>S11O01O010O01O01O010O010O01O01O010OfNaAS1`>iNdAV1c>10O01O01O01M2010O010M2M3N3M2M4M2M3O20O01O0N2M4M2M4M2N3Lo[e5"}, "image_id": 204, "id": 3002}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 125.0, 52.0, 75.0], "area": 2260, "segmentation": {"size": [512, 512], "counts": "[eY41l?3N3L3i@Ge>m0M2M4M2M3M4M2M4M2M3N3O000010O010O00010O010O00010nMXBg1k=VNXBh1R>L3oNhA9Y>EjA;V>AmA?S>_OoAa0R>[ORBe0m=YOUBg0k=VOYBi0_>N2M4L3N3L3N2M4M2M3NkZl2"}, "image_id": 204, "id": 3003}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 139.0, 44.0, 57.0], "area": 1383, "segmentation": {"size": [512, 512], "counts": "[4[1e>00jN^An0c>oN_AR1h>O01O010O01OXAPO`>Q1]AQOc>W10O0010O010OM3N3N101O01O010OSOUAe0l>WOXAh0h>VOZAk0m>010O010O0001O0N3M2M4M2M3N3L3N3MfjY7"}, "image_id": 204, "id": 3004}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 159.0, 70.0, 92.0], "area": 3275, "segmentation": {"size": [512, 512], "counts": "^VZ51l?3N3M2QAIR>:lAHR>;kAGS>;jAIR>;kAGS>;kAHQ>;kAHR>:kAIR>T1N3M2M4M2ZBmM]=U2aBmM`=Z2O01O01O010O010O01O01O01N1N3L3N3M2M3N3M2M4M2N3L10O01000O010O11N4M2N3L3N3L3N210O010N1N2N3L3N3L3N3M2M3N3Mjjb1"}, "image_id": 204, "id": 3005}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 186.0, 87.0, 51.0], "area": 2486, "segmentation": {"size": [512, 512], "counts": "iVW13j?3N3L3M3N3L3N3L301O01O010O010O01O01O010O010O01O010O01N1N3M2N20E@WA`0g>CWA=i>FTA:l>IQA7n>>0O10O012M3N2N2M10O010O3N2M13L3N2M3N000O010O0100O01000O010O3N2M4M3FTAYOo>d0TAZOo>c0:M0O2O0O2O0O2O0O2O001N101N1N3M_Y]5"}, "image_id": 204, "id": 3006}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 207.0, 49.0, 78.0], "area": 2214, "segmentation": {"size": [512, 512], "counts": "SgS61m?2M3N3j@H`>:^AH_><^AFc>9ZAKe>6XALi>3TA1?Df=;iA3?Dh=l0UBWOl=i0QBZOn=f0oA]OR>c0kA@T>`0jABU>U1N3L3N3L3O2O01O01O010O01O01O010O01O0YOXBWOg=g0[BYOf=c0]B^Ob=`0aB_O`=>bBB^=2VBA`0=Z=OYBA?`0X=L\\BA?d0T=IXC6iN3LhgS1"}, "image_id": 204, "id": 3007}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 211.0, 13.0, 14.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "lfe42l?3M2N3M2O2O01O0O2M3M2N3M[iS3"}, "image_id": 204, "id": 3008}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 216.0, 60.0, 57.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "ege22l?3M2M4M2O2O00010O010O010M2M3N3M2M4M2O2O01O010O01O010N1N2M4M2N3O001^AdN_>_1O010O01O01O010O01UObA3]>JgA5Z>HhA8X>FkA:T>CoA=R>@PBa0o=\\OUBc0l=ZOVBg0b>O0N3M2M3N3M2M4M2NVX\\4"}, "image_id": 204, "id": 3009}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 242.0, 60.0, 56.0], "area": 1749, "segmentation": {"size": [512, 512], "counts": "bhc32k?4M2M3N3M2M4N100010O010O01O0M3N3L3N3N11O01O010N1N3L3N2O20O010O02N010O010O01O01O04MO01UO^A7a>FbA:^>DeA;\\>AgA`0X>^OjAb0W>[OlAe0g>01O001M2M3N3L3N3M2M3N\\W^3"}, "image_id": 204, "id": 3010}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 256.0, 78.0, 97.0], "area": 3050, "segmentation": {"size": [512, 512], "counts": "bje61l?3N2M4M2N3M20010O010O001d@@W??g@CW?e0L3N2N3L3N3_OjNSBY1k=jNRBX1k=kNRBU1o=mNoAT1o=POnAR1P>a0L3O2O01O01O0N3L3N3L31M2N3L3N3M2M3N3L3N3L3N2M4M2N1N10O010O010O10O100O3N2M4M2M4M2Om@[On>e0o@]OQ?i0010M2In@^OU??7N2M4M2M4M2MlW3"}, "image_id": 204, "id": 3011}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 274.0, 95.0, 63.0], "area": 3287, "segmentation": {"size": [512, 512], "counts": "Sif43k?2N3L3d@Go>f0M3010O010O00010O010O010O00010O010O00010O010O00N3M201O00010O010O010O00010N1N30O0010O010O00010O010O001O0N210O010O00010O010O01O01OdN_AY1d>10O0010O0010O010O000iN[AS1i>M3N3L3N3M2M4M2M3N3L3N3M2Mcfi1"}, "image_id": 204, "id": 3012}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 297.0, 15.0, 88.0], "area": 752, "segmentation": {"size": [512, 512], "counts": "dkh72l?2XNMbB254V=McB155U=LcB353U=NcB155U=LdB244V=MbB255U=LdB155U=McB154LXOW=[1jBALWOY=Y1hBHX=`110O0eF"}, "image_id": 204, "id": 3013}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 5.0, 59.0, 68.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "eQS42m?2M3N2N1O2M3`@CZ?b0O2M3N2KWOPAj0o>5M3N2N2N1N3N2NO10O10O1000O10002N2M3N00000O01000001N3N1O2M3N2N2N1N3N2N2M2O2N2YOl@?V?^Om@?\\?N1O2M3N2N2M2OV_o2"}, "image_id": 205, "id": 3014}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 34.0, 65.0, 76.0], "area": 2196, "segmentation": {"size": [512, 512], "counts": "nbb41n?2N1O2M3N2N2N1O2M3N2N2N2M2IWOVAk0h>WOVAk0h>8N2M3CfNQB[1n=gNPBZ1n=iNPBW1P>kNmAV1S>;000O0100000O010000000O011O2N2N1N3N2N2N2M2O2N2N2N2oNSAm0Q?O100N1O2N2M3N2N1O2M3N2N2N2N1N3NUn\\2"}, "image_id": 205, "id": 3015}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 82.0, 68.0, 59.0], "area": 1902, "segmentation": {"size": [512, 512], "counts": "kcQ31n?2M3N2N2N2N2N1O2M3N2N2N2N1O0O100000002N2N2O1N2N1O2N2N2N2M3N2N1O000000O2O2N2N2N2N1O2M3N2N1ISAZOm>f0UAXOk>g0XAWOh>i070O10000000O2O2N2N2N2N2N2M3N1O2N2N2N2NT]l3"}, "image_id": 205, "id": 3016}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 108.0, 74.0, 81.0], "area": 2843, "segmentation": {"size": [512, 512], "counts": "Ve82m?2M3N2N1O2N2M3N2N1O2N2M3N2KUORAm0l>4N3N2N2EgNlA[1R>gNlA[1Q>hNlAZ1S>hNkAZ1S>;N2N2M2O00000O010000000O01000001O2M2O2N2N2N2M2O2N2N2M3N2N1O2O1O1nNWAl0i>ROYAn0m>0O01N2M3N2N2N1N3N2N2N2N2M2O2N2N2Nc[b6"}, "image_id": 205, "id": 3017}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 123.0, 72.0, 64.0], "area": 2369, "segmentation": {"size": [512, 512], "counts": "hd\\62m?2M3N1O2N2N2N2M3N2N1O2N2N2N2M3N2N2N1O2XAjNc>[1N1N3N2N2N200000O10O100000000000O1N000000001O2N2M3N1O2N2N2N2N2M3N2N11000UOn@g0U?01N20000O1M3N2N1O2N2N2N2M3N2NR[?"}, "image_id": 205, "id": 3018}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 143.0, 60.0, 59.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "gUW41n?1O2N2N2M3N2N2N1O2M3N2N2N2N2N1N2O00002N2N2M2O2N2N2N2N2M10O10000000O03N2N2N2N2N1N3HWAVOk>h0WAVOk>h0VAWOk>h0600O10002N2N2N1N3N2N2N2N2N2M2OUkj2"}, "image_id": 205, "id": 3019}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 157.0, 71.0, 76.0], "area": 2770, "segmentation": {"size": [512, 512], "counts": "gfQ12m?1N3N2GJe@8Y?Je@8Y?Je@8X?9H[OTAg0Z>YOnA3Ff0Z>YOnA3Ff0Z>YOnAQ1P>QOnAQ1P>QOnAQ1o=QOoAQ1P>QOnAQ1P>a0N2N2N2M2001N2M3N2N2N000O01000000000O3N2N2N1O2M3N2N2N2N2N1N3N2N2N000O101100000O0O2N2N2N2N2N1N3N2N2N2N2M3N1O2N\\jj5"}, "image_id": 205, "id": 3020}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 167.0, 54.0, 64.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "Xfk52m?2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N2N2M3N2N1O2N2O100O1M3N1O2N2N2N2M3O10O1M2O2N2fNjAg0X>WOjAg0X>VOjAi0X>UOjAh0Y>VOiAb0H\\O`>1jAa0H\\O`>1jAa0^>]OdAa0o>M2O2N2N2N2N2M3NhYY1"}, "image_id": 205, "id": 3021}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 0.0, 24.0, 9.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "QP^45j?101O0000000000001O0000000000001O000000000000LTPV3"}, "image_id": 206, "id": 3022}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 0.0, 39.0, 16.0], "area": 399, "segmentation": {"size": [512, 512], "counts": "ZP\\63g?6L400000001O0000000000001O000000000000001O000000K5K5M30000000000000000001ONR`P1"}, "image_id": 206, "id": 3023}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 0.0, 28.0, 12.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "RPb77g?20000000001O0000000000001O00000000001O0000N2O100000000"}, "image_id": 206, "id": 3024}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 124.0, 22.0, 49.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "l3a1`>N2I_AlNc>R1_AlNc>R17N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N`kd7"}, "image_id": 206, "id": 3025}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 143.0, 84.0, 76.0], "area": 2944, "segmentation": {"size": [512, 512], "counts": "ie41n?2N3M2N2N2N2N2N2c@@W?f0N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N1O000002N2N2N2N2N2N1O0000000000000KcAhN]>X1eAfN[>Z150000000000000001O2N2N2N2N2N2N2N1O00000000002N2N2N2N2N2N2N2N2N2N2N2N2N2OQ[a6"}, "image_id": 206, "id": 3026}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 173.0, 64.0, 91.0], "area": 2696, "segmentation": {"size": [512, 512], "counts": "\\Wm02m?2N2N2N2N2N3M2l@@e>b0YA@e>b0YA@e>b0YA@e>b0YA@R>I[Bi0A@R>I[BU1d=mNZBS1f=oNXBQ1h=QOVBo0j=SOTBm0l=UORBk0`=hNhB?Fi0b=jNfB?Fg0d=lNdBg1\\=[NbBe1^=]N`Bc1`=_N^Ba1b=aN\\B_1d=>000000001O2N2N2N2N2N2N1O00001O2N2N2N2N2N2N2mN_Ad0c>ZO_Ad0c>ZO_Ad0c>ZO_Ad0c>ZO_Ad0c>ZO_Ad0P?N2N2N2N2N2N2N2N2N2N]iR6"}, "image_id": 206, "id": 3027}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 215.0, 65.0, 96.0], "area": 3176, "segmentation": {"size": [512, 512], "counts": "ogi13k?3M2O2M2N3M2N3M2N3M2N3M2N3M5K2N3M2N3N1N3M2\\BZNnl0cARO^>l0dARO_>k0O_AfN\\>`1O01O01O0dAbNW>c100010O00010O0001N1M3M4N10010O00010O00010O00010OO1M3M4L3M3bNcAf01De>8_ADd>9_ADd>9_ADe>8c0M3M4L^fY7"}, "image_id": 206, "id": 3029}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 303.0, 93.0, 73.0], "area": 3835, "segmentation": {"size": [512, 512], "counts": "njg01m?2M4L300010O01O01O000N3L3NZO0aAO]>3aAO]>4`AO]>3aAO]>4`AO]>3aAO]>4`AO]>3aAO]>4`AO]>3aAO_>j00O010O010O010O010OfAaNS>`1jAcNV>\\1hAfNX>b10O010O010O010O010O010O010O010O010O01M2N3M2O2O01M2N3M2N3M201O010O010O010O01O0N3M210O01M2N3M2ZOcAI^>4eALZ>4fAJ]>5dAH^>9aAEb>:_ACc>=]AAf><]AAe>=]AAf>1SA3]?Kf@1[Uj5"}, "image_id": 206, "id": 3030}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 361.0, 85.0, 90.0], "area": 3347, "segmentation": {"size": [512, 512], "counts": "jk>1n?2M3N1O2M3N2N101O1c@BW?>g@DW?d0N1O2N2N2N1O2N2N2O0100000OO2N2M3N2N11000O10O1000OUAUOb>k0\\AWOc>j0[AXOe>h0XA[Oh>n010N2N2M2O2kAcNd=_1ZBcNf=]1XBeNg=[1WBgNj=Y1TBiNl=W1RBkNn=d1O10O1000O10O1000O10O1000N2M210O10O1N2M2O2POnAOT>OnANU>0mANT>0oANS>_OfA;93T>_OfA<74Z>JiA4Y>JiA3Z>KhA3Z>JhA5Z>IhA5Y>JiA3T?N1OfbV6"}, "image_id": 206, "id": 3031}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 436.0, 68.0, 76.0], "area": 2582, "segmentation": {"size": [512, 512], "counts": "Q^11n?2N2N2]@KX?6g@LW?6g@LV?7g@LW?`0N1O2000000O10O100000N2M3N2N1O2N2N2M3000O10O10000eAdNQ>\\1lAgNS>Z1kAhNU>W1iAlNW>^110N2M3N2M201O100000O10O1000O10O10O1`NQBE0Q1P>WOSBE4n0j=\\OTBD8j0f=@cB>_=_OcB?`=_ObB?_=@cB>_=_OdB?`>M3N1O2N2M3N2Nd`l6"}, "image_id": 206, "id": 3032}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 438.0, 76.0, 74.0], "area": 2814, "segmentation": {"size": [512, 512], "counts": "]nW31n?2M3N2N1O2N2M3N2N2N1N3N2N2N2N1N3N2N2N2O1O01N2M3O1000O10O1000O1M3N2N1oAgN[=Z1cBhN]=X1aBjN_=V1_BlN`=U1^BmNb=R1\\BPOe=P1YBROg=n0WBTOi=l0UBUOk=l0RBVOo=]11O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1B`AZO`>d0cA[O^>c0cA]O^>a0dA^O]>M^A:7F^>N]A98G]>N]A98G\\>O]A98Gf>7c0N2M3NmPb3"}, "image_id": 206, "id": 3033}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 476.0, 40.0, 36.0], "area": 810, "segmentation": {"size": [512, 512], "counts": "l>T1m>O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1N1OV`[7"}, "image_id": 206, "id": 3034}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 492.0, 66.0, 20.0], "area": 662, "segmentation": {"size": [512, 512], "counts": "o_c21n?1O1N2O1O1O1O1O1N2O1O1O1O11O1O1O1O001O1O1O1O001O1O1O1O001OLOZ@0e?2[@Md?4\\@Lb?7\\@Jc?:O1O1N2O1O1O1O11O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O001O1O1OQ`[4"}, "image_id": 206, "id": 3035}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 0.0, 53.0, 33.0], "area": 971, "segmentation": {"size": [512, 512], "counts": "ZP62m?2N1O2M3N2N2N2N2N101O1O1O1O1O10OO2O1O1O1O1O1O001O1ON2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1OQ`o6"}, "image_id": 207, "id": 3036}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 0.0, 65.0, 67.0], "area": 2110, "segmentation": {"size": [512, 512], "counts": "\\ao01m?2N3N1N3M3N1N3M2O2M3N1N3M3N1010O0100O0BPOmAP1Q>ROoAm0o=VOPBk0n=VOSBj0j=YOVBf0FROl=:^Be0DSOl=:_Bi0a=XO]Bj0b=VO\\Bl0d=UOYBl0h=UOVBj0l=VORBk0o=a02N2O1N2N2O1N2N2O1N2N2O1N2O1N2N2O1N2N2O1N2N2O2M2N3N2M2N3N1N3N2M2Nloo5"}, "image_id": 207, "id": 3037}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 0.0, 67.0, 22.0], "area": 779, "segmentation": {"size": [512, 512], "counts": "P`h31o?00001O00001O00001O00003M00001O00001O0000001O00001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O01O00010O0001OO2L3M3M4L3MooU3"}, "image_id": 207, "id": 3038}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 10.0, 4.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "PP\\51o?001O001O00001O00NRP_2"}, "image_id": 207, "id": 3039}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 0.0, 73.0, 56.0], "area": 2087, "segmentation": {"size": [512, 512], "counts": "o`k51n?3N1N3M2O2M2N3N1N3M3N1N3M2N3N1N2N01O01O01O01O01O01O01O01O00010O00010O00010O000MmN[AS1e>nNYAR1h>2100O1001O2N1O0000O1O100O1O102M2N3N1N3M2O2M2N3N1N3M2O2M2N3N2MXoo0"}, "image_id": 207, "id": 3040}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 8.0, 27.0, 29.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "nPg41l?3L4M4L3M3M4L300010O00010O00010O00010O0O1M4K4M3M4L3Me_k2"}, "image_id": 207, "id": 3041}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 15.0, 102.0, 65.0], "area": 2831, "segmentation": {"size": [512, 512], "counts": "YQV32k?3N3L3N2M4M2N3L3N2M4N110O00010O010O0010O0010O0010O0010O010O00010O010O00010O01N11O010O01O01O01XOo@`0P?^ORAb0o>ZOUAf0R?01O01O2O0O010O01O01O0O2M2N2O2O010O01O01O010O010O01O01O010O01O01O010O01O01O010O01O0O1N3L3N3L3N3L3N2N3L3N^nV3"}, "image_id": 207, "id": 3042}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 25.0, 59.0, 49.0], "area": 2035, "segmentation": {"size": [512, 512], "counts": "YaR73m?8H7I7H8I1O0000O10O100000000O01000000000O10O10000000O100O01000O100O10000O100O01000O1000O010O01000O010O010O10O10O2O1NVO"}, "image_id": 207, "id": 3043}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 46.0, 34.0, 29.0], "area": 606, "segmentation": {"size": [512, 512], "counts": "TR\\63i?4L4L5K4L4N201O01O00010O000010O0001O0O1O1N3N1N2O2N1N2O2N1N2O2M2O1O[nR1"}, "image_id": 207, "id": 3044}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 62.0, 96.0, 63.0], "area": 2807, "segmentation": {"size": [512, 512], "counts": "ibn21m?3L3M3N3L3M4M2M3M4M2O20O010O00010O010OO2L31O01O010O010O01O01O010OO2O00010O010O010O000VOVA?j>]OYAc0g>[O\\Ae0o>01O01O010O2OO010O01O01O010O01M2M3N3L310O0010O0010O010O0010O0010O010O0010O001N1N3L3VOn@d0T?ZOn@d0Y?L3N3L3N3M2M3NQ]a3"}, "image_id": 207, "id": 3045}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 64.0, 77.0, 56.0], "area": 2404, "segmentation": {"size": [512, 512], "counts": "\\SX51l?3M3L5L3M3M4L3M3M4L3M3M4L3M3O2O01O01O01O01O01O01O01M2M3M40O00010O00010O00ROZAb0g>ZO\\Ag0c>VO`Aj0`>SOdAl0h>1O01O01O01O01O01L30010O00010O00010O00010O00010O00010O00010L3M3M4L3M3M4LS]a1"}, "image_id": 207, "id": 3046}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 101.0, 60.0, 78.0], "area": 2283, "segmentation": {"size": [512, 512], "counts": "\\Ta63j?4M2M3M4M200010O0001O0O1M4^AYOh=h0UB\\Ok=c0RB@n=a0nABR>>dAZO1<[>R100010O00010OO1J_NjAe1R>6M3M4L3N210L2N03M3M4L3M4L3M3M4M2M3M4L300010ON2M4L3M3L5L3M3M4L3M3Mfl`0"}, "image_id": 207, "id": 3047}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 112.0, 90.0, 54.0], "area": 2741, "segmentation": {"size": [512, 512], "counts": "Wde23j?3M3M4L3M3M4L3O110O0010O00010O00010O00010O010O00010O00M4L3N210O01O01O010O00010VAnNd>R1ZAQOe>U110O00010O01O01O01L3N2010O00010O00010O010O00010O00010O00010O01O01O010O00010OM3M4M2M3M4L3M4M2M3M4Lm[m3"}, "image_id": 207, "id": 3048}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 114.0, 32.0, 81.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "PU`71m?3L3_@J31f>8TAJ21g>8UAI22f>a0VABh>m0L3N2M4M2M4M2N3L3N2M4M2M4M20010O010O01ON3N101ON3L3N_L"}, "image_id": 207, "id": 3049}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 115.0, 21.0, 16.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "jce53k?2M3N3O0010O01O01O01O010O0010O00001L3L4M[lo1"}, "image_id": 207, "id": 3050}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 129.0, 25.0, 51.0], "area": 811, "segmentation": {"size": [512, 512], "counts": "Q4a1_>01O01O010O01O000M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2Mi[c7"}, "image_id": 207, "id": 3051}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 138.0, 76.0, 60.0], "area": 2186, "segmentation": {"size": [512, 512], "counts": "Wen41m?2M3N4O0O0O2M2M3N3M2M4N11O010O010O01O01O010O010O00010O01N1M4M2N2M4M2N3N1010O0010O010O00010O010O0010O0TO_A9b>CaA>^>@eA?\\>^OfAc0Y>ZOkAe0U>YOmAh0S>UOPBj0c>0010O010O00010O010O010YOm@=U?Am@VOhAj0W>YOfAg0[>\\ObAd0]>_O`Aa0a>`0M4M2M3N30O00010O010OM301O010O000M0100O3N3L3N2M4M2N3O01O0OO2N3L3N3L3N2M4L3N3L3N2M4M2M4MVki6"}, "image_id": 207, "id": 3053}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 155.0, 22.0, 16.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "PU_43k?3M2O20O010O0010O0010O010O0010O0010O0O2M2MPkU3"}, "image_id": 207, "id": 3054}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 162.0, 63.0, 44.0], "area": 1551, "segmentation": {"size": [512, 512], "counts": "le_21k?5J5L4M30001O01O0001O0001O01O0001O01O0001ON2L5J5M30001O01O0001O01O00UAQOe>U10010O00000010O00000010O000001POYAd0h>WO]Ae0R?K4K5000010O00000N3J5L_j`4"}, "image_id": 207, "id": 3055}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 188.0, 76.0, 55.0], "area": 2600, "segmentation": {"size": [512, 512], "counts": "QW`43i?4L4L5K4L4M3L5M2001O01O0001O01O0001O01O01O0001O01O01O0001O01O00000L5K4L4N2010O0000010O0000010O0000010O00010O0000010O0N2L4gN_Ao0k>L4L4M4M2000010O00000L5K4M3LiiY2"}, "image_id": 207, "id": 3056}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 226.0, 87.0, 67.0], "area": 2969, "segmentation": {"size": [512, 512], "counts": "UXj12k?4M2M4M2N2M4M2M4M2N2M4M2M4M2O20O00010O010O010O00010O010O00010O010O010O000kN\\Ao0d>nN^AR1i>0O010O01O01O010O01O01O010O010O01O01O010O010O01OO2M2N30O0010O0010O001M2N3L3N2N3L3N3L3N3M2M3N3L3NQXj4"}, "image_id": 207, "id": 3057}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 230.0, 32.0, 47.0], "area": 1015, "segmentation": {"size": [512, 512], "counts": "V7U1l>O010O010O00010O010O0010O0010O010O0010O0010O010O0SOXA?l>^OVA`0l>^OWA?V?M4M2H\\@1k?NPh_7"}, "image_id": 207, "id": 3058}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 284.0, 19.0, 25.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "Y98f?2N3M2M4M2N3O000010N1N3L3N3M2N3L3N2NRWf7"}, "image_id": 207, "id": 3059}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 285.0, 62.0, 43.0], "area": 1973, "segmentation": {"size": [512, 512], "counts": "XYQ71n?6K5K5K6J5J6K0000O10O100000O1000O100000O10O100000O10O10000000O010000000O0100000000O0100000000O0100000000O0100000001N1000O1SG"}, "image_id": 207, "id": 3060}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 290.0, 65.0, 52.0], "area": 1840, "segmentation": {"size": [512, 512], "counts": "lic14j?2N3L3N2N3L3N3M2M4M2N2O20O010O01O01O010O010O01O01O010O010O01O01O010O010O01O01O010O010O01O01O010O010O01O01M2M4M2N3L3N2N3L3N3M2M4MYf[5"}, "image_id": 207, "id": 3061}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 298.0, 28.0, 27.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "mii21l?3M3N3L3N3L3M3010O00010O010O00010O01O01O0N3L3N2M4L3N3LaVh4"}, "image_id": 207, "id": 3062}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 306.0, 90.0, 64.0], "area": 2937, "segmentation": {"size": [512, 512], "counts": "_jX31m?2N3M2N3M3M2c@BV?e0O2M2N3M2N30O010O010O01O001M2N3M2N3N2O010O010O010O010O010O010O010O010O010O010O0100jNaAi0_>UOdAk0\\>ROfAn0Z>POiAP1c>010O010O01N1N3M0111O010O010O010O010O010O010O01O0N3O010O0O2M2O2M2N3M3M2N5K2NTUZ3"}, "image_id": 207, "id": 3063}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 315.0, 21.0, 16.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "RjX62m?1N3N2M2O2O10M20100O01000O10O10O1N1N3N2Nle\\1"}, "image_id": 207, "id": 3064}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 320.0, 25.0, 24.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "Tjg62n?5K5K6I3N0000000O010000000O01000000000O05L5K6J5K\\ek0"}, "image_id": 207, "id": 3065}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 342.0, 67.0, 53.0], "area": 1914, "segmentation": {"size": [512, 512], "counts": "bkY12k?4M2N3L3N3L3N2N3L3O20O00O2L3N3O010O00010O010O01O01O010O01lNVAQ1l>01O01O010O01O01O010O010O01O01O010O010O01O01O010O01O0M3N3L3N3M2M4M2M3N3M2Medd5"}, "image_id": 207, "id": 3066}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 350.0, 71.0, 67.0], "area": 2338, "segmentation": {"size": [512, 512], "counts": "WlT62l?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3O110O0010O0010O010O00010O010O00010O010O00oNdA>]>_OeAb0Z>[OjAd0W>XOlAi0S>TOQBk0b>0010O010O00010O010O00010O010O00010O010N1M3N3L3N3L3N2M4M2Mmcg0"}, "image_id": 207, "id": 3067}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 352.0, 24.0, 22.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "][`21m?2M4M2N3L3O2O01O010O01O010O01O01O010OM4M2N3L3NkdS5"}, "image_id": 207, "id": 3068}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 361.0, 8.0, 22.0], "area": 95, "segmentation": {"size": [512, 512], "counts": "j[l72l?3M2M4M2M3N3L3gD"}, "image_id": 207, "id": 3069}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 363.0, 15.0, 21.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "k[f73k?3M2M4M2NO10O22O010M2N3L3N2NdT2"}, "image_id": 207, "id": 3070}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 367.0, 67.0, 65.0], "area": 2386, "segmentation": {"size": [512, 512], "counts": "ilR34j?2M3N3L3M4O01ON3M2M4M2M3N3L3N3L3_AeNZ>^1dAdNZ>b1M4O010O00N3M210O01ON3N101O01O010N1O110O0010O000QOfA9Z>DhAAkA`0U>\\OoAc0Q>[OQBg0m=VOWBi0j=TOXBm0]>01O010O00010O010O0O1M4M2M3Jg@C\\?97N3L3N]ck3"}, "image_id": 207, "id": 3071}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 390.0, 87.0, 61.0], "area": 2799, "segmentation": {"size": [512, 512], "counts": "Tmi01n?2M2N3M2N3M20010O010O0010O0010O010O010N1N3M2N2M4M2N3L3N3O0010O0001M2N3M2O2O01O01O010O010O010O00010O010O010O000010O01O010O010O01O010O01O01N1N3M2N3L3N3M2M3N3M2N3L3N3M2N2M4M2NXcj5"}, "image_id": 207, "id": 3072}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 429.0, 43.0, 31.0], "area": 772, "segmentation": {"size": [512, 512], "counts": "Pnd02k?3N3M2M4M2N3M2M301O010O010O0010O0010O010O0010N1M4M201O010O01O01O010O010O0M3N3M2M4M2NWbe6"}, "image_id": 207, "id": 3073}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 430.0, 64.0, 62.0], "area": 2263, "segmentation": {"size": [512, 512], "counts": "`nk52l?2N3M2N3M2N3M2f@_OS?i0M2N3M2N3M2N3M3M2N30O010O010O010O010O010O010O010O10O010O010O010O010O0N20N2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3N1NPRT1"}, "image_id": 207, "id": 3074}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 435.0, 25.0, 40.0], "area": 673, "segmentation": {"size": [512, 512], "counts": "`nc72k?3N2M4M2N3L3N2M4M2M4M2N2010O01O010O01O010O01O01OXB"}, "image_id": 207, "id": 3075}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 436.0, 101.0, 75.0], "area": 3828, "segmentation": {"size": [512, 512], "counts": "g^a21m?2M3N3M2M4M2N3M2010O00010O010O010O0010O0010O010OC[O_Ae0_>^O^Ae0_>]O_Ae0^>_O_Ad0^>^O`Ad0a>>O0010O0010O010N1N3M2O110O0010O010O0010O010O010O0010O0010M2010O010O00010O010O010O0ZNiAc1Z>10O010O010OO2O0OTOiA1W>MkA4S>JQB5Q>GQB:Q>BPB>S>_OnAa0U>ZOlAf0W>WOjAh0g>10O01O01M2M4M2N3M2M4M2N2MlPl3"}, "image_id": 207, "id": 3076}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 451.0, 13.0, 14.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "[nQ22l?2N3M2N30O010O010M2N3M2Nkag5"}, "image_id": 207, "id": 3077}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 475.0, 54.0, 37.0], "area": 1067, "segmentation": {"size": [512, 512], "counts": "m_63k?2N2O1001O00001O001OO1N2M3N2M3N2N2M3N2M3N200001O001O00001O001O00001ON2N2M3N2N2O10O1N3L3N3L3N2M4M2M4M2M3N3MRan6"}, "image_id": 207, "id": 3078}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 479.0, 67.0, 33.0], "area": 1234, "segmentation": {"size": [512, 512], "counts": "non62l?2N2M3N2N2M3N2O1001O001O00001O001O00001O001O001O00001O001O00001O001OO1N2M3N2N2M3N2N2M3N2M31O00001O0NPATOQ?i0QAWOo>h060O00010O010O010O00010O01O00"}, "image_id": 207, "id": 3079}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 484.0, 94.0, 28.0], "area": 1651, "segmentation": {"size": [512, 512], "counts": "oo]41m?2M3N2N2M3N2M3N2N2M30000001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O0000M3N2M3N2N2M3N2N21O00001O001O001O00001O001O00001O001O001O00001O001O00001N1N3L3N3L3N`PS2"}, "image_id": 207, "id": 3080}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 485.0, 31.0, 27.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "lof22k?3N2N3L3N3M2M3N2M3O11O00001O001O00001O001O001N1N2N3L3N3L3N3Md`i4"}, "image_id": 207, "id": 3081}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 488.0, 44.0, 24.0], "area": 701, "segmentation": {"size": [512, 512], "counts": "noU12k?3M3N2M3M3N2M3M3001O00001O00001O001O00001O001O00001O001O00001O00001O001O00001O0O2L3N2M^PT6"}, "image_id": 207, "id": 3082}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 500.0, 35.0, 12.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "ooQ21l?3M3N2M3001O001O001O001O001O0000O100001O001O00001O001O00001O00001O00R`\\5"}, "image_id": 207, "id": 3083}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 509.0, 9.0, 3.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "o_c31m?200001O00001O00QPX4"}, "image_id": 207, "id": 3084}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_T41P`k3"}, "image_id": 207, "id": 3085}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 0.0, 30.0, 12.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "PPn11o?001O001O001O001O001O001O001O001O001O001O001O00O1N2N2N2N2NRPc5"}, "image_id": 208, "id": 3086}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 0.0, 88.0, 41.0], "area": 2373, "segmentation": {"size": [512, 512], "counts": "_`g24h?4M3L5L3O10001O00001O0000001O00001O00001O0000001O00001O0000001O00001O0000001O00001O00001O0000001O00001O0000001O01O01O0001O01O01O01O00010OoNTAn0o>01O01O01O01O0001O000M4K4M3L4M4K4M3Lh_l3"}, "image_id": 208, "id": 3087}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 0.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "PPm71o?000P`1"}, "image_id": 208, "id": 3088}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 4.0, 25.0, 29.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "j``02l?2M3N3L3M4M2M3N3M210O00010O01O000M4M2M4L3N2M4M2MioR7"}, "image_id": 208, "id": 3089}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 20.0, 57.0, 66.0], "area": 2192, "segmentation": {"size": [512, 512], "counts": "Ub_43k?3M2M4M2N2M4M2N3L3N3L3N2N3L3N3M2M4M2N2M4M210O01O01O010O010O01O01O01N101O0M4M2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3Nonc2"}, "image_id": 208, "id": 3090}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 34.0, 114.0, 58.0], "area": 3278, "segmentation": {"size": [512, 512], "counts": "ZbR11m?2M4L3M3N3L3M4M2M3M4L3N201O010O00010O010O0O1N0O10012O01O01O010O01O01O010O010O00010O010O0010oN[Ae0e>XO^Ah0c>UO_Al0`>QOdAn0g>10O01O0M3N3L3N3L3N3L3N1N12N2M4M2N3L3N2N3L3N3M2M3N30O010O0001M2M4M2M3N3L3O02L3N2M4O001O01O010O010O00010O010O0010O00010ON3M2M4M2Mh^T5"}, "image_id": 208, "id": 3091}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 58.0, 64.0, 54.0], "area": 2004, "segmentation": {"size": [512, 512], "counts": "cRh21m?2M3M4L3N2M4L3N3k@XOm>P10O00010O01O01O010O00010O01VAQO`>P1]ATOb>l0\\AVOd>S110M20001O010O01O01O010O0N2O2ON3M2M4L3N2M4M200010L3N3L03M4M2M3M4M2M4L3N2M4L3NTnW4"}, "image_id": 208, "id": 3092}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 71.0, 63.0, 84.0], "area": 2868, "segmentation": {"size": [512, 512], "counts": "\\T^61m?3M2M3N3M2M4M2N3L3N2N3L3N3M2M4_OgNWB[1f=iNWBY1g=iNVB[1g=gNWB[1f=iNWBZ1f=a0N3L3O1010O01O001N11O010O0N3M2M3N3M2M4M2N3L3N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3M2M^]b0"}, "image_id": 208, "id": 3093}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 79.0, 60.0, 53.0], "area": 1914, "segmentation": {"size": [512, 512], "counts": "Uci33j?3N3L3N2M4L3N2j@[On>n0O00010O00010O010O00010O00TOQAh04WOg>i0WAZOi>n00010M2M3N3L3N30O00010O010O00010O0N3L3N2M4L3N2O20O010ON2M4N10010N1Ei@KZ?1j@KY?3;MY]X3"}, "image_id": 208, "id": 3094}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 88.0, 14.0, 29.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "h2k0U?010O0010M2N3M2M3N3L3N3M2MVmh7"}, "image_id": 208, "id": 3095}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 96.0, 17.0, 17.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "[c_21m?2N3M2N3M2O20O010O010N1N3M2N3M2NmlW5"}, "image_id": 208, "id": 3096}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 110.0, 28.0, 60.0], "area": 875, "segmentation": {"size": [512, 512], "counts": "dTb72l?3L3N3L3N2M4M2M4O00010O001QAWOf>h0WA[Oi>e0UA]Ol>l0O001L3N2M4M2N3L3N2N3M2bL"}, "image_id": 208, "id": 3097}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 129.0, 70.0, 74.0], "area": 2688, "segmentation": {"size": [512, 512], "counts": "TUW31m?2M3N3L3N3L3N2M4M2M4M2UASO`>P1]ASOa>W1M40O00010O010O00010O01O01O010O01O01OfAaNS>`1jAcNV>c10001L3010O00010O010O00010O0TOmALR>1QBOP>NRB2n=KVB5j=GYB9g=E\\B;c=B`B>a=_OaBa0_=WOmA1h0h0^=VOdBj0Z>O01O010OM3N3L3M4M2M3N3L3N_je3"}, "image_id": 208, "id": 3098}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 154.0, 29.0, 30.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "^eS72l?2N2M4M2M4M2M3N3N110O010O00010O010O01O01OO2M2M4M2M3N3L3NPk="}, "image_id": 208, "id": 3099}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 160.0, 68.0, 55.0], "area": 2229, "segmentation": {"size": [512, 512], "counts": "mUg53j?3N3L3N2M4M2M4M2M3N3L3O2O01O010O]AnNX>S1dAPO\\>P1bARO_>X1O010O00010O010O00010OO2L3N2N3O010O00010O010O00010O010O00010O010O00010O010L3N2M4M2M4M2M3N3L3N3L3N\\jV1"}, "image_id": 208, "id": 3100}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 183.0, 66.0, 61.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "QW=2k?3N2M4M2N3L3N2M4M2M40O0010O0010O0010O010O0010O0010O0010O0010O0010O00O2M2M4M2M3N3L3N3M2M3N00O3N3M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4MSja6"}, "image_id": 208, "id": 3101}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 192.0, 64.0, 66.0], "area": 2124, "segmentation": {"size": [512, 512], "counts": "QWj21l?4L3M3M4N101O0I@o@`0m>DPA>n>Do@?n>Z1iAiNX>V1fAlNZ>^10O01O010O01O0TOgA2X>LkA3V>ImA8R>FQB9P>YOgA5XOSBg0c>10O00010O0M4M2N3L3N2M4M2MhhU4"}, "image_id": 208, "id": 3102}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 197.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Ufo72i9"}, "image_id": 208, "id": 3103}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 208.0, 31.0, 29.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "Qgf63j?3N3L3N2M4M2N30O00010O01O01O01O010O01O01O010OO1N3L3N3L3N2M4MYii0"}, "image_id": 208, "id": 3104}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 213.0, 68.0, 57.0], "area": 2248, "segmentation": {"size": [512, 512], "counts": "fW[53j?3N3L3N2M4M2M4M2M4L3N2M4M201O01O01O010O01O010O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01O010O01O0O1N3M2M4M2M4M2N2M4M2M4M2N2M4Mghb1"}, "image_id": 208, "id": 3105}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 214.0, 55.0, 53.0], "area": 1535, "segmentation": {"size": [512, 512], "counts": "]gT71l?4M2N3L3N3c@BT?`0j@BS?h0N101O01O010O01O010O01O010O01O01O010N1N3M200010O01O010O01O0j@\\OQ?i0010O010O0001N1M4N101O0N2N3L3N3M2M4YI"}, "image_id": 208, "id": 3106}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 230.0, 51.0, 45.0], "area": 1293, "segmentation": {"size": [512, 512], "counts": "RXU11m?2N3M2M4M2N2N3L3N3M2N3L3O20O00010O010O010O01O01O010O00010O0010O010O0010oNTAl0P?1O01O0O2L3N3M2N3L3N2N3L3N3MXXQ6"}, "image_id": 208, "id": 3107}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 230.0, 32.0, 30.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "jWb61l?4M2M4M2M3N3L3N3O01O01O010O01O01O010O01O01O01O0N3L3N2M4M2M4M2Mdhm0"}, "image_id": 208, "id": 3108}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 259.0, 63.0, 53.0], "area": 1949, "segmentation": {"size": [512, 512], "counts": "mh_23k?2N3L3N3M200N3M2M4j@XOP?m0N3L31O010O010O01O01O01O0M4M200010O010O01O01O010O010O00010O010O000lN[An0e>oN]AQ1j>0O010O01O010M2N3L3N2N3L3N3L3N3L3N3MXg`4"}, "image_id": 208, "id": 3109}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 274.0, 75.0, 49.0], "area": 2223, "segmentation": {"size": [512, 512], "counts": "YYl42k?4M2N2M4M2M4M2N3N10010O010O010O00010O010O010O000N3O0010O0001M2N3L3O20O0010O010O0010O0010O010O0010O0010M2N3L31O01O0000M3N3M2M4M2N2M4M2N3L3N2M4M2N3L3NXWn1"}, "image_id": 208, "id": 3110}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 279.0, 36.0, 32.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "\\YV61m?3L3N2M4L3N2M4L310O00010O01O010O01O01O010O01O01O010O001M2M3N3L3N3L3M3NPgW1"}, "image_id": 208, "id": 3111}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 293.0, 63.0, 48.0], "area": 1846, "segmentation": {"size": [512, 512], "counts": "iiP72m?4M3M3L4M3M3L4M4L1N1000O01000O10O10O10O10O10O1000O01000O01000O01000O10O10O10O1000O01000O01000O01000O01000O10O10O10O1000O0100kF"}, "image_id": 208, "id": 3112}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 302.0, 19.0, 18.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "fYf62m?2N1O2N2N2N2N2N2O10O1N2N2N2N2N2N2N2NZVP1"}, "image_id": 208, "id": 3113}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 309.0, 68.0, 65.0], "area": 2161, "segmentation": {"size": [512, 512], "counts": "eZS22k?3N3M2M3N3M2M4M2N3L3N3M2M3O2O0010O010O0010O010O0010O010O0010O0^AiN[>X1bAjN_>U1_AmNa>Z10O010O010O01O01O010TO_A7a>GbA9^>DdA<\\>BgA>X>_OkAa0V>\\OlAd0T>ZOoAf0e>010O010O010O0001O0N3L3N3M2N2M4M2NRej4"}, "image_id": 208, "id": 3114}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 322.0, 62.0, 54.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "kjf41n?2M3N2N1N3N2N2N1N3N2N2M3N1O2M3N2N20O10M3N1O2N11O100O010O1O10O0100O10O01O100nNWAk0i>TOYAl0m>01O010O01O010O01O01O01N1N3L3N3M2M3N3M2M4M2NTUZ2"}, "image_id": 208, "id": 3115}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 322.0, 28.0, 29.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "dj\\63k?3M2M3N3M2M4M2N30O00010O010O010O00010N1N3L3N3L3N2N3L3NhUU1"}, "image_id": 208, "id": 3116}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 327.0, 58.0, 90.0], "area": 2816, "segmentation": {"size": [512, 512], "counts": "b\\31l?4M2M4M2M3N3L3N3L3N2N3TOROaBQ1]=QOaBR1[=ROaBQ1]=QOaBQ1\\=ROaBR1\\=QOaBQ1\\=ROaBR1\\=QOaBQ1\\=k0O2O010O00010O010O0003N0O01O01M2M4M2M3PNXBf1k=VNYBf1j=XNXBf1S>L3GfAjN\\>T1fAiN^>S19N3L3N3L3N2M4M2N3L3N3L3NReo6"}, "image_id": 208, "id": 3117}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 331.0, 38.0, 33.0], "area": 724, "segmentation": {"size": [512, 512], "counts": "lZg52l?3L3M4M2M3M4N110O010O2N01O010O01O01O010O01O010O01O01O010O00O2M2N3L3N2M4M2N[ee1"}, "image_id": 208, "id": 3118}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 338.0, 41.0, 33.0], "area": 681, "segmentation": {"size": [512, 512], "counts": "PkT71o?2M3N3M2M4M2M3N2M10O10O010O01000O010O010O10O10O010OJh@EY?;i@CW?=5010O010O010003L3N3L3N2MSe6"}, "image_id": 208, "id": 3119}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 354.0, 15.0, 38.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "Xkh71o?2M3N3L3N2M3N2N2M4MO02O2m@VOk>R1N2lD"}, "image_id": 208, "id": 3120}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 379.0, 76.0, 44.0], "area": 1998, "segmentation": {"size": [512, 512], "counts": "flY43k?2M3N3L3N3L3N2M4M2M4M21O01O010O01O01O010O01O01O01O010O01OO2L3N3L3N201O01O2O00O10O01O01M2N3L3M310O00010O010O00010O010O00010O010O00N3M2M4M2M3N3L3N3L3N2MiS`2"}, "image_id": 208, "id": 3121}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 380.0, 56.0, 71.0], "area": 2302, "segmentation": {"size": [512, 512], "counts": "X\\g11o?2M4eAKe<7PC2P=1dB8[=KZB>f=DoAe0P>f00000O101N100O10000O100O10001N100O1003M2N2NOO2H7I8H8H701O01000O010O0100O01GPABP?=TA_Ol>b09O01002M4M2M4M3M2Mfc\\5"}, "image_id": 208, "id": 3122}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 384.0, 66.0, 63.0], "area": 2470, "segmentation": {"size": [512, 512], "counts": "VmS61k?4L5K4L4L4M4K4000010O0000010O0000010O0000010O0WATO^>l0^AYOa>g0[A]Of>n00001O01O0001M2L4L4L5O0000010O0000010O000010O00O1M2ON4L5K4L4M3L5K4L4L5K4L4L5L3LPTk0"}, "image_id": 208, "id": 3123}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 387.0, 30.0, 29.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "f\\b52l?2N3L3N2M4M2N3L31O01O010O00010O010O00010O01L3N2M4M2M4M2M3Ngcn1"}, "image_id": 208, "id": 3124}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 406.0, 13.0, 25.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "fQO\\Ao0j>01O01O010O01O010O01O010O01O01OM4M2M4M2M3N3L3N3L3N2MkQm2"}, "image_id": 208, "id": 3126}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 432.0, 31.0, 29.0], "area": 565, "segmentation": {"size": [512, 512], "counts": "R^X53j?4M2M3M4M2M3N3O0010O00010O0010O0010O00010O001M2N2M4M2M3M4M2M[RX2"}, "image_id": 208, "id": 3127}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 438.0, 35.0, 43.0], "area": 828, "segmentation": {"size": [512, 512], "counts": "g^Q11l?3N3M2N3L3N3M2M3N3M2M4M2N3L31O01O010O010ON3M2N2M4M2N3L3N3M2M3N3M2M4MSR]6"}, "image_id": 208, "id": 3128}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 455.0, 38.0, 48.0], "area": 986, "segmentation": {"size": [512, 512], "counts": "]of11l?3N3M2M4M2M4M2N2M4M2N3L3N3M2M3N3O010O010O00M4M2N3L3N3M2M3N3M2M4M2N3L3N2M4MbQf5"}, "image_id": 208, "id": 3129}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 458.0, 96.0, 52.0], "area": 3014, "segmentation": {"size": [512, 512], "counts": "g^^52m?3N3L3N3M2M4M2M4M2N3L3N3L3N1O0O010O01000O010O01000O010O102N3L3N20N2M4M2N3L10O01000O010O10O10O010O12M100O01000O010O01000O010O01000O010O01000O013L3N3M2M4M1N0100O0100O01000O010O01001N4M2L5L3N3Mn`Q1"}, "image_id": 208, "id": 3130}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 477.0, 61.0, 35.0], "area": 1538, "segmentation": {"size": [512, 512], "counts": "loe33i?4J7L3M3L4M3M3M3N20000001O00001O0000001O00001O00001O000000N2001O00001O00001O0000001O00001O000]Oo@7R?EQA;o>BUA=W?0001O00001O00000N3L3M^`[3"}, "image_id": 208, "id": 3131}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 486.0, 41.0, 26.0], "area": 618, "segmentation": {"size": [512, 512], "counts": "o_?1n?1N2O1O1O1HJe@7Z?Kd@6[?Kc@7\\?7O1O1N2O1O1O1O100001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1N2N2M2O2NXPl6"}, "image_id": 208, "id": 3132}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 499.0, 18.0, 13.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "n_Z51m?2N3L3N2N20000001O001O001O001M2N2M\\`\\2"}, "image_id": 208, "id": 3133}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 500.0, 26.0, 12.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "n_]12l?2O1N2N2O1N2001O001O001O1O001O1O001O001O1O001O001OQ`U6"}, "image_id": 208, "id": 3134}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 502.0, 18.0, 10.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "mo^61n?2N2O0O1O100O1O100O1001O2N1O2N1O2NQPX1"}, "image_id": 208, "id": 3135}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 509.0, 5.0, 3.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "n_;2m?1001O1OQPb7"}, "image_id": 208, "id": 3136}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 226.0, 87.0, 60.0], "area": 2560, "segmentation": {"size": [512, 512], "counts": "nWn42l?2M3N3M2M4M2M3N3L3N3M2N210O010O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010O0001L3N3M2M3N3L3N3L3N2N3LRXf1"}, "image_id": 210, "id": 3137}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 228.0, 7.0, 11.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "Zgl73k?2M4N101O01OkH"}, "image_id": 210, "id": 3138}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 265.0, 35.0, 38.0], "area": 795, "segmentation": {"size": [512, 512], "counts": "TY[61m?3L3N2M4M2M4M2N2M4M2M4O01O01O010O010O00010O010OO1N3M2M4M2M4M2M3N3L3N`WS1"}, "image_id": 210, "id": 3139}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 280.0, 65.0, 54.0], "area": 2006, "segmentation": {"size": [512, 512], "counts": "cio63k?3M2M3N3L3N3M2M4M2M3N3N101O01O010O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01ON3L3N3M2M4M2M3NeF"}, "image_id": 210, "id": 3140}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "P``01o?000PP^7"}, "image_id": 211, "id": 3141}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 0.0, 72.0, 24.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "PPY21o?00001O00001O0000001O00001O0000001O00001O0000001O00001O0000001O00001O0\\@H`?=O0000001O00001O00001O0000001O00001O0000010O000010O000010O00O1M4K4M3L5Lnob4"}, "image_id": 211, "id": 3142}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 0.0, 8.0, 3.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "P``31o?001O00001O00OQ`[4"}, "image_id": 211, "id": 3143}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 0.0, 13.0, 5.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "P`Y41o?00001O001O00001O001O00MSP`3"}, "image_id": 211, "id": 3144}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 0.0, 53.0, 41.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "i`Q51m?3L3N2N3L3N3M2M4M2N2N3O001O001O00001O001O001O00001O001O001O00001O001O0000N2N2M3N2001OFTA^On>`0TA^On>`0:M3N2N2M3N2N2MSPT2"}, "image_id": 211, "id": 3145}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 0.0, 4.0, 1.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "PP\\61o?00000PPb1"}, "image_id": 211, "id": 3146}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 0.0, 8.0, 3.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "PPl71o?00001O001O0000"}, "image_id": 211, "id": 3147}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 9.0, 60.0, 92.0], "area": 2868, "segmentation": {"size": [512, 512], "counts": "hbk52k?4M2M3N3M2M4M2M3N3L3N3L3N3L3QOhNTCZ1jkNkAV1U>gNnAX1]>1O01O010O01O01O010O_N"}, "image_id": 211, "id": 3151}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 28.0, 27.0, 26.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "^al21m?2N2N3L3N3M2N3L310O0010O0010O010O01O0N3M2N2N3L3N3M2Nnne4"}, "image_id": 211, "id": 3152}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 60.0, 85.0, 77.0], "area": 3274, "segmentation": {"size": [512, 512], "counts": "Xco22l?3M2M3N3M2M4M2N3L3N2N3L3N30O01O0O1N3N1M4G8100O0010O010O010OO1N3M2O101O010N1M3010O010hAiNe=W1YBkNh=U1UBmNk=S1RBQOm=o0QBSOP>m0mAVOR>\\11XNnAa1Q>\\NRBd1U>010O010O010O01O01O01N1N3M2M4M2N3M210O01O01M2N3M2M4M2N3M2M4M2N2N3L3NSme3"}, "image_id": 211, "id": 3153}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 60.0, 33.0, 41.0], "area": 794, "segmentation": {"size": [512, 512], "counts": "lR[41m?3L3M3N3L3M4M2M3M4M2M3M4O001O01O01O010O01M2M3M4M2M3M4M2M4M2M3M4Mo]T3"}, "image_id": 211, "id": 3154}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 62.0, 27.0, 29.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "cRa61l?3N2M4M2M4M2M3N3N110O01O01O010O010O00O2L3N3L3N2M4M2Mn]Q1"}, "image_id": 211, "id": 3155}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 65.0, 71.0, 63.0], "area": 2426, "segmentation": {"size": [512, 512], "counts": "ZSY12g?O^@4_?N_@4_?N_@4_?8O1N2N3M2N2N1O01O0001O1O2N2N2O1N2N3M2N000001O01O00002N2O1N2N3M2N2N2N2OO00001O2N3M2O1N2N2N2N3M2N2O1N2N2N3M2N2N2O1N2N3M2N2N2N2O2M2N2N2No\\c5"}, "image_id": 211, "id": 3156}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 83.0, 63.0, 40.0], "area": 906, "segmentation": {"size": [512, 512], "counts": "ob[51m?2N3M2N3M2N30O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010OO2N1010O010O010O001M2M4M2N3M2Nfld1"}, "image_id": 211, "id": 3157}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 99.0, 57.0, 45.0], "area": 2039, "segmentation": {"size": [512, 512], "counts": "VTl63f?8G8I7I7J6000001O000001O00000001O0001O00000001O0001O00000001O000001O00000001O0001O00000001O0001O0000000K5H8I7I8Gn\\7"}, "image_id": 211, "id": 3158}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 117.0, 75.0, 59.0], "area": 2214, "segmentation": {"size": [512, 512], "counts": "adg42k?4M2N3M2N2M4M2N3M2N3O010O010O0010O0010O0N3L3N3M2O20O010O010O0010O0010O010O010O010O0010OoN[Ag0f>VO\\Ak0c>RO`An0j>0O010O010O010O01O010O01O010O010OO2M2N3M2N3L3N3M2N2N3M2M^kR2"}, "image_id": 211, "id": 3159}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 131.0, 54.0, 52.0], "area": 1660, "segmentation": {"size": [512, 512], "counts": "Ten01m?2N3L3N3L3N2M4M2N3O01O01O010O01O01O010O0O1HXOVAk0g>WOWAk0f>:M2010O0010O010O00010O010O00010O010O01O01OO2M2M4YOWA3k>KXA2j>LXA1l>KXA2j>LXA2]?LR[V6"}, "image_id": 211, "id": 3160}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 132.0, 74.0, 91.0], "area": 3179, "segmentation": {"size": [512, 512], "counts": "iUg22l?2N3M2N3M2N3M2N2M4M2N3M2N3M2N3M2N3M2M4M2N3M2NBnASOo=m0SBSOk=m0XBSOf=l0\\BTOe=i0^BVOb=h0`BYO_=e0dBZO]=c0eB^OZ=`0iB_OX=>jBCU==lBBU=>jBCU==lBBU=>jBCU==lBBU=>jBCU==lBBU=>jBCU==lBBU=>jBCS=?mBAS=`0jBCU==iBEW=g=[OhB5D=g=[OhB5C>`>@cA=`>@bA>R?L3N3M2N3MPjS4"}, "image_id": 211, "id": 3161}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 133.0, 26.0, 29.0], "area": 523, "segmentation": {"size": [512, 512], "counts": "jT^63i?5K4M3L4L5M2000010O00010O0000010O000001M2L4L5L3L4LjkT1"}, "image_id": 211, "id": 3162}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 142.0, 29.0, 29.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "QUS22k?3N3M2M4M2N2M4M201O010O01O01O010O010O01N1N2M4M2N3L3N3M2M][^5"}, "image_id": 211, "id": 3163}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 146.0, 28.0, 28.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "RUl54j?2N2M4M2N3L3N3O000010O010O01O01O010O010N1N2M4M2N3L3N3MXke1"}, "image_id": 211, "id": 3164}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 164.0, 65.0, 60.0], "area": 2147, "segmentation": {"size": [512, 512], "counts": "PV_62l?2M4M2N3M2M4M2N2N3M2M4M2O2O010O0010O0010O010O010O0ZAmN_>T1^AnNc>Q1[AROd>U110O010O0]AhN_>\\1010O01O0N3O00010OO2M2001M21N1N3M2M4M2N3M2M3N3M2N3M2M4M2N3M2M3N^Z`0"}, "image_id": 211, "id": 3165}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 187.0, 49.0, 68.0], "area": 1893, "segmentation": {"size": [512, 512], "counts": "bWe43k?2M4M2M4M2M3N3H[OPAh0f>WO^AQ1^>RO`AP1^>:M4M2M4M2M3N3N110O00010O010O0001N1M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2MlYb2"}, "image_id": 211, "id": 3166}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 189.0, 67.0, 45.0], "area": 1696, "segmentation": {"size": [512, 512], "counts": "bV;2k?4M2M4M2M3N3L3N3N10010O0010O010O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010OO1N3M2M4M2O20O0010O00N3M2M4M2N3L3N2M4M2N3L3N3M2MkYc6"}, "image_id": 211, "id": 3167}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 196.0, 30.0, 30.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "hfa12l?2N3L3N2N3L3N3M2N3O01O010O01O010O01O010O000N3M2M4M2M4M2N2MgYo5"}, "image_id": 211, "id": 3168}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 213.0, 28.0, 29.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "Wgj53k?3L3N3L3N2M4L3010O00010O010O00010O0010OO1M4M2M4M2M3N3LVYg1"}, "image_id": 211, "id": 3169}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 219.0, 28.0, 27.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "]W\\51l?3N3L3N3L3N2M4O0010O00010O010O00010O010O0M3O2O0M4M2M6KmhU2"}, "image_id": 211, "id": 3170}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 223.0, 49.0, 48.0], "area": 1432, "segmentation": {"size": [512, 512], "counts": "o6Q1P?O010O010O01O01O010O010O01O000O20O01O010O01O010O01OnNTAP1n>010O010O0010O0010O01N1N2N3L3N3M2M4M2N2M4M2M`XW7"}, "image_id": 211, "id": 3171}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 241.0, 41.0, 32.0], "area": 758, "segmentation": {"size": [512, 512], "counts": "TXS11m?2N3M2N3L3N3M2N2N3O0010O010O010O010O00010O010O0N30O010O010O0010O0O2M2N3M2N3L3N3MSXX6"}, "image_id": 211, "id": 3172}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 242.0, 56.0, 62.0], "area": 2093, "segmentation": {"size": [512, 512], "counts": "mXQ61l?3N2M4M2M4M2M3N3L3N3L3N2M4M2O20O01M2N2O2O001L31O01O010O01O010O01O010O01O01O010O01O01O010OYOfAF]>6gAG[>7gAF]>6gAG[>7gAG[>7gAF]>6gAG[>7i0L3N2MThR1"}, "image_id": 211, "id": 3173}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 261.0, 20.0, 19.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "bhe01m?2M4L3N3L300010O00010O01O01O001L3M3N3LiWP7"}, "image_id": 211, "id": 3174}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 273.0, 21.0, 44.0], "area": 661, "segmentation": {"size": [512, 512], "counts": "a8V1k>O01O010O010O01O01O010O010O]OWAMl>0VANl>OXANj>0XAMl>0WAMk>0XANk>OjWe7"}, "image_id": 211, "id": 3175}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 279.0, 38.0, 34.0], "area": 758, "segmentation": {"size": [512, 512], "counts": "[i_52l?3L3N3L3N3L3N2M4O010O00010O010O00010O010O00010O010O00010ON3M2M3N3L3N3M2M4MoVm1"}, "image_id": 211, "id": 3176}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 282.0, 35.0, 28.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "Xie02l?3L3N2N3L3N3O010O01O010O01O01O010O01O01O010O01O010O01O010N1M3N3M2M4Mlfh6"}, "image_id": 211, "id": 3177}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 319.0, 42.0, 51.0], "area": 1087, "segmentation": {"size": [512, 512], "counts": "X[`01l?4IL^@6`?6M4M2M4L3N2N3O010O01O010OLTOTAm0i>UOWAk0f>XO[Ag0c>\\O[Af0b>^O^A`0_>CaA>\\>d01POcA9_>a04VO]A3e>b02M4^OUAKm>a04Do@ES?:n@CU?=l@@W?`0401O0N3L3N2M4M2Nkej6"}, "image_id": 211, "id": 3178}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 320.0, 62.0, 47.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "hZi53j?3N2N3L3N3L3N2N3L3N3O001O01O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01O01O010O01O01O010O0N3L3N2N3L3N3L3N2N3L3N^eW1"}, "image_id": 211, "id": 3179}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 338.0, 24.0, 37.0], "area": 594, "segmentation": {"size": [512, 512], "counts": "b:P1P?010O00010O010O010O00010ON3M2N3L3N2N3L3N3M2M4MVec7"}, "image_id": 211, "id": 3180}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 373.0, 63.0, 52.0], "area": 1794, "segmentation": {"size": [512, 512], "counts": "m\\?1m?2N3M2N3M2N3M3M2N3N1N3M2N30O010O01M2N3M2N3M2O1N001O02N3M2N3M2O2O0100O010O010O01mNUAP1m>010O010O010O010O01SOQAh0n>VOUAi0Q?01M3M2O2M2N3M2N3M2N3MbSa6"}, "image_id": 211, "id": 3181}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 379.0, 31.0, 26.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "Z\\11l?4M2M3N3L3N3O01O01O010O010O00010O010O00010O010O00N3M2M4M2M3NnS_7"}, "image_id": 211, "id": 3182}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 424.0, 55.0, 58.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "T^72l?3L3N3M2N3M2N3O010O010O0O2M2N3M2N3M2N3M210O010O010O0^AiN\\>W1`AlN`>[10O010O010PO`A>`>@cA`0]>]OfAb0Z>\\OhAe0X>XOkAg0U>WOmAj0e>O010O010O001M2N3M2N3M2N3M2N3M2NgQm6"}, "image_id": 211, "id": 3183}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 434.0, 29.0, 78.0], "area": 1252, "segmentation": {"size": [512, 512], "counts": "doa71m?3M2N3M2DGo@FPAGo@FPA=M2N3M2N3M2N3M2N3M2N3M2N3M2N2N2N2N1O000`B"}, "image_id": 211, "id": 3184}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 461.0, 71.0, 51.0], "area": 2099, "segmentation": {"size": [512, 512], "counts": "_o]11m?2N3M2N3M2N3N1N3M2N3M2N3M2N3M2N3M2O2O10O010O010OO2O010O010O010O01O1O001O001fN[AW1h>O001O001O0POWAi0j>TOXAl0o>O001O001O001O001O001O001O001O001O0O2M2N3M2N3M2N3Mc`^5"}, "image_id": 211, "id": 3185}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 474.0, 28.0, 28.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "^o51l?3M4M2M3M4L3N2O2O00010O010O00010O00010OO2L3N2M4L3M3N3LRQ\\7"}, "image_id": 211, "id": 3186}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 482.0, 14.0, 13.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "XoP13k?2N3M2010O010O010O001M2N3MkPh6"}, "image_id": 211, "id": 3187}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 483.0, 21.0, 17.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "Z_d04j?2M4M200010O010O00010O010O00010O0M4M2M3NiPQ7"}, "image_id": 211, "id": 3188}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 502.0, 49.0, 10.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "m_j23j?3M3O1001O00001O0000001O00001O00001O0000001O00001O0000001O00001OO1M31O0000001O00001O0000001O0000QP]4"}, "image_id": 211, "id": 3189}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 153.0, 23.0, 15.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "oTc71m?3M2N2010O01O010O01O01O010O01O01O010O01L3N2NR[1"}, "image_id": 212, "id": 3190}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 469.0, 117.0, 43.0], "area": 2524, "segmentation": {"size": [512, 512], "counts": "ooV51m?2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M31O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O00Q`n0"}, "image_id": 212, "id": 3191}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 0.0, 171.0, 83.0], "area": 10829, "segmentation": {"size": [512, 512], "counts": "[`j1d0\\?00000000000000S1mN0000000000;E000000000HM;00000000000008H0000K50000000000000000000000004L000000F:00000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000009G000000009G000000000000000000000000000g0YOc1]Nfm_3"}, "image_id": 213, "id": 3192}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 405.0, 376.0, 107.0], "area": 28523, "segmentation": {"size": [512, 512], "counts": "^_1:f?8G10000EGo@9Q?;0000000000000000O1000000000000000000O17I3M00000000O1000000000000000000UOCRB>n=LhA4X>j00000000000O1000000000000000000O1B`N\\B`1d=>000000O10000000000000000O1000000000000000000O1000000000000F:000000000000O1000000000000000000O10000004L4L000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1CZM`Cf2`<=00O10000000000000000O1000000000000000000O1000000000000000000O10000000000000000002N:F1O00000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000009G:F:F9G:F:F:F:F9G:FQ`R2"}, "image_id": 213, "id": 3193}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 125.0, 112.0], "area": 12231, "segmentation": {"size": [512, 512], "counts": "0\\3d<0000000000000000000000001O0000000000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000001O0000M3QOo0QOo0POP1B>000000000000000000000P`Q6"}, "image_id": 214, "id": 3194}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 0.0, 70.0, 92.0], "area": 6142, "segmentation": {"size": [512, 512], "counts": "bRm67T?e0ZOf0[Oe0^Ob000000000000001O0000000000000000000000000000000000000000001O00000000000000000000000000000000000000001O000000000000000000000000000000"}, "image_id": 214, "id": 3195}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 97.0, 39.0, 35.0], "area": 1203, "segmentation": {"size": [512, 512], "counts": "Qc\\7m0S?00000000000000000000006J0000000000000000000000J6000000000000000000000000000oL"}, "image_id": 214, "id": 3196}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 126.0, 15.0, 17.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "]Tg32_??0000000000000000000000000MU\\Q4"}, "image_id": 214, "id": 3197}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 142.0, 157.0, 105.0], "area": 6357, "segmentation": {"size": [512, 512], "counts": "Wec02_??00000000F:0001SAAW>?iABV>>jABV>>jABV>>jABV>>jAB`>4`AL`>4`AL`>4`ALa>3_AMa>3_AMa>3_AMa>3_AMa>i000000001O00000000000001O0000000000000001O00000000000001OH80000000000000001O0000K5_Ob0A>0000000000000006J0001O00000001O0000000000000000000001O0000000001O0000000000^@OU?>000000000001O000gB@R;`0^D0c;OmCa0S<_O\\CR1`j00000000000000000001O000000000000000001OUOgjR7"}, "image_id": 214, "id": 3200}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 231.0, 40.0, 43.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "Yh[57R?g0E;000000000001O0000000000000001O0000000000000000000000000001O00000000000000^OZYP2"}, "image_id": 214, "id": 3201}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 3.0, 62.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "b7n1R>UOk0UOYYn7"}, "image_id": 214, "id": 3202}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 244.0, 35.0, 34.0], "area": 863, "segmentation": {"size": [512, 512], "counts": "ggP2l0Q?300000000000001O000000000000000007I005K00000000016I0000000000000001hg]5"}, "image_id": 214, "id": 3203}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 245.0, 52.0, 84.0], "area": 3400, "segmentation": {"size": [512, 512], "counts": "hih09]?Ih@:Q?Mo@3Q?Nn@2R?Nm@3l>e0\\Oc0^Ob0L4000005K000000000000000000000000010O000000000000000M31O0000000000000001OM3]Oc0^Ob0^Ob0I70000AiX]6"}, "image_id": 214, "id": 3204}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 291.0, 15.0, 16.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "Wi]24l?00004L000LI_@7a?8L000000000000OnfZ5"}, "image_id": 214, "id": 3205}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 501.0, 121.0, 11.0], "area": 1287, "segmentation": {"size": [512, 512], "counts": "fob2:f?0000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000[``3"}, "image_id": 214, "id": 3206}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 0.0, 27.0, 31.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "g`h02l?3N2M2O2M3N2M2O2M2OO010O010O011N2O1N2O1N2O2M2O2M3N1Nooi6"}, "image_id": 215, "id": 3207}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 0.0, 4.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "PPf11o?001OOQPX6"}, "image_id": 215, "id": 3208}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 96.0, 40.0, 13.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "USf38g?10O100000000000O10O10000000000000O01000000000000000O010000000000000O010000000Pme3"}, "image_id": 219, "id": 3209}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 207.0, 187.0, 161.0], "area": 15076, "segmentation": {"size": [512, 512], "counts": "ViZ21m?3M3N1N3M2O2M3M2N3N1N3M3N1N3M2O2M3M2O2M2N3N2M2N3M2O2M2N3N2M2N3N1N3M3N1N3M2N3N2M2N3N1N3M3N1N3M2O2M2N3M30O10O010O10O10O010O10O010O10O10O010O10O010O10O10O010O10O10O010O10O010O10O10O010O10O10O010O10O010O10O10O010O10O10O010O10O010O10O10O010O10O010O10M3M2O2M2N3N2M2N3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2N3N1N3M3N1N3M2O2M00010O0000102M210O10O10O010O1O0N3M2O2M3M2N3N1N3M3N1N3M2O2M2N3N_gg2"}, "image_id": 219, "id": 3210}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 338.0, 97.0, 71.0], "area": 3867, "segmentation": {"size": [512, 512], "counts": "Q\\\\12l?3N2M2N3M2O2M2N3N2M2N3N1N3M2O2M3M2N3N1JdNfA_1W>6N3N2N101O0N3N110O10O001M2O2M3M2N3N1N300O0100O010O0100O0100O010O0100O0100O010O0100O0100O0100O001N101O0O2O1O0O2O010O001N2M2N3M2O2M2N3M3N1N3M2N3N2M2N3N1NYTS5"}, "image_id": 219, "id": 3211}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 356.0, 35.0, 40.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "Z[5`0`?000000000000000001O0009f@YOi>o0I700000000000001O0001O000000000000000I7D00000bLWOeGi0[8^300000000000000000000000000000000000000000000000^Ob0000000000000000000000000000000000000000000?A00000000000000000000000000000000000000000000000000000000000000B>00000000000000000000000000000000000000000000000000000000000000000000000000000000=C000000000000000000000000000000000000000000000000000000000000B>000000000000000000000000000O1000>B0000000000000000000000000000000000000000000000000000000000000000A?00000000000000000000000L"}, "image_id": 221, "id": 3218}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 63.0, 9.0, 27.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "oaW1k0U?00000000000000GZnc6"}, "image_id": 221, "id": 3219}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 95.0, 61.0, 40.0], "area": 1798, "segmentation": {"size": [512, 512], "counts": "Q3m0S?00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000:XO>00000h@OU>T100000000000000000000Q]Q7"}, "image_id": 221, "id": 3220}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 95.0, 57.0, 29.0], "area": 1563, "segmentation": {"size": [512, 512], "counts": "[cR1?U?<00000000000000000000000000000000000001O000000000000000000000000000000000000001O000000000000000000000000000000^OcmP6"}, "image_id": 221, "id": 3221}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 204.0, 97.0, 308.0], "area": 29876, "segmentation": {"size": [512, 512], "counts": "\\f_6d9\\600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, "image_id": 221, "id": 3222}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 223.0, 178.0, 289.0], "area": 42921, "segmentation": {"size": [512, 512], "counts": "i]_23m?T2lM0000000000000000000000000`KMSI3m6MSI3m6MSI3m6MSI3m6MSI3m6MSI3m6MSI3m6MSI3m6MSI3m6MSI3^600000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000R1nN0000000000000000000000]`e5"}, "image_id": 222, "id": 3227}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 229.0, 25.0, 20.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "U7;f?O010O00010O010O01O01O010O010O00010O010O01ON3M2MdXc7"}, "image_id": 224, "id": 3228}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 236.0, 94.0, 61.0], "area": 2827, "segmentation": {"size": [512, 512], "counts": "PXd02k?3N3L301O010O01O01`@EY?;e@G\\??O01O01O0M4M2N3L3N2M4N1010O00010O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01O01O0O2L3N3M2M3N3L3N3M2M3N3L3Nigl5"}, "image_id": 224, "id": 3229}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 302.0, 4.0, 11.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "^9;e?M4M2Mdfm7"}, "image_id": 224, "id": 3230}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 342.0, 24.0, 26.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "Xki22l?2M4L3N2M4L3N3O00010O00010O010OO1N3L3M3N3L3M3NWUj4"}, "image_id": 224, "id": 3231}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 363.0, 47.0, 69.0], "area": 1665, "segmentation": {"size": [512, 512], "counts": "Sm[63k?3L3N2M4M2N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3ON3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M_dl0"}, "image_id": 224, "id": 3232}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 387.0, 39.0, 30.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "c\\l11m?2N3L3N3M2M4M21O010O010O00010O010O01O01O010O010O01O01O010O01O01N1N3M2M4M2M3NbS`5"}, "image_id": 224, "id": 3233}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 394.0, 114.0, 69.0], "area": 3206, "segmentation": {"size": [512, 512], "counts": "Q]`24j?2M4M2N2M4M2M4M2M3O2O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01OO2M2N3L3N2M4M2N3L3N2M_bf3"}, "image_id": 224, "id": 3234}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 442.0, 26.0, 44.0], "area": 647, "segmentation": {"size": [512, 512], "counts": "mnU64i?3M3N3L3M3M4M2M4L3N2M4L3N2010O0M4M2M3M4M2Bn@KU?2n@KV?1n@KU?3=MUR]1"}, "image_id": 224, "id": 3235}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 26.0, 12.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "04l?001O00001O001O00001O00001O001O00001O001O00O1M3N2MSPc7"}, "image_id": 225, "id": 3236}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 0.0, 65.0, 49.0], "area": 1992, "segmentation": {"size": [512, 512], "counts": "mPQ21l?4M2M4M2N2M4M2M4M2M3N3M201O001O00001O001O00001O010O00010O010O0010O0010ON3O00010O010O00010O010O00010O010N1N3L3N2N3L3N3L3N2N3L3N3Lb_n4"}, "image_id": 225, "id": 3237}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 0.0, 26.0, 19.0], "area": 337, "segmentation": {"size": [512, 512], "counts": "W`P34j?2M3N3O001O00001O00001O0a@B\\?a001O000000N21ON2M3M3N3L3NQ`b4"}, "image_id": 225, "id": 3238}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "PPe41o?1O00OQPY3"}, "image_id": 225, "id": 3239}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 0.0, 58.0, 27.0], "area": 964, "segmentation": {"size": [512, 512], "counts": "UPR53k?3L30001O001O00001O001O00001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001ON2M3N2M3N2M3N2M3M3NRPQ2"}, "image_id": 225, "id": 3240}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 0.0, 22.0, 22.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "^PT73k?2M4M2M3N3M201O00001O001O00M3N2M3N2M3N2M3NRPa0"}, "image_id": 225, "id": 3241}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 0.0, 17.0, 29.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "f`g71m?2M3N3L3N2M4M2M4M20001O001O00001O"}, "image_id": 225, "id": 3242}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 7.0, 59.0, 64.0], "area": 1922, "segmentation": {"size": [512, 512], "counts": "``Q43m?3L4M3L4M2N3L4M3M3L3N3L3N1O1N101O1N2O0O2O1O1N101O1002M3N2N2N2M3N1O2NN1O2N2O01N2N1N3N2M2O2N2M3N1N3N2N1N3N2M3N1O2M3N1NhnP3"}, "image_id": 225, "id": 3243}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 27.0, 36.0, 30.0], "area": 640, "segmentation": {"size": [512, 512], "counts": "]QY72k?3N2M4M2M3M4M210O01O01O010O00010O010O00010O01O01O010O0001O0M4M2M3M4M2Mmn4"}, "image_id": 225, "id": 3244}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 43.0, 22.0, 49.0], "area": 701, "segmentation": {"size": [512, 512], "counts": "a1[1e>01O0M4M0O11N3O2O01O01OCZA@g>=[ADd>9_AGb>6aAI_>4dAK]>3fAJ]>2fAK]>3eAK^>1j0Nbnd7"}, "image_id": 225, "id": 3245}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 48.0, 98.0, 62.0], "area": 2727, "segmentation": {"size": [512, 512], "counts": "jRd12l?3L3N2M4M2M4M201O00010O010O01O01O010O01O01O010O01OO2L3N3M2M4M2M3N3L3N3L3N210O0010O0010O000NO10O13L3N3L3N2M4M2M40O0010DSACm>:VADm>9UADn>;TABn>>;0O0N3M21O010O01O01O010O010O01O01O010O01O01O010O01O01O0O2M2M3N3M2M4MRnj4"}, "image_id": 225, "id": 3246}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 52.0, 49.0, 53.0], "area": 1609, "segmentation": {"size": [512, 512], "counts": "^bW74i?3N2M4L3N2M4L3N3L3N210O0010O0010O00010QASOi>R11O01O010O00010O010O00010O00010O010O00010O010O00010O00010lM"}, "image_id": 225, "id": 3247}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 62.0, 70.0, 60.0], "area": 2648, "segmentation": {"size": [512, 512], "counts": "obZ32k?4L3N3L3M3N3L3M3O20O0010O001XATO[>k0cAWO]>i0`A[O_>f0]A]Od>b0ZA@f>n0O010O01O01O01O000M1O0012M3M3M4N1010O00010O00010O0010O0010O00010O0010O00010O0010O000VOeA0[>MgA3Y>JjA3Z>IjA3Y>KiA2Z>KjA2Y>JjA3Y>JjA3T?Ln\\b3"}, "image_id": 225, "id": 3248}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 106.0, 76.0, 67.0], "area": 2391, "segmentation": {"size": [512, 512], "counts": "bTT12l?3L3N2M4M2M4M2N2M4M2M4O0010O00010O010OA[OcAe0]>`0O001O00001O001O00lNgAa0Y>_OhAa0W>_OiAa0W>_OiAb0V>^OkAa0U>_OkAb0S>_OnA`0\\>VOdAj0i>10O010O00010O010O00010O010O0010O0010O010YOTA8l>FVA:j>CZA=f>@\\A`0d>]O`Ab0n>N010O10O10O210M3N3L3N3L3Ndke5"}, "image_id": 225, "id": 3249}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 117.0, 27.0, 28.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "XT[22l?2M4L3M3N3L3M40O00010O0010O00010O0010O0O1M4L3N3L3M3MW\\W5"}, "image_id": 225, "id": 3250}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 120.0, 69.0, 48.0], "area": 1844, "segmentation": {"size": [512, 512], "counts": "`TP33k?2N2N3L3N3M2N2N30On@\\Oh>e0UA^Ok>a0RABn>h0010O01O01O010O00010O00010O010M2N2M4L31C]O\\Ab0a>B^A?`>B`A>a>B_A=a>C^A?a>A]Aa0d>_OXAd0h>;0O0mNYAm0g>QO\\Ao0j>010O00010O00010O010O00010O00010ON3M2M3M4M2M4M2M3M4Mf[m3"}, "image_id": 225, "id": 3251}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 133.0, 85.0, 114.0], "area": 3792, "segmentation": {"size": [512, 512], "counts": "Pea53j?3M3M4L3M4M2M3M4L3O110O0010O0010O00010UAROb>n0[AUOe>T10O010O00010O0001dAiNn=U1PBoNo=n0RBTOo=h0TBXOl=i0SBXOl=h0UBWOl=i0SBXOl=h0UBWOk=i0UBWOl=i0SBXOl=h0UBWOl=f0VBZOj=g0UBZOj=f0SB]On=b0oABP>?mACS>=jAFW>R1]BZNj<5aBY1b0eNn6nAKR>1RBNn=0TB1k=LXB4i=H[B7e=G]B7e=F_B6e=F^B7e=G]B7f=E^B7g>M3NSiS1"}, "image_id": 225, "id": 3252}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 170.0, 53.0, 72.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "VWd21m?2M4M2lNI\\B;`=H^B:`=I\\B:a=I]B:`=I\\B:a=I\\B;a=G]B;a=H[B;f=DXB?g=BUBa0k=_OSBd0m=g000010O0010O0010O0010O0010O001N1M3N3L3N3L3N2M4L3N2N3N1N3L3N2M4M2M4M2M3M4M2MYZa4"}, "image_id": 225, "id": 3253}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 173.0, 70.0, 57.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "_fl03k?3M2O2M2N3M2N3N1N3O00100O010O0100O010O0100O010GXOXAh0f>ZO[Af0b>]O]Ac0b>_O^Aa0_>AbA>\\>EcAFgA:V>IiA7U>KiA8U>JhA8W>j0O010O01O0N3M3N1N3M2N3M2O2M2N3M3M2N3O010O010O01000O010M2N3N1N3M2N3M2N3NQZP6"}, "image_id": 225, "id": 3254}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 179.0, 25.0, 26.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "VVP21l?4M2M3N3M2M3N3M210O00010O00010O01O0N2N3L3N2M4M2MZZc5"}, "image_id": 225, "id": 3255}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 209.0, 71.0, 67.0], "area": 2505, "segmentation": {"size": [512, 512], "counts": "Ygl61m?2M4M2N3L3N2M4M2M4M20010O0n@VOo>m00010OO1N21L301O01O010O010O00010O010O01O01O010O0N2N3O010O010O01_AUOk=k0SBXOl=h0QB[Om=h0PB[On=f0oA]OQ>d0lA^OT>b0jAAV>T1010O00010O010O010O00001L3N3L3N2N3L3NRI"}, "image_id": 225, "id": 3256}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 211.0, 66.0, 49.0], "area": 2072, "segmentation": {"size": [512, 512], "counts": "`WU51m?2N3L3N2N3L3N3L3N3M2M3N3N110O010O00010O0UAnNi>T1010O01O01O0M21N3L3010O00010O010O00010O010O0001M201O01O01O01O0M4M2M3M4M2M3N3L3M4M2M3N3L3MVii1"}, "image_id": 225, "id": 3257}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 220.0, 37.0, 25.0], "area": 363, "segmentation": {"size": [512, 512], "counts": "UWU21l?4M2M3O20O010O00010O010O00010O010O00010N]@Ga?6_@Mb?0^@3a?J`@8d?1O01O010O01O01O010O000M4MeXX5"}, "image_id": 225, "id": 3258}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 233.0, 64.0, 58.0], "area": 2041, "segmentation": {"size": [512, 512], "counts": "]Xa03k?2M4M2M3N3M2M4M2M4M2N2M4M2M4M20010O010O010O00010O010OkN`Aj0`>TObAl0_>POeAP1e>0010O010O0001O0M4M2N2010O010O00010O001L3N3M2M3N3L3N3M2M3N3L3N3L3N2NTh^6"}, "image_id": 225, "id": 3259}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 233.0, 33.0, 36.0], "area": 621, "segmentation": {"size": [512, 512], "counts": "Uhb11l?4M2M3M4L3N2M4M201O01O010O01O01O010ONXOl@h0R?ZOn@e0Q?^Oo@?Q?Co@:S?Hm@4T?Nl@0V?:4Ff@I]?;3K4O2M2N3Mehl5"}, "image_id": 225, "id": 3260}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 258.0, 57.0, 64.0], "area": 1850, "segmentation": {"size": [512, 512], "counts": "]YS522Oh?5N3M2O2M3N1N3N2M2O2M3M2O2M3N110O0N3N2M2N3N2M2O2M3N1N2O0O0012M3N1N3M3N1N3N2M2O2M3N1N3M2O2M3N1N3N2M2O2M3M2O2M3N1N_WP2"}, "image_id": 225, "id": 3261}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 260.0, 65.0, 55.0], "area": 1781, "segmentation": {"size": [512, 512], "counts": "RY\\23m?2M2O2M3N2M2N3N2M2O2M3M2O2M10O01O010O010O0HQOaAn0`>TO]Am0b>UO\\Ak0d>90O1O3N2M100O010O00010O010O0010O0010O0010O0010O010O2N3N1N3N2M2N3N2M2O2M3N2M2N3NUWc4"}, "image_id": 225, "id": 3262}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 265.0, 67.0, 70.0], "area": 2753, "segmentation": {"size": [512, 512], "counts": "[Yg62k?4L3N2M4M2M3N3L3N3L3N210O0010O0010O0010O0010O0010O0010M2M4bAiNo=Y1nAjNo=Y1nAjNP>Y1mAjNQ>c1010O00010O010O00010O010O00010O010O00010O0010ON2SOmAKV>1mALV>2lAKX>1lALV>1mALV>2mAKV>1mALV>2lALV>1nAKV>2WX7"}, "image_id": 225, "id": 3263}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 301.0, 69.0, 64.0], "area": 2119, "segmentation": {"size": [512, 512], "counts": "YjP31n?2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M3N1O2M3000O0100000O0100000O0100000O0100000O0100000O0100000O0O2N2M3N1O2M3N2N1N3N2N2M2O2N2N2M2O2N2M3N1Oael3"}, "image_id": 225, "id": 3264}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 305.0, 49.0, 66.0], "area": 1907, "segmentation": {"size": [512, 512], "counts": "kYY43l?4M3[@G]?<`@F]?b0M002M4M3M3L4M3M3L4M3M3L4M3M2M01000O01000O010O10O101O3L4M3M3L4M3M3L4M3M3L4M3M3L4M3M3L4M3MhTn2"}, "image_id": 225, "id": 3265}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 306.0, 82.0, 56.0], "area": 2608, "segmentation": {"size": [512, 512], "counts": "^Z62l?3L3N2M4L3N3L3M3N3L3N30O00010O010O00010O010O00010O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01OO2M2N3L3N2O2O010O01O01O010O01N1N3M2M4M2M3N3M2M4M2M4M2N3L3N2M4MPf`6"}, "image_id": 225, "id": 3266}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 317.0, 43.0, 51.0], "area": 1281, "segmentation": {"size": [512, 512], "counts": "Tke11l?4M2M3N3L3N3L3N2M4M2M4M2M3M4M2O20O0010O0010O0010O0010O0N2N3L3N3L3M3N3L3N3L3N2M4M2M4Mjed5"}, "image_id": 225, "id": 3267}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 324.0, 36.0, 35.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "cZ[51n?2N2N2O2M2N2N2N2O2M2N2N2N3N1N2N2N1O01O1O2O2M2N2N2N3M2O1N2N3M2N2O1N3M2NZeR2"}, "image_id": 225, "id": 3268}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 325.0, 29.0, 29.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "iZn62k?3M4M2M3N3L3N3M200010O00010O010O00010O00001L3N3L3M3M4M2MgUc0"}, "image_id": 225, "id": 3269}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 347.0, 62.0, 44.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "g[T65g?4K6J5K5K5L50O000000010O000000010O000000010O000000010O000000010O00000010O00000010O000000010O000000010O0000000N3J5L4K5K5K6Jndl0"}, "image_id": 225, "id": 3270}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 373.0, 27.0, 29.0], "area": 584, "segmentation": {"size": [512, 512], "counts": "j[R4h0X?0000O01000O0100000O0100000O0100000O01000O4M3M4L4K5LiS`3"}, "image_id": 225, "id": 3271}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 374.0, 38.0, 34.0], "area": 770, "segmentation": {"size": [512, 512], "counts": "W\\[14i?3M3L5L3M3O20O000010O01O01f@]OU?g010O0001O010O00010O001O01O01O01O01O0L5L3L4M4L3LRdQ6"}, "image_id": 225, "id": 3272}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 376.0, 65.0, 47.0], "area": 1911, "segmentation": {"size": [512, 512], "counts": "el51l?3L4M4L3L4M4K4M3O20O0000010O00010O00010O000010O000010O000010O00010O00010O000M3M4O00001L3O110O00fN_AU1f>0010O00010O000YOWA8i>DZA@_A?a>^ObA`0`>]OcA`0Q?K4M3M3Lbci6"}, "image_id": 225, "id": 3273}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 382.0, 57.0, 83.0], "area": 2651, "segmentation": {"size": [512, 512], "counts": "TnV22l?2M4M2M3N3L3N3L3M3WOVOZBm0c=VOYBm0d=VOZBm0c=UOZBn0c=VOZBl0d=VOYBn0c=UO[Bm0b=j0010O010O00010O01M2M3N3O00N2M4M2M4M200HVB[Nj=b1YB]Ng=`1\\BaNc=]1_BcNb=Y1bBfN^=X1dBiN\\=S1hBiN[=T1hBjNZ=T1k0L3N3L3N2M4M2M4L3N2M4M2Mkcl4"}, "image_id": 225, "id": 3274}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 398.0, 92.0, 50.0], "area": 2891, "segmentation": {"size": [512, 512], "counts": "nll41o?4L5K4K5H_Oi@f0P?:K5L3M000001N2O3MO10O10O10O1000O10O10O10O1000O10O1000O10O10O10O1000O10O10O10O1000O10O1000O10O10O10O1000O10O102N4K4M00O10O1000O10M3002M2O000O10O1000O01000O10O10003L5L3M4F^@Nf?NSSe1"}, "image_id": 225, "id": 3275}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 412.0, 17.0, 17.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "Uma13j?4M2M3O2O0010O0010O0010M2M4M2M3NRcU6"}, "image_id": 225, "id": 3276}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 416.0, 26.0, 24.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "\\]P41m?3M3N1N3M2N3N1010O0100O0100O010O0100OO2M2N3N2M2N3Mhbb3"}, "image_id": 225, "id": 3277}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 433.0, 29.0, 72.0], "area": 1076, "segmentation": {"size": [512, 512], "counts": "a=X2h=N3M2N3M2N3M2N3M2N3M2N2N3M2N3M2M4M2N3M2N3M2N3M2N3M2N3M2NTRa7"}, "image_id": 225, "id": 3278}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 433.0, 56.0, 60.0], "area": 1533, "segmentation": {"size": [512, 512], "counts": "lnY61m?3N2N2N2M2O2N2M3N1O2N2M3N2N1N3N2N2N1N3N2N1N1000O10O1000O01000O10O11O2M2O2N2M3N2N1O2M3N2N1N3N2N2N2M2O2N2M3N1O2NPRj0"}, "image_id": 225, "id": 3279}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 440.0, 15.0, 13.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "o][11m?3M2N3N110O010O010O01N1N3M2NUR]6"}, "image_id": 225, "id": 3280}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 446.0, 68.0, 48.0], "area": 2194, "segmentation": {"size": [512, 512], "counts": "PoT31k?4K6K4K5K5L5J5N21O0001O01O0000WAmNe>W1010O0000010O000000010O000000010O0M3O1010O00nNWAm0n>0010O000000010O000001N1L4000010O0000010O00O1K5L5J5K5L4K6KiQi3"}, "image_id": 225, "id": 3281}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 452.0, 14.0, 11.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "ZnX12k?4L300010O0001O01O000O2L3MlQ`6"}, "image_id": 225, "id": 3282}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 454.0, 28.0, 26.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "f^c52l?3L3M3M4L3N2010O00010O00010O01O01O01O01O010N1M3M4L3M3Nean1"}, "image_id": 225, "id": 3283}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 463.0, 30.0, 29.0], "area": 499, "segmentation": {"size": [512, 512], "counts": "RoY41m?2N2N3L3N3M2M4M2O110O010O0010O0010O010O010OO1N3L3N3M2M4M2NZQW3"}, "image_id": 225, "id": 3284}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 464.0, 51.0, 48.0], "area": 1623, "segmentation": {"size": [512, 512], "counts": "ooc01l?3N2^OMUA5h>NUA5i>MUA5i>NSA5j>NTA4j>b0M3N2N2O100001O001O00001O001O001ON2N2N2M300001O00001N1N3L3N2M4M2N3L3N3L3N2N3L3N3L3N2NZab6"}, "image_id": 225, "id": 3285}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 464.0, 48.0, 35.0], "area": 946, "segmentation": {"size": [512, 512], "counts": "SoV21l?4L3N2M4L3N3L30010O01O01O010O00010O010O00010O00010O010O00010O00010O010O00010O01O000M4M2M3M4M2MTQQ5"}, "image_id": 225, "id": 3286}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 472.0, 64.0, 40.0], "area": 1383, "segmentation": {"size": [512, 512], "counts": "m_o62m?2N100O1O1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O100O1O1O1O1O1O1O100O1O1O1O1O1O1001O1O1O1N2N3M2N2N2O1N2N2N3M2N2N2N2O1N2N3Mb`0"}, "image_id": 225, "id": 3287}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 477.0, 44.0, 35.0], "area": 1005, "segmentation": {"size": [512, 512], "counts": "n_b12l?2N2M3N2M3N2N2M3N2M3N2N2M3N200001O00001O001O001O00001O001O00001O001N1M4M2M3N3L3N3M2M3Nj`g5"}, "image_id": 225, "id": 3288}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 492.0, 69.0, 20.0], "area": 858, "segmentation": {"size": [512, 512], "counts": "l_m44h?4L4L4O1001O0000001O0000001O000000L4N2001O0000001O0000001O0000001O0000001O0000001O4L0000001O0000001O0000001O0000001O0000001O0000001O0000TPP2"}, "image_id": 225, "id": 3289}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 497.0, 26.0, 15.0], "area": 263, "segmentation": {"size": [512, 512], "counts": "o_71m?2M3N2N2M3N200001O00001O001O001O00001O001O000O2L3NZ`[7"}, "image_id": 225, "id": 3290}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 0.0, 24.0, 12.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "R`63k?20001O001O001O00001O001O00001O001O00O1M3N2M3NR`]7"}, "image_id": 226, "id": 3291}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 0.0, 37.0, 20.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "VPb02l?3L3O101O001O00001O001O001O00001O001O00001O001O00001O001ON2N2N2M3N2M3N2NR`k6"}, "image_id": 226, "id": 3292}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 0.0, 35.0, 14.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "Q`a11n?2O001O00001O001O001O00001O001O00001O001O001O00001O001O00O1N2M3N2M3NRPm5"}, "image_id": 226, "id": 3293}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 0.0, 71.0, 26.0], "area": 1229, "segmentation": {"size": [512, 512], "counts": "X`k42j?4L401O0000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O0000001O0000001O0000001O0000001O0000001OO1L4L4L4L4KUPQ2"}, "image_id": 226, "id": 3294}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 0.0, 44.0, 22.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "PPP71o?1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O1O2N1O00O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O100OQP:"}, "image_id": 226, "id": 3295}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 8.0, 59.0, 65.0], "area": 2011, "segmentation": {"size": [512, 512], "counts": "iQY22k?4M2N3M2M3N3M2N3L3N3M201O01O010OLROVAo0f>TOZAl0d>WO\\Ai0a>YO_Ag0^>]ObAb0\\>@dAa0Y>BgA=W>EhA=T>i0N3N110O00010O01M2N3M2M3N3M2M4A\\A^Of>a0[A]Oh>b0YA[Oi>f0VAWOn>h050010O010N1N3L3N3M2M3N3M2N3L]_i4"}, "image_id": 226, "id": 3296}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 10.0, 43.0, 42.0], "area": 1057, "segmentation": {"size": [512, 512], "counts": "Wa\\11l?3N3L3N2N3L3N3M2M3N3L3N30O010O00010O010O010O00010O010O00010O0O2L3N3M2M3N3M2M4M2M4M2N[om5"}, "image_id": 226, "id": 3297}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 19.0, 27.0, 27.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "lPj62m?4M2M3N2M4M2M3N0O10O10O10O010O010O011N3N2N3L3N2M3N3Lm^h0"}, "image_id": 226, "id": 3298}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 20.0, 55.0, 68.0], "area": 2144, "segmentation": {"size": [512, 512], "counts": "YRQ32k?4M2M4M2N2M4M2M3N3M2M4M2M3N3M2M4M2M3N3M2M4M20001O01O01O010O01O01O01M2M4M2M3N3L3N3L3M3N3L3L4M4L3N3L3N2M4M2M4MR_S4"}, "image_id": 226, "id": 3299}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 26.0, 34.0, 29.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "_al51l?3N2M4M2M3M4M2M40O0010O0010O0010O0010N1M4M20010O0Dd@4]?Ie@8b?O01O01O0N2M4Mf^b1"}, "image_id": 226, "id": 3300}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 42.0, 60.0, 83.0], "area": 2837, "segmentation": {"size": [512, 512], "counts": "_SX41m?2N3L3N2N3oNCZB`0d=BZBa0b=CZB`0d=BZBa0c=A[Ba0b=CZB`0d=BZBa0c=BZB`0c=CZBa0c=BZB`0d=BZB`0c=Q1O001O010O01O010O01O010O01O010O0N2N3L3N3M2M4M2N2M4M2M4M2N2M4M2N3L3N3M2M3N3L3N3M2M4M2NWni2"}, "image_id": 226, "id": 3301}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 61.0, 44.0, 88.0], "area": 2158, "segmentation": {"size": [512, 512], "counts": "gRX51l?3N3iAK^<8PCG^O4_=7QCG]O5_=9oBE@4_=:nBE_O5_=9oBE@4_=:nB9n6TAGP?6RAHP?5`0M2MbmQ2"}, "image_id": 226, "id": 3302}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 69.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "Xbn72l?2O2jM"}, "image_id": 226, "id": 3303}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 84.0, 56.0, 91.0], "area": 2920, "segmentation": {"size": [512, 512], "counts": "_TQ61c01e>2XA1d>2YA1e>1XA2e>2XA1e>1XA2e>2XA1e>f0\\OhN]B[1a=gN\\B]1`=gN]B[1a=gN\\B\\1a=hN\\B[1`=d0N3L31O01O010O01ON3L3N30O0M4L3N2M4M2M2O1N3N3L3N2M4L3N6I3N2M4M2M4M2M3N3L3M4M2M3N3L3N3LRmR1"}, "image_id": 226, "id": 3304}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 108.0, 27.0, 64.0], "area": 996, "segmentation": {"size": [512, 512], "counts": "\\3o1Q>01O01O01N1N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N\\\\b7"}, "image_id": 226, "id": 3305}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 109.0, 57.0, 82.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "Vej62k?3N3L3N2g@Ej>?SACk>?RADk>`0RACk>j0M4M2M3N3M2M4M2M3N3L3N3L3N2M4M201O01O010O01ON3M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3L3N2M4MXl8"}, "image_id": 226, "id": 3306}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 139.0, 40.0, 63.0], "area": 1696, "segmentation": {"size": [512, 512], "counts": "mU`02l?2N3L3N3M2M3N3PODXB>h=MjA7U>j001O000O2O001O001O00001O0O2O001O00O1N2M3N1N3N2N2M3N6I;F;D;Fjjk6"}, "image_id": 226, "id": 3307}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 144.0, 54.0, 65.0], "area": 2145, "segmentation": {"size": [512, 512], "counts": "TfW14j?2N3iNJ_B9^=JdAMg0;c=O[B4a=0[B3e=MYB6f=KWB7i=ITB;j=FTBg001O010O01O010O01O01O0O2M20010O010N1N3M2M3N3L3010O01O0M3N3L3N3M2M4M2N2M4M2M4M2N2MR[m5"}, "image_id": 226, "id": 3308}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 147.0, 27.0, 66.0], "area": 1009, "segmentation": {"size": [512, 512], "counts": "oeb71m?2M4M2M3N3M2M4M2N2DZO^Ag0_>\\O`Ad0]>_OdAa0Y>AgA?W>DfA?W>CgA?Z>c0kA`Nj=a1SBaNn=^1oAfNP>e110O00010O010O0XK"}, "image_id": 226, "id": 3309}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 166.0, 26.0, 27.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "hU^63k?2M4M2M3M4M2M4O01O01O01O010O01O01O001L3N2M4L3N3L3NejT1"}, "image_id": 226, "id": 3310}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 180.0, 22.0, 25.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "S65h?3N2M4M2M4M200010O010O0010O00M4M2M4M2M3N3LYjd7"}, "image_id": 226, "id": 3311}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 183.0, 52.0, 63.0], "area": 1851, "segmentation": {"size": [512, 512], "counts": "WWW23j?3M4M2M3M4M2M4M2M4M210O0EROdAn0Z>TOgAl0V>WOgAk0W>WOgAl0X>>10O010O0N2M4M2010O010O0O2M2M4M0O101O2M4M2M3N3M2M4M2M4M2N3L3N2M4M2N3LUjn4"}, "image_id": 226, "id": 3312}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 200.0, 31.0, 31.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "lVk03k?3M2M3N3M2M4M2M4N11O010O01O01O010O01O01N1N3M2M4M21O010ODa@5f?M4M\\Ye6"}, "image_id": 226, "id": 3313}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 200.0, 51.0, 68.0], "area": 1993, "segmentation": {"size": [512, 512], "counts": "oWT33k?2M3M4M2]OC`Aa0]>AaAa0\\>C`A`0^>B`Aa0\\>BaAa0\\>CaA`0\\>c0M3N3M2010O0010O0010O0010O0010O00NO10O0103L3N3L3N2M4M2M4M2M4M2M3N3L3M4M2M4M2M3NbYR4"}, "image_id": 226, "id": 3314}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 207.0, 86.0, 68.0], "area": 2296, "segmentation": {"size": [512, 512], "counts": "d6f1X>0O04M2N2O20O010O01O01O01UOaA4^>IfA6[>GgA:X>DkA;U>BnA>S>_OoAb0P>\\OSBc0n=YOUBh0b>O010O00010O010O010O00010O010O0010O0010O010O00010O010O010O000O2M2M4M2O2O01O0M3N3L3010O01O01O010O01O010O01O010O01O01N1M4M2N3L3N2N3LShd6"}, "image_id": 226, "id": 3315}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 219.0, 110.0, 89.0], "area": 3705, "segmentation": {"size": [512, 512], "counts": "ihQ33k?2M4M2N2M4M2M4O00010O0010O0010O0010O0010O01O01O010O01O01O010O01O01O0O2O01O01O010O01O01O010O01O010O01O0N2M4M2N3L3N201O010O0O1N2NO0100O0CdNZB\\1e=gNXBY1i=iNUBW1j=mNRBS1o=oNoAS1o=a0L3N2O20O01O01O010O0nMXBm1h=PNZBn1n=M2M3N2M011O3L3N2N2M102N3L3N2M4M2N3L3N2N3L3N3M2M3NmXW3"}, "image_id": 226, "id": 3316}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 226.0, 30.0, 30.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "egf11m?2N3L3N2N3M2M4M2O2O0010O0010O0010O010O0010O0010O0N3Bf@1]?Lf@2\\?Lf@2f?M_Xj5"}, "image_id": 226, "id": 3317}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 259.0, 54.0, 66.0], "area": 2135, "segmentation": {"size": [512, 512], "counts": "eih52k?4M2BJo@9n>Ko@8n>Jo@9o>>L3N2M4M2M4M2M4M2M3O20O01O01O010O01O01O010O01O01O010O01OM4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N`W\\1"}, "image_id": 226, "id": 3318}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 292.0, 13.0, 32.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "U9o0P?1000O2O3M4K4M3M3L4M4L3LRVi7"}, "image_id": 226, "id": 3319}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 292.0, 94.0, 55.0], "area": 2595, "segmentation": {"size": [512, 512], "counts": "RZ]21o?2M4M2M3N3L3N2N2M4M2M2OO0100O0100O010O0100O0100O010O0100O0ITO\\Al0c>WOZAi0g>8O010O010O10O010O10O010O10O010O010O3N3M2M010O10O010O10O01001O00010M2O20O00010O010O01O01O010O010O01O01O01O0N3L3N2N3L3N3L3N3M]fS4"}, "image_id": 226, "id": 3320}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 295.0, 57.0, 64.0], "area": 2200, "segmentation": {"size": [512, 512], "counts": "cZe62k?4M2N2M4_@C[?b0M3N3M2GYOVAk0g>8N3L3N2M4M2N3O000010O010O010O00010O010O00010O010O010O0O1N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M[V>"}, "image_id": 226, "id": 3321}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 297.0, 26.0, 62.0], "area": 925, "segmentation": {"size": [512, 512], "counts": "hZc72m?2N2N2_OIVA8h>KVA7h>KUA8i>IVA9h>IVA9h>IVA9h>IVA9h>a0M3N2N2N1O1IbNkA^1T>eNjA[1V>70O1000002N20dF"}, "image_id": 226, "id": 3322}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 301.0, 34.0, 39.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "eYg01n?3N3L3N2N2M3N3L10O03N2M4M2N2M3N2M1000O3N3L3N2M3N2M4M2M3N2N3L3N2M3Ndeg6"}, "image_id": 226, "id": 3323}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 305.0, 50.0, 65.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "iYe13l?5L4L3L5L4L4L3L5L4L3M4K5L3M0O103M3MO01000O0100000O013M4K4M4L3M4K5L3M4L3L01000O10OKk@AU??n@^OR?b050103M4L3L5LQea5"}, "image_id": 226, "id": 3324}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 343.0, 56.0, 59.0], "area": 1997, "segmentation": {"size": [512, 512], "counts": "Tk=1o?2M3N3L3N2M4M2N3L3N2M4M2M3N3L300O2M2M4M0O100O010O010O01000O010O010O10O012M4M2M3N3M1N10O010O03N2N3L3]Ol@6V?Hm@4W?Ik@5W?Hl@5c?M2NnSf6"}, "image_id": 226, "id": 3325}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 344.0, 37.0, 38.0], "area": 814, "segmentation": {"size": [512, 512], "counts": "_kT62k?3N3L3N3L3N2M4M2M301O010O01O01O010O01Om@UOQ?m0010O00010O010O0XOo@a0Q?\\OSAd0T?M4L3N2M4M2M4LidX1"}, "image_id": 226, "id": 3326}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 352.0, 43.0, 65.0], "area": 1666, "segmentation": {"size": [512, 512], "counts": "Q;Y1g>20001N1M3N2M3N0O010O10O10O010O0101N4M2M3O1000001O01O00N2M3N3L3N2N2YOm@lNgAT1Y>jNiAV1W>hNkAX1U>fNmAZ1]>00O1O1N2N2N2N3M2N2N2N2N2N2N2N2O1N2N2N3M2N2NVSk1"}, "image_id": 226, "id": 3328}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 358.0, 48.0, 56.0], "area": 1542, "segmentation": {"size": [512, 512], "counts": "U\\W71n?2N2N2N2HJc@8[?7O2N2N2N2M3N2N2N1O2N2N2M3OO2N1O02N2N1O2M3O1000000000O1000OO2N2N2N2M3[OYANi>0XAOj>OXAOi>HSA165i>HRA274i>HRA274o>JSA3_?Ndc0"}, "image_id": 226, "id": 3329}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 387.0, 31.0, 31.0], "area": 523, "segmentation": {"size": [512, 512], "counts": "elW61m?3N2M2O2M3N1N3N2M2O2M3N101O10O10O10O1M2O2M3N1N3N2M2O2M3N1N3NccX1"}, "image_id": 226, "id": 3330}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 415.0, 53.0, 66.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "bnh61n?2N2N1N3N2N2BETA=j>ESA=k>FSAETA=j>ETA=j>>M2O2N2N2M3N1OO0100000O010000O010000O012N2N2M2O2N2N2M3N1JSAXOo>f07N2N2M2O2N2N2M3N1O2N2Mbb<"}, "image_id": 226, "id": 3331}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 417.0, 62.0, 49.0], "area": 1613, "segmentation": {"size": [512, 512], "counts": "gm[32n?2N2M3N3L3N2M3N2M4M2M3N1O0O010O010O10O010O010O10O010O010O010O10O010O10O010O010O010O10O010O10O010O02O2M3N2N3L3N2M3N2M4M2M3NVRe3"}, "image_id": 226, "id": 3332}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 423.0, 90.0, 81.0], "area": 2733, "segmentation": {"size": [512, 512], "counts": "cmc41o?2M3N1N3M3N2M3N1N3M3N2M2N3N2M3N2M2N3N2M3O00100ON3M3N1N3M3N1O2O1N101O0O2O1OO0N3O10O01000O01000O01O1M2O2N20O01000O01000O01000O010nNWAl0i>ROXAn0h>PO[AP1k>00O01000O01000O010O10ON3N2M2O2M3Je@D]?9e@E]?97N2M2N3NQQo1"}, "image_id": 226, "id": 3333}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 436.0, 75.0, 67.0], "area": 2852, "segmentation": {"size": [512, 512], "counts": "^^[24l?2M3N3L3N2M3N3L3N2M3N3L3N2M3N3M20O1N0O10O01000O010O010O010O010IgNgAY1X>jNeAV1\\>70O010O010O0100O3N2K210O0100O0LjA`NV>_1mA^NU>a15O01000O012M3N2M3N3L3N20001M2N2M3N3L3Hc@I_?4e@H^?69L3NTQ_4"}, "image_id": 226, "id": 3334}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 458.0, 37.0, 27.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "fnf33m?2M3N1N10001N100O2O0O101N10000O2O0O101N100O2O000O2O0O04M2N2M3N3L3N2M3N2MSaf3"}, "image_id": 226, "id": 3335}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 477.0, 44.0, 35.0], "area": 1157, "segmentation": {"size": [512, 512], "counts": "n_c12m?1N2O1N2O1N200O100L4I7I7K5000000000000001O0000000000000000001O00000000000000001O0O1E;E[af5"}, "image_id": 226, "id": 3336}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 478.0, 47.0, 34.0], "area": 992, "segmentation": {"size": [512, 512], "counts": "m_^61n?2N2M2O1KJ_@7`?5N2O1O1O1O1O1O1O1O1O1N2O1O1001OO1O1O1N2O11O001O1O1O1O1O1O1O1O1O1O1O0Mh@^OY?`05M3N2F]@4i?N2N]Pj0"}, "image_id": 226, "id": 3337}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 500.0, 26.0, 12.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "o_b01n?1O1O1O1O100O1O1O1O1O1O1001O1O1O1O1O1O2N1O1O1O0000Q`P7"}, "image_id": 226, "id": 3338}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 504.0, 26.0, 8.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "o__31l?3M3O11O00001O00001O00001O00001O00001O0000001O0000Q`S4"}, "image_id": 226, "id": 3339}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 506.0, 15.0, 6.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "oob21o?0O10000O100O10000O100O12N3MQ`U5"}, "image_id": 226, "id": 3340}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 508.0, 12.0, 4.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "o_d71l?3001O00001O00001O0000Q`5"}, "image_id": 226, "id": 3341}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 0.0, 68.0, 64.0], "area": 2439, "segmentation": {"size": [512, 512], "counts": "TQ81n?3M2N2N2N2N2O1N2N2[O^OiAd0U>^OiAd0U>^OiAe0T>]OjAe0T>]OjAe0T>]OjAe0T>]OjAe0T>^OiAd0V>]OhAe0V>d0O1O1O1O2N1OO1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O101N2N2N2N3M2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2NZoe6"}, "image_id": 228, "id": 3342}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 0.0, 47.0, 29.0], "area": 806, "segmentation": {"size": [512, 512], "counts": "XP`13k?2O2M3N1N3O001O1O001O001O1O001O1O001O001O1O001O001O1O001O1ON2O1N2N2O1N2N2O1N2O1N2N2O1N2N2O1NR`h5"}, "image_id": 228, "id": 3343}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 0.0, 22.0, 19.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "P`a23m?3M2N2N3M2N2N2NO100O10000O100O101N3N3M2M3Nf_S5"}, "image_id": 228, "id": 3344}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 0.0, 84.0, 49.0], "area": 2616, "segmentation": {"size": [512, 512], "counts": "iP[31k?5L3M3M3M4L3M3M4O00001O00001O00001O00001O0000001O00001O00001O00001O000010O00010O00010O000010O000010O00010O00010O00010O00010O000010O00010O000010O0O1M4L3M3L5L3M3M4L3M3LeoZ3"}, "image_id": 228, "id": 3345}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 0.0, 46.0, 24.0], "area": 594, "segmentation": {"size": [512, 512], "counts": "P`_61o?1O1O1O1O001O1O1O1O1O1O1O1O1O00O1O11O001O1O1O1O1O1O1O1O1O1O1O00N2O1O1O1O1O1O2N2N2N2N2M3N2Nj_i0"}, "image_id": 228, "id": 3346}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 0.0, 42.0, 44.0], "area": 1193, "segmentation": {"size": [512, 512], "counts": "TQ[71l?3M3M4L3N2N30O00010OO2L3M3M4L3L4M4M20001OO1M3M3M3M3001O00001O00010O00010O00010O000M"}, "image_id": 228, "id": 3347}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 22.0, 84.0, 58.0], "area": 2842, "segmentation": {"size": [512, 512], "counts": "eQj44i?3M3M4L3M3M4L3L4M4L30001O01O01O01O01O01O01O01O01O01O00010O00010O00010O00010O00010O00010O00010O00010O0001O01O01O01O01O01O01O01O01O01O01O01O00010N1M3L5L31O01Bk@OU?Nn@3Q?JRA5P?GTA5]?M3MXnk1"}, "image_id": 228, "id": 3348}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 26.0, 14.0, 36.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "j0T1l>02M3N3L3N2N3L3N3L3N3M2M4MYnh7"}, "image_id": 228, "id": 3349}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 56.0, 56.0, 47.0], "area": 1622, "segmentation": {"size": [512, 512], "counts": "aRj64i?3M7I3O1010OO11Od@AW?d00001M2M3M4K4M3O20O00010O00001N1N210O0001O01O01O01O01O01O000oNWAl0n>01O01O01O01O00N3L3L4M4L3M3M3L5Lim9"}, "image_id": 228, "id": 3350}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 75.0, 68.0, 61.0], "area": 2178, "segmentation": {"size": [512, 512], "counts": "_cf23j?3N3M2M3N3M2M4M2N3N11O010O01O010O010m@UOo>n010O00010O010O010O00010O010O010M2N3L3N2N3L3N3M2M4M2N20N3N100N3L3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2N^]W4"}, "image_id": 228, "id": 3351}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 82.0, 56.0, 55.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "YSe14i?3M3O2_@F[?9b@J^?<010O00010O0001g@AP?;PAHP?6o@MQ?3l@1T?=00010OM3M4K4M3M4L3M301O01O00010O00010O0001O01O01ON3L3M3M4L3M3M4L3M3L5L3M3M4L3MXm^5"}, "image_id": 228, "id": 3352}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 91.0, 66.0, 56.0], "area": 1985, "segmentation": {"size": [512, 512], "counts": "ncb52k?3M4L3M3L5L3M3M4O00010O000010O00N3L3L4M301O000010O00010O00010O00010O00010O0jN]Ao0c>nN`AS1f>01O01O01VOWA;i>BZA?e>^O_Aa0b>ZObAf0l>10O00010O00010O00010O0O1M4L3M3M4L3MU\\\\1"}, "image_id": 228, "id": 3353}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 98.0, 70.0, 64.0], "area": 2651, "segmentation": {"size": [512, 512], "counts": "cSQ43m?2M3N2M3M2O2M3N2M3N2M3N2M3M3N2M3N1N3N2M3M3N2O01M10O010O010O01O010O010O0102M3N2M3OO1N3N2M3N2M3N2M2OO0102M3N2M2O2M3N2M3N2M3N2M3N2M3M2O2M3N2M_kk2"}, "image_id": 228, "id": 3354}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 115.0, 55.0, 53.0], "area": 1723, "segmentation": {"size": [512, 512], "counts": "gTe62k?3L5L3M3M4L3L4M3M4L3L401O00010O00010O0001O01O01O01O01iN[AQ1k>0O00010O00010O0000010O00010O00010OTOUAc0k>ZOYAe0h>WO[Aj0m>M4L3M3L5L3M3Md[?"}, "image_id": 228, "id": 3355}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 116.0, 18.0, 70.0], "area": 738, "segmentation": {"size": [512, 512], "counts": "d3V2j=M3N3L3N3L3N2N3L3N3\\O]AFf>8\\AFg>7\\AEg>8\\AFf>KRA::Ij>3ZAIi>5c0LUlf7"}, "image_id": 228, "id": 3356}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 133.0, 26.0, 25.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "dd[13k?2N2N3L3N3M2N201O010O010O01O010O01O01L3N3M2N3M2M4Me[W6"}, "image_id": 228, "id": 3357}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 134.0, 38.0, 41.0], "area": 915, "segmentation": {"size": [512, 512], "counts": "SU_31l?4M2M4M2N2M4M2N3L3N3L3N2010O010O00010O010O010O00010M2N3L3N3L3N2N3L3N3M2M3Nbkm3"}, "image_id": 228, "id": 3358}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 138.0, 48.0, 70.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "Uf`02k?3N3L3^OF[A>\\>CYA29=\\>CYA37=]>KaA8\\>JaA9\\>h0M2M3M4M2M4M21O010O01O01O010O0M2O0O2O2M3N3L3N3L3N2M4M2M3N3L3N3L3M3N3L3N3L3N2Mb[g6"}, "image_id": 228, "id": 3359}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 139.0, 143.0, 85.0], "area": 4743, "segmentation": {"size": [512, 512], "counts": "UUU51m?3N1N2N3L3M3N300O00100O00102M010O00010O0M3M4L3N2M4M200010O0001O01O01O01O01O01O01OjN[AQ1f>lN\\AT1h>10O000\\OVA3j>H[A7e>F^A:O@X>3lA>HC[>LQB`0AGQ?d00010O0004M0O00010O0001O01O0001L3M3N3L3N210O00010O00010O0001O01O01O01O01O01O01O01O01O00010O00010O00010O00010O000010O00010O000010O00010O000010O00010O00010O00010OO1M4L3M3M4L3M3M4K4M3M4LWZc0"}, "image_id": 228, "id": 3360}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 151.0, 23.0, 25.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "ZU_71l?3M4M2M4L3N2M4O01O010O01O01O001M2M3M4M2M3M4MU[5"}, "image_id": 228, "id": 3361}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 162.0, 47.0, 63.0], "area": 1679, "segmentation": {"size": [512, 512], "counts": "bfZ13k?2M4M2YOFiA;S>HlA9Q>IPB6m=NQB4l=NRB4l=OPB4m=OQB4l=NQB5m=NPB5o=KoA7Q>JkA9V>i0O010O0010O0010O010O0001M2N3L3N2N3L3N3L3N3M2M3N3M2M4M2M4M2N2M4Mbjm5"}, "image_id": 228, "id": 3362}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 175.0, 37.0, 32.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "RVf32k?3M4M2M3N3L3N3N100010O010O00010O01O01O010O01O01O010O00010M2M4M2N2M4M2M3NYZg3"}, "image_id": 228, "id": 3363}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 194.0, 64.0, 73.0], "area": 2696, "segmentation": {"size": [512, 512], "counts": "hgR22k?4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3L3N3M2M3N3M201O00010O010O010O00010O010O010O00N3M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3L3N2N3L3N3M2MaYm4"}, "image_id": 228, "id": 3364}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 210.0, 66.0, 116.0], "area": 3473, "segmentation": {"size": [512, 512], "counts": "kYn31l?3N3L3^OHZA:d>HYAHZA:c>IZA;c>G[A;b>c0N3N110O00010BbNXB^1f=dNXB_1d=eNYB]1e=eNXB^1e=eNYB^1d=`0M3N3L3N3L3N1N13L3N3L3N2M3NO010O010O01O2O3L3N2M10O0100O3N3L3N3L3N2M4L3N3L3N2M4M2M4Ah@2Z?Li@0Z?Mi@1Z?Lh@1bXQ3"}, "image_id": 228, "id": 3365}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 223.0, 121.0, 70.0], "area": 3508, "segmentation": {"size": [512, 512], "counts": "hg[53k?3M2N2M4M2N3L3N3M2M3N3O001O010O01O010O01O01O010O01O010O01O010O01O01M210O010O01O01O010WOQA?o>^OUAb0k>[OWAe0R?10O0010O0010O010O0010O0010O02OO010O01O010O0O1M4M2N30O01O01O010O010O01O01O010O01O01O010O01L3N3M21O01ON3O010O010O00010O010O001M2N2M4M2N3L3N3M2M3N3M2M4Mngg0"}, "image_id": 228, "id": 3366}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 235.0, 53.0, 58.0], "area": 1815, "segmentation": {"size": [512, 512], "counts": "fXm21l?3N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3N11O010O01O01O010O010O01O01O010O01O01N1N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2MZXX4"}, "image_id": 228, "id": 3367}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 272.0, 48.0, 75.0], "area": 2159, "segmentation": {"size": [512, 512], "counts": "[Zk42k?3N3mNJXB8f=JWB9g=IWB:f=IVB:h=HVB;f=IWB9g=IVB:h=HVB;g=HUB;h=HVB;j=ESB=m=l00O0010O0010O010O00010O010O0010O00WOUB^Ok=?WBAi==XBEh=7YBKg=3XB0i=MYB1i=OWBOk=1VBLm=1UBLn=2UBKn=2TBDAO^>:UBD_O0^>:UBDU>8k0N3M2MTg\\2"}, "image_id": 228, "id": 3368}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 285.0, 5.0, 24.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "m8h0X?O100O8GnVm7"}, "image_id": 228, "id": 3369}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 296.0, 65.0, 79.0], "area": 2556, "segmentation": {"size": [512, 512], "counts": "T[e52k?3N3L3N2M4M2M4N10010O001N1N2M4RAQOg>U1M4DgNmA[1P>iNlA[1Q>gNmA[1P>3000O010O010O10O2O2M3O2O0010O0010O010O0010M2N3M2M3N3L3N3L2O1O3L3N2M4M2N3L3N2M7J2M4M2NRfQ7"}, "image_id": 228, "id": 3371}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 310.0, 2.0, 8.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "f98n?JTfn7"}, "image_id": 228, "id": 3372}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 340.0, 64.0, 77.0], "area": 2473, "segmentation": {"size": [512, 512], "counts": "hlf01m?3L3N2M4L3N3L3N210O0010ON2M4M2M3N3L3M4M2M3N0O00010O010O3M3N3L300010O0010O0010O0010OO1M4L3N2M3N0O002O2M4M2M3M4M2M4M2M3M4M2M4L7JUUY6"}, "image_id": 228, "id": 3373}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 358.0, 68.0, 84.0], "area": 2749, "segmentation": {"size": [512, 512], "counts": "`]`11l?3N3L3N2M4M2M4M2M3N3L310O01O01O010L3N2M4M2M4[OcNcB_1[=cNbB`1[=dN_B_1a=cN]B]1b=?100O4M2M4M2M21N2O20O00010O010O00010O001M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M_d]5"}, "image_id": 228, "id": 3374}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 360.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Xko72f4"}, "image_id": 228, "id": 3375}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 371.0, 6.0, 16.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "o[m72l?2M4M2M4M2]D"}, "image_id": 228, "id": 3376}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 385.0, 51.0, 86.0], "area": 2565, "segmentation": {"size": [512, 512], "counts": "_mb21m?3M2RALm=7^AH;3U>8]AH:3V>8]AH;3U>?hADV>?gADU>?hADV>T1M4M2N2M4M2M4M2N2M4M2O20O00010O0N2N3L3N3L3N2O20ON3L3J6N3L3N3L3M3N3CWA^Ol>?XA^Ok>?WA^Ol>?j08L3N3L3N2M4M2M4M2MPcf3"}, "image_id": 228, "id": 3378}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 423.0, 23.0, 36.0], "area": 549, "segmentation": {"size": [512, 512], "counts": "Qnd71l?3N3L3N2N3L3N3L3N3M2N201O010O01O01O010O01O01dB"}, "image_id": 228, "id": 3379}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 430.0, 53.0, 59.0], "area": 2011, "segmentation": {"size": [512, 512], "counts": "h=T1m>O01O012M010O01O000O2O01L3N2M4M2N3L3010O0010O0010O0010O0010O0010OM4M2N2M4M2M4M2M3N3N101N1M3N3L3N3L3N3M2MWRU7"}, "image_id": 228, "id": 3380}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 433.0, 77.0, 64.0], "area": 2441, "segmentation": {"size": [512, 512], "counts": "U^_42k?3M4L3M4L3N2M4O01O01O010SAZO^>f0_A]Ob>b0\\AAc>`0YACg>l00O00010O010O00010O0010O0010O0010O0010O00010O\\AmN[>1bAl00UO_>KdAX1c>0O00010O0010O0010O0001L3N3M20010O0010O00010O0010O001N1M3N3L3M4M2M3N3L3N`QZ2"}, "image_id": 228, "id": 3381}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 445.0, 28.0, 27.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "_^T21l?4M2M4M2M3N3L3010O00010O0010O0010O0010O00M4M2M4M2M3N3Lna]5"}, "image_id": 228, "id": 3382}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 456.0, 40.0, 47.0], "area": 1161, "segmentation": {"size": [512, 512], "counts": "R_U71l?4M2a@JQ?9l@JR?9k@IR?:k@JQ?d0N3L3N2M4O01O010O01O01O010O01O01O01OON12M3N2M4L3N3L3N2M4M2M4M2M3N3Lda6"}, "image_id": 228, "id": 3383}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 460.0, 81.0, 52.0], "area": 2243, "segmentation": {"size": [512, 512], "counts": "nom02k?3N2M3N2M3O100O1N2M31O00001O001O00001O001O000000O11O001O00001O00O1N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2O11O00001O001O00001O001O001O00001O001K_AiNa>U17L3N2M4M2M4M2M4M2M3N3L3NYai5"}, "image_id": 228, "id": 3384}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 465.0, 36.0, 28.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "lni22l?2M4N110O00010O010O0[@Ia?<0O01O0M3M4L301O01O01O010O01O01O01N1M3N3L3M4M2M3N[Qd4"}, "image_id": 228, "id": 3385}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 472.0, 19.0, 40.0], "area": 492, "segmentation": {"size": [512, 512], "counts": "gof73j?3N2N3L3N3L3N3L3N2M4M2M4N10010O01O00"}, "image_id": 228, "id": 3386}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 488.0, 21.0, 24.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "X?h0Y?O001O001O00001O001O001O00001M2N3M2M3N3MaPe7"}, "image_id": 228, "id": 3387}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 488.0, 82.0, 24.0], "area": 1005, "segmentation": {"size": [512, 512], "counts": "l_o31m?2M3N3L5L2001O00001OM3M3M3O11O00001O00001O001O00001O001O00001O001O00001O00001O001O00001O001O00001O001O00001O00001O001O00001O001O00N2M3001O001O00001O001O00001O0O1NV`g2"}, "image_id": 228, "id": 3388}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 501.0, 52.0, 11.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "o_`21m?2O11O001O001O0000O1N2N2M3N21O001O00001O001O00N2N21O00001O001O00001O001O0000M3N21O001O001O00001O001O0MY`e4"}, "image_id": 228, "id": 3389}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 508.0, 11.0, 4.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "n_l62l?2001O00001O001O0000QPn0"}, "image_id": 228, "id": 3390}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oo]21PPb5"}, "image_id": 228, "id": 3391}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 0.0, 69.0, 50.0], "area": 2009, "segmentation": {"size": [512, 512], "counts": "bPh21o?3M3L5L3M3L4M3M3L4M3MO01000O01003L4M3M2M10O10O10O1000O01000O01A[OcAe0\\>_OaA`0`>C\\A>d>EYA;f>?1000M210000O10000O1001O3M3M00O10000O10000O1H\\ATOd>l0_AQOd>l0;L4M4L3L4M3M3L4MT_U4"}, "image_id": 229, "id": 3392}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 27.0, 37.0, 31.0], "area": 715, "segmentation": {"size": [512, 512], "counts": "VaS43m?3M3L4M3M3L3N0O10OO2000O01000O01000O010O01000O01000O01000O2O3M3L4M2N3L4McnY3"}, "image_id": 229, "id": 3393}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 45.0, 49.0, 30.0], "area": 1126, "segmentation": {"size": [512, 512], "counts": "ba<1o?8H8H8H00O1000O1000000000O100000O10000000O10000000O100000O1000000000O100000O1000000000O1000O9H8HSnj6"}, "image_id": 229, "id": 3394}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 50.0, 8.0, 30.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "b1n0R?00O101O9G8H8Hdmk7"}, "image_id": 229, "id": 3395}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 54.0, 17.0, 15.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "jQd23m?3L4M2NO01000O01000O01000O2O3M3LT^S5"}, "image_id": 229, "id": 3396}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 54.0, 52.0, 69.0], "area": 1943, "segmentation": {"size": [512, 512], "counts": "_bX61i00[>2cA0[>2cA0\\>1bA1\\>1bA1\\>1bA1\\>2aA0]>2aA0^>1_A2_>0_A2Y>]OgAc0N3Y>[OgAd0N3Y>[OgA^1W>7N2N2O1N0000000001O00002N2N3M2N2N2N2N0KaAjN_>V1cAhN]>X1501O3M2N2N2N3M2N2N2]Oh@;[?Bg@<`?N2N2N2N2N2N`]m0"}, "image_id": 229, "id": 3397}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 77.0, 27.0, 34.0], "area": 655, "segmentation": {"size": [512, 512], "counts": "`R^2:e?:G3M00O1000000000O1000005K5K00000O1000ESABn>>ZAZOf>f0:0006J9GT]T5"}, "image_id": 229, "id": 3398}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 89.0, 54.0, 74.0], "area": 1989, "segmentation": {"size": [512, 512], "counts": "oSb61n?2N2N2O1N2N2N2a@DW??f@CX?d0N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N00000000000000ZOQB\\OP>c0RB[On=e0TBYOl=g0VBWOj=i0XBUOh=k0ZBSOh=k0ZBSOh=k0ZBSOh=k0ZBSOh=k0ZBSOh=k0h0_Om@2U?Ln@2S?Lo@2S?Lo@2S?Lo@2S?Lo@2a?N^lb0"}, "image_id": 229, "id": 3399}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 122.0, 53.0, 93.0], "area": 2371, "segmentation": {"size": [512, 512], "counts": "eeU71n?2N2`@MP?5n@MP?5n@MP?5n@MP?5n@MP?5n@MP?c0N2N2N2N2N2N2N2N2N2N2N2N000000000000000000000000000H]NTB[1LiNP>NRBY10gNn=2PBW14eNk=d1800000000000000000000000001O2NVL"}, "image_id": 229, "id": 3400}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 181.0, 27.0, 26.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "PVe61n?3M2N3N1N2N3N1N3M2N2O0O1O01O011N2N3M2O2M2N2O2M2N3M2OlYm0"}, "image_id": 229, "id": 3401}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 211.0, 74.0, 95.0], "area": 3867, "segmentation": {"size": [512, 512], "counts": "WgY47i?9G9G00002N3L10000000000AVOhAj0X>?0O100000O10000000000000O10O100000000000005J100000001O000000I61001N:JO=@`0A`0@00000000000000000000O10000000O100[OcAe0]>`0[O_NhBf1R=bNhB^1W=iNcBW1\\=g0N101N2O1O1N2O0O2O1O1N2O0O2O1001O1O0O2O1O1O001kMoBX1S=fNSCU1n>000000000000000000O10000?A003Mgfl4"}, "image_id": 229, "id": 3405}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 299.0, 41.0, 44.0], "area": 1302, "segmentation": {"size": [512, 512], "counts": "Tj[72k?3L4M4L3M3M4L3M301O01O01O0o@UOn>n001O01O01O01O01O0N2N3O00010O00010O0001O01O01O01O0M3O2dF"}, "image_id": 229, "id": 3406}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 341.0, 24.0, 32.0], "area": 621, "segmentation": {"size": [512, 512], "counts": "gZT22n?=C=C2M1000000000O10000000ON3000000000003M:F:Fdd_5"}, "image_id": 229, "id": 3407}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 351.0, 27.0, 17.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "ojl3:f?000000006J0000001O000000000000000000000000000000000Qee3"}, "image_id": 229, "id": 3408}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 355.0, 66.0, 69.0], "area": 2742, "segmentation": {"size": [512, 512], "counts": "]\\d62m?1N3N2DIl@9R?Il@9Q?Im@9R?Il@9R?Il@91_On>P1M3N2N1O2M3N2N2N2M3N2N1O2M2O01O2N2M3N2N1O00O10002N2N2O1000O1000O100000O10N2N2NPOPB3n=MTB3j=MXB2i=LXB5h=IZB5h=IZB4i=IZB5g=J[B4g=J[B4g=JZB5h=IZB4i=IZB5h=IZB5h=IZB5h=IZB5h=IZB5i>N3N\\c:"}, "image_id": 229, "id": 3409}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 395.0, 99.0, 43.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "blh31m?2N3M3N1010O10O010O10O10O010O10O010O10O10O010O10O10O010O10O01O0O2M2N\\@1\\?M`@8`?4K6O0001O0001O01O000001O01O0001O01O000001O01O0001O01O000001O01O0001O01OO1K5L5M2000001O01O0000010O000001O01O00000O2K4K5K5L4K6K[ce2"}, "image_id": 229, "id": 3410}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 401.0, 78.0, 85.0], "area": 2874, "segmentation": {"size": [512, 512], "counts": "Umj51m?3N2N2M2O2N2M3N1O2M3N2N1N3N2N2O00100000OYAQO]>P1`ASO`>m0^ATOc>l0[AVOe>S1O10O10N2N2M2O2N2M30O10O1000O10O1000OmAZNm=f1PB]No=d1oA^NQ>h11^NnAU1R>hNQBX1n=gNTBY1l=eNVBZ1k=cNWB^1i=`NYB`1T>00O0100000O0100000nNbA`0^>_OdAa0\\>\\OfAe0Z>YOhAd0[>ZOgAd0Z>ZOiAd0Y>ZOhAd0[>ZOgAd0l>N2M2O2N2M3N1O2M3N`Qn0"}, "image_id": 229, "id": 3411}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 406.0, 41.0, 29.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "ml`13l?1N3M2N3O010O010O10O10O010O010O10O10O010O010O010O10O10O010O010O10O10O01N1N3M2O2Mhbj5"}, "image_id": 229, "id": 3412}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 434.0, 47.0, 46.0], "area": 1282, "segmentation": {"size": [512, 512], "counts": "Snk32l?3L3N3e@Fk>=SAFj>=RAFl>i00010O0010O010O00010O010O0010O0010O010O00010O010O0010O00M4M2M4M2N3L3N2N3L3N3L3NPb\\3"}, "image_id": 229, "id": 3413}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 460.0, 94.0, 52.0], "area": 2718, "segmentation": {"size": [512, 512], "counts": "S_Y52m?2N1O2N2N2N2N2N2M3N1O2N2N2N2N2N2N2N1O2M3N2000000]AfN_>Z1_AgNa>]1000O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O00N2O1O1O11O1O0O200O1O1O1O1O0000O0O2O001O1O1O1O11O1000O01O1O1O1O1O1O001N2N2N2N2N2M2O2N2N2N2N2Mb`W1"}, "image_id": 229, "id": 3414}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 464.0, 47.0, 48.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "XoX72l?2O2N2M3N1O2M3N2N2N1N3N2N2O01000000O010000M3N1O2N2N2N1N3N2N1O1O11O001O1O1O1O1O001O1O1O1O001O"}, "image_id": 229, "id": 3415}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 483.0, 24.0, 23.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "_o\\42l?3M2N3L3N3N1010O010O010O010O010O01OM4M2N3M2N3MgPW3"}, "image_id": 229, "id": 3416}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 507.0, 18.0, 5.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "o_a41n?1O1O1O10000001O001O00001O001O0000Q`U3"}, "image_id": 229, "id": 3417}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 0.0, 220.0, 161.0], "area": 25045, "segmentation": {"size": [512, 512], "counts": "gco33j?3N2M4ZOEcA=[>EbA?Z>EcA=[>EbA>[>FbA=[>EbA>[>EcA>Z>e0M3N3L3N3L3N2M4L3N2M4M2M4M2M3N3L3N3L3N2M4L3N3L3N2M4M201O01O01O010O01O01O010O01O01O01O010O01O01O01O010O01O01O010O01O01O0M4M2M3N3L3N3L3O101O001O00001O001O00001O00001O001O00001O001O0000N2N2M3N2M3M3N2M3N2M3001O001O00001O001O00001O001O00001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N3L3N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N3L3N2N3Li_b0"}, "image_id": 231, "id": 3418}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 195.0, 21.0, 32.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "ife72l?2N3M2M4M2N2N3M2N3L310O00010O010O01O010OhI"}, "image_id": 231, "id": 3419}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 53.0, 14.0], "area": 612, "segmentation": {"size": [512, 512], "counts": "0:f?0000000000000000000000001O00000000000000000000001O00000000000000000000001O0000000000000000000000001O00KU`U7"}, "image_id": 232, "id": 3420}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 0.0, 58.0, 34.0], "area": 1173, "segmentation": {"size": [512, 512], "counts": "WPV11m?2M4[@J\\?8b@J^?=O001O00O1N2O1001O001O001O00001O0e@AU?`0h@BX?c001O001O001O00001O001O001O00001O0012M010O00010O010O0O2M2N2M4M2N3M2M4M2Nhol5"}, "image_id": 232, "id": 3421}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 0.0, 93.0, 30.0], "area": 2340, "segmentation": {"size": [512, 512], "counts": "`P]26_?;K50000000000000001O000000000000000000001O000000000000000000001O000000000000000000001O0000000000000000001O000000000000000000001O000000000000000000001O000001O000000000001O00000001O0G9EY`T4"}, "image_id": 232, "id": 3422}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 0.0, 60.0, 56.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "n`m32l?3M2M3N3M2M4M2N3L3N2M4M2N3O001O00001O001O00001O001O001O00010O012M1O01O010O010O01O01O0eN_A<2:b>DaA;`>AcA`0\\>^OgAa0Y>]OiAd0W>XOmAg0f>0010N1N3L3N3M2M3N3M2MP_T3"}, "image_id": 232, "id": 3423}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 0.0, 36.0, 45.0], "area": 1212, "segmentation": {"size": [512, 512], "counts": "fP^72k?3M3N3L3N2M4M2o@\\Oc>h0ZA[Od>f0YA]Og>c0WA_Oi>m0O001O00001O001O00001O00001O001O0000O1M3M3N2M3N2M3"}, "image_id": 232, "id": 3424}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 24.0, 61.0, 51.0], "area": 1743, "segmentation": {"size": [512, 512], "counts": "eai43j?3N2N3M2M4M2010O010O00010O01N1N3M2M3N3M2UAoNh>U1O00010O010O010O000O2L3N3M1O20010O010O00N3M2M4M2N3L3N2N3O001O0N3M2N3L3N2N3L3N3MonW2"}, "image_id": 232, "id": 3425}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 38.0, 38.0, 63.0], "area": 1402, "segmentation": {"size": [512, 512], "counts": "V1h1X>10O010O00010O0ZNjAc1Z>O00010O010O01O01N1WOdAL_>0dAN^>0eAL^>3cAK`>5`AGc>9]AEf>;ZAAi>?WA_Ol>`0:1O010O01O0N2N3L3N3L3NYn\\7"}, "image_id": 232, "id": 3426}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 45.0, 60.0, 63.0], "area": 2017, "segmentation": {"size": [512, 512], "counts": "aR\\33k?2N2M4M2M4M2N3O01O010O010O01O01OE\\OXAg0d>]OXAf0f>\\OXAg0g>9010OO1N3M2N3M2010O0010bA_N[>d1O010O010O01OO2N1010O010SOjANV>0lA0T>NoA2Q>JRB6n=HTB9l=DWB;i=BZB?f=^O]B?e=_O]B?e=^O_B>c>N2N3L3N3M2M]me3"}, "image_id": 232, "id": 3427}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 50.0, 62.0, 72.0], "area": 2570, "segmentation": {"size": [512, 512], "counts": "Uc\\23k?3L3N2N3L3N3E]OTAf0j>\\OTAg0h>]OUAe0i>:N3L3N3N11O01L3eA_NU>h1M2M4M2O2O00010O010O01O01O010ON3M2M3N3M2O20OO0OO12FgAjN\\>S1gAkN\\>R1:M3N3M210O001O0N2N3L3N3L3N2N3L3NV^d4"}, "image_id": 232, "id": 3428}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 51.0, 45.0, 61.0], "area": 1533, "segmentation": {"size": [512, 512], "counts": "YbR71m?2M4M2M4M2N2M4M2i@ZOR?l0O01O01O010`ATOj=l0TBWOl=i0QBYOo=g0nA\\OS>a0mABR>?kACU>=hAGV>;gAGW>S1M200010O010O01O01O010O001\\NiAe00KY>^OjAd00J`>3cAK_>3dAJ_>3cAJ`>3dAJ_>3cAJ`>3cAK_>3i0MPn6"}, "image_id": 232, "id": 3429}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 53.0, 79.0, 73.0], "area": 2880, "segmentation": {"size": [512, 512], "counts": "lRi02l?3L3N3L3N3M2M3N3L3N3M2M310O010O0010OWAmNc>U1[AmNc>Z1L3N3M2N210O0010O010O0010O0010O0010O0010O010O0010O0010lNiA=W>@lAa0T>\\OoAc0Q>ZORBf0n=XOUBh0k=UOWBk0i=ROZBo0\\>O004MO010O010O00010O010O0001O0M4M2N3L3N2M4M2N3L3Nm\\o5"}, "image_id": 232, "id": 3430}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 71.0, 11.0, 30.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "kbj73j?3N3L3N2M4M2M7J201O0iM"}, "image_id": 232, "id": 3431}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 80.0, 65.0, 50.0], "area": 2005, "segmentation": {"size": [512, 512], "counts": "]S[41l?3N2M4M2N3L3N2M4M2M4M2O110O010O00010O010O0010ORAQOk>R11O010ON3M2N201O010O00010N1N3N110O00010O01O0N2M4M2M4M2M3010O01M2N3L3N2N3L3N3L3N2MY]d2"}, "image_id": 232, "id": 3432}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 102.0, 140.0, 179.0], "area": 12467, "segmentation": {"size": [512, 512], "counts": "cW_21m?5K5K4L201O0O2O001N1O1N3L3N3M2M3N3L3N3M2M4M2M3N3M2_OTNkBP2R=VNhBl1V=`0M4M2N3M2M4N1O101O0O2O0O1N3MZO_CQN_WOkAi0U>UOmAk0S>ROQBn0o=oNSBQ1m=mNVBS1[>01O01O010O010O00010O010O01O0lNXAQ1k>0010O001M2N2N3ZOk@e0_AYOd>JZAg05[Oh>a0[A]Og>a0=M4M2M4M2M3NQZQ1"}, "image_id": 232, "id": 3435}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 184.0, 58.0, 107.0], "area": 3426, "segmentation": {"size": [512, 512], "counts": "bW^11m?2M4M2M3`AFX==eBEX=>eBEY=>dBEY==dBFY==eBFX==eBEX=>eBEY=>dBEP=[OiBR14FQ=ZOhBT14EP=f0nB\\OP=f0mB^OP=j1M3N3M2010O00010O010L3N2N3L3N3L2O0O011O3L3N3L3N2N3L3N3L3hNnA`0T>^OnA?V>^OmA?U>^OnA`0U>]OnA?U>^OnA`0U>]OmA`0l>M2N3L3N3L3N[id5"}, "image_id": 232, "id": 3436}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 187.0, 48.0, 61.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "VV:4j?5K5b@D10e>a0UAA41c>g0ZA\\Od>Q1010O0010O0010O00010O010O^AkNY>U1dAnN\\>[110O010O00010O01O01O010O000N30O0O1M4M2_O^AAf><\\ABf>;]ABf><]A@g><\\ABf>;a0N3L3N3Ljim6"}, "image_id": 232, "id": 3437}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 201.0, 57.0, 69.0], "area": 2231, "segmentation": {"size": [512, 512], "counts": "fWk62l?2M4M2N2M4b@AW?a0g@AV?f0O2O010O000N0O3N2N3L3N2M4M2M4M2N3M21O01O0jAYNT>i10010O0010O0010O010O0010SOmAKS>3PBLQ>1QB0n=NTB2l=NUB2k=LVB4j=LWB3j=KWB6h=JXB5j=IXB3k=JXB4j=JYB3j=IYB4j=JYB3j=JXB3k=JXB4l>MZX8"}, "image_id": 232, "id": 3438}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 234.0, 62.0, 60.0], "area": 1605, "segmentation": {"size": [512, 512], "counts": "ngh52k?3N2N3L3N3M2M4M2O110O0XAXOY>h0dAZO\\>f0bA]O^>c0^A@b>`0\\ACc>=[AEf>j0^AiN[>X1aAlN_>Z1010O01O01O0nNbAc0^>ZOdAf0\\>WOhAi0X>TOjAl0V>ROlAo0c>O0010O010O0010L3N3M2O20O0010O0[Oh@b0[?10O001N1010O0010@c@;]?Ce@=`?0ON3M2N2M4M[WX1"}, "image_id": 232, "id": 3439}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 277.0, 71.0, 52.0], "area": 2129, "segmentation": {"size": [512, 512], "counts": "cYk14j?2N2010O010O000MHNg@3W?0f@2X?0e@4W?T11O01O010O01O01O010L3N2M4M2O2O010O00010O01O0N2M4M2M4M2M3N30O01N1N2N3L3N3L3N3L3N2MTWQ5"}, "image_id": 232, "id": 3440}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 281.0, 41.0, 75.0], "area": 1694, "segmentation": {"size": [512, 512], "counts": "hjg64j?2N2M4M2N3\\O@cAc0Z>AcAb0Z>@dAb0Y>AdAb0Z>AcAb0Z>@cAc0Z>d0M210O0O1M4M2N3N1001O0N2M10O10O103M2GmAbNW>[1kAcNW>[19L3N2@XAFk>7XAEk>9WAEl>1PAL8Ok>3o@La?0Zgc0"}, "image_id": 232, "id": 3441}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 288.0, 42.0, 102.0], "area": 2471, "segmentation": {"size": [512, 512], "counts": "\\9f2Z=000M4M2N3L3N3O00010ON3L3N2N3L3N3mMeB]1]=`NfB]1]=aNeB]1^=`NeB\\1^=aNeB]1^=`NeB]1]=`NfB]1P>N3L3N3M2M4M20001O0N3M2M4M2M3N3M2M4MQfZ7"}, "image_id": 232, "id": 3442}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 294.0, 69.0, 53.0], "area": 2152, "segmentation": {"size": [512, 512], "counts": "bim22l?3M2N3O0b@GV?9g@J1MP?9l@M1MT?f0O0010O01M2M3O20O010O01O010O01O010O01O012M01O010O01O010O0N2N3M2M4M2O20O0010O0010O001L3N3M2N2M4M2N3O0010OM4M2N2M4M2N3L3N3M2N`fo3"}, "image_id": 232, "id": 3443}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 306.0, 40.0, 50.0], "area": 1259, "segmentation": {"size": [512, 512], "counts": "`Z\\73k?3L3N3M2M3N3L3N3L31N1NO4M2N3M20010O010O00010O010O0WAmNf>V101O010O01O01O010O01O01O010SF"}, "image_id": 232, "id": 3444}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 309.0, 58.0, 77.0], "area": 2307, "segmentation": {"size": [512, 512], "counts": "e[k01l?3N3L3N2M4M2M4M2M4M2N2M4M2M4M2M3N3L3N3L31O01O010O01O01M2N3L3N1N010O03N2M3N3M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3NUfW6"}, "image_id": 232, "id": 3445}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 316.0, 72.0, 34.0], "area": 1537, "segmentation": {"size": [512, 512], "counts": "djW52k?4M2M3N3L3N3M2M3O20O01O010O01O010O010N1N3N100010O001ON2N1011O0O101ON1101O0M3N3M2010O0010ON2M4O0010O0010O010O00N3N11O0N2010O010OAj@4W?Il@7S?Go@9Q?DSA;Y?O1M4M2NbUd1"}, "image_id": 232, "id": 3446}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 321.0, 42.0, 51.0], "area": 1143, "segmentation": {"size": [512, 512], "counts": "\\kd11l?3N3L3N2DEQA>l>EPA>n>DPA?m>CQA?o>9010ON3M2M4M2N2M4MO4M2N2M4M2N3M210O00010O010O000M4VOm@c0Z?L3N2N3L3N3M2MdUf5"}, "image_id": 232, "id": 3447}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 332.0, 44.0, 52.0], "area": 1226, "segmentation": {"size": [512, 512], "counts": "f[S23k?2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3L21N3M1N2O3L3N201O0010O0010O010M2M4M2M3N3M2M4M2M3N3M2M^eV5"}, "image_id": 232, "id": 3448}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 343.0, 45.0, 53.0], "area": 1229, "segmentation": {"size": [512, 512], "counts": "S\\a22k?3N3M2M4M2M3N3M2M4M2M3N3M2M4M2N3L20O3L2O003L3N3N10010O010O010O0O1N3L3N3M2M4M2M3N3M2M4M2NQUh4"}, "image_id": 232, "id": 3449}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 357.0, 41.0, 51.0], "area": 1105, "segmentation": {"size": [512, 512], "counts": "Y\\o26g?5L3N10010O0M4M2N2M4M2N3L3N2M4M2N3L3N20N3L3N2M4M2010O00O2M2M4M2M3M4M2M4M2M3N3L3NeT\\4"}, "image_id": 232, "id": 3450}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 395.0, 113.0, 77.0], "area": 3858, "segmentation": {"size": [512, 512], "counts": "]m[12l?2M4M2M4M2N2M4M2N3L3N3L3N2N3N110O0010O0010O010O00010O010O0010O0010O010O00kN[Ao0f>nN]AR1h>0010O010O0010O0010O010O00010O010O0010O0010O010O0010O0010O0010O0010O010O0010O0010O010O00010O010O0010O001N1N3L31O010O010N1N2M4M2N3L3N2M4M2N3L3N3M2M3N`bk4"}, "image_id": 232, "id": 3451}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 435.0, 25.0, 77.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "cmc7>b?00000000000000000000000001O000000TBKm;5]Ca0c<_OmBQ1S=SOiBm0W=n0000000000000001O0001O000N2G9F:FQPk5"}, "image_id": 233, "id": 3458}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 0.0, 56.0, 10.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "PP]21o?4L000000O10000002N4L00000000O10000O1000000O100001O1O00O1000000O10000O1000000O1000000O1000000O10000O1000000O10PPg4"}, "image_id": 233, "id": 3459}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 0.0, 80.0, 51.0], "area": 2602, "segmentation": {"size": [512, 512], "counts": "ZP_42m?2N2N2N2O1N3M2N2N2l@]Oh>e0VA]Oh>e0VA^Oh>b0WA@h>?XACg>=XAEg>:ZAGe>9ZAIe>k0N1O1O1O1O1O2N1O1O00O1O100O1O1O1O1O1O100O1O1O1O1O1O2O100000001O01O000O1N2N2N2N001O01O001O3M2N2N2O1JSAVOo>h06N3M2O1N2N2N2N3M2N2O1N2NYoX2"}, "image_id": 233, "id": 3460}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 0.0, 20.0, 8.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "P`e51o?001O001O00001O001O001O001O001O00M3N2NR`P2"}, "image_id": 233, "id": 3461}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 0.0, 49.0, 44.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "X`b62k?3N3L3RAFV>:gAIY>8cAK]>5aAN^>2_A1a>0\\A3c>MZA6f>KWA7i>a0O00001O001O00001O001O00001O001ON2M3N2M3N2O1001O001O0000N2N2M3N2\\Oi@IQA8m>JQA9l>IRA9l>IRA9m>>N2N2N3M2N2N2O1N2N2N3M2N2N2O1N00000001O2N2O1N2N2HeAhN^>U1dAiN^>U1dAiN^>U18O1N201ON2O0O1O000002N3M2N200000001O01O0000000N2N3M2O1001O0N2N2N2O1N3M2N2N2N2N2O1N3M2N2N2N2N2No]P3"}, "image_id": 233, "id": 3464}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 23.0, 14.0, 16.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "QaZ72l?3M2N3L3O2O010O01M2N3L3N2NX_>"}, "image_id": 233, "id": 3465}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 44.0, 19.0, 16.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "ea^71l?4M2M3O2O010O01O01O010O010O0001M2M4M`n7"}, "image_id": 233, "id": 3466}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 60.0, 4.0, 9.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "SRn71m?2M4M2TN"}, "image_id": 233, "id": 3467}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 65.0, 61.0, 77.0], "area": 2923, "segmentation": {"size": [512, 512], "counts": "]bl25k?3M3n@GV>=gAFV>>eADZ>?cAA]>b0`A@]>d0cA]OY>d0gA^OV>a0jACR>>nAEo=;QBHl=8SBMh=4XBOe=4XBMg=V1O1000O01000O10O10O10O10O10O1000O01000IXBUNi=k1510O10O1004K4M3M3L4M4L0O1ROZAb0f>^O^A>e>_O^A=f>_O\\A`0g>]OYAc0R?O10O12N3L4M3M3L[lT4"}, "image_id": 233, "id": 3468}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 68.0, 64.0, 58.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "_cf51m?2N2N3L3N3M2N3L3N3N10010O0N3M2N3M2O2O000N3M2M4MO1000101010O00010O010O010O010O00010O010O010O010O00010O010O010OlNbAd0a>YOaAe0a>YOaAe0a>YObAc0o>Hg@G\\?5g@I[?5:M2NQ]Y1"}, "image_id": 233, "id": 3469}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 82.0, 26.0, 39.0], "area": 665, "segmentation": {"size": [512, 512], "counts": "]Sc71l?4M2M4M2N3L3N2M4M2M4M21O010O01O01O010O010O01O01O01WM"}, "image_id": 233, "id": 3470}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 84.0, 5.0, 33.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "d2Q1o>00L5D;Fe]m7"}, "image_id": 233, "id": 3471}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 87.0, 67.0, 104.0], "area": 3346, "segmentation": {"size": [512, 512], "counts": "\\U92n?1N2N3M2N2N2N3M2O1N3M2N2N3M0FYO\\Ag0Z>DeA<\\>DdA<\\>DcA<^>b01OL5E:G9F:010O1O2N100O1O1O100O1OM3G:0O2N1O1O1F:000000000000F:B>1O100OO1O1O1D`AZO^>e0dA[O[>e0fAZOZ>f0hAXOX>h0iAVOX>j0hAUOY>3hA6OFl>:UADl>3[]e6"}, "image_id": 233, "id": 3472}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 90.0, 29.0, 28.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "[SR72l?2N3M2M4M2N3M2N30O00010O010O010O010O010O0N2N3L3N3M2N3M2MP]?"}, "image_id": 233, "id": 3473}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 100.0, 66.0, 76.0], "area": 2345, "segmentation": {"size": [512, 512], "counts": "iSg1140e?2Y@Of?2Y@0d?8N2N2N1N3N2N2N2O0010000000O01000000O001N2O1O1O001O1\\AoNW>R1fAPOZ>Q1dAQO[>o0dASOZ>o0dASOZ>[1N1O2M3N2N2N1N3N2N2N2N110O10[N[Bn0e=PO]Bo0d=nN_BP1c=nN^BP1d=oN^Bo0d=oN^Bo0d=nN_B5C

    ]O_BL@53`0Q>]OaB1@`0P>^OaB1A?j>_OXA?V?M2O2N2N2M3N1O2NWkW5"}, "image_id": 233, "id": 3474}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 125.0, 70.0, 54.0], "area": 2229, "segmentation": {"size": [512, 512], "counts": "SU_41n?3N1N2N2N2N2N3M2O1N2N2N2\\O\\OiAf0V>_OdAa0]>`0100O001O1O100O001O10000000O10O100000O10O100000O10O10000000O10O100000O10O100000O10O10000000O10O4M5K6J5K5J7J5KPk]2"}, "image_id": 233, "id": 3475}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 132.0, 52.0, 51.0], "area": 1663, "segmentation": {"size": [512, 512], "counts": "oTV72l?2M4M2N3M2M4M2N2M4M2N3M201UAnNf>R1XAQOh>S1010O00010O010O010O0010O0010O010O010O00010O010O010O001O0N2N3L3N3M2O20O001[K"}, "image_id": 233, "id": 3476}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 159.0, 28.0, 29.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "cU]62l?2N2M4M2N3L3N3M2N210O0010O010O0010O00O2M2M4M2N3L3N2N3LljT1"}, "image_id": 233, "id": 3477}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 161.0, 78.0, 66.0], "area": 3174, "segmentation": {"size": [512, 512], "counts": "VfV13k?2M3N3M2M4M2M3N3L3N3L3N2N3L3N3N11O01O010O010O00010O010O000N3N1010O00010O010O00010O010O00010O010O00010ON30O00010O010O000^NeA`1^>O01O01O010N1N2M4M2M4YOUA5m>HVA5m>IVA4m>HVA5m>IVA4\\?M3NmYb5"}, "image_id": 233, "id": 3478}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 185.0, 79.0, 47.0], "area": 2528, "segmentation": {"size": [512, 512], "counts": "gff42j?5K4L4L4L5N100000010O0000010O0000010O0000010O000N2L5K4M30010O0000010O0000010O000001L3001O01O0001O01O0001O01O0001O01O00O2K4M30010O0000010O0000010O]O]AIc>3aAIc>3bAHc>3aAIc>3aAIc>3jjQ2"}, "image_id": 233, "id": 3479}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 189.0, 71.0, 57.0], "area": 2105, "segmentation": {"size": [512, 512], "counts": "bff61m?2N2M4M2d@FP?=l@GP?[10O01O01O010O0M40O00jNZAR1k>0OoNWAk0o>1O010O01O01O010O010O01O01O010O01O010M2M3N3M2M4M2N2M4M2MXi5"}, "image_id": 233, "id": 3480}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 198.0, 17.0, 16.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "]VW64j?2N3M2O110O010O010O0010O0N3L3N3MfY`1"}, "image_id": 233, "id": 3481}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 206.0, 7.0, 87.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "^6g2Y=C=B>C=B>C=CoYl7"}, "image_id": 233, "id": 3482}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 222.0, 53.0, 77.0], "area": 2872, "segmentation": {"size": [512, 512], "counts": "UY82e?9I71O0001DCSA=d>L\\A4]>k000000002N000001O00I7F;F8M31001O0000000000L5O01O00000000000001O01O00000000POaB_O_=;gBEY=D^B785[=ZOfBa0O5k=KUB6j=GYB9g=^ObB=d>FgWm6"}, "image_id": 233, "id": 3483}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 230.0, 35.0, 40.0], "area": 798, "segmentation": {"size": [512, 512], "counts": "Thm51l?3N3M2N3L3N3M2N3L3N2N3M2M4O010O010O01O010O0O1M4M2N3M2M4M2N2M4M2N3M2Mdh`1"}, "image_id": 233, "id": 3484}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 234.0, 77.0, 56.0], "area": 2504, "segmentation": {"size": [512, 512], "counts": "ZXY41l?3N2N3M2N3M2N3L3N3M2N3O0010O010O0010O010ON2N2N2N20O2O001O000O2O001OOO2N3M2N3M2ZAlN`>[10O010O00010O010O010O00010O010O01M2O1010O010O01O01O001M2N3L5L2^OWAKl>1YAJj>4ZAHh>5d0M2N[X`2"}, "image_id": 233, "id": 3485}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 237.0, 53.0, 49.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "[ha61l?3N3L3N3L3N2M4M2M4M2M3N3O0010O00010O010O00010O010O00010O010O00010O010O000ZOUA8k>DYA;g>C[A>e>_O]Aa0c>\\OaAc0n>10O01O01L3N3L3N2M4M2Mhgc0"}, "image_id": 233, "id": 3486}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 246.0, 83.0, 59.0], "area": 3489, "segmentation": {"size": [512, 512], "counts": "TYU15a?:F:K6O000O1F:K50000000L4N20001O0001O006J0J601O01O0000000000000001O01O000nNmA7S>_OWBa0e>O00000001O00000G9F:I700000001O0000O2J500000000000000007JO6J00001O0001O00000000L4F:F;E:FVXa5"}, "image_id": 233, "id": 3487}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 290.0, 83.0, 54.0], "area": 2422, "segmentation": {"size": [512, 512], "counts": "mYo52l?3L3N3M2M3N3L3N3M2N30O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010O01N1N2M4M2N3L3N3N10010O010O00010O010O010O0001M2M4jN[Am0g>QO[Am0n>L3N2N3L3N3O0010O0010O010O0N3L3N2N3L3N[Vg0"}, "image_id": 233, "id": 3488}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 299.0, 75.0, 42.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "[ji31g?2[@1c?8M2M3N3L3N3L301O01O01O010O01O0N2N3N101L3N2M40O010O0N2N3L3N3L30001O010O010O0N2N3O0010O0010O0010O0010O0010O010O0010O0010O001N1N2N3L301O000Fh@MX?Ol@0T?Nn@1T?Ko@3S?KPA1[UQ3"}, "image_id": 233, "id": 3489}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 325.0, 57.0, 58.0], "area": 1826, "segmentation": {"size": [512, 512], "counts": "_kX51m?3M2N2M4M2N3M2M4M2N3M2M3010O010O010OO2M2N2N3L3N3M2N3M210O01O01O010O01N1N3L3N3M2N2M4M2N3M2N30O01M2N2N3M2M4M2N3M2Maej1"}, "image_id": 233, "id": 3490}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 334.0, 27.0, 23.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "Rkc31m?2N2N2N2N2N2N2N3M2N2001O001O001O010O001M2N2N3M2N3L3N]en3"}, "image_id": 233, "id": 3491}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 341.0, 56.0, 59.0], "area": 1985, "segmentation": {"size": [512, 512], "counts": "PlS62k?4M2M4M2M3N3@_O\\Ad0b>_O[Ad0a>_O\\Ad0b>>M4M2O20O0010O0010O0010O0010ON2010O01O01O010O01O01M2M4M2M3N3L3N3L3N201O010M2N2M4M2M4M2M3N3LoTP1"}, "image_id": 233, "id": 3492}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 362.0, 53.0, 47.0], "area": 1181, "segmentation": {"size": [512, 512], "counts": "gk]32l?3M2Y@Jc?;0O01O0M4M20010O001O00001J501O00001O001O06KO00001O010m@ZOj>f0TA\\Ol>d0QA_OP?i0O010O0010O0010O010O00GQAAP?:VAFk>6YAJf>4\\ALd>4\\ALe>2]ANb>ISA2>5c>I_A5X?Lacg3"}, "image_id": 233, "id": 3493}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 375.0, 29.0, 79.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "g;^2b=00gMhBj1g=000000000000001O01O000000000[OQBYOo=<]B]Oi=<^BYOm=g0f000010O0000O1E;ESTa7"}, "image_id": 233, "id": 3494}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 382.0, 61.0, 69.0], "area": 2856, "segmentation": {"size": [512, 512], "counts": "]]c09]?:E;F:F:00000000M301O00000001O0000000001O01O0000000000001O00J:L00001O001O010O00001O001OnNVBNj=L]B4b=L^B4b=L_B3a=M_B4`=IdB6\\=ZOTCf0S>N3L3N3L3N2N3L3N3L3NhR^6"}, "image_id": 233, "id": 3495}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 401.0, 86.0, 62.0], "area": 2859, "segmentation": {"size": [512, 512], "counts": "Z]T51l?3N2N3M2M4M201O010O01O01O0N3M2N3L3N2N3O010O010O0010O0010O010O010O0010O0010O010O010O00010O010O010O001UAPOe>o0YASOd>W1M2N3M2M4N10010O0010O010O0010O0010O002K4M2N3L3C\\A\\Og>a0[A]Og>`0]A]Oe>a0]A\\Og>a0=N3L3N3M2N3LSc`1"}, "image_id": 233, "id": 3496}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 404.0, 94.0, 67.0], "area": 3436, "segmentation": {"size": [512, 512], "counts": "XmS33k?3L3N3L3N2N3L3N3O010O000[AYOR>g0kA[OU>e0hA^OY>b0dAA[>?cAC]>=`AG`>l0010O0010O001M2M3N30O01O010O01O01O010O01L3N3L3O110O010O00O2N1010O00010O010O00010kN_Ai0a>UObAk0^>QOeAo0[>oNgAR1d>O0010O010O00010O010O0010O0010O0010O0010O01POTAk0k>SOWAl0P?M2M4M2M3N3L3N3M2MVR]3"}, "image_id": 233, "id": 3497}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 449.0, 17.0, 24.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "`^S14e?8H7O2O010O010O010ON3L3N3M2N2N3LlQd6"}, "image_id": 233, "id": 3498}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 452.0, 9.0, 39.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "Uok72l?3^ONQA5m>MQA6k>NQA5m>MQA6l>`0M3N3kA"}, "image_id": 233, "id": 3499}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 453.0, 53.0, 53.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "m^X52l?3L3N2N3L3N3M2M4M2M310O010O0[ASOX>l0eAWO[>j0bAYO]>g0aA[O`>e0\\A^Od>P10O01O01O010O010O01O01O010O0O1N3M2M4M2M4M2N2M4O010O010N1M3N3L3N3M2M\\Qm1"}, "image_id": 233, "id": 3500}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 461.0, 49.0, 51.0], "area": 1551, "segmentation": {"size": [512, 512], "counts": "n_X12k?3N2N2M3N2N2M3N2N2M3N2N2M3N2N200001O001O00001OO1N2M3N2N2M3N2N2001O001O000N3M2M4O0O2L3N2GRA^OQ?5PAM2JQ?7PALY?1i@LZ?1hQo5"}, "image_id": 233, "id": 3501}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 479.0, 56.0, 33.0], "area": 1640, "segmentation": {"size": [512, 512], "counts": "o>Q1o>000001O00003M00000000000000001O00000000000000000000001O00000000000000J60000000000001O0000000000000000000M3C>C[aS7"}, "image_id": 233, "id": 3502}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 480.0, 78.0, 32.0], "area": 1198, "segmentation": {"size": [512, 512], "counts": "l_^24l?00000001O0000000000000000001O000000000000000000001O00O1F:E;G900000000001O000000000000000000001O0000000000000000001ZOUA5[?0000000000000001O0000000000000000001M]`Z4"}, "image_id": 233, "id": 3503}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 481.0, 23.0, 23.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "]_P52l?3M2N3M2N3M2010O010O010O010Bb@8^?Fe@:`?010O01N1N3M2NdPd2"}, "image_id": 233, "id": 3504}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 498.0, 17.0, 14.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "moT43k?2N2M3N2N2001O00001O001O001M2M3N\\`b3"}, "image_id": 233, "id": 3505}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 498.0, 24.0, 14.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "o_d71m?2M3N2M3O100001O001O0000O1M31O00001O001O00001F^@4"}, "image_id": 233, "id": 3506}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "oob71o?0QP<"}, "image_id": 233, "id": 3507}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 0.0, 26.0, 9.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "PP`61o?001O00001O001O00001O001O001O00001O001O0000O1N2M3NRPS1"}, "image_id": 234, "id": 3508}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 21.0, 80.0, 54.0], "area": 2493, "segmentation": {"size": [512, 512], "counts": "ZQ^41o?3M3\\@JW?:f@IW?:f@JV?c0L4M3M1N10O10O1000O01000O01000O10O10O10O10O10O10O10O10O10O10O10O10O10O1000O01000O01000O01000O10O10O10O10O10O10O10O10O10O10O10004K4M3M3M1N102N3L4M3M3L`nY2"}, "image_id": 234, "id": 3509}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 90.0, 33.0, 94.0], "area": 1634, "segmentation": {"size": [512, 512], "counts": "j2n2S=L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N3L3N2M4M2O2O01OFQACP?9TAFl>8VAIj>3ZALf>2\\AOc>N`A0c>M`A0b>MaA0[k_7"}, "image_id": 234, "id": 3510}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 158.0, 59.0, 86.0], "area": 2829, "segmentation": {"size": [512, 512], "counts": "ie_12_1Ol<4QC0l<2QC1l<3QC0l<2QC1l<3QCOl<4QC0l<2QC1l<3QC0l<2QC1m<1QC1o<0mB4R=LkB7V=IgB9Y=GdB=\\=BbB`0^=R1O01O010O01O01N1N2M010O2O3L3M3N3L3N3L3M3N3L3N3M02M3O20O0N2N3L3N3L3M3N3L3N2M4M2M4L3N2Mojb5"}, "image_id": 234, "id": 3511}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 162.0, 16.0, 16.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "ZUf72l?3M2N3M210O010O010O010ON2N3M2Nkj1"}, "image_id": 234, "id": 3512}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 178.0, 68.0, 70.0], "area": 2416, "segmentation": {"size": [512, 512], "counts": "_f\\23j?3N2M4M2N3M2010O00O2M2M4M2fATO]=o0aBSO^=o0_BSOa=m0\\BWOd=i0YBYOg=g0VB]Oi=c0UB_Ol=a0QBAo=?nAER>T101O010N1N2O2O010O0001L3N3M2M2O0O011O2M4M2M3N3L3N3M2M4M2M3N3Oj@^On>c0o@_OR?`0l@CR?f0N3O00M4M2N2M4M2M4M2M4M2NYZa4"}, "image_id": 234, "id": 3513}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 179.0, 7.0, 6.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "eel72l?3O010O01O0O\\J"}, "image_id": 234, "id": 3514}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 215.0, 38.0, 35.0], "area": 725, "segmentation": {"size": [512, 512], "counts": "\\Wl02l?3M2N3L3N3M2N2N3L3O2l@WOo>n00O0010O0010O001L3O20ON3M2N3M2O110O010Bc@7]?Ge@9b?0O01O0N3L3Nih`6"}, "image_id": 234, "id": 3515}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 229.0, 34.0, 37.0], "area": 748, "segmentation": {"size": [512, 512], "counts": "Ph:1l?3N3L3N3L3N2N3L3N3L3N201O010O00010O010O00010ON3M2M3N3L3N3L3N2M4M2MfXT7"}, "image_id": 234, "id": 3516}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 243.0, 64.0, 103.0], "area": 2930, "segmentation": {"size": [512, 512], "counts": "_jX42k?3N3L3N2N3cNCRC`0lo0WASOi>R110O0010O0010O00O2M2010O0010O0010O0O2L3N2M4N1010O0M3N3L3N3M2M3N3L3N3L3N3M2M3N3L3NQge0"}, "image_id": 234, "id": 3519}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 306.0, 30.0, 30.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "VZl41l?4M2M4M2M3N3L3N3O000010O010O00010O010O00010M2M4M2M3N3L3N3LYfd2"}, "image_id": 234, "id": 3520}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 307.0, 27.0, 19.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "gYV22l?2N2010OZ@Ka?6\\@Le?8O00010O010O00010O010O010O00010O01N1N3L3NUV\\5"}, "image_id": 234, "id": 3521}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 322.0, 36.0, 36.0], "area": 848, "segmentation": {"size": [512, 512], "counts": "njj13i?4K6J5K5K5L5O01O000001O01O000001O01O000001O01O000N2L4K6J5K503M10O00O1KjUc5"}, "image_id": 234, "id": 3522}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 333.0, 28.0, 31.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "Skk51m?3L3N3L3N2M4M2M4M20001O010O01O01O010ON3M2M3N3L3N3L3N2M`Uf1"}, "image_id": 234, "id": 3523}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 339.0, 39.0, 39.0], "area": 968, "segmentation": {"size": [512, 512], "counts": "Vk\\74j?2N5J3N3M2M3N3L310O01O01O010O010O00010O010O01O01O010O010O00010O010O01N1M310OPE"}, "image_id": 234, "id": 3524}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 378.0, 36.0, 47.0], "area": 885, "segmentation": {"size": [512, 512], "counts": "P]k61l?3N2N3M2M4M2N3M2N3L3N2N3M2M4M2N3M210O00M4M2N3M2N3L3N3M2N3L3N2N3M2N3L3NPdb0"}, "image_id": 234, "id": 3525}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 390.0, 15.0, 13.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "\\lf52l?3M2N30O010O010O010O0O1N3M2NgcQ2"}, "image_id": 234, "id": 3526}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 418.0, 14.0, 36.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "o]i72k?3N3M2M3N3L3N3L3N3L3N2N3O0nB"}, "image_id": 234, "id": 3527}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 474.0, 86.0, 38.0], "area": 2418, "segmentation": {"size": [512, 512], "counts": "]on28h?;E000000000O100000000000000000000O100000000000000000000O1000000G_OSAa0m>900O100000000000000000000O10000000000000000000000O100000000M300000000000000000000O10000000000002N;E;E600O010O01000O010O01000O0100O10000O10000O100O10000O100O1002N2N1OO10000O10000O100O10000O100O102VOo@a0S?]Oo@`0\\?M3L3N3M2M4MU_a5"}, "image_id": 237, "id": 3534}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 0.0, 38.0, 20.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "QPi31o?2M2O2N1O2N2N1O2N1O2N1O1OO1O100O100O1O100O1O100O1O100O1O100LZ@Mg?2400O1O100O10PPd3"}, "image_id": 237, "id": 3535}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 0.0, 27.0, 12.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "P`c51o?1O2N1O2N1O2N1O1O00O100O1O100O1O100O1O100O1O100O1O10PPo1"}, "image_id": 237, "id": 3536}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 0.0, 76.0, 78.0], "area": 2895, "segmentation": {"size": [512, 512], "counts": "XQm51o?2k@NW>4hAMV>6gAMW>4hAMV>6gAMV>5hAMV>6hAKW>7fAIZ>9dAH[>;cAD^>=`AC`>`0]AAb>a0]A^Od>n000010O000N]AiNb>W140JkN`AU1a>4BeNXB[1h=hNUBY1k=hNTBW1l=lNQBT1o=nNoAS1Q>nNnAQ1R>>010O1O100O1O1O100O1O100O1O100O1O100O1O1O100O1002N1OO100O1O2O1N2N3oNXAe0j>YOXAf0j>XOXAe0S?N2O2M2N3N1N3M2O2M2NRol0"}, "image_id": 237, "id": 3537}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 5.0, 82.0, 69.0], "area": 2823, "segmentation": {"size": [512, 512], "counts": "VaP43l?3N1N3M3N1N3M2O2M3M2O2M3M100O010O2N2O2M3M2O2M3M2O2M0010O00010O00010O00010O01O01O2O2M2N3I]AmNb>S1aAjN`>V1401O01O01O010O00010O00010O0010O00010O02N3M2O2M2N3N1N3M3N1N3M2O2M2N3N1NT_f2"}, "image_id": 237, "id": 3538}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 37.0, 63.0, 66.0], "area": 2492, "segmentation": {"size": [512, 512], "counts": "i1Q1n>3N1N3N2M2HeNhA^1U>eNiA]1T>eNjA]1U>7O10O01O01O010O01O01O010O011N3M2O2M2O2GaAlN`>S1cAjN^>V15010O01O01O010O01O010O01O01O102M2O2M2N3N1N3N1N3M2O2M2O2M2N3N1N3MQ^P7"}, "image_id": 237, "id": 3539}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 44.0, 75.0, 76.0], "area": 2865, "segmentation": {"size": [512, 512], "counts": "PSd11n?2N3M2N3N1N3M2I@k@b0T?_Oj@d0S?6N3CSOcAn0\\>TOaAo0\\>SObAo0\\>SOcAo0Z>=O0O0001O01O01O01O01O01O00010O0001O01O01O01O01O01O00010O0001O01O01O01O01O2010OO2M2N3N1N2N3N1N3M2VOQA`0R?^OPA`0Q?^OQA`0Y?N3N1N3M2O2M2N3N\\]V5"}, "image_id": 237, "id": 3540}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 52.0, 10.0, 11.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "fQS11o?1N3M2O2O01O0O2M2NVng6"}, "image_id": 237, "id": 3541}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 55.0, 80.0, 81.0], "area": 2959, "segmentation": {"size": [512, 512], "counts": "Sba61n?2O2M2N3M2O2M2N3N1N2N3N1N3M2O2M2N3O01O010O010O010O010O00010O010O01N1N3N1N3M2O1N3M2N3N1N3M2O2M2N01O00010O00010O0YOUBZOk=g0WBWOh=i0ZBUOg=j0\\BSOe=m0\\BROe=l0^BQOe=l0]BROe=m0\\BROf=k0]BROe=l0]BROf=l0h0N3M2O1N3M2N3N1N3M2O2M2N3NW]6"}, "image_id": 237, "id": 3542}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 56.0, 77.0, 66.0], "area": 2612, "segmentation": {"size": [512, 512], "counts": "eRa43l?2O2M3M2O2M2a@AZ?e0M2O2M2N3N2M1O10O003N2M11O1N3N1N3M3N1N3M2O2M10O01O010O000102M2N01O010O01O01O01O0K`AlN_>T1cAjN^>V1501O01O010O00010O01O01O0101N2N3N1N3M3N1N3Fi@GX?7j@GY?6j@GY?79N3M2Oc]X2"}, "image_id": 237, "id": 3543}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 88.0, 84.0, 78.0], "area": 3369, "segmentation": {"size": [512, 512], "counts": "bc51o?2M2N3N2M2N3N2M2N3N1N2N10O0003N2M201O10O010N00RAQOj>o0VATO^>MhAo0JWO[>MiAl0LXOY>NiAk0MZOX>MhAW1V>o0jAPOU>P1nAmNR>T1PBjNP>U1RBiNn=W1=10O01O01O010O01O01O012M2N3N1N3N1N3M2O2M2O2M2N3N1G`@Oc?O_@Nc?0_@Onk`6"}, "image_id": 237, "id": 3544}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 88.0, 69.0, 83.0], "area": 2835, "segmentation": {"size": [512, 512], "counts": "SSi51n?3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2N110O010O010O001N1N3M2O2cAaNT>a1kAaNR>b1kA`NT>h1M2N3N1N3M201O01O010O010M2O1N00010O000UOVBBj=>XB_Oj=?XB_Ok=>XB@i=?XB_Ok=>XB_Oj=?XB@j=>XB_Oj=?XB_Ok=>XB@i=?XB_Ok=>XB_Oj=QOfAR1W>QOfAQ1X>QOgAn0Z>TOcAm0\\>;001O01O01O01O00010O0002O1N00010O00010O000010O00LaAhN`>X13000010O0002O1N3M2O2M1O02O2M2N3N1oN]Ab0f>\\OZAc0h>[OYAe0i>XOWAh0k>VOUAj0n>TOSAj0T?N1N3M2O1N3M2O2M2N3N1N3Ml[a4"}, "image_id": 237, "id": 3547}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 129.0, 68.0, 72.0], "area": 2467, "segmentation": {"size": [512, 512], "counts": "_dY52n?1a@0k>3SANk>4SAOk>3RAOl>3SAOj>4SANl>3RA0k>d0N3N1N3M2O2M2N3N1N3M2O2M2N3O0010O010O0O2M2N3N1N3M2O2M2N3N1N3M2O2M2N010O000MSAUOm>j0UATOk>l0410O00010O00101N3M2O2M2N3N1N3M2O2M2N3N1NhZd1"}, "image_id": 237, "id": 3548}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 144.0, 76.0, 80.0], "area": 2523, "segmentation": {"size": [512, 512], "counts": "_fg21o?2M2N3N1N2N3N1N3M2O2M2N00010O0VOYO\\Bg0d=\\OZBd0e=^OYBb0h=@UB`0k=BTB=l=FQB;o=FoA:Q>ImA5T>MjA3V>0gA0Z>0fA0Y>0gA0Y>2eAN\\>k0O00010O0001O01O01O01O00010O00010O00010O0001O01O01O2O1N30O01M2PO\\Aa0g>\\OZAd0g>[OXAe0j>YOWAf0l>WOUAi0R?N3M2O2M2N3N1N3M2N3N1N]ZR4"}, "image_id": 237, "id": 3549}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 156.0, 74.0, 74.0], "area": 2917, "segmentation": {"size": [512, 512], "counts": "aeo01n?2N3N1QAJQ>9lAJQ>8nAIQ>9lAJQ>8mAJQ>9mAIm=EfAc0;Km=CfAc0\\1jAaNW>^17N2O1N3M100O00010O00010O00010O00010O1O2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1NXZk5"}, "image_id": 237, "id": 3550}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 191.0, 74.0, 92.0], "area": 3241, "segmentation": {"size": [512, 512], "counts": "mVa31n?3N1N3M2O2M2N2O2M2I]On@f0o>\\OPAe0o>7N0000100O3M3N1N3M2oAfNZ=\\1cBgNZ=[1dBgNZ=\\1dBfNZ=[1dBgNZ=\\1cBgNZ=\\1dBeN[=\\1cBgNZ=\\1dBeNZ=S2N2M0010O00010O1O3N1N3M2O2M2N3UNYBY1h=eN[BX1h=eNZBY1h=eNYB[1i=cNVB]1l=aNVB]1m=`NUB_1X>M2N3iNZAP1g>oNZAo0n>M2O0O00011N2N3N1N3M2O2M2N3N1N3M2O2MThY3"}, "image_id": 237, "id": 3551}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 207.0, 69.0, 74.0], "area": 2488, "segmentation": {"size": [512, 512], "counts": "Th\\11n?2O2M2N3UOGkA:S>HkA;S>GjA;T>GkA;R>HkA:T>GjAGkA:S>HkA;S>GjA;T>GkA:S>IjA7W>h000011N2N2O2M2NIdNkA]1T>eNkAZ1U>iNhAW1Y>8O00010O00010O00MhA_NY>`131O01O01O01O01O01O102M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1Nah`5"}, "image_id": 237, "id": 3552}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 210.0, 12.0, 25.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "iVj71n?3N1N3M2O2M2N3N1N3M2O2\\I"}, "image_id": 237, "id": 3553}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 223.0, 57.0, 87.0], "area": 2691, "segmentation": {"size": [512, 512], "counts": "ZgS72m?2O2M2N3N1N3M2O2M2l@]Oh>f0UA]Oi>d0VA]Oh>f0UA]Oh>o0O2M2N3N101OO2N1N3M2O2N10iAkNc=U1ZBmNf=S1XBPOg=Q1WBPOg=R1WBQOg=Q1VBQOk=n0TBTOk=m0RBUOo=j0oAYOP>g0oAZOQ>\\1M2N2OO00010O00010O0001O01O01O03M2O2M2N3N1N_H"}, "image_id": 237, "id": 3554}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 251.0, 67.0, 59.0], "area": 2210, "segmentation": {"size": [512, 512], "counts": "Yhh21n?2N2O2M2N3M2O2M2N3N1N2N3PAVOf>k0XAWOf>l0WAWOf>k0YAVOh>R1O01OO2M2O2M2N3N1N3M10O01O01O011N2N3N1N3M2N2OO00101N1O010O01O2N2O2M2N3N1N3M2O2M2N3N1N2N3M2O2M2N3N1N3MUgU4"}, "image_id": 237, "id": 3555}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 254.0, 79.0, 87.0], "area": 2985, "segmentation": {"size": [512, 512], "counts": "dXZ63l?2N3N1N3M2O2M2N2O2M2N3N1N3M0010O2N2O1N001O01O3N1N3M2O1N3M2bAiNQ>Y1lAiNR>Y1mAhNQ>[1lAhNQ>Z1mAhNR>e1M2N3N1N3M210O01O01ON3M2O20OVNTB_1m=^NVBb1i=]NXBc1i=ZNZBe1f=YN\\Bh1o=N1N3N1N2N3M2O2M2N3N1N3M2O1N3M2O2M2N3M2O2M2N2O2M2N3NnU>"}, "image_id": 237, "id": 3556}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 266.0, 63.0, 68.0], "area": 2372, "segmentation": {"size": [512, 512], "counts": "\\Ym13m?1N3@Lo@6P?Ko@7n>Lo@6o>Lo@7o>Ko@6o>`0M2O2M1O011N2N3N1N3M2O2M2O2M2N3N1N1O01O01O01O01O02N3N1N3N1N3M2O2M2N3NO0002O2M3L3N3M2O2M2N3N1N3N1N3M2O2M2N3N1N_VS5"}, "image_id": 237, "id": 3557}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 283.0, 60.0, 83.0], "area": 2671, "segmentation": {"size": [512, 512], "counts": "hi_52n?2M2N3N2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1O2O0O2FbNmA`1Q>cNmA_1Q>bNmA`1Q>:M2O2M2N3N2M2N3N1N1O01O011N2N3N1N3M2O2M2N3oNQBMP>2QBLR>1QBLR>1PBNQ>1QBLR>1PBNQ>LVB1m=GZB7j>N1N3M2O2MUUb1"}, "image_id": 237, "id": 3558}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 316.0, 67.0, 87.0], "area": 2911, "segmentation": {"size": [512, 512], "counts": "hZg42n?1N3M2O2M2N2O2M2N3N1N3M2N3NO0002O2M2JPOYAS1e>5N3M2O2M2N2O2M2N3N1N3M2O2M2N3N1N3N110O010O010ON3M2O2M2N3N1N3[NoAV1S>iNnAV1T>gNoAV1S>hNoAV1T>hNmAW1T>gNoAV1_>N3N1QOUAf0n>WOTAg0U?N1_Og@8\\?Eg@8[?Gf@8c?M2N3NoSW2"}, "image_id": 237, "id": 3559}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 350.0, 70.0, 87.0], "area": 3027, "segmentation": {"size": [512, 512], "counts": "j[m32n?2M2N3N1N3M2O2M2N3N1N3M3N1N00010O101N3M2KmN[AU1b>6O2M3M2O2M2N3N1N3M2O2M2N3N1N3M2O2O0010O010O0N3M2O2M2N3N1N3M2O2\\NlAW1V>hNlAU1W>hNkAW1V>gNmAV1V>hNkAV1b>N1ROUAd0n>YOUAd0m>ZOUAe0U?M2N3@b@:c?N3M2O2M2Nobo2"}, "image_id": 237, "id": 3560}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 372.0, 11.0, 11.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "h[R61n?3N1N3M2N10O2N3N1N3NVTh1"}, "image_id": 237, "id": 3561}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 394.0, 82.0, 81.0], "area": 3270, "segmentation": {"size": [512, 512], "counts": "g\\V32m?2O2Z@K\\?7b@L[?7c@J\\??M2N3N2M2N3N1N3M3N0O1O010O2N3N1O2O10O010O001M2O2M2OO2N3O00100O001M2N3N2M2N100O00010O00011N3M2O20O010O0100O01N1^NoAR1T>kNoAR1S>lNoAS1S>kNnAS1T>kNoAS1S>jNoAT1a>M2O2M2N3N1N3M2O2M2O2M2N3N1N3M2Ofa`3"}, "image_id": 237, "id": 3562}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 428.0, 10.0, 13.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "_]Y42m?3M2N3O0010M2N2N3M]ba3"}, "image_id": 237, "id": 3563}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 18.0, 44.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "0\\1d>N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3NRPg7"}, "image_id": 238, "id": 3564}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 0.0, 240.0, 233.0], "area": 34937, "segmentation": {"size": [512, 512], "counts": "^U41l?4M2N2M4M2M4M2M4M2kNWOPCl0mCeA:^>DeA9]>DfA9]>EfA8\\>EgA8\\>FgA7R?N2M4MZoS4"}, "image_id": 238, "id": 3565}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 0.0, 15.0, 20.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "_`h32k?4M2M3N3L3N3OO1N2M3N2M3N2N2MSPP4"}, "image_id": 238, "id": 3566}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 0.0, 129.0, 155.0], "area": 11226, "segmentation": {"size": [512, 512], "counts": "PTa44j?2N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2N3N10001ON2M3N2M3N2N2M3N200001O001O001O00001O001O00001O001O00\\NcDeN];Y1eDhNZ;U1iDkNW;S1lDmNS;P1PEPOP;n0SEROl:l0VETOj:i0YEWOg:g0\\EXNYO;];Z1]EYNYO9];\\1]EXNXO:];\\1]EWNZO:[;]1^EVNYO:\\;`1[ESN]O:Z;d1lEYNW:g1iEWNY:j1gESN[:m1eEPN^:P2i11O001O0000N2N2M3N2N2oNkA4X>IkA5W>IkA4X>IlA4V>JlA4V>JmA2V>KmA3U>KnA1U>LnA2T>LnA1U>On0001O001O00NR`^1"}, "image_id": 238, "id": 3567}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 10.0, 84.0, 187.0], "area": 7306, "segmentation": {"size": [512, 512], "counts": "bUf61m?2M4M2N3L3N2M4M2N3L3N3L3N2N3XAjNa>]1N110O010O00010O010O01OO1M102M4M2N3L3N2M4M2N3L3N3L3N2N3L3N0O10O10O010O103M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3L3N2N3L3F"}, "image_id": 238, "id": 3568}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 358.0, 66.0, 154.0], "area": 9176, "segmentation": {"size": [512, 512], "counts": "m_o63W?f0[Oe0[Oe0ZOf0I700000000N2[Oe0[Oe0B>0000000000000000000000000000001O00000000000000000000000000000000000000001O000000000000000000000000000000"}, "image_id": 238, "id": 3569}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 368.0, 127.0, 144.0], "area": 14586, "segmentation": {"size": [512, 512], "counts": "`;g1b0`0n:@RE`0n:@RE`0n:@RE`0n:@RE`0o:_OQEa0o:_OQEa0o:_OQEa0o:_OQEa0o:_OQEa0o:_OcDo0];j10000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000001O00000000000iMaDM_;BRE>n:BRE>n:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:CQE=o:XO\\Eh0d:YN\\Ff1o;000000000000000000000000000000000000000000000000000000000001O000000000VOj0QOPSP6"}, "image_id": 238, "id": 3570}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 468.0, 28.0, 43.0], "area": 1054, "segmentation": {"size": [512, 512], "counts": "i_[55Z?a0_Oa0M3000000000000001O0000000001O0000000000000000N2_Ob0_OjaV2"}, "image_id": 238, "id": 3571}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 494.0, 18.0, 13.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "hoj52d?:000001O0001O00000000000000000000aPl1"}, "image_id": 238, "id": 3572}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 0.0, 57.0, 42.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "f`U24j?2AMo@5n>No@6n>Mo@5Q?Km@8R?;0001O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00O1M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2NRPn4"}, "image_id": 239, "id": 3573}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 0.0, 64.0, 57.0], "area": 2023, "segmentation": {"size": [512, 512], "counts": "PPV42n?1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2NO100O1O1O100O1O1VOdAO]>0eAO[>0hAMY>3hAKY>4iAJX>5kAIU>L`AO=3T>MbAN;3T>NcAM<2R>1cAK=3P>1]BLe=2]BLf=1V1Oloi2"}, "image_id": 239, "id": 3574}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "P`\\51o_c2"}, "image_id": 239, "id": 3575}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 5.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPl51o?1O00O10P`Q2"}, "image_id": 239, "id": 3576}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 0.0, 58.0, 79.0], "area": 2510, "segmentation": {"size": [512, 512], "counts": "d`m52m?2N3N1N2N3N1N2N3M2O1N3M2N3N1N2N3M2O1N3M2N2O2M2N2N3jAZNn=g1PB[Nn=g1QBZNn=n1N1O1O2N1O1O2N1O2N1O1OO2O2M2nN[BFh=7ZBHg=7ZBGh=9YBDj=9XBEj=9XBFi=9YBDj=9XBEj=9YBDi=:YBEi=9XBEj=9YBDi=2cALf00j=2aALg01i=1bAMg0Oj=1^`U1"}, "image_id": 239, "id": 3577}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 0.0, 56.0, 38.0], "area": 1280, "segmentation": {"size": [512, 512], "counts": "PPl61o?1O2N1O1O2a@Ko>6PAKo>6o@LP?6m@MQ?4m@NR?3m@NR?b0N1O2N1O1O2N1O00O100O1O1O100O1O1O100O1O100O1O1O1Kk@_OU?b0l@[OU?f022N1OO1O1O100O1O2O1N2N3M2O1N3M2N2O2Mdo7"}, "image_id": 239, "id": 3578}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 21.0, 70.0, 72.0], "area": 2509, "segmentation": {"size": [512, 512], "counts": "lQb32m?2N2O2M2N2N2O2M2N2[O_OhAd0U>^OiAd0U>^OjAc0U>_OhAc0V>_OhAb0W>@gA`0Y>CeA=[>DcA>[>f0M2N2N2O2M2N2N3N1O101OO1N3M2N2N3N1N2N2N3N1N2N3M2N2O2M2N2N2O2M2N0001O01O0000011N3M2N2O2M2N2N2O2M2N2N3M2OTnZ3"}, "image_id": 239, "id": 3579}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 32.0, 59.0, 61.0], "area": 2194, "segmentation": {"size": [512, 512], "counts": "[Rk12l?2_ONQA6k>MSA5k>NQA5l>NRA5k>MSA5n>?O01O010OO2L3N3L3N2M4M2O2O01O010O01O01O010O01O010O01O01O010O01O010O0N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2Mc^W5"}, "image_id": 239, "id": 3580}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 51.0, 93.0, 64.0], "area": 2555, "segmentation": {"size": [512, 512], "counts": "jQe41n?2N3M2O10O1N3M2O1O01N2N3M01O011N2N2O11M2O1N3M2N3N1N3M2O2M2N2O2M2N3N110O00010O0TAPOe>W1O2M2N3M2O1N3M2O2M201O01O0N2O2M2N3M2O1N3M2O1N1O01O00010O0001O010O2O101O0N3N1N3M2N2O2M2N3N1N2N3N1N3M2O1N3MW]l1"}, "image_id": 239, "id": 3581}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 83.0, 58.0, 61.0], "area": 2186, "segmentation": {"size": [512, 512], "counts": "\\Sa11?2l>1QA2k>1SA1k>2QA2k>1SA1m>OPA4Q?>O0010OO2L3N2M4M2N3L3O1010O010O01O01O010O01O01O01O010O01O01O010O010ON2N3L3N3M2M3N3L3N3M2M3N3L3N3L3N2N3LPma5"}, "image_id": 239, "id": 3582}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 83.0, 71.0, 76.0], "area": 2603, "segmentation": {"size": [512, 512], "counts": "VcT42m?2N3N1N2N2N2N3N1N2N2N3M2N2O1N3M2N2N2O2M2N2N2N3N1000000cA`NY>d11O0001O01O0001O0O1N20010O000OO001O0001O00WOlAGU>9lAET>;nACR>=PBBo=>SB@m=`0VB]Ok=c0VB[Ol=c0VB[Ol=c0VB\\Ok=b0XB[Ok=b0WB\\Ok=c0VB[Ol=c0j0N3M2N2O1N3M2N2N2Nclg2"}, "image_id": 239, "id": 3583}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 102.0, 75.0, 74.0], "area": 2636, "segmentation": {"size": [512, 512], "counts": "ldo22m?2N2N3M2N2N2O1N3XO_OlAc0R>_O]AMBiA`0U>BiA`0U>ChA`0V>AhAa0V>AhAa0V>AhA?X>d001O00000001O2N001O0001O000000000001O0001O000003M2N2N2N2N2N2O1N1O001O0000000001O0002N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2NXlj3"}, "image_id": 239, "id": 3584}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 116.0, 9.0, 8.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "fST41n?2O1N3N10O1N2O2MX\\g3"}, "image_id": 239, "id": 3585}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 117.0, 30.0, 64.0], "area": 927, "segmentation": {"size": [512, 512], "counts": "_Ta73l?2O1N3M2O2M2N2N3N1N3M2N2O0O001JTOXAk0i>VOUAk0j>7N3N1N3M2O2M2N3M2O1N3iAXNQ>n1YL"}, "image_id": 239, "id": 3586}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 134.0, 59.0, 61.0], "area": 2220, "segmentation": {"size": [512, 512], "counts": "aUV11m?3L3[OL\\A7a>K\\A8b>K[A8e>GYA;1_O\\>6`A>2^O_>4\\Aa01_Ob>j0\\AXOe>R1O0N2N000O100O4M20010O0010O010O0010O0010O0010O0010O010O0010O00M4M2N3L3N3L3N2N3L3N3M2M3N3L3N3M2M4M[[l5"}, "image_id": 239, "id": 3587}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 147.0, 82.0, 67.0], "area": 2687, "segmentation": {"size": [512, 512], "counts": "fU\\32m?2N2O1N2N2N2`@EX?=g@DW?>g@DW?d0N2N3M2N2O1JoNZAS1e>5N2N2N2N2N3M2N00000001O0000000001O000002N2N2N2N3M2O100O1N1O01O000001O0000002N3M2N2O1N2N2N2N2N0000000001O01O00001O2N3M2N2N2N2O1N2N2NojZ3"}, "image_id": 239, "id": 3588}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 152.0, 79.0, 99.0], "area": 3290, "segmentation": {"size": [512, 512], "counts": "eee61n?2N3N1N2N3N1N2N3J_Oh@c0V?_Oi@c0T?6O1N3M2N2O0O0002000N3M2O1N3M2N3N1N2N3M2O1N3M2N3N1N2N3N1N2O2O00TN_BY10WN]=>eB^1[=`NgBHKb1]=dNkBGJe1\\=aNlBHKg1X=`NUC`1l<]NUCd1kUOZAi0i>TOYAj0Q?M2O1N3M2O2M2N2N3N1N2N3Mjh2"}, "image_id": 239, "id": 3589}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 169.0, 7.0, 6.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "\\eP22l?2O100010N1Nhjk5"}, "image_id": 239, "id": 3590}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 176.0, 63.0, 76.0], "area": 2407, "segmentation": {"size": [512, 512], "counts": "Tfd22m?2O1N2N2N2^AF[==bBF[=;ZB^OE8o=X13N3M2N000000001NSAQOm>n05M2N2N2N2N2O1N3M2N2N2N2N2O2M^i[4"}, "image_id": 239, "id": 3591}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 186.0, 83.0, 115.0], "area": 3751, "segmentation": {"size": [512, 512], "counts": "oVo51o?1N3M2N2O2M2DFo@EPA=k>_ORA70=k>?N2O2M2N2N2OO000102M2N2N3N1N2N3M2O2M2N2N3N1N201O00UN^BW1c=gN^BZ1a=dNaB\\1NWN^=:gBHK`1N`N`=6iBIJe1]=aNPC`1P=]NSCb1m<\\NSCf1maNmA_1S>_NnAa1Y>01OOO00010O1O2N2PO`A>b>_O`A?b>_O`A?b>@`A=c>@_A?b>_O`A?b>_OaA>R?N2O2M2N2N3N1NTWg0"}, "image_id": 239, "id": 3592}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 198.0, 7.0, 6.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "WVT15k?0O10O10004LfYh6"}, "image_id": 239, "id": 3593}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 204.0, 49.0, 52.0], "area": 2036, "segmentation": {"size": [512, 512], "counts": "bfT17i?8G9H7I8H8H0000000O1000O1000000000O1000O10000000O100000O10000000O1000O1000000000O1007H9H8H7I8H8HVhR6"}, "image_id": 239, "id": 3594}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 259.0, 64.0, 65.0], "area": 2143, "segmentation": {"size": [512, 512], "counts": "mh]12n?1N2N3M2N2O1N3M2N2N2O2M2N2N2O2M2N2N3M2O1N2N3M2N2O1N3M2N2N2O1N00000101N2N3M2N2O1N3M2N2N2O2M2N2N2N3N1N2N2N3M2O1N3M2N2N2O2M2N2N2OiVb5"}, "image_id": 239, "id": 3595}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 261.0, 12.0, 13.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "Zhe73l?2O1N3M2N01O012M2N2N2OfW4"}, "image_id": 239, "id": 3596}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 262.0, 6.0, 7.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "WhX16i?10O10001OiWd6"}, "image_id": 239, "id": 3597}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 266.0, 81.0, 83.0], "area": 3580, "segmentation": {"size": [512, 512], "counts": "miQ42m?2N2N2N2N2N2N2N3M2N2N2N2O10000N2N2N2N2N2N2N2N2N2N2IaNjAa1T>aNjAa1T>7N2N00001O00000000000000000000JPB]NP>c1RB[Nn=e1TBYNl=i142N2N2N2N200001O00000000000000000N2N2N3M2N2O1jNRB7P>GRB7P>GRB7P>GRB7P>GRB7P>GRB7P>GRB7P>GRB7P>GRB7m>N2N2Nkee2"}, "image_id": 239, "id": 3598}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 274.0, 8.0, 7.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "ehZ11n?2N2N2O01N2N2N\\Wa6"}, "image_id": 239, "id": 3599}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 284.0, 46.0, 67.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "XZY72m?2N3N1N3M2O2M2N3N2M2CZO]Ai0a>XO^Ai0`>ZO]Ai0`>YO_Ah0`>ZO]Ag0b>;010O00010O01O0JdNjA[1W>fNgA[1X>hNfAW1Z>710O00010O00010O00010O0002O1N3M2O0O00QG"}, "image_id": 239, "id": 3600}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 322.0, 7.0, 8.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "UZn11n?2N2N3NO3M2NlUn5"}, "image_id": 239, "id": 3601}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 333.0, 70.0, 71.0], "area": 2333, "segmentation": {"size": [512, 512], "counts": "^kZ31n?2N2N2N2N2N2N2N2N2N2N2N2N2HVOWAl0g>VOXAk0f>WOXAk0f>8O000000001O2N2N2N2N2O10000001O0000000000000000000000000000000O1N2N2N2N2N2lNYAl0i>ROYAl0o>N2N2N3M2N2N2N2N2N2N2N2N2N2NSTb3"}, "image_id": 239, "id": 3602}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 345.0, 78.0, 78.0], "area": 3149, "segmentation": {"size": [512, 512], "counts": "^lj41n?2N2N2N2N2YOFeAFeAFeAFeAFeAFeAFeAEfA=X>DgA>W>DgA>W>DgA>W>g0N2N2N2N2N2N2N2N0000000000000002N2N2N2N2N2N2N2N1O00001O2N2N2N1O000000001O2N2N2N2N0000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NaTn1"}, "image_id": 239, "id": 3603}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 371.0, 11.0, 24.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "jkj71n?3N1N3M2O2M2N3N1N3M3[D"}, "image_id": 239, "id": 3604}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 388.0, 83.0, 83.0], "area": 3432, "segmentation": {"size": [512, 512], "counts": "e]f21[?0XA1f>1XA1f>1XA1f>2WA02IV>9fA11IV>8gA11IV>8gA11IV>8gA11IV>8hA00JV>8hA00JV>\\1N2N3N1N2O10N2N2N2N3M0001O002N001O0000000001O2N2N2N000000001O2N1O00000001O000001O000000001O2N2N2N2N2N2N2N3M2N2N2N2O1N2O100O1N2N2O1N2N2N2N2N2N2NRSP4"}, "image_id": 239, "id": 3605}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 389.0, 89.0, 90.0], "area": 3767, "segmentation": {"size": [512, 512], "counts": "R]c51o?1N2N3DJi@8V?Ih@9V?Jh@7V?Kh@8U?;O1N3M2N2N2O2M2N2N3N1N2N2N3M2O1N3M2N2O1010O0001PBQNl=S20O000010O000010O000010O00O1N3N1N2N3M2N2O1N3M2N2O2M1O000002N2O2M000010O1O3M2N2O1N3J5O1N3M2N2O1N3M2N2N3N1N2N3M2O1NRRP1"}, "image_id": 239, "id": 3606}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 426.0, 78.0, 85.0], "area": 3211, "segmentation": {"size": [512, 512], "counts": "]_b31n?2N1O2jNL]B6]=IdA3m06]=IdA3m06]=0aB2]=0aB2]=0aB2]=ObB3\\=ObB3\\=ObB3[=0cB2[=0cB2[=0cB2[=0cB2[=0cB2[=0cB2[=0cB0^=1`BOb=1\\BOf=T12N2N1O000000000O1000000000001O1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N00000000000000002N2N2N2N2N2N2M3N2N2N2N2N2N2NPbV3"}, "image_id": 239, "id": 3607}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 429.0, 86.0, 65.0], "area": 3050, "segmentation": {"size": [512, 512], "counts": "Sn82l?3M2O2M3M2N3N1N3M2N30O0100O010O0100O0100O010N1O2M201O10O10O010O010N2N1N3M2N30O0100O010O0100O010O0100O0100OO2M2N3N2M2N3M2100O0100O010O0100O0100O01M2O2dNhAl0[>ROfAl0\\>ROgAk0\\>ROfA>HLd>DgA=GNP?0SAMP?1QAMQ?1RALQ?1RAMP?0a0OiQ\\6"}, "image_id": 239, "id": 3608}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 448.0, 84.0, 64.0], "area": 2748, "segmentation": {"size": [512, 512], "counts": "h^k42m?2N2N1L5N2N2N2N1O2M3N2N2N1O2N2M3N2N1O2000000O0100000`AhNW>X1gAiNY>W1fAkNZ>U1cAnN]>[1000O10O100000O1O001ON2O11O001O1O1O1O001O1O1M3N2N000O0100002M3O100O010000000O001N2N2M3N1O2N2M3N2N1O2M3N2N2Ml`j1"}, "image_id": 239, "id": 3609}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 487.0, 81.0, 25.0], "area": 1291, "segmentation": {"size": [512, 512], "counts": "no22l?2N2KL]@5a?6N2N2N2O1N2N2O11O001O001O001O1O001O001O001O001O1O001O001O001O001OO1N2N2N2O1N2N2001O001O001O001O1O001O001O00O1O1001O1O001O001O001O001O1O001O001O001O001KZ@Ng?0Z@0lod6"}, "image_id": 239, "id": 3610}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 496.0, 42.0, 16.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "n_e22l?2N2N2N2N2N2O1O11O001O001O1O001O001O001O001O00N2001O1O001O001O001O001O001O001O001OQ`e4"}, "image_id": 239, "id": 3611}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 499.0, 30.0, 13.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "ooU41m?2N2O1N2N2N2O1001O001O001O001O1O001O001O001O1O001O001O001OQP[3"}, "image_id": 239, "id": 3612}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 505.0, 17.0, 7.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "nom42l?2N2O100001O001O001O1O001O001O00Q`i2"}, "image_id": 239, "id": 3613}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "ooP41o?0QPn3"}, "image_id": 239, "id": 3614}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 0.0, 19.0, 10.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "P`Z31o?1O1O1O1O1O1O1O001O1OO1O1O1O1O1O1O1OQP\\4"}, "image_id": 240, "id": 3615}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "PPP41o?1O00OQPn3"}, "image_id": 240, "id": 3616}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 0.0, 59.0, 46.0], "area": 1538, "segmentation": {"size": [512, 512], "counts": "ZPR42m?2O2M2N2N2N2N2N2N2N3N1N2O1O1O1O1O2N1O1O1O1O1O1O1O2N1O1O00O1O1001O2N1O1OO1O1O1VO\\A7e>H]A6d>I^A5c>K_A2b>M`A2`>MbA1_>NcA0^>OdAO]>0eAN\\>1fAM[>2hAKY>4iAJX>7h01O00O1O100JX@3l?Nn_P3"}, "image_id": 240, "id": 3617}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 0.0, 39.0, 40.0], "area": 705, "segmentation": {"size": [512, 512], "counts": "[PZ52n?2M2N2N2N2N2N3N1N2N2N2N3M2OO1O2O1000001O01O000000010OWOj@g0X?N2N2N3M2N2O1N2N3M2N2NT_R2"}, "image_id": 240, "id": 3618}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 0.0, 41.0, 22.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "TPf51n?2N2N2N2O2N1O1O1O1O1O1O1O1O1O1O1O00O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1OQ`e1"}, "image_id": 240, "id": 3619}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 33.0, 30.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "X`\\61n?2N2N2N2N2N2N2N2O1O100000000000000000N2N2O10000000000O1N2N3M2N2NZoR1"}, "image_id": 240, "id": 3620}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 2.0, 40.0, 41.0], "area": 766, "segmentation": {"size": [512, 512], "counts": "k`l62m?2N2N2N2N2N2N2N2N3M2N2N2N2N0000000001O000000000000002N2N2N2N2N2N2N2N2N2N2N2N2Nb_?"}, "image_id": 240, "id": 3621}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 11.0, 75.0, 36.0], "area": 1997, "segmentation": {"size": [512, 512], "counts": "oPh05d?8H7K5001O000001O000001O000001O0001O00000001O0001O00000001O0M3N200000001O01O0000000001O01O0000000001O01O0000000001O01O0000000001O01O00000001OL4I8H7If_R6"}, "image_id": 240, "id": 3622}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 11.0, 56.0, 75.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "nQT71n?3M2N2N2N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N00000001O0000000000002N2O1N2N2N2N0000000000000000000000FQOeAo0Z>SOdAg0JZOb>1bAe0NXO`>5`Ac0f>_OXAb0g>;F"}, "image_id": 240, "id": 3623}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 20.0, 5.0, 47.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "d0_1a>cNlAn0c>00D\\1hAfNY>`101O010O01O01OM4O0bNdAX1]>dNgA[1_>10O01O01O001M2000N3L31O01O010O01N1N2M4L3N2M4ROo@k0T?O01O01O0N3L3M3N3L3N3L3N2MTnZ4"}, "image_id": 240, "id": 3626}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 46.0, 15.0, 13.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "eQo33j?3M3O2O0010O0010O0001O0M3N3La^i3"}, "image_id": 240, "id": 3627}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 54.0, 35.0, 27.0], "area": 743, "segmentation": {"size": [512, 512], "counts": "Xb_15b?:F900000000001O0001O000000000001O0001O000000000001O005LO01O005K000HTnn5"}, "image_id": 240, "id": 3628}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 61.0, 26.0, 30.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "bbe32k?4M2M3N3L3N3M20010O001L3N210O0010WOn@b0R?[OQAd0W?M2M3N3L3N3L3Nj]m3"}, "image_id": 240, "id": 3629}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 83.0, 21.0, 22.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "QSb06c?7I70001O00000001O0001O0000000001O000J6Ib]S7"}, "image_id": 240, "id": 3630}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 83.0, 25.0, 24.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "VSc11j?5K5L4K6O01O0001O0001O0001O01O000001O01O0K5K5K5L]]P6"}, "image_id": 240, "id": 3631}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 89.0, 19.0, 19.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "Sc]71n?2M3N2N2N1O2N2N2N2OO2N2N2N2N2N2N2N2NPm8"}, "image_id": 240, "id": 3632}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 90.0, 50.0, 28.0], "area": 1400, "segmentation": {"size": [512, 512], "counts": "jR^4l0T?0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000Vmh2"}, "image_id": 240, "id": 3633}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 98.0, 25.0, 25.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "dcc25a?:H80000000010O0000000000000000010O00000000N2F:GUmo4"}, "image_id": 240, "id": 3634}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 100.0, 10.0, 60.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "T3l1T>00J6I7I7I7I8H7I7IRmj7"}, "image_id": 240, "id": 3635}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 109.0, 69.0, 64.0], "area": 2147, "segmentation": {"size": [512, 512], "counts": "XTf52m?2M3N2N1O2M3N2N1O2M3N2N2M2O2N2N2M3ZAkN^>V1aAlN_>T1_AnN_>[1N1O2N2NO2O2N2N2N2O010O1O]AhN_>]10O00100iN_Am0b>QO`Ao0_>oNdANKo0b>QOdAOLo0n>0O0010O00010O010O00010O010O00010O0O2M2M3N3L3N3L3M3N3L3Na[W1"}, "image_id": 240, "id": 3636}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 114.0, 48.0, 63.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "UUi12k?4M2M3YOGeAGdAGdA=X>FfAGdAGeAEdA>]>c0M2M3N3O00010O0010O0010O0010O001M2N2M4M2M4L3N2M4M2M3O2O010N1N2M4L3N3Ba@6f?N3LPl^5"}, "image_id": 240, "id": 3637}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 120.0, 47.0, 47.0], "area": 1535, "segmentation": {"size": [512, 512], "counts": "TTc07a?8L5O0e@AU?e01O01O000000M3M301O0000RAEW>k000001O0001O0000000001O01O0000000001O0001O00000000K6G8I7H8IY\\e6"}, "image_id": 240, "id": 3638}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 123.0, 35.0, 29.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "[T`41l?4M2M4M2M3N3N110O00010O010O01O010O010O00010O010O0010O010OM3N3L3N3L3Nk[n2"}, "image_id": 240, "id": 3639}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 127.0, 30.0, 30.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "cTi31m?2N3M2N3M2N3M2N3M2N3O010O010O010O010O01N1N3M2N3M2N3M2N3M2Njkg3"}, "image_id": 240, "id": 3640}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 132.0, 42.0, 59.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "eUa23j?3M3L4M4K4M3M4K4M3M4K4M3M3L5M21O01O00010O0001O01O01O0M3M3L5L3M3L5L3M3L5L3M3L4M4L3Lhki4"}, "image_id": 240, "id": 3641}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 147.0, 31.0, 29.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "Ue]52l?2N2N3L3N3M2M4M210O00010O010O010O01O01O010O01N1M4M2N2M4M2N3MUkR2"}, "image_id": 240, "id": 3642}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 147.0, 51.0, 42.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "\\eV7=T?`0L300000000001O000000000000000001O0000000H800000000000000001O01O00000005K00000000000000001O01O0000NXK"}, "image_id": 240, "id": 3643}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 153.0, 32.0, 33.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "^Ug41m?3M2N3M2M4M2N3M201N1N3N1010O010O0010O010O010O01M2N2N3M2Dc@3`?Jc@3_?Kc@3g?Mijh2"}, "image_id": 240, "id": 3644}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 164.0, 50.0, 66.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "gVi33k?2N3M2N3M2N3M2N3M2EYO[Aj0b>XO\\Aj0b>YO[Aj0b>;N3M2N3M2N3M2N3M210O010ON3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N`j]3"}, "image_id": 240, "id": 3645}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 191.0, 53.0, 66.0], "area": 1651, "segmentation": {"size": [512, 512], "counts": "cgb42l?2N3M2O2M2N3M3M2O2M2N3M2O2M3M2N3M2O2M2N3M2N3N1N0000010O0002N3N1N3M2N3M3N1N3M2N3M2O2M3M2N3N1N3M2N3M2O2M3Mfib2"}, "image_id": 240, "id": 3646}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 201.0, 31.0, 30.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "if]73k?2O2M2N3N1N3M3M2O2N110O10O10O010O01000O01N1O2M2N3M3N1N3M2O2M]i2"}, "image_id": 240, "id": 3647}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 214.0, 83.0, 67.0], "area": 2731, "segmentation": {"size": [512, 512], "counts": "VXQ52m?1N3M2O2M2N3M3N1N3M2O2M201O10O010O10O10O010N1O2M3M2N3N1010YOAlA>R>DnA=o=FQB9n=HPB;m=HPB:n=IPB9m=IQB:m=HPB:n=HQB:m=HPB:Q>EmA>R>i00O010O010O10O10O010O010M3M2O2M2N2N10O000010O2N3M2O2M2N3N2M2N3M2O2M2N3M3N1N3M2O2M2NPYe1"}, "image_id": 240, "id": 3648}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 216.0, 34.0, 38.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "agd62m?2M2N3M3N1N3M2O2M3M2O2M2N3N2M2010O10ON3M2N3N2M2N3N1N3M3N1N3M2O2M3MoXj0"}, "image_id": 240, "id": 3649}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 240.0, 30.0, 40.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "\\hY72m?2M2N3M2N3N2M2N3M2N3N1N3M2N3M3N1010O0UOYA=h>@ZA>h>@[A=h>@ZA>h>@[A>T?N3M3M2N3N1NnW7"}, "image_id": 240, "id": 3650}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 258.0, 36.0, 56.0], "area": 1388, "segmentation": {"size": [512, 512], "counts": "P9d0X?4M3M4L3M3L5L3M3M3O2O01O01O01O01O01O01OO2M21O00IcAkN^>Q1eAnN\\>o0gAnN\\>o0=K4M3M4L300Gd@N]?Ng@N\\?ORh]7"}, "image_id": 240, "id": 3651}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 266.0, 37.0, 54.0], "area": 1548, "segmentation": {"size": [512, 512], "counts": "hYR14e?7I7J6I8H7I7K5001O0001O00000001O0001O00000001O000001O000001O0L4I7jNYAl0Q?I7H8IfW[6"}, "image_id": 240, "id": 3652}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 266.0, 47.0, 56.0], "area": 1372, "segmentation": {"size": [512, 512], "counts": "ciZ62m?2M3M2N3M2N3M2O2M2N3M2N3M3N1N3M2N3M2N3M2O2M2O20O0N3M2N3M2O2M2N3M3M2N3N1N3M2N3M2N3M2O2M3M2N3M[gm0"}, "image_id": 240, "id": 3653}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 281.0, 45.0, 62.0], "area": 1674, "segmentation": {"size": [512, 512], "counts": "]Zm11m?2M4QOJPB8n=JoA9n=KoA8m=KPB8n=JoA:m=JPB8n=JoA9o=JnA9Q>GlACiA`0V>e00010O01O01O0N30O0010O0001M2M3M4M2M4M2M3N3L3M4M2M3N3L3N3L3M3N3LoV\\5"}, "image_id": 240, "id": 3654}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 290.0, 7.0, 16.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "]il72l?3M2N3M2O2M2nF"}, "image_id": 240, "id": 3655}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 291.0, 52.0, 59.0], "area": 1494, "segmentation": {"size": [512, 512], "counts": "\\jn63l?2M2O2M3N2N1N3N2M2O2M3N1O2M3N2M2O2M3N1O2M3N1N10O01000O010O3N2M2O2N2M2O2M3N1N3N2N2M2O2M3N1N3N2N1N3N2M3N^V7"}, "image_id": 240, "id": 3656}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 307.0, 56.0, 60.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "kjd23l?1N3N2M2N3N1N3N2M2O200O0100O0100O01000]OYOiAi0U>YOjAi0T>YOiAi0U>ZOhAi0U>YOjAi0T>YOiAi0U>YOiAj0U>b0M20O1N3M2O2M3N1N3M011N3N2M2N3N2M2O2M3M3N1N3M3N1N3N2M3M2OPV_4"}, "image_id": 240, "id": 3657}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 317.0, 7.0, 21.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "m9d0\\?00001O00G9F]Vl7"}, "image_id": 240, "id": 3658}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 320.0, 41.0, 21.0], "area": 778, "segmentation": {"size": [512, 512], "counts": "Yja08h?000N6E7000000000000000000000000000000000000000000000000001O00000000000000005C80jei6"}, "image_id": 240, "id": 3659}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 326.0, 15.0, 20.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "fj\\21m?2M4M2M4M2N2N30N1N2N3L3N3L3Nie[5"}, "image_id": 240, "id": 3660}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 337.0, 20.0, 37.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "S[f71n?2N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N1O0`E"}, "image_id": 240, "id": 3661}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 342.0, 55.0, 67.0], "area": 1699, "segmentation": {"size": [512, 512], "counts": "Z\\c31n?2M3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2O2M2N3N2M2N10O00010O00010O1O3N2M2N3N1N3M3N1N3M2O2M3M2O2M2N3N2M2N3N1N3M3NmTa3"}, "image_id": 240, "id": 3662}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 344.0, 25.0, 28.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "_k_12h?6K5J6K5O1010O00000000010O000000010O00000L4J6K6IZeS6"}, "image_id": 240, "id": 3663}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 345.0, 44.0, 37.0], "area": 1330, "segmentation": {"size": [512, 512], "counts": "bkf07`?9H8H80001O000000000001O0001O0000000001O0001O000000000001O0001O0000000001O0001ON2G9H8G\\Uc6"}, "image_id": 240, "id": 3664}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 356.0, 37.0, 34.0], "area": 887, "segmentation": {"size": [512, 512], "counts": "T;l0T?1O01O000001O01O0001O01O0001O01O000001O01O0001O01O00000L4K6Gd@K]?1g@OY?Mk@3b?O01O00L\\T]7"}, "image_id": 240, "id": 3665}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 364.0, 47.0, 28.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "j[k11l?3N2O20O010O00010O010O00010O010O2N01ON3OO2L3N3L3M3N3L310O00010O01O01O010O01O01OM4L3N3L3N2M4M^T]5"}, "image_id": 240, "id": 3666}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 365.0, 68.0, 85.0], "area": 2601, "segmentation": {"size": [512, 512], "counts": "`mT42l?3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2O2M2N3N2M2N3M2O2M2N3N0O00010O0001O01O01O01O01O3N1N3M2O2M3M2O2M2N3N2M2N3M2O2M3M2O2M2N3N2M2N3N1N3M2O2MUTi2"}, "image_id": 240, "id": 3667}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 369.0, 12.0, 13.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "i[m22m?1N3N1N3N1O20N2M2O2M2O]dl4"}, "image_id": 240, "id": 3668}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 386.0, 29.0, 30.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "blT52m?2M3M2N3M3N1N3M3M210O10O10O10O010O010N2M2N3N1N3M3M2N3M2Nec\\2"}, "image_id": 240, "id": 3669}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 387.0, 33.0, 41.0], "area": 699, "segmentation": {"size": [512, 512], "counts": "k\\o23l?1N3N2M2N3N2M2O2M2O2M3N1N3N2M2O200O01000OFWA]Oj>`0XAAh>=ZABf><]ABe><\\ABg>;\\ABf>=`0M2O2M3N1NeS`4"}, "image_id": 240, "id": 3670}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 390.0, 78.0, 85.0], "area": 3170, "segmentation": {"size": [512, 512], "counts": "Y^\\11m?3M2M4M2N3M2M4M2N3j@WOQ?n0M2N3O010ON201O010ON3M2N3M2N3L3N2N3M2N3M2N3M2N3L3N3M2N2N000002N3M2N3L3N3M2N3M2N2N0O3N3M2N3M2N5J3010O010O010OO2M2N3M2N3M2N3M2N3M2N3M2N3M[c\\5"}, "image_id": 240, "id": 3671}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 399.0, 51.0, 22.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "h\\>1k?5K4O10001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O000L4LZSh6"}, "image_id": 240, "id": 3672}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 406.0, 62.0, 41.0], "area": 1380, "segmentation": {"size": [512, 512], "counts": "W]_52l?3N1N3N2M2O2M3N1N3N2M201O01000O010O10O10O10O10OO2M3N1O200O01000O01O1N1N3O10OO2M2O2M3N101000O001N1N3N2M2O2M3N1N3N2M2N3L[@Jg?34Olba1"}, "image_id": 240, "id": 3673}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 414.0, 21.0, 27.0], "area": 492, "segmentation": {"size": [512, 512], "counts": "o3bAN]>3aA0\\>3bAO\\>3aAO]>3aA0]>2aA0\\>3aAO]>3bAO\\>3aA0]>1bA0_>i00O01000O01000O0O2N1N3O1O010O10O10O10O10O10O10O10O10O10O10O10O01nNYAi0g>VOZAj0g>SO\\Al0m>O001O1N1O2M3M2O2M2O2M3N1N3N2Me`S2"}, "image_id": 240, "id": 3677}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 460.0, 30.0, 29.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "lnm31m?3N1N3M3N1N3N1N3M3N110O10O10O01000O010O01M3M2O2M3N1N3M2O2MZQc3"}, "image_id": 240, "id": 3678}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 463.0, 65.0, 43.0], "area": 1710, "segmentation": {"size": [512, 512], "counts": "Woi21n?2M3M2O2M2N3N1N3M2O2M3M201O010O010O01000O010O010O0O2N2M2N3N1N2O11O001O001TOYA=g>A[A?f>^O\\Ab0e>[O]Ae0c>YO_Ag0m>0000000000000000000000000000000000001O000000@`aU4"}, "image_id": 240, "id": 3679}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 466.0, 50.0, 46.0], "area": 1672, "segmentation": {"size": [512, 512], "counts": "m_W73k?2M3M3N2M3M3N2O100N2M3N2M3N2M3N2M3N2N21O001O00001O00001O00001O00001O00001O00001O00001O001O0N2M4L3NUA"}, "image_id": 240, "id": 3680}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 467.0, 27.0, 27.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "Uo`01m?3L3N2M4L3N3L30001O010O01O01O01O010O01OM4M2M3N3L3M4MXaQ7"}, "image_id": 240, "id": 3681}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 467.0, 36.0, 33.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "T_]41m?2O2M3M2O2M2N3N2M2N3O0010O10O10O010O10O10O010O01000N1O2M2N3N2M2N3N1N3MQaP3"}, "image_id": 240, "id": 3682}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 476.0, 54.0, 24.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "RoU12k?4L300010O00010O00010O00010O00010O00010O00010O00001O00010O00010O00010O00010O00010O00010O00010O00010O000M4LfPo5"}, "image_id": 240, "id": 3683}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 494.0, 25.0, 18.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "j_W22l?3M2M4M2M40O00010O001O001O00001O001O00001O0N3L3N\\P\\5"}, "image_id": 240, "id": 3684}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 15.0, 47.0, 27.0], "area": 922, "segmentation": {"size": [512, 512], "counts": "e`V75k?8H8H00O01000000000000O01000000000000O01000000000000O01000000000000O01000000000000O0105K8H7Imn1"}, "image_id": 241, "id": 3685}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 60.0, 7.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "mal72n?7I0O1000O10TN"}, "image_id": 241, "id": 3686}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 64.0, 208.0, 171.0], "area": 23597, "segmentation": {"size": [512, 512], "counts": "\\US26j?7H8SA@l=f0nA^On=i0kAWOU>o0eAQO[>Z104L7H3N00000000O10O10000000O1000O1000000SNPNjEo1W:WNcEi1]:^N\\Eb1d:eNTE\\1k:lNnDT1R;SOgDm0Y;ZO`De0a;BXD>h;IQD7o;m100O1000O10000000O10O1000000000O10O1000000000O0100000000000O01000000000O10O1000000000O0100000000000O010000000000O0100000000000O01000000000O10O1000000000O10O10000000O1000O10000000O10O10000000O1000O10000000O1000O10000]OWLoDi3P;_LiD`3X;gLaDY3_;d00000000O010000000000O010000000000O0100000000000O01000000000O10O1000000000O10O1004L7I7H8I6J7I7I7I7I7H8I7I6J7I7I7I7H8I7I7Iiid2"}, "image_id": 241, "id": 3687}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 288.0, 85.0, 164.0], "area": 10094, "segmentation": {"size": [512, 512], "counts": "YiX63m?7I7H8I7I7I7I7I7I7H8I7I7I8H7I7I7H3N0000002N7I7H8I7I7I0000000O1000O10000000O1000O10000000O1000O1000000000O10O1000000000O1000O1000007I7I7I7H8I7I7I7I7I7I7H8lM`BZ1g=^NaB[1Y>I7I7I7I8G8IWb<"}, "image_id": 241, "id": 3688}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 293.0, 16.0, 112.0], "area": 939, "segmentation": {"size": [512, 512], "counts": "XYh75k?7H8I7I7I7I7I7I7H8I7I7I7I6J7H8dF"}, "image_id": 241, "id": 3689}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 413.0, 70.0, 99.0], "area": 5073, "segmentation": {"size": [512, 512], "counts": "V]W52n?7I7H8I7I7I7I8H7I7H8I7I7I3M0000000O10O1000000000O10O1000000000O10O1000000000O10O1000000000O10O100000000mNoBWOP=i0XCPOh1O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O00000000L4L4L4L4K5L4L4L4L4L4K5L4L4L4L4K5L4L4L4L4KU`[5"}, "image_id": 242, "id": 3691}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 217.0, 59.0, 132.0], "area": 3906, "segmentation": {"size": [512, 512], "counts": "dZj01k?5J5L4L4K6K4L4L4VAmNb>\\1K4L4K5L5K4L4K5L5K4K5L4L4L2M5L4L4K6K4L4M30L4L4K5L5K4K5L4L5K4K5L4L5J5L4L4L5J5L4L4K5L5K4L4K5L5K4K5L4LUYX6"}, "image_id": 242, "id": 3692}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 372.0, 54.0, 121.0], "area": 2196, "segmentation": {"size": [512, 512], "counts": "d;i3WCiA=W>_OmAb0S>XORB2A>Z?000010O000000010O0000010O000000010O0000010O000001O01O0N2L4K6KWcT7"}, "image_id": 242, "id": 3693}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 0.0, 284.0, 239.0], "area": 49982, "segmentation": {"size": [512, 512], "counts": "URb31m?2N2O2i@M]>5aAM]>6_AN^>4`AN_>4]AOa>3]A0`>2^A0`>j0M2\\BbNe<`1WCdNg<]1WCeNjBZA>f>EWA:i>?0000O01000WO"}, "image_id": 249, "id": 3707}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 64.0, 15.0, 14.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "UbV72m?2N2N2N2N2O100O01N2N2N2N2N2Oima0"}, "image_id": 249, "id": 3708}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 109.0, 44.0, 49.0], "area": 1191, "segmentation": {"size": [512, 512], "counts": "mSZ72l?2O2N2N2M2O2N2M3N1O2O1O10O100000N2N1O2N1N2O200O10O100000O10O1000O1N1O2N2000O010000000NYL"}, "image_id": 249, "id": 3709}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 238.0, 60.0, 67.0], "area": 1970, "segmentation": {"size": [512, 512], "counts": "kX_33l?1N3O1O0N3N2M2O2M3K]Oj@d0U?5M3N2M2O2M3N1N3N2JeNcA]1\\>dNcA^1Z>6N1O2M3N1N3N2N11O1N1DQBeNR>Y1oAfNS>W1PBfNR>Y1PBeNR>X1PBfNS>Y1:01000OgN]AU1h>O0N3N2M2O2N2M2O2M3N1N3N2M2O2M3N1N3Nkgb3"}, "image_id": 249, "id": 3710}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 282.0, 55.0, 58.0], "area": 1642, "segmentation": {"size": [512, 512], "counts": "oiV51m?3N1N3N2M3N1O2M3N1N3N2N1N3N2M3N1N3N2N1N3N2M3N1N3N2N10100OO2M3N2M2O2N2M2O2M3N2M2O2N2M2O2M3N1N3N2N2M2O2M3N1N3Ndfm1"}, "image_id": 249, "id": 3711}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 337.0, 80.0, 63.0], "area": 2166, "segmentation": {"size": [512, 512], "counts": "nk^52l?3N1N3N2M2O2WOClA?R>CkA?S>DkA>S>CkA`0R>ClA>S>DjA?S>ClA?R>CkA?T>CjA?T>BjAa0U>e01000O10O10O10N2M2O2M3N1N3N2M3N1O2M3N1N3N2M2O2M3N1O2O10O10O10O1000Fc@O]?Of@1Z?Of@1Z?Nf@2[?Ne@0Z?3f@KZ?6g@HX?;52M2O2N2M2O2000O01000O1M2O2M3N1N3N2N1N3N2M2OhTY1"}, "image_id": 249, "id": 3712}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 364.0, 45.0, 40.0], "area": 868, "segmentation": {"size": [512, 512], "counts": "mkV71n?2M2O2M3N1N3N2M3N1N3N2N11000O01000O01000O01000O01000O01000O01000O01N2M2O2M3N1N3N2M2O2M3Noc2"}, "image_id": 249, "id": 3713}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 395.0, 18.0, 18.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "elP61n?2N2M3N1O2M3N2N1010M3N1O2N2M2O2N2N`Sf1"}, "image_id": 249, "id": 3714}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 238.0, 87.0, 47.0], "area": 2430, "segmentation": {"size": [512, 512], "counts": "ogU58Y??N200010O00000000000000000000001O0001O0000000000000001O000000000001O0000000001O000000000000000001WAYOW>g0[AGe>j01O0001O000000000000000001O0000000M3C=001O00000001O00000000000000000I7Ajh^1"}, "image_id": 250, "id": 3715}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 264.0, 13.0, 12.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "]XS52l?5K3O0O2O00000001O0O2N3LfWf2"}, "image_id": 250, "id": 3716}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 295.0, 35.0, 63.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "ijY4;Y?<]AFb>;\\AGc>:[AGe>:XAIg>7XAKg>6WALh>5VAKk>c02O1O1O1O1N2O1O1001OO1N2O1O11O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O"}, "image_id": 251, "id": 3721}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 22.0, 45.0, 47.0], "area": 1038, "segmentation": {"size": [512, 512], "counts": "cQS62m?2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M10O100000001N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1Ol^V1"}, "image_id": 251, "id": 3722}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 7.0, 8.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "07i?00001OM3N2NR`l7"}, "image_id": 252, "id": 3723}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 0.0, 15.0, 14.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "WPT34i?3M3O2O01O01O01O01O01O0O1M4Lo_d4"}, "image_id": 252, "id": 3724}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 0.0, 59.0, 35.0], "area": 1331, "segmentation": {"size": [512, 512], "counts": "_Po32l?3L3M3N3L3N3O00001O001O00001O001O00001O001O00001O001O00001O00001O001O01O010O01O01O010O01OO2L3N3L31O010OO1M4L3N3L3N2Mj_S3"}, "image_id": 252, "id": 3725}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 0.0, 84.0, 25.0], "area": 1385, "segmentation": {"size": [512, 512], "counts": "VPd63h?5O101O000000001O000000001O000000001O000000001O000000001O000000001O000000001O000000001O0000001O000000001O000000001O000000001O000000001O000000001O000000001O00L4K5L4K5KUP2"}, "image_id": 252, "id": 3726}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 5.0, 25.0, 17.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "ZP[31m?3L303M010N101O01O010O01O01O010O01O01O010O0N2M4Mf_X4"}, "image_id": 252, "id": 3727}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 8.0, 34.0, 34.0], "area": 639, "segmentation": {"size": [512, 512], "counts": "i`m13j?3N3M2N3L3N2N3N110O010O0010O00k@ZOQ?f0l@\\OT?i00O0010O0010O0O2M2M4M2N3M2M3N3M2N__a5"}, "image_id": 252, "id": 3728}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 18.0, 19.0, 15.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "k`k21m?2M3N3M2010O00010O010O00010O0O2M2M3M\\oj4"}, "image_id": 252, "id": 3729}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 22.0, 25.0, 20.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "QQ62l?2M4L3M310O00010O00010O010O00010O00010O00M4M2M3MV_]7"}, "image_id": 252, "id": 3730}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 31.0, 54.0, 38.0], "area": 1449, "segmentation": {"size": [512, 512], "counts": "faX66f?4K5K5K5L5O0001O0001O0001O0001O0001OM310PASOk>Q1000010O0000M4O0001O0001O0001O01O000L4K6O0000001[Of@c0[?0001O00000K5L5Jk^l0"}, "image_id": 252, "id": 3731}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 34.0, 52.0, 46.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "nQd02k?3N3L3N2M4M2N3L3N3L3N2O20O010O0010O0010O0010O001M2O101O010O01O01O010O010O00010ON3L3N3L3N2M4]Of@<\\?Bf@M4L3N2M4M2M4L3N2M4L3N2M4M210O01O01O010O01L3M3N3L3M3N3L3N3L3M3N3L3N2M4L3N3L3M3N3Lk]d5"}, "image_id": 252, "id": 3735}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 63.0, 22.0, 17.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "WbR32l?2M4M2O2O00010O010O00010O010O00010O00M4L3Nm]b4"}, "image_id": 252, "id": 3736}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 70.0, 34.0, 31.0], "area": 604, "segmentation": {"size": [512, 512], "counts": "`bf22k?4M2Z@I`?=N3O00N3Od@FR?:l@HT?b010O00010O01O01O010O0002O0O0010OO1M4M2N201O0M3M4M`]h4"}, "image_id": 252, "id": 3737}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 79.0, 20.0, 17.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "hR<1l?3N3M2N30O01O01O010O01O01O010O01OM4M2M^mY7"}, "image_id": 252, "id": 3738}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 87.0, 15.0, 14.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "ob_72l?3L3N3N10010O010O0001N1M4M2NWm8"}, "image_id": 252, "id": 3739}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 92.0, 61.0, 44.0], "area": 1524, "segmentation": {"size": [512, 512], "counts": "bck53k?2M4M2M3N3L301O01O01O01O010OO2M2M3N3M2010O00010O010O00010O00010O010O00010O010O00010O010O00010O00010M2N3L3N2M4M2M4L3N2M4MflU1"}, "image_id": 252, "id": 3740}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 93.0, 28.0, 45.0], "area": 751, "segmentation": {"size": [512, 512], "counts": "d3a0\\?3N3M2M4M2M3N3M2M4N11O010M2M3N3M2M4M2M4M2N2M4M2M4M2N2Mola7"}, "image_id": 252, "id": 3741}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 97.0, 28.0, 28.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "ecW71m?2M3N3M2N3L3N3M2N3O01O010O01O010O01O001L3N3M2M3N3M2M4Mi\\:"}, "image_id": 252, "id": 3742}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 98.0, 54.0, 46.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "mcX32l?2M4M2M3N3L3N3L3N2N3L3010O0010O0010ORAPOl>R110O00010O01M12O0010O0010O010O00010O010O00010O0101L3L3N3L3N2M4M2N3L3N^\\l3"}, "image_id": 252, "id": 3743}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 107.0, 14.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "bcl62m?2N2N3N1N00001O01O2N2N2O2Ma\\l0"}, "image_id": 252, "id": 3744}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 125.0, 24.0, 18.0], "area": 236, "segmentation": {"size": [512, 512], "counts": "Wdf21l?3N2M4M2010O0010O0010O0010O00010O0010O0N3L3M3Nn[m4"}, "image_id": 252, "id": 3745}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 128.0, 22.0, 24.0], "area": 321, "segmentation": {"size": [512, 512], "counts": "bdQ11l?3N3L3N3L3N2M4O001O01O010O000N3L3N3L3M3N3Ln[c6"}, "image_id": 252, "id": 3746}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 132.0, 15.0, 14.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "\\Ta02k?3N2N3N110O01O01O010O01M2N2Mk[W7"}, "image_id": 252, "id": 3747}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 142.0, 26.0, 22.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "jda63k?2M3N3M2M40O00010O010O00010O010O00010O001L3N2M4M2M][Q1"}, "image_id": 252, "id": 3748}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 143.0, 18.0, 15.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "gTV72l?2M3N3N1010O0010O0010O0010OO1N3L3N^k`0"}, "image_id": 252, "id": 3749}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 145.0, 59.0, 44.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "Ye_52l?2M4M2N3L3N3L3N2N3N1010O0010O010O0010O0010O0010O0010O010O0010O0010O0010OO1M4M2N3L3N30O0N2M4M2N3L3N3L3N2N3L3N3L3N2N3L[kb1"}, "image_id": 252, "id": 3750}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 146.0, 30.0, 57.0], "area": 1164, "segmentation": {"size": [512, 512], "counts": "h4^1`>3L3O1010O01O01O010O01ON3M2M012M4M2N3L3N2M4M2M4M2N7H3N3L3NUk`7"}, "image_id": 252, "id": 3751}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 155.0, 59.0, 39.0], "area": 1543, "segmentation": {"size": [512, 512], "counts": "dek21l?3N2N3L3N3L3N2M4M2N3N100010O010O00010O010O00010O00010O010O02N01O00001O0N200000000000000000000000010O0000000000000J6^O]kV4"}, "image_id": 252, "id": 3752}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 160.0, 44.0, 54.0], "area": 1453, "segmentation": {"size": [512, 512], "counts": "XfP14d?N`@4]?Oa@4\\?9M3N3M2M4M2M4M2N201O0O2M2M3N3O010O0010O0010O01O0N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3MgZY6"}, "image_id": 252, "id": 3753}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 163.0, 27.0, 28.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "eeh13j?3M4M2M3N3L3N210O010O00010O00010O010O00O2L3N3L3M3N3Lhji5"}, "image_id": 252, "id": 3754}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 172.0, 37.0, 34.0], "area": 745, "segmentation": {"size": [512, 512], "counts": "RfZ61m?2M4M2M4M2M3N3L3O2O0O101O010O010O00010O010O00010OUOn@h0V?0O0001L3N3L3N3L3N2N3LZjR1"}, "image_id": 252, "id": 3755}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 183.0, 30.0, 30.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "TVf21m?3L3^@JX?9e@IX?:e@JX?`0O1010O010O01O01O010O01O01O010O0O2M2M3N3L3N3M2M3NRjj4"}, "image_id": 252, "id": 3756}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 192.0, 12.0, 13.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "Wf73k?3M2M301O010O00O2L3N3MoYb7"}, "image_id": 252, "id": 3757}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 193.0, 48.0, 61.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "age11m?3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2N3L3N201O010O00010O010ON2M4MO10O103M2M4M2M3N3M2M4M2M4M2N2Ha@Lc?0a@Ma?1QZb5"}, "image_id": 252, "id": 3758}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 193.0, 61.0, 47.0], "area": 1702, "segmentation": {"size": [512, 512], "counts": "lfT51l?3N3L3N2M4M2M4M2M3N3M2010O0010O0010O0010O0010O010O00010O010O00010ON3M2O110O0010O0010O0010O0010OO1N3L3N3L3N2M4M2M4M2M3N3Ldil1"}, "image_id": 252, "id": 3759}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 216.0, 45.0, 56.0], "area": 1419, "segmentation": {"size": [512, 512], "counts": "Uhj21l?3M3N3L3N3L3N2M4M2M4L3N2M4M2M4M2M3O2O00010O010O00010ON3M2M3N3L3N3L3N2M4M2M4L3N2M4M2M4M2MQi^4"}, "image_id": 252, "id": 3760}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 236.0, 39.0, 35.0], "area": 745, "segmentation": {"size": [512, 512], "counts": "ngU62k?3N2M4L3N2M4M201O01O010O01O01O01O01O010O01O01O010O01O01Og@\\OV?g0010O0010O^Ol@5W?Ik@5X?Gl@5a?M3NQhV1"}, "image_id": 252, "id": 3761}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 238.0, 39.0, 49.0], "area": 1067, "segmentation": {"size": [512, 512], "counts": "cXj43k?2M3N3M2M4M2N3M2M4M2N2N3L3N3M2M40O0010O0010O0N3M2M4M2N2M4M2O2O0Hk@FU?7m@GU?7n@EV?7;N2N3MZXb2"}, "image_id": 252, "id": 3762}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 258.0, 29.0, 28.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "dX\\52k?4M2M3M4M2M3N3O0010O00010O010O00010O00010O0N3L3N2M4L3N3LiWU2"}, "image_id": 252, "id": 3763}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 267.0, 28.0, 44.0], "area": 1103, "segmentation": {"size": [512, 512], "counts": "ain55[?`0@`0J600000000000000010O000000000000000000000000000F:@TXc1"}, "image_id": 252, "id": 3764}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 303.0, 57.0, 56.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "Sja43l?2M2O2M3N1N3N2M3N1O2M3N1N3N2O010O1TAoNh>P1WAROi>R101000O01000O01000O01000O01000O1mNWAn0h>QOZAo0l>O01000O01000M2O2M3N1N3N2M2O2M3N1N3N2M2Oaea2"}, "image_id": 252, "id": 3765}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 306.0, 21.0, 55.0], "area": 635, "segmentation": {"size": [512, 512], "counts": "b9g1Y>00O2L3N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3M2MYVe7"}, "image_id": 252, "id": 3766}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 311.0, 38.0, 34.0], "area": 756, "segmentation": {"size": [512, 512], "counts": "]jb51m?2M3N3L3N2M4M2M4M200010O01O01O010O01O01O010O01O01O010O01OVOm@g0V?N2M4M2M4M2M3N3LoUj1"}, "image_id": 252, "id": 3767}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 319.0, 51.0, 71.0], "area": 2027, "segmentation": {"size": [512, 512], "counts": "ik?2l?3M2M4M3M2M4M2M4M3BUOcAm0[>UObAo0[>TObAn0[>UOcAn0Z>=M3N3M2M3N2N3N11O0010O0001N1N3M2M4M2N2M4M201O0N2N3F\\ASOg>k0[ASOh>i09N3M2M3N3M2M4M2M3Ngef6"}, "image_id": 252, "id": 3768}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 325.0, 33.0, 49.0], "area": 734, "segmentation": {"size": [512, 512], "counts": "_k`12l?3L3N2M4M2M4M2I]Oo@e0o>8M2N3M1O0O0100O2O3M2M3N3L3N3O010O0O1N3L3N3L3N2Mhen5"}, "image_id": 252, "id": 3769}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 360.0, 65.0, 50.0], "area": 1829, "segmentation": {"size": [512, 512], "counts": "R\\S41m?3L3N3M2N2N3M2M4M2N30OM3N3O001O010O01O010O01O010O01O010O01TAoNg>Q1WAROi>R10010O010O0010O0O2M200010O01O010O01O010O01O010L3N2N3L3N3M2M4M2N2M4MUTl2"}, "image_id": 252, "id": 3770}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 366.0, 49.0, 66.0], "area": 1716, "segmentation": {"size": [512, 512], "counts": "Tmb12k?4M2M4M2M3N3L3N3M2M3N3L3N3L3N3L3N2N3L3N3L3N201O01O0N3M2M4M2M3N3M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2M[dd5"}, "image_id": 252, "id": 3771}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 387.0, 16.0, 18.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "`\\e51l?3N3L3N2M4O010O01O01ON3M2M4M2MlcR2"}, "image_id": 252, "id": 3772}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 390.0, 53.0, 56.0], "area": 1403, "segmentation": {"size": [512, 512], "counts": "[mW22l?3N2N2N2N1O2N2N2M3N2N2N2N1O2N2M3N2N2N2N1O00000O01000000000000O012N2N2N1O2N2N2M3N2N2N2N1O2N2M3N2N2N2N2N1OZcm4"}, "image_id": 252, "id": 3773}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 394.0, 35.0, 38.0], "area": 828, "segmentation": {"size": [512, 512], "counts": "T]73k?2M4M2M3M4M2M4M2M301N1N30O00010O0010O0010O0010O00O2L3M3N3L3N3L3M3N6I]SW7"}, "image_id": 252, "id": 3774}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 400.0, 21.0, 33.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "[mf03e?N`@4^?N`@5\\?9N3L3N2N3L3N12M2M4M2N3L3N3M2M3N3L3N]cn6"}, "image_id": 252, "id": 3775}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 404.0, 27.0, 27.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "V]U52k?3N3M2M4M2N2M4N110O00010O010O00010O01N1N2M4M2M4M2M4MVS]2"}, "image_id": 252, "id": 3776}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 410.0, 19.0, 17.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "U]V11l?3N3L3N2O2O010O00010O010O0001L3N3L3NSS`6"}, "image_id": 252, "id": 3777}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 410.0, 66.0, 52.0], "area": 1674, "segmentation": {"size": [512, 512], "counts": "e]h34j?2M3N3L3N3L3N2M4M2M4M20010O010O00010O010O00010O01O0ROVAg0i>WOYAi0P?0O00010O010O00010O010O00010O010O00010O010O00010O0010O001O0N2M4M2M4M2M3N3L3N[bV3"}, "image_id": 252, "id": 3778}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 440.0, 25.0, 28.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "Rn;2l?2M4M2N3N101O01O01f@CP?XOmAj0Q>XOmAj0Q>XOmAj0Q>XOmAj0Q>XOmAj0Q>XOmAj0Q>XOmAj0Q>e0CB2O1N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N100000000000000000000O2N2N2N3M2O1N2N2N2N2N2N2NXRd5"}, "image_id": 253, "id": 3788}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 424.0, 180.0, 88.0], "area": 8149, "segmentation": {"size": [512, 512], "counts": "g_X42m?1O2M3N2N1O2M3N2N1O2M2O1O1N2O1O1O1N2O1O1N2O1000000N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1001O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001OQ`m0"}, "image_id": 253, "id": 3789}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 336.0, 21.0, 18.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "h::f?00000000000O100I7000000000000000000009GWUe7"}, "image_id": 254, "id": 3790}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 22.0, 22.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "0a0_?001O00001O001O00001O001OO1M3N2M3N2M3N2N2MSPe7"}, "image_id": 256, "id": 3791}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 0.0, 52.0, 28.0], "area": 924, "segmentation": {"size": [512, 512], "counts": "YPQ12k?3N3M2N3O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O010O00O2M2M4M2M4M2N2M4M2NmoT6"}, "image_id": 256, "id": 3792}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 13.0, 26.0, 25.0], "area": 360, "segmentation": {"size": [512, 512], "counts": "l`a01m?2N3M2N3M3M2N3N110O010O010O10O010O0O2M2N3M2N3M2N3M\\_Q7"}, "image_id": 256, "id": 3793}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 26.0, 46.0, 49.0], "area": 1304, "segmentation": {"size": [512, 512], "counts": "lQl01m?3BNj@4T?Nj@5S?Nj@4T?Nj@5S?=N3N110O010O0N3M2N3M2N30O010O010O010O010O010O0N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2Njn\\6"}, "image_id": 256, "id": 3794}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 65.0, 53.0, 41.0], "area": 1331, "segmentation": {"size": [512, 512], "counts": "gbf43k?2M4M2M3M4M2M4M2N2010O01O01O010O00010O010O00010O010O00010O010O00010O010O00O2M2M3O2O0001M2M4M2M4_Oi@4Z?Jh@4Z?Ii@4d?M2Mam^2"}, "image_id": 256, "id": 3795}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 70.0, 54.0, 53.0], "area": 1807, "segmentation": {"size": [512, 512], "counts": "`c91l?3AMn@7n>Lo@7n>Lo@7o>Lm@8o>>M3M4L3M3O2O00010O00010O00010O00O2L31O01iN\\AP1d>mN_AT1g>O00010O00010O0001O01O01O01O01O01L3M3M4L3M3M4L3M3M4LY]k6"}, "image_id": 256, "id": 3796}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 86.0, 42.0, 50.0], "area": 1169, "segmentation": {"size": [512, 512], "counts": "kSd53j?3N2M4M2M4M2N2M4M210O01ON3L3N3L3N2M4M2010O01O01O010L3N2M4M2M4M2N2M4M2M4M2M4M2N2M4MSmf1"}, "image_id": 256, "id": 3797}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 90.0, 25.0, 26.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "ZS_43k?2M4M2N3L3N2N30O01O010O01O01O010O01M2N3L3N2M4M2NQ]T3"}, "image_id": 256, "id": 3798}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 95.0, 24.0, 23.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "bSY61i?6K5J6N3O01O000001O01O000000010O0000000O2I6K5JSmZ1"}, "image_id": 256, "id": 3799}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 58.0, 54.0], "area": 2001, "segmentation": {"size": [512, 512], "counts": "Z4c0^?O00010O00N3L3M3M4L3O104L010O00010O00010O00N3O00010OYAnN`>R1]AQOc>V11O01O01O01O01O01O01O01O000L5O00010O00O2L3M3M4@VAEm>8VAEm>8WADm>8?M3M4LlkR7"}, "image_id": 256, "id": 3800}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 125.0, 52.0, 54.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "hdf61i?7J5J6J6O10001O01O000001O000M30000010O00ZAXOT>h0gA]OY>c0aAC_>P10O00000001O01O000000O2L30000001O01O0000O1J7J5J6K5J6J6K6IV\\?"}, "image_id": 256, "id": 3801}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 130.0, 55.0, 80.0], "area": 2436, "segmentation": {"size": [512, 512], "counts": "\\UQ44h?5J5K5N201O0001O0001O01O0001O0001O01M2i@[On>n0L4K5M4O0oAdN^=\\1]BiNc=W1YBmNe=U1VBQOd=h1M31O0001O01O000001O01O0001O01O000SO`BZO`=b0dB_O[=5kAOKLc=8_B0IMc=7`B0JLc=8^B0KLc=8_B0JLc=7_Be0]=Q1M301O01O00010O0001O01O00010OoJ"}, "image_id": 256, "id": 3804}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 163.0, 65.0, 51.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "\\fQ52h?6J6J6M4O0001O000001O0001O000001K4M3001O0003M1O0001O00O1J7I6L40N2000001O01O000001O0N200001O01O000000010O00000XO_A1a>IeA7[>CkA>U>[OQBe0e>0001L3I7J6JWjm1"}, "image_id": 256, "id": 3805}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 181.0, 55.0, 49.0], "area": 1844, "segmentation": {"size": [512, 512], "counts": "oV_62i?5K5M3000001O01O000001J5J6J6J6J7N11O00000001O01O000001O0001O0O1L40010O00000001O01O000001O0001O000O1K5^OXAGn>3WAHn>3b0KYZe0"}, "image_id": 256, "id": 3806}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 276.0, 59.0, 88.0], "area": 2606, "segmentation": {"size": [512, 512], "counts": "]j_22b00f>2XA0e>3XA1d>3YAOf>2XA1f>1VA2j>NTA4l>a0N101N110O00010O0N3L3M4M2M3N3L1@ZNgBf1Z=^NbBb1^=bN]B_1d=<3N1O2N20O2O0O101N100ON3N2N1N4M2N2M4M2N3L4M4M4M3L101N1BSAFP?8UACn>9>N3L3Nofb4"}, "image_id": 256, "id": 3807}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 291.0, 71.0, 70.0], "area": 2573, "segmentation": {"size": [512, 512], "counts": "]Zn34j?2M4M2M3N3N1010O0010O0010ON2N3L3N3L3M3N3L3N3L3N2M4M210O00010O010O00010O010O00001N101O01O010O01O01kNmA;S>CPB=P>_OSBa0m=]OVBc0i=ZOZBf0g=WO[BN@d0U>[O_BN_Og0R>XOhBh0X>10O0010O01N1N3L3N3L3N2M4M2M`Un2"}, "image_id": 256, "id": 3808}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 309.0, 64.0, 67.0], "area": 2399, "segmentation": {"size": [512, 512], "counts": "^Z[53f?O]@4`?7M3N3O0010ON2N3^AYOf=j0WBZOf=h0XBZOe=j0WBYOi=g0TB]Ok=d0RB^Oo=a0nACQ>=lAFT>S110O00010O010O00010O00010O010O00010O00010OM3N3L3M4M2M3M4M2M3M4M201O01O01O01O01L3N3L3M3N3L3M3Nked1"}, "image_id": 256, "id": 3809}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 320.0, 70.0, 86.0], "area": 2768, "segmentation": {"size": [512, 512], "counts": "XlV62k?3M4L3M4L30001O01O010O00O2L3TO[OZBh0c=[OZBi0b=[OZBh0d=ZOYBi0d=ZOYBj0c=ZOYBi0g=WOVBl0j=f00O00010O00010O00010O00010O0M3M4L1O0000001O00003M3N2M4L3M3M4L3M3M4M20010O00010O0N2M4L3N2M4L3M3MlUf0"}, "image_id": 256, "id": 3810}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 327.0, 42.0, 45.0], "area": 1231, "segmentation": {"size": [512, 512], "counts": "S[[72l?3L3M4L3M3O20O0001O0M3M4L3M3N30O00010O01O01O01O01O01O01O01O01O010O00010O00010O01O0`E"}, "image_id": 256, "id": 3811}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 342.0, 26.0, 21.0], "area": 311, "segmentation": {"size": [512, 512], "counts": "RkR21m?2M4M2M3N3O010O010O00010O010O010O00010O01N1N3L3N2NTU`5"}, "image_id": 256, "id": 3812}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 367.0, 68.0, 63.0], "area": 2146, "segmentation": {"size": [512, 512], "counts": "j[b31n?2N2N2N3TAHl=9RBJk=8SBJk=8SBJk=9RBIl=9SBHl=9RBJk=8SBJk=9RBIl=9RBIm=8RBJn=5PBMP>3nAOR>2kA0U>m01O2M1O000001O01O00001O2O2M2N2N2N3N1N2N2N3M2O1N2N1O001O01O000000010O00000002N2O2M2N2N2N3N1N2N2N3M2Ojc[3"}, "image_id": 256, "id": 3813}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 372.0, 65.0, 52.0], "area": 2019, "segmentation": {"size": [512, 512], "counts": "blo61m?2M4L3M3M4M2M3M4L3M4M200010O00010O00010O00010O0010O001TAoNh>T101O01O01O01O01O01O01O01O010O01O01OM4M21O0O1M4L3N3L3M3M4L30010O001N1N2M4LQD"}, "image_id": 256, "id": 3814}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 386.0, 66.0, 62.0], "area": 2190, "segmentation": {"size": [512, 512], "counts": "R]Q21m?2O2M2N3M2N3M2N3N2M2N3M2N3M210O010O0100O010O0100O010O001M2N3O0010O010OO2M3M2N3M2O2N110O010O010O010OUOjAMV>1kAOU>OnA1R>LPB4Q>IRB7m=GUB9l=DWB:j=DYB9j=EWB9k=EXB8k=EWB9k=EXB8i>O2M2N3Mebm4"}, "image_id": 256, "id": 3815}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 427.0, 69.0, 66.0], "area": 2536, "segmentation": {"size": [512, 512], "counts": "omg63k?3L3Z@Hc?;010OM3N3c@@U?h0L3M3O20O0010O00010O01O01O01O01O010O01O01O01O02O0O2N101N1O2O0O2O0O2K4L5K4M4M2O1O01O01O01O0O2O0O1O2UOlAHT>5PBIQ>4TBIn=4UBKk=2ZBKh=1\\BMe=0`BN`=0cBN_=NfBO[=NiB1W=LnB1b>NPa5"}, "image_id": 256, "id": 3816}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 436.0, 86.0, 67.0], "area": 2620, "segmentation": {"size": [512, 512], "counts": "Xo[41m?3M2N3M2N3M2N3M2N30O010O010OJZORAf0l>]OTAc0i>_OWAa0f>CZA=c>E]A;a>H_A8^>JbA6\\>M`A7]>g0N3M2O20O010O0N3M2N3M2N3D\\AXOf>e0^AXOe>g0[AWOg>i090O010O010O0010O0010O010O0O1M1000000000002N3M2O20O010O010O0010O010OO2M2N2N3L3N3M2N3M2N3M2N3M2NSRY2"}, "image_id": 256, "id": 3817}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 472.0, 65.0, 40.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "f_c51m?3N2N2N2M2O2N2N2M300O01O1O1O1O00O1O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1N2O1001O001O1O1O1O1O1O1O1O001O1O1O1O1O1O1N1N3N2N2N2M3N2N2N1N3NaP\\1"}, "image_id": 256, "id": 3818}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 509.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "ooY41m?2001O001O00Q`b3"}, "image_id": 256, "id": 3819}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 0.0, 8.0, 4.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "PPn52n?1O1O00O1O1O10PPn1"}, "image_id": 257, "id": 3820}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 0.0, 88.0, 50.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "`Pd61m?3M2M4M2N2N3L301O001O00001O001O001O00001O00N2M3N2N2001O00001O001O001O001O001O001O001OO1O11O001O0o@YOi>g0TA]Ok>l0010O2N01O010O01XOQA>n>_OVA`0j>^OXAb0S?10O010O010O01O01O010O010O01O01O010O010O01O01O010O010OTO"}, "image_id": 257, "id": 3821}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 21.0, 75.0, 61.0], "area": 2063, "segmentation": {"size": [512, 512], "counts": "gam51l?4M2N2N3L3N3M2N2M4M2N2N3L3N3M21O010O0010OM4M2M3N3L3N3N101O01O010O01O010O01O01OUA_OW>b0fA@[>?bAE]><`AFa>9\\AKc>6ZALf>h00O010O010O010O01O10O010O010O0N2M4M2N3M2M4M2N2M4M2N3L3N3M2Ncnl0"}, "image_id": 257, "id": 3822}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 40.0, 149.0, 118.0], "area": 7905, "segmentation": {"size": [512, 512], "counts": "ebc11h?2[@0b?8N3O1BCXAFXA;e>GZA:d>IZA9d>HZA;c>HZA:d>c0M201O01000O010O001M3M2O2M2N3N2M2010O010O01000O010O0100O01N2MnAXNP>0oAd10_NS>_1nAbNS>[1mAhNR>W1mAkNT>R1lAQOT>l0mAVOR>h0nAZOS>d0lA_OS>?nABQ>=oAFP>9oAJQ>3PBNP>0PB3P>KoA7Q>GoAAPBa0o=]OQBe0P>YOoAj0Q>SOPBn0P>POPBS1P>kNoAW1Q>gNPB[1P>bNPBa1P>]NPBd1P>ZNRBg1T>O0100O010O0N3M3O010O100O0100O0100O010O0100O0100O0100O0100O010O01000O01VNQBb1n=]NTBb1m=[NUBf1S>O10O1O0N3M2O2O10O010O10O10O010O010O1M2O2WNmAd1W>10O010O1O0N3N0O002O1N3M2O2O0N3[OXANk>0VANl>0WANk>OXANj>1WAMl>0WANk>OWAOk>OVmQ4"}, "image_id": 257, "id": 3823}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 55.0, 59.0, 60.0], "area": 1784, "segmentation": {"size": [512, 512], "counts": "gbR72k?4M2N2N3M2M3N3M2N2M4M2N2N3M2O100010O00001M2N2M4M2N2N3M21O010O0010O01O010O00010UA\\OX>d0fA_OZ>a0cAA]>?aAD_><^AFb>:\\AId>j0O010O0010O0100O010O010O10OhM"}, "image_id": 257, "id": 3824}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 119.0, 115.0, 125.0], "area": 6271, "segmentation": {"size": [512, 512], "counts": "ZV<2m?2N2N2M2O2N2N2M3N2N1O2M3N2ETO_An0_>TO_An0^>TO`An0_>TO_An0_>;M3N2N2M3N1O2M3N2N2M3N1O2M3N2N2M3N1O2M3N2M3N2N10iNSC\\Om[CBc<=`CC^<=dCBZ<>iCBTmCBR`0jABV>?hACX>S1O010O0100O0100O01M3N1010O10N2M2N3M2O2M3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3MZ[j5"}, "image_id": 257, "id": 3825}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 146.0, 69.0, 53.0], "area": 2126, "segmentation": {"size": [512, 512], "counts": "^Ui43j?3N3M2M3N3L3N3L3N2N3L3O2O01O01O010O010O00010O010O01O01O010O01O01O010O010O00010O010O01OO2M2M4N11O010O01O010ON3M2M3N3L3N3M2M3N3L3N3L3N3M2MR[T2"}, "image_id": 257, "id": 3826}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 167.0, 63.0, 53.0], "area": 2050, "segmentation": {"size": [512, 512], "counts": "XVg52k?4M2M4M2M3N3L3N3L3N2M4N110O00010O010O00010O010O00010O0N3L3N2O20O010O00010ON3L30010O010O00O2M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3NbZY1"}, "image_id": 257, "id": 3827}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 176.0, 99.0, 102.0], "area": 4493, "segmentation": {"size": [512, 512], "counts": "aWg12l?3N2N1N3N2M2O2N2M3N1N3m@YOj>i0TAXOk>i0TAYOi>Q1O10O10O10O10O1000O10O10O10SOlNlBU1R=lNoBT1nZO^Ac0f>YO]Ae0e>YO^Ad0P?M4M2N3L3N2N3L\\Yb2"}, "image_id": 257, "id": 3829}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 209.0, 66.0, 75.0], "area": 2806, "segmentation": {"size": [512, 512], "counts": "T7[1d>0000001O0O1000000000000O1000000001O2NiAgNF1n=V1\\BkND1P>R1\\BUOd=k0YBXOg=h0WBYOi=h0UBZOk=e0TB]Ol=c0RB_On=Z1000O1000O01N2N20000O1N2N1O2N2N2M10001O1O2N2M3mNWAl0k>QOXAm0o>N2N2N2M3N1O2N2N2N2M3N1O2N2NWhn6"}, "image_id": 257, "id": 3830}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 215.0, 59.0, 54.0], "area": 1978, "segmentation": {"size": [512, 512], "counts": "gW[51m?3M2M4M2N3L3N2N3L3N3M2M4O00010O010O010O00010O010M2M4M2N201O010O010O010O00010O010O010O00010O010O01N1M3N3ZOYAOi>OYANk>OXANj>OYAOj>NXAOk>OXANj>ObYg1"}, "image_id": 257, "id": 3831}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 239.0, 72.0, 51.0], "area": 2018, "segmentation": {"size": [512, 512], "counts": "Rho33k?2N3N1N3M2N3M2N3N2M2O20O10O010O010O010O10O0O2N2O010O01N1N3M3N1N30O010O01O0O2M2010O0100O010O010O010O010O01nNXAk0h>RO[An0d>PO^AP1j>0O010O010O01N2M2N3N1N3M2N3Db@2`?Lb@2a?La@1Zgl2"}, "image_id": 257, "id": 3832}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 263.0, 105.0, 97.0], "area": 4500, "segmentation": {"size": [512, 512], "counts": "lYb01o?1N2N2N2N2N2N2N2N2g@@o>b0o@@o>b0o@@o>j0N2N3M2N2N2N2O10000000000000000000000O1N2N2N2WO]NRCc1n<_NPCa1P=aNnB_1R=cNlB]1T=eNjB[1V=gNhBY1X=iNfBW1Z=kNdBU1\\=mNbBS1^=oN`BQ1`=QO^Bo0b=g001O000000K_BlMa=T2aBjM`=U27N2N2N2N2N2N2N2N2N2O1N1O00000000000000000000000000002N2N2N2N2N2N200O1N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2NPWi5"}, "image_id": 257, "id": 3833}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 265.0, 64.0, 61.0], "area": 1987, "segmentation": {"size": [512, 512], "counts": "XYh33l?2N1N3BHQA:l>IQA9n>HQA:l>IQA:m>HQA:m>GRA:l>?N2M2O2N2O0100000O0100N1O2O1O01000O10O10O10O1000O01000O10O10O10O1000O010O1M3N1O2M3N1N3N2M2O2N2M3N1N3N2M2O2NffW3"}, "image_id": 257, "id": 3834}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 265.0, 63.0, 54.0], "area": 2009, "segmentation": {"size": [512, 512], "counts": "ViR53k?2N3M3M2N3M2N3M2N3M2N3N2M2O2O010O010O010O10O010O010O010ON3M3N1N3N1100O010OO2M3M2001M3M2O2M2N3M2N3N1N3HQA[OR?b0PA\\OR?d06O0N3M2O2M2N3M3M2O\\gm1"}, "image_id": 257, "id": 3835}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 18.0, 34.0], "area": 335, "segmentation": {"size": [512, 512], "counts": "m8P1o>0002O2M2N2N3N1N2N2N3M2O1N2N3M2O1Ncff7"}, "image_id": 257, "id": 3836}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 300.0, 61.0, 65.0], "area": 1890, "segmentation": {"size": [512, 512], "counts": "YZ\\31n?2N1O2N2CJm@8Q?Jm@8Q?Jm@8Q?Jm@8Q?Jm@8Q?=N2N2M3N2N1O2N200000000000000000000O0100000000000000000000O1N1O2N2N2N2N2N2M3N2N2N2N2N2N1O2N2N2N2N2M3N]Ue3"}, "image_id": 257, "id": 3837}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 311.0, 67.0, 60.0], "area": 1974, "segmentation": {"size": [512, 512], "counts": "]jd41n?2N2M3N1N3N2M2O2N2M3N1O200M2O2N2M3N10100O010000O010000O010000O01N2N2M2O2N2O01000O10O1000O0100mNaAd0^>[OcAf0]>XOeAg0\\>VOgAj0X>UOjAk0V>ROlAm0e>N2N1N3N2M2O2N2M3N1N3N2N1N3NTeY2"}, "image_id": 257, "id": 3838}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 331.0, 65.0, 70.0], "area": 2133, "segmentation": {"size": [512, 512], "counts": "X[m21o?1N3M2N2N2N2N2O2M2D_OVAc0h>_OVAc0h>@UAb0j>_OTAc0j>[OfAe0Z>YOhAe0Z>YOhAe0Z>YOhAd0[>ZOfAe0\\>YOfAe0\\>YOfAe0l>N2N2N2N2N2M3N1O2N2N2NhSh2"}, "image_id": 257, "id": 3841}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 368.0, 56.0, 64.0], "area": 1777, "segmentation": {"size": [512, 512], "counts": "mk[22n?1N3M2O2M2N3M2O1N3M2O2M2N3N1N3M2O1N3M210O0010O0010O010O001M2N3N1N2N3N0O0011N3M2N3N1N3M2POYAe0i>XOYAf0i>XOYAf0i>YOYAe0R?N3M2N3J`@Ha?6a@Ha?67M2OUSh4"}, "image_id": 257, "id": 3842}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 383.0, 4.0, 9.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "U\\n71m?3M2N3PD"}, "image_id": 257, "id": 3843}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 386.0, 67.0, 57.0], "area": 2028, "segmentation": {"size": [512, 512], "counts": "g\\g31n?2N2N2O1N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2O1000010O000000O1O1N1O1O0000000000011N2N2N2N2N001KUATOk>m0VAROk>l06O100N2N2N3N1N2N2N2N2N2N3M2N2OUSW3"}, "image_id": 257, "id": 3844}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 395.0, 60.0, 65.0], "area": 2072, "segmentation": {"size": [512, 512], "counts": "Y]i11n?2N3N1N2N2N3N1N2N3M2O1N3M2N2O2FRO]An0c>TO[Al0e>80O02N2N3M2O1N3M2N2O2M2N200010N1O1N3M2N100O001O2O2M2N2N3M2O1N2N3M2UORA=OAP?0SA=OAX?>7M2N2N3M2O1N_bX5"}, "image_id": 257, "id": 3845}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 396.0, 9.0, 22.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "mlk71m?3M2N3M2M4M2N3M2dC"}, "image_id": 257, "id": 3846}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 399.0, 9.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "c\\`31o?2M2N2N1O01O2N2N`S[4"}, "image_id": 257, "id": 3847}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 420.0, 69.0, 57.0], "area": 2079, "segmentation": {"size": [512, 512], "counts": "emQ31o?2M2N3M2O1N3M2N2O2M2N3M2O1N3M2O1N3M2N2O2M201O01O01O000N3N0O1O0001O01O01O01O0002N2O2M2N2N3N1N3GUAXOm>f0UAYOl>e080O0000101N2OO2N2N3N1N3M2O1N3M2N2O2M2NTbk3"}, "image_id": 257, "id": 3848}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 422.0, 36.0, 59.0], "area": 1438, "segmentation": {"size": [512, 512], "counts": "a^^72l?3L3N3M2N2BBXAa0e>BXA`0f>BXAa0e>BXA`0f>BXAa0e>>N3M2M4N110O010O010O010O01M201O010O01O010O010O010_B"}, "image_id": 257, "id": 3849}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 430.0, 59.0, 66.0], "area": 2046, "segmentation": {"size": [512, 512], "counts": "\\^U11n?2N3N1N2N3N1N3M2N2O2M2N3N1EUO^Am0`>VO]Aj0c>XO[Ah0f>YOYAf0g>;N3M2O1N3M2O2M2N2N3N1N3O0000N3M2O2M2N2N01O01O2O2M2N2N3N1N3M2QOUAg0l>WOVAb0M_OP?MUAb0M^OW?`07M2O1N3M2N3N1N[Qm5"}, "image_id": 257, "id": 3850}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 453.0, 66.0, 58.0], "area": 2044, "segmentation": {"size": [512, 512], "counts": "h^\\22m?3M2O1N3M2O1N3M2N3N1N2N3M2O1N3M2O2M2N2N3N1N2N3M2O2M2001N1N2N3N1N3M2N10O0000010O000101N3M2N2O0MSAROm>n03O01002M2O2M2N2O2M2N2N3N1N3M2N2O2Mo`b4"}, "image_id": 257, "id": 3851}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 461.0, 61.0, 51.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "doc02m?2N2XOKcA7Z>KdA7Z>KdA8Z>JdA7Z>KdA8Y>KdA6\\>KcA5\\>MbA4]>O`A2_>0_A1a>1]AOb>h0O100O1O1O100O1O1001O2N1O1O2N1O2N1O1O2N1O2N1O2N1O1O2N1O1O00O1O100O10O2O2M2N3N1N3M2O1N3M2N3N1N\\`]6"}, "image_id": 257, "id": 3852}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 461.0, 8.0, 8.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "`nX21n?3N1N2N03N1N2N`Qc5"}, "image_id": 257, "id": 3853}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 472.0, 68.0, 40.0], "area": 1372, "segmentation": {"size": [512, 512], "counts": "nol62m?1O1O1O1O1O1O1O1AGRA0J:S?HQA=n>EPAFo@;P?Gn@:Q?;O1N2O1O1O1O1O1O1O11O1O1O1O1O1O1O001O1O1OO1O1O11O1O1O1O1O1O1O1O1Bi@1W?Nj@2W?Lk@3V?Kl@4U?Jm@5T?Ho@7^?O1O1O1O1O1O001OO1O1O1O11O1O1O1OQP1"}, "image_id": 257, "id": 3854}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 487.0, 64.0, 25.0], "area": 944, "segmentation": {"size": [512, 512], "counts": "i_e11o?2M2N2N3N0O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1002N1O2N1O1O2N1O1O00O1O100O1O1O1001O2N1O1O00O100O1001O2N1O1O2N1O2N1O2N1O1OQ`Z5"}, "image_id": 257, "id": 3855}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 0.0, 39.0, 16.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "P`g01o?1O2NO100O1002N1O2N1O2N1O1O2N1O1OO100O1O100O1O100O1O1O100O1O100O1O100O1O100OQPe6"}, "image_id": 258, "id": 3856}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 0.0, 65.0, 64.0], "area": 2119, "segmentation": {"size": [512, 512], "counts": "g`P13l?2O1N3M2N5L1N2N3M2O2M2N2O20O0N210O0001M2N3M2O1N3FiNfAY1X>jNeAX1Z>iNeAY1X>9N10O01O00010O0000010OEmAjNU>T1mAjNU>U1lAiNV>W1kAgNW>X190010M2O1N3M2N2O2M2N3M2O1N3M2N2O2M2N2O2M2NPon5"}, "image_id": 258, "id": 3857}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 0.0, 61.0, 32.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "P`g11o?2N1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1O2N1O1OO100O1O100O1O1O100O1O100O1O1O100O1O100Md@A]??2O1O10000O100O1O100O1O1O100O1O2O1NooY5"}, "image_id": 258, "id": 3858}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 0.0, 71.0, 33.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "P`l61o?1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OO1N200001O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1OO1O1O1N2O1O1O1O1O1O1O1001O001O1O1O1O1O1O1O00O1O1O1O1O1O2M2O0"}, "image_id": 258, "id": 3859}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 5.0, 69.0, 84.0], "area": 2818, "segmentation": {"size": [512, 512], "counts": "SQU61n?2O2M2N2O2M2N3M2O2M2N2N3N1N2N010O2IROYAP1e>ROYAP1e>8N1N3M2O1N3M2N2O2M2N3M2O1N3M21O01O010O01O01O01N1N2O2M2N3N1N2_NkAT1X>jNiAT1Y>jNjAT1W>jNkAT1X>jNjAS1c>N2O2M2N2O2M2N3]Og@:[?Eg@9Z?Eh@9a?N3M2O2Mk]h0"}, "image_id": 258, "id": 3860}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 42.0, 28.0, 72.0], "area": 1200, "segmentation": {"size": [512, 512], "counts": "Z1j1W>010O0010O0010O00010OO2M2N2O2M2N2N3WO\\A2e>M]A0f>M\\A1f>M]A1d>N]A0f>M\\A1f>M]A0f>M\\A2e>M]A0e>N]A0U\\b7"}, "image_id": 258, "id": 3861}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 45.0, 68.0, 83.0], "area": 2723, "segmentation": {"size": [512, 512], "counts": "Zb\\52m?2N2O2M2N3M2O1N3M2O2M2N3M2O1N001JQO[AP1c>RO[AP1c>9N1N3M2O1N3M2N3N1N2N3N1N3M20010O0010O00010O001M2O1N3M2N3N1N2bNhAQ1[>lNhAR1Y>lNiAR1Z>kNhAS1Z>lNhAQ1e>O2M2N2O2M2N3M2_Oc@=b?M2N3N1N2N3Me\\a1"}, "image_id": 258, "id": 3862}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 84.0, 63.0, 79.0], "area": 2563, "segmentation": {"size": [512, 512], "counts": "`Sm41o?1N3M2^@KV?7h@KW?7g@K=LV>;[AK>KU>g0hA[OV>g0hA\\OV>e0hA]OV>f0hA[OP>GmAP11\\OP>EnAP10]OP>EnAQ10[OP>`1N1N2N1O01O0001O01O0001O01O0001O03M2N2O2M2N3M2gNgAi0Z>UOhAi0[>TOgAj0[>UOgAj0Y>TOiAl0X>QOjAo0V>oNmAP1b>N0001O03M2O1N3M2N2O2M2N2N3N1N2N3M2O1Nj[S2"}, "image_id": 258, "id": 3863}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 86.0, 63.0, 69.0], "area": 2674, "segmentation": {"size": [512, 512], "counts": "WcP71n?3N1N3M2O1N3M2N3N1N2N3N1N3M2N2O2M2N3ZAiN`>W1^AkNc>Z1O010O00010O0N3M2N2O2M2N3NO0001O01O00010O0001O01O00010O2N2O2M2N3M2O01N1O01O01O01O0001XM"}, "image_id": 258, "id": 3864}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 116.0, 77.0, 72.0], "area": 2782, "segmentation": {"size": [512, 512], "counts": "RdV62n?1N2N3N1N3M2O2M2N3M2O1N3M2O2M2N3^AnNR>S1lAPOQ>R1nAoNP>T1mAoNQ>R1mAPOQ>S1lAPOR>`10010O010O001M2O0O0011N3M2O2M2N2O2M2N3M2O2M2N10O01O01O02N2O2O00010O0O1OO0001O01O02N3N1N3M2O2M2N2N3N1K_@Hd?55O2M2NPkb0"}, "image_id": 258, "id": 3865}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 118.0, 74.0, 70.0], "area": 2607, "segmentation": {"size": [512, 512], "counts": "[Tm31n?3M2N3N1N2N3N1N3M2N2O2M2N2N3N1N3M2O1O20O00O2M2O2M2N2N10O0001O01O02N3N1N3M2O11O0O1N3M2O1N3M210O01O01O0aNcA[1a>0001M2N3M2O1N3M2N3N1N2N3N1N2N3M2O2M2N2O2M2Nfjm2"}, "image_id": 258, "id": 3866}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 120.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "h33o?NVln7"}, "image_id": 258, "id": 3867}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 130.0, 14.0, 16.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "VdQ62m?3N1N3M2N2O2O01M2N3N1N2N3Nd[g1"}, "image_id": 258, "id": 3868}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 155.0, 73.0, 75.0], "area": 2814, "segmentation": {"size": [512, 512], "counts": "ZfS32m?2O2M2N3HFf@;X?Gf@UOmAj0S>XOmAf0T>\\OlAb0S>@kAc0R>@lAa0R>AlAb0R>@kAb0S>@lA`0S>BkA?U>e000010O00010O0001O012N110O010O01O010O01O01O0XNlAb1V>\\NlAa1[>N2O1N001O3N1N3M2O2M2N3N1N2N3N1N3M2@d@9c?O2M2N3N1N^ig3"}, "image_id": 258, "id": 3869}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 155.0, 62.0, 76.0], "area": 2395, "segmentation": {"size": [512, 512], "counts": "[Ve51n?2N3N1N2N3M2\\OCcA?[>BcA`0[>CbA?\\>CcA?Z>DcA>\\>CbA?S>WOnAUOnA>M>S>WOnAVOoA^1o=;O2M2N2N010O00010O000002O2M2N2O2M2N2N3N1N3M2EaAROa>n0`APOb>o0^AoNd>Q1]AlNf>R15N2N10O01O3N1N3M2N2O2M2N3M2O1N3M2O1Nni[1"}, "image_id": 258, "id": 3870}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 192.0, 78.0, 73.0], "area": 2936, "segmentation": {"size": [512, 512], "counts": "_Vb42n?1N3M2O1N3M2N3N1N2N3N1N3M2N2O2M2N3_AkNS>V1lAlNQ>V1mAlNQ>W1lAlNQ>V1mAlNS>T1lAmNT>a10O01O01O01N1N2N3NO3N1N3M2O1N3M2N3N1N2N3N0O1O0001O01O0001O3N1N2N3NO002O1N3M2N3N1N2N3M2O2M2N2O2M2N3K4O1N3M2O1KX@O`XW2"}, "image_id": 258, "id": 3871}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 206.0, 7.0, 8.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "`fX42m?3M2O11N1O2M^ic3"}, "image_id": 258, "id": 3872}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 218.0, 70.0, 86.0], "area": 2926, "segmentation": {"size": [512, 512], "counts": "lXo31n?2N2N3N1^NHYC;em0XAQOi>n040101N3M2N3N1N2N3N1N3M2N2O2M2N3N`gm2"}, "image_id": 258, "id": 3873}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 235.0, 65.0, 80.0], "area": 2558, "segmentation": {"size": [512, 512], "counts": "Xh]11n?2O2M2N2N3N1N2N3M2O1N3M2N2O2M1O1JQOZAo0f>SOYAm0f>UOXAn0e>8O1N3M2N2O2M2N2N3N1N2N3O010O00010O00010N1N2N20N2N3M2O1N3M2N2nNiA9Y>DjA9X>FiA8Y>ZOcA66>Z>ZOaA68>]>@eA>^>@cA>_>@dA>P?N2N3M2O1N3M2Nhfa5"}, "image_id": 258, "id": 3874}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 244.0, 34.0, 68.0], "area": 1118, "segmentation": {"size": [512, 512], "counts": "`X_71o?2M2N2O2M2N3M2O1N3M2N2O2M2N010O00001KUOUAl0j>7M2N2O2M2_AdN[>^1cAdN[>c1N1N2N3M2O2M2N2[H"}, "image_id": 258, "id": 3875}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 273.0, 57.0, 65.0], "area": 2009, "segmentation": {"size": [512, 512], "counts": "TYi02m?3N1N3M2N2O2M2N2N3N1N2N3M2O2M2N2O2M20010O0N3M2N2O2M2N2N3N1N2N3N1N0011N2N3N1N2N3M2O1N3M2N3N1N2YOXA3k>JWA5j>IXA5j>IYA4j>JWA4k>JXA4i>JYA4[?N3M2OPVZ6"}, "image_id": 258, "id": 3876}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 277.0, 75.0, 96.0], "area": 3349, "segmentation": {"size": [512, 512], "counts": "lYh63l?2N3N1N3M2N3N1N2N3N1N3M2N2OO01O00010O0CSOhAP1U>ROjAo0T>SOjAo0T>TOiAo0U>ROiAP1U>ROjAo0T>a0N1N2N3M2O1N3M2O1N3O00010O00010O00010O01O01O01O0N3M2O1N3\\NVBm0l=QOVBm0l=ROVBl0l=QOVBm0l=QOVBm0m=POVBm0l=ROUBm0l=QOVBm0a>M2N3N1N2N3M2_Oc@=a?N3M2N2O2M2NPU2"}, "image_id": 258, "id": 3877}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 297.0, 69.0, 65.0], "area": 2310, "segmentation": {"size": [512, 512], "counts": "PZ32m?2N2O2M2N3N1N2N3M2O2M2N2N3N1N2N3N1N20N2OO1O3M2O2M2N2O2M2N3M2O1N1O01O00010O2N2O2M2N3M20010O010O0iN_An0a>PObAP1^>mNdAS1\\>lNeAS1f>M2N3N1N2N3N1N3M2N2O2M2N3N1N2N3M2OSUj6"}, "image_id": 258, "id": 3878}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 304.0, 74.0, 79.0], "area": 2970, "segmentation": {"size": [512, 512], "counts": "SjT22n?2M2N2N3N1N3M2O1N3M2N2O2M2N2N3N1N3M2O110O01O01O0O1N3M2O2M2N2N3N1N2N3M2O1N3M2O2M2N2N3NO2N2O1N3M2O1N3M2TOPBER>9QBEQ>9PBER>9PBEQ>:RBCo==RBBm=>VB_Oj=a0XB]Oh=c0ZB[Og=e0[BYOd=g0^BWOd=g0^BWOe=f0l0N2N3M2N3N1N2N3M2O1NgUf4"}, "image_id": 258, "id": 3879}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 333.0, 88.0, 87.0], "area": 3396, "segmentation": {"size": [512, 512], "counts": "iZQ61n?3N1N2N3M2O1N3M2O1N3M2N2O2M2N2O2M2O1010O00010O0010O00001N1N2N3N0O001O01O2N2O1N3M20hAmNc=R1\\BPOc=OkAn0?UOj=k0TBXOj=i0UBXOi=k0TBWOj=k0TBXOj=j0SBXOk=j0TBXOi=j0UBXOi=a1O1N00010O000010O0000101N3M2O2M2N2N3N1gNhAg0[>WOfAg0\\>WOgAg0Z>WOhAg0[>WOfAg0\\>WOgAf0[>XOgAg0k>M2N2O2M2N3M2O1N3M2N3Nfcb0"}, "image_id": 258, "id": 3880}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 334.0, 26.0, 70.0], "area": 1043, "segmentation": {"size": [512, 512], "counts": "^:V2j=O1O2O2M2N2N3N1N3M2O1N3M2SO_A8d>E^A:c>D_A:c>E_A8d>E^A:c>D`A9c>E^A9d>E^A9d>E_A9U?N3M2O1NQdb7"}, "image_id": 258, "id": 3881}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 344.0, 70.0, 66.0], "area": 2330, "segmentation": {"size": [512, 512], "counts": "U[d13m?1N3M2O1N3M2N3N1N2N3N1010OO1N3TATO`>m0^AUO`>n0^ASO`>o0^ATO_>n0_ATO`>X1M2N2O2M2N3N1N2O2O01N1O2M2N3M2O1N3M01O01O012M2N3M201O0N2N3M2O2M2N2O2M2N11N2N02N2O2M2N3M2O1N3M2O2M2NncX5"}, "image_id": 258, "id": 3882}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 358.0, 78.0, 89.0], "area": 3165, "segmentation": {"size": [512, 512], "counts": "o[]52n?2M2N2O2M2N2N3N1N2I^Om@e0P?]On@e0P?9L3N1N2N3M2O1N2N1O2O1N3M2N2O2M210O00010O00010O00O2N1N2N3N1N3M21O01O01O01O01O01aNbAY1^>eNdA\\1a>O00010ON100O000010O2N2N3N1N2N3M2O2M2N2Bj@OY?Ni@1X?Nj@OX?Oj@OY?Ni@0Y?Nj@0SR\\1"}, "image_id": 258, "id": 3883}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 376.0, 67.0, 66.0], "area": 2302, "segmentation": {"size": [512, 512], "counts": "Vln01o?1N3M2O1N3M2N2O2M2N3M2O1O2O01O01M2O2SATOa>n0]ATOa>n0^ASOa>X1M2N2N3N1N3M2N2O2M2000O2N1N2N3N1N3M2N01O01O1O3N1N3M200010OO1N3N1N3M2O1N3M2Jj@_OX??j@@X?=7O2M2N2N3N1Nobo5"}, "image_id": 258, "id": 3884}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 385.0, 11.0, 22.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "Xlj71n?2O2M2N2N3N1N3M2N2O2mC"}, "image_id": 258, "id": 3885}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 406.0, 59.0, 70.0], "area": 2172, "segmentation": {"size": [512, 512], "counts": "Vm71o?2M2N2O2M2N3M2O2M2N2N3N1N3M2N2O2M2N3N1N2N3M2010O00010O010OO2M2O1N3M2N3N1N2N2OO012M2N2O2M2N2O2TOeAN]>JdAC0a0_>IcAD0a0_>JjA4W>JkA4X>IkA4W>JkA5V>JkA4X>IkA4W>JkA4S?N1Ndaj6"}, "image_id": 258, "id": 3886}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 413.0, 55.0, 75.0], "area": 2230, "segmentation": {"size": [512, 512], "counts": "_mT71n?2N3N1N2N3M2O2M2N2N3N1N2N3N1N3M2N2O2M2N3N1N2N3M2gA]NS>e1jA^NU>g100010O00010O010O00010O0O2M2O1N3M2N3NO00010O0001O012M2O10YB"}, "image_id": 258, "id": 3887}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 414.0, 8.0, 8.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "P]32m?2O2M2O01N3M2Onbh7"}, "image_id": 258, "id": 3888}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 430.0, 81.0, 82.0], "area": 3486, "segmentation": {"size": [512, 512], "counts": "l^j31o?1N3lNMVB5h=MmAND7\\>NmAME7]>MlAOD6^>MmANC8]>MmA>R>ClA`0Q>CmA>Q>DmA>Q>DmA?Q>i0N2N10O0000201O01O010O0O1N3N1N3M2N2O20O0O1O2N1O2N1O1PNTBj1n=SNUBk1P>0O1001O2N1O2N1O1^NfA\\1\\>aNfA^1`>N1O1jN]Am0e>QO\\An0e>PO^An0k>O2N1O1O2N1O00O1O1001O1O2N1O1O2N1O2N1O1N3M2O2M2N2OVPm2"}, "image_id": 258, "id": 3889}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 434.0, 47.0, 69.0], "area": 1872, "segmentation": {"size": [512, 512], "counts": "j=Z1f>1N2N3M2O2M2N2O2M2N3M20010O01N1N2O2M2N3N1N2N3M2O2M2N2O2M2N2N01OMSATOn>k06N1N3M2O1N3M2N3N1N2N3M2O2MTQX7"}, "image_id": 258, "id": 3890}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 443.0, 81.0, 69.0], "area": 3013, "segmentation": {"size": [512, 512], "counts": "c^^62m?3N1N3M2O1N3M2N3N1N2N3N1N1O0010O2000O1O2M2N3M2O1N3M2O2M2N3M2hA]NQ>d1nA]NP>l1M100O1O100O1O1O11O2N1O1O2N1O2N1O1O2N1O2N1O2N1O1O2N1N3N1N3M2O1N00001N2O3M2O2M2N2O2M2N3M2O1N3M2O2MeP9"}, "image_id": 258, "id": 3891}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 475.0, 71.0, 37.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "[om21n?3M2O1N3M2_@EZ?>c@D\\?a0N3M2O2M2N1O100O1O100O1O1O100O1001O2N1O1O2N1O2N1O1O2NO1O1001O1O1O2N1O2N1OO100O1O100O1O1O100O1O100O11O2N1O1O2N1O2N1O1N3M2O2M2N2NW`n3"}, "image_id": 258, "id": 3892}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 506.0, 11.0, 6.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "o_]21n?1O100O1O1O11O1O1O2NQP]5"}, "image_id": 258, "id": 3893}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 14.0, 22.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "0f0Z?O1000000O1000000O4M4L4L4Kboh7"}, "image_id": 259, "id": 3894}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 0.0, 89.0, 74.0], "area": 1226, "segmentation": {"size": [512, 512], "counts": "P`a01o?1O2N1O1O1O2N1O1O2N1O1O101O01O01O0001O01O00010O0000010O000010O0000010O0001O01O0001O01O01O0001O01O00010O0000010O0000O2N1O1O1O2N00000000000000000000000000000000000000000000000000PnQ6"}, "image_id": 259, "id": 3895}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 147.0, 5.0, 27.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "c4k0U?0000000][m7"}, "image_id": 259, "id": 3896}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 399.0, 156.0, 113.0], "area": 8465, "segmentation": {"size": [512, 512], "counts": "_lNeAS1\\>kNfAT1Z>jNiAU1a>01O1O1O1O1O001O00O1O1N2O1O1O1O1N2O1O1O1O1O11O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1OQ`a5"}, "image_id": 259, "id": 3897}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 455.0, 11.0, 31.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "Poj72l?2M4M2M3N3L3N3L3N2M4hA"}, "image_id": 260, "id": 3898}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 0.0, 110.0, 102.0], "area": 4794, "segmentation": {"size": [512, 512], "counts": "kPf21n?2N2O1N3M2N2N2O2M2N2N2N3IXORAi0l>YORAi0m>5O00000IoN`AR1`>oN_AP1b>QO\\Ao0e>51002N1O1O1O2N1O1O1PB`N^=b1_B`N`=a1^BaNa=`1]BbNb=_1\\BdNb=^1\\BcNc=^1[BdNd=]1ZBeNe=m1N1O00O100O1O1O1O100O11O2NgN]B6b=I`B7_=IbB7]=HeB9Y=FjB9U=FmB:R=EPC;obe0XAYOj>e0XAYOj>e0;M2N2O1N3M2N2N2O2M2N2Nfmb3"}, "image_id": 264, "id": 3899}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 167.0, 64.0, 65.0], "area": 2127, "segmentation": {"size": [512, 512], "counts": "RV]22m?2N2N3N1N2N3M2O1N2N3M2N2O2M2N2N2O2M2N2N3M2O1N2N3M2O1N3M2N00010O000002N2O1N2N3M2O1N3M2N2N2O2M2N2N3N1N2N2N3M2O1N3M2N2O1N3M2N2N3Ndib4"}, "image_id": 264, "id": 3900}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 0.0, 7.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "P`U51o?001O00001ONRPg2"}, "image_id": 265, "id": 3901}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 0.0, 24.0, 9.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "P`Q61o?001O001O00001O001O00001O001O001O00001OO1N2N2MS`b1"}, "image_id": 265, "id": 3902}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 0.0, 38.0, 32.0], "area": 783, "segmentation": {"size": [512, 512], "counts": "dPm62l?3M2M4M2M3N3L3N3N101O00001O001O001O00001O001O00O1N2N2M3N2N2M3N2N2M3N2N2M3NRP`0"}, "image_id": 265, "id": 3903}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 9.0, 10.0, 52.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "YPk7d1\\>00000000000000000G"}, "image_id": 265, "id": 3904}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 260.0, 23.0, 31.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "lXR53k?2N3M2M3N3M2M4M2N3M2N21M2N3M2N3L3N3M2N2N3L3NiWb2"}, "image_id": 265, "id": 3905}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 265.0, 60.0, 128.0], "area": 2599, "segmentation": {"size": [512, 512], "counts": "`kd24j?2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M]ORBXOk=h0XBYOe=g0]BYO`=g0dBXOZ=h0hBYOT=h0oBWOoAfAa0X>e0N2N2N00000001O00M]NjAd1U>3000000000HjAgNV>Y1lAeNP>MPB]12dNn=1nA[16cNk=b19000010O00001O1O1O1O2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3N1NWoX2"}, "image_id": 271, "id": 3916}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 14.0, 56.0, 56.0], "area": 1562, "segmentation": {"size": [512, 512], "counts": "Rao21n?2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N201O000001N1N2000000001ON3M2N2O1N200000001O0O1N2N2N2YOi@b0\\?N2O1N2Ca@6f?N2N2NW^T4"}, "image_id": 271, "id": 3917}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 16.0, 6.0, 12.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "`0Fo@Fo@=N1N2N2N2N2N2N3M2O1N1O00000000000010O00002N2N2N3M2N2O1N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2O1N2N[]a4"}, "image_id": 271, "id": 3921}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 65.0, 27.0, 27.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "Zb_41n?2N2O2M2N2N2N2N3M2O1N2O10010OO1N2N2N3M2O1N2N2N3M2N2O^mR3"}, "image_id": 271, "id": 3922}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 70.0, 52.0, 51.0], "area": 1374, "segmentation": {"size": [512, 512], "counts": "TSk31n?2N2N2N2N2N2N2N3E@QAb0n>_OPAc0n>_OQAb0m>:N2N2N2N2N2N2000N2N2O0O000000001O00000000002N2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2N2NXmZ3"}, "image_id": 271, "id": 3923}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 86.0, 60.0, 56.0], "area": 1720, "segmentation": {"size": [512, 512], "counts": "]S]12m?2N2Y@Ka?6]@La?;N3M2N2N2N20000000000O2M2N2N2N2N2N2O1N2N2N0000001O00000001O0001O1O10O2N2N3M2N2N2N2N2N2N2M3N2N2N3M2N2N2N2N2N2O1Ndld5"}, "image_id": 271, "id": 3924}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 90.0, 15.0, 16.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "nb`32m?2N2N2N2O10001O0000O1N3M2N2NmlW4"}, "image_id": 271, "id": 3925}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 113.0, 21.0, 20.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "nSW42n?1N2N2N2N2N001O01O0000000001O02N2N2N2N2N[\\^3"}, "image_id": 271, "id": 3926}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 114.0, 96.0, 84.0], "area": 2797, "segmentation": {"size": [512, 512], "counts": "RUf12m?2N2O1N2N2N2N1O0000000001O01O0000000000000001O01O0000000000000000010O00000000000`AC^==bBE\\=;dBHY=8gBJX=5hBMV=4iBNU=4iBNU=4iBNU=4iBNU=4jBMT=5jBMT=5jBMT=5jBNS=4kBNU=2iB0X=0\\BUO3m0a=NZBWO3m0c=2[B0e=V1000001ON2N3M2N2N2N2N2N2N2O1N3]NdA]1a>N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N3M2Nlji4"}, "image_id": 271, "id": 3927}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 118.0, 37.0, 37.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "Sdk21n?3M2N2N2N2N2N2N2N2O1N3M2N2N2O100000000010N1N2N2N2O1N2N2N2N3M2N2N2N2N2O1Ndka4"}, "image_id": 271, "id": 3928}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 134.0, 104.0, 97.0], "area": 4080, "segmentation": {"size": [512, 512], "counts": "bfo51`?2k@0S?2k@0S?2k@0S?2k@0S?2k@0S?2l@OR?b0N1N2N2N2N2N2N2N3M2O1N2N10O2N2N2O1NGeNQBX1o=jNQBT1o=nNQBP1o=ROPBm0P>UOnAk0R>WOlAi0T>YOjAh0U>[OhAe0Y>`0000000000000010O000000002N2N3N1N2N2N0000001O0001O0000000001O0001O000000000001O01O3M2N1O001O01O0000002N2N2O1N2N2N2N2N3M2N2O1N2N2N2N3M2F_@2c?L_@2c?L_@2i?OnZ<"}, "image_id": 271, "id": 3929}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 137.0, 21.0, 20.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "fdQ42n?1N2N2N2N2N1O00001O01O000000003M2N2N2O1Nbkc3"}, "image_id": 271, "id": 3930}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 142.0, 58.0, 56.0], "area": 1578, "segmentation": {"size": [512, 512], "counts": "]e?1o?1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M2N2N2OO000000000001O00000001O001O2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2NlZc6"}, "image_id": 271, "id": 3931}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 154.0, 80.0, 85.0], "area": 2484, "segmentation": {"size": [512, 512], "counts": "[eT43l?2O1N2N2N2N2N2N3M2N2O1N2N2N2N3M2N2N2N200000001O000001O00000001O000001O000001O00000001O000001O000001O00000001O000001O0O1O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2O1N2NTYc2"}, "image_id": 271, "id": 3932}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 155.0, 18.0, 18.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "SUQ61o?1N2N2N2N2N2N2N3M11N2N2N2N2N2N2N3Mmje1"}, "image_id": 271, "id": 3933}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 169.0, 59.0, 55.0], "area": 745, "segmentation": {"size": [512, 512], "counts": "hfh02m?2N2N2N2O1N2N1O00000000000010O0000000000000000010O000000000000000001O01O0000000000000001O01O0000000000003M2O1N2N2N2NajY6"}, "image_id": 271, "id": 3934}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 185.0, 16.0, 17.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "nef31o?2M2N2N2N2N3N1001O0N2N2N2N3N1NnYQ4"}, "image_id": 271, "id": 3935}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 194.0, 37.0, 36.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "gVd21n?2N2N2N3N1N2N2N2N2N2N2N2N2N1O01O0001O0000000002N2N2N2N2N2O1N3M2N2N2N2N2NaYi4"}, "image_id": 271, "id": 3936}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 201.0, 21.0, 21.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "afW21n?2O1N2N2N2N2N2N3M2O1000N2N2N2N3M2N2N2N2N]i]5"}, "image_id": 271, "id": 3937}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 208.0, 60.0, 50.0], "area": 1547, "segmentation": {"size": [512, 512], "counts": "UW]11o?1N2Y@M^?5`@M^?5`@M^?b0QA\\Oo>f0QAXOQ?f06N2N2N2N2N0000002O1N2N2N3M2NPid5"}, "image_id": 271, "id": 3938}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 218.0, 20.0, 20.0], "area": 187, "segmentation": {"size": [512, 512], "counts": "UWa62m?2N2N2N2N2O2M000000000011N2N3M2N2N2O1NohT1"}, "image_id": 271, "id": 3939}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 223.0, 20.0, 20.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "\\Wb32m?2N3M2N2N1O0001O000001O000002N2N3M2N2OkhS4"}, "image_id": 271, "id": 3940}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 223.0, 79.0, 76.0], "area": 2981, "segmentation": {"size": [512, 512], "counts": "kgc32m?2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N3M2O1N2N2N2N2000001O0001O00000000000O1N2N3M2O1N2N2O100001ON2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O]gT3"}, "image_id": 271, "id": 3941}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 225.0, 167.0, 177.0], "area": 7449, "segmentation": {"size": [512, 512], "counts": "iWY51n?2N2N2N2O1N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N20001O01O000000000001O0001O00000001O00000001O000001O00000001O0O1N2N2N2N2N20010O000000cAfNS>Z1kAhNU>X1iAjNW>V1gAlNY>^1010O000000000001O0001O0000000001O000001O000001O00000001O000001O0000000001O01O000000000000010O0000000000001O01O000000000001O01O0N2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2O1N2N2O10000O1N3M2N2N2NUT3"}, "image_id": 271, "id": 3942}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 247.0, 56.0, 60.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "oXn12n?1N2N2N2N2N2N2N2N3M2N2N2N2O1DUO`Am0^>UO`Am0^>VO_Al0_>VO_Aj0a>:1O01O0000000000000000000001O011N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N3McgU5"}, "image_id": 271, "id": 3943}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 248.0, 28.0, 29.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "ThX12m?3M2N2N2N2O2M2N2N2N2N2N3NO00101N2N2N3M2N2N2O1N2N3M2N2NkWY6"}, "image_id": 271, "id": 3944}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 274.0, 59.0, 55.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "aYU31n?2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N1O0000001O00000001O0000002N2O1N2N2N2N0000002N2N2O1N2N2N2N2N3M2N2L]@He?64N2N2NlVm3"}, "image_id": 271, "id": 3945}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 296.0, 147.0, 213.0], "area": 17133, "segmentation": {"size": [512, 512], "counts": "R]l32m?2`@NP?3n@OP?4n@MP?5n@No>4o@No>4o@No>4o@No>c0N2gMnNaET1]:nNaET1]:nNaET1]:oNaER1]:POaES1\\:oNbES1\\:oNbES1\\:oNbES1\\:oNbES1]:nNaET1]:nNaET1]:nNaET1]:nNaET1]:oN`ES1^:oNaES1\\:oNbES1\\:oNbES1\\:oNbES1\\:oNlDlN8W2j:oNlDlN8W2k:nNkDmN8W2k:nNkDmN8W2k:nNkDmN8W2k:oNjDlN9W2k:]OREf0k:\\OTEe0k:\\OSEf0m:ZOQEh0o:U20000N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N3N1N2000N2O1N2N2N3M2N2N2000000000000O2M2N2N2N2N2N2N2lKdEb2]:\\MeEb2]:\\MeEb2^:[MdEc2^:[MdEc2^:[MdEc2^:[MdEc2^:[MdEc2^:[MdEc2^:[MeEb2]:\\MeEb2]:]MdEb2]:\\MeEb2^:[MdEc2^:[MdEc2^:[MdEc2^:[MdEc2^:[MdEc2^:[MdEc2^:[MeEb2]:\\MeEb2]:\\MeEb2]:]MdEb2^:[MdEc2n;N2N2N2N2N3M2HWBVNk=2VB]11_Nk=2VB]11_Nk=2VB]1R>bNoA\\1\\>N2N3N1N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2NXRj1"}, "image_id": 271, "id": 3946}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 342.0, 7.0, 14.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "f:>c?N2N2N3M2N2NTUl7"}, "image_id": 271, "id": 3947}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 349.0, 25.0, 24.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "QkW61n?2N2N2N2O101ON2000000000000000010O00000O1N2N2O1N`d[1"}, "image_id": 271, "id": 3948}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 349.0, 19.0, 20.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "Rk]61n?2N2N2N2N201O00000001O00000N2N2O1N2NfdX1"}, "image_id": 271, "id": 3949}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 376.0, 52.0, 45.0], "area": 1264, "segmentation": {"size": [512, 512], "counts": "T<5j?2N3M2N2O1N2N2N2N21Oj@\\Oo>d0o@^OP?j0N2N2N3N1N2N001O02O100000000N2N2N2N3M2N2N2N0001O00000001O1O3M2N2N2N2O1N2N2N2N2NfcU7"}, "image_id": 271, "id": 3950}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 410.0, 62.0, 68.0], "area": 1950, "segmentation": {"size": [512, 512], "counts": "\\nc01n?3N1N2N2N2N2N2N2WO@oAb0o=@oAb0o=@oAb0o=@oAb0o=@oAb0o=@oAb0o=@oAb0o=@oAb0o=@oA`0R>BkA>U>DiAf0001O2N2N2N2N0000000000000001O00000002N2N2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N_R]6"}, "image_id": 271, "id": 3951}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 416.0, 8.0, 15.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "P=?b?N2N2N2N2N2N2Njbk7"}, "image_id": 271, "id": 3952}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 435.0, 7.0, 14.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "c=>c?N2O1N2N2N2NXRl7"}, "image_id": 271, "id": 3953}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 451.0, 53.0, 56.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "i^[12m?2N3M2N2N2N2O1N2N2N2N2N2N3M2N2UAPOc>R1[APOc>R1[APOc>Y1N1N2N2N2N2N10O2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2NQQj5"}, "image_id": 271, "id": 3954}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 467.0, 41.0, 40.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "Zo32m?2N2N2N2N3M2N2N2N2O1N2N2N2N2N1O00000000000001O0000101N2N2N2N2N2N2N2N2N2N3M2N2O1No`W7"}, "image_id": 271, "id": 3955}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 479.0, 20.0, 20.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "U_X71n?2N2N2N2O2M2N200000000001N1N2N2O1N2N2Ne`="}, "image_id": 271, "id": 3956}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 494.0, 29.0, 18.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "h_Y28h?0O1000000000000000000IJc@7]?60000O1000000000000000000O14L9GUPX5"}, "image_id": 271, "id": 3957}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 497.0, 20.0, 15.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "fo`72m?2N2N2O1N2N3N10O1O1O2N1O1O1O1O1O1O1O1NSP5"}, "image_id": 271, "id": 3958}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 499.0, 15.0, 13.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "g_32m?2N2N2N2O100001N1O1O1O1O1O1NTPe7"}, "image_id": 271, "id": 3959}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 32.0, 6.0, 18.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "P1b0_?L3M3N3L3MQol7"}, "image_id": 272, "id": 3960}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 43.0, 39.0, 60.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "lRV14i?3N3L3N3L3M3N3L3N3L3M4M2M3N3L3M4M2M3N11M3M4M2M3N3L3N3L3M4M2M3N3L3M4M2M4M2M3Ma^V6"}, "image_id": 272, "id": 3961}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 106.0, 101.0, 134.0], "area": 5956, "segmentation": {"size": [512, 512], "counts": "Z3V1j>10O00010O00010O00010O010O00010O00010O00010O0010O0010O00010O00mAnNU=V1hBlNX=T1eBPO[=o0bBTO^=m0^BVOc=i0ZB[Oe=e0YB]Og=d0UB_OZ1YOU;R2gDRNX;n1eDUN[;k1cDWN^;i1^D[Na;e1^D\\Nb;e1^DZNc;e1]D\\Nb;d1^D\\Nc;d1]D\\Nb;d1^D\\Nb;d1^D\\Nc;b1_D^N`;_1aDcN_;[1`DhNa;T1_DPO`;m0`DVO`;g0aD[O`;b0_DB`;POcCW1m0Ld;2[D2d;NZD4g;KVD8j;IRD;m;EPD>n;DoC?QPOYAo0l>1O01O01O0ZAQO\\>n0aAUO_>l0]AWOc>T10O010O00010O00010O000iN\\AS1i>O00010OM4O000010O00010O0010O0010O00010O00010O0010O0001hNZAU1h>0010O0001M21O01O01O010O01O01O01O01O01O01O01O01O010O01O01gNZAV1i>0O00010O01O01O010O00010L30010O01O01O01O01O010O00010O00010O01O01O010O0004MO01O01O0O1O2O00010O010O00010O00010O00010O010O00O2L3M3M4SOPAf0W?L3N2010O00010O01O01O01O01O01O01O010O01O01O01O01O01O01O010O0001M2M4M2M3MlUa3"}, "image_id": 272, "id": 3967}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 273.0, 43.0, 67.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "WiZ72l?3N1N3N2N2M2O2UAAQ>b0lA@S>a0lAAR>a0lAAQ>b0lAAR>a0lA@R>b0mA@R>a0kABU>>iADW>P1O1000O10O1000O01000O10O01M2O00O0100O01000O01000KdAfN\\>[1dG"}, "image_id": 272, "id": 3968}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 292.0, 19.0, 20.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "]Y12m?2N3M2N2N2N2N2N2N10O2N2N3M2N2N2N2N2NdVe7"}, "image_id": 272, "id": 3969}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 306.0, 10.0, 10.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "eii01n?2O1N2N3O00N2N2N2NZVQ7"}, "image_id": 272, "id": 3970}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 317.0, 31.0, 32.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "\\jg01n?3M2N2N2N2N2N2N2N2O1N2N3M2N2N1O02N2N2N3M2N2N2N2N2N2N2N2N2N2Neeh6"}, "image_id": 272, "id": 3971}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 335.0, 80.0, 65.0], "area": 2569, "segmentation": {"size": [512, 512], "counts": "ZkX61m?3N1O2M3N2N1N3N2N2M2O2N2M3N101O10000O0100N1O2N2M3O01000O010000O010O1N2M2O2N2O1N101O1O1O0O2N2M3N1N3N2N2M2O2N2O1O0O2O1O1O1OO1O1jNPB;R>CPB:S>CPB;R>CoACPB:S>CPB;R>CPB;V>_OkA?m>N2N2M2O2N2M3N1O2M]T?"}, "image_id": 272, "id": 3972}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 341.0, 7.0, 14.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "e:>c?N2N2N3M2N2OTUl7"}, "image_id": 272, "id": 3973}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 354.0, 52.0, 52.0], "area": 1273, "segmentation": {"size": [512, 512], "counts": "d[c21n?2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N2O100000000001O0000000000000N2N2N2N2N2N2N2N2N2N2N2O1N2N3M2N2N2N2Nncb4"}, "image_id": 272, "id": 3974}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 358.0, 32.0, 50.0], "area": 854, "segmentation": {"size": [512, 512], "counts": "V;^1c>00000000O1N2N2N2N2N2N2N1O2N2N2N2N2N000000002N2N2N2N2N2N2N2N2NUd_7"}, "image_id": 272, "id": 3975}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 369.0, 9.0, 19.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "lkk73l?1N3N2N1N3N2N2M2_D"}, "image_id": 272, "id": 3976}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 379.0, 24.0, 23.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "W\\n01n?2O2M2N2N2N2N2N2N2N001O01O001O2N2N3M2N2N2O2M2Nlce6"}, "image_id": 272, "id": 3977}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 390.0, 33.0, 34.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "`ln12m?2N2N2N2N2N2N2N2N2N200000001O00000000000000N2N2N2N2N2N2N2N2N2N2NUc`5"}, "image_id": 272, "id": 3978}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 397.0, 98.0, 62.0], "area": 2795, "segmentation": {"size": [512, 512], "counts": "R]d51n?1N3N2N1N3N2M2O2M3N11000O001N2M2O2M3N1N3O1000O01000O10O10O10O10O10O10O10O10O10O10O10O10O10O1000O01000N2N1N3N2N101000O01000O01000N2M2O2M3N1N3N2M210O0O0O010O0102M3N1N3N2N2M3N1N3N2M3N2M3N1N3N2M3N2Mjbj0"}, "image_id": 272, "id": 3979}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 410.0, 54.0, 49.0], "area": 1362, "segmentation": {"size": [512, 512], "counts": "dma02m?2N2N2O1N2N2KDc@?[?4N2N2N2N2N2N2N2N2N2N2N2N2N10O2N0000001O00001O2N2N3M2N2N2O1N2N1O0000002N2N2N2N2N2N2N2N3M2N2OdRc6"}, "image_id": 272, "id": 3980}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 410.0, 36.0, 86.0], "area": 1934, "segmentation": {"size": [512, 512], "counts": "o^^71n?2N1N3N2FIh@8V?Jh@9V?Ih@9V?:M2@VOiAl0b=\\OYBK3k0b=[OZBL1l0b=[O[BK1k0c=\\OYBK3k0b=\\OYBK2l0d=GZB;e=P101O1N101O1N0O10O010O2O2N1N3N2N1N3N2NRC"}, "image_id": 272, "id": 3981}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 436.0, 54.0, 57.0], "area": 1544, "segmentation": {"size": [512, 512], "counts": "c^R21n?2N2O1N2HId@9Z?Id@9Z?Id@9Z?8N2N2N2N3M2N2N20000O1N2N2N2N2N2N2N1O01O00000000002N2GbAnN_>P1cAnN_>P1cAnN_>P1:N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2NgaR5"}, "image_id": 272, "id": 3982}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 453.0, 72.0, 49.0], "area": 1576, "segmentation": {"size": [512, 512], "counts": "Qoo02m?2O1N2N3M2N2N2O1N2N3M2N2N2N2O1N3M000000010O001O2N1O1O01O00000LYAPOg>P140101N3M2N200010O000001O01OO1O2M2N2N1O00101N3M2N2N21O01O00N2N2N3ON2O2M2N2N2N2NjPl5"}, "image_id": 272, "id": 3983}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 503.0, 18.0, 9.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "o_T21n?1O1O100O1O1O1O100O11O1O1O2N1O1O1OQ`b5"}, "image_id": 272, "id": 3984}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 506.0, 11.0, 6.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "o_h11n?1O1O1O1O11O1O1O1O1OQPR6"}, "image_id": 272, "id": 3985}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 201.0, 130.0, 104.0], "area": 6337, "segmentation": {"size": [512, 512], "counts": "ghf31m?3N1N3M2O2M3M2O2M3N1N5K8I4K3N1N3M2O2O10O01000O10O0H7O2N2O1O0GRNaBo1]=TNbBl1]=VNaBj1^=XNbBg1]=\\NaBc1`=^N`Ba1_=bN`B\\1a=a0O2N3N1N0010O0010O1O3N02N1N3M3N1N3J[BnMg=P25N30O0100NO02O1N2O1GlAdNU>[1mAcNU>\\1mAbNT>\\19O1N3N11000O01]AgN^>Y1_AjNa>Z10100OnN`Ad0e>VO]Aj0m>0O01WOQA`0n>_OSAa0n>ZOQAO4g0R?2M2O2M3M2O2M2N3N2M2O2M2N3N2O010O10O010O10O10O10O10O0N3N2M2N3N1UO]A7f>F\\A9f>D]A9e>F\\A8g>E\\A9f>E\\A8f>F\\A8X?N1N3N2MWXX2"}, "image_id": 273, "id": 3986}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 0.0, 59.0, 39.0], "area": 1562, "segmentation": {"size": [512, 512], "counts": "PP]52n?4L4L4L4L4L4L4L5K4L00O10000O1000000O10000KVATOj>l050000O10000O1000000O1000000O1000000O1000000O1000000O10000O10004L4K5L3M4L]_e1"}, "image_id": 274, "id": 3987}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 49.0, 33.0], "area": 1180, "segmentation": {"size": [512, 512], "counts": "UP[64l?5J5L4L5K4K3NO1000O10O10O1000O10O10O1000000O1000000O1000000O10000O1000000O1000000O10004L4K5L3M4L]_l0"}, "image_id": 274, "id": 3988}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 102.0, 12.0, 12.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "XcP67h?2N2O001O0000O101N2N3Lh\\i1"}, "image_id": 274, "id": 3989}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 107.0, 24.0, 23.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "fSg51n?3M2N2N2O2M2N2N3NO01O000001O101N3M2N2O2M2N2N2N\\ll1"}, "image_id": 274, "id": 3990}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 169.0, 88.0, 83.0], "area": 3504, "segmentation": {"size": [512, 512], "counts": "hfa21n?3M2O1N2N3M2N2O1N3M2N2O1N3M2N2O2M2N2N2N3N1N2N2N3N1N2N3M0010O00000010O00000010O00000010O000001O01O000001O01O0001O01O000001O101N3M2N2N3N1N2N3M2O1N3M2N2N3N1N2N3M2N2O2M2N2N3N1N2NgYR4"}, "image_id": 274, "id": 3991}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 169.0, 55.0, 61.0], "area": 1457, "segmentation": {"size": [512, 512], "counts": "hUi32m?2N2N2N3N1N2N2N2N3N1N2N2N2N3N1N2N2N3O0001O0001O0001O01O000001O01O0000010OO1N2N2N3M2N200O2M2O1Kf@A\\?=5N3M2O1N2N2N]Y[3"}, "image_id": 274, "id": 3992}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 386.0, 26.0, 24.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "a\\[33m?1N2N2N2N3NO000000010O000000010O000000102M2N2N2N3NgcW4"}, "image_id": 274, "id": 3993}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 20.0, 42.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "VN2N3N1N3M2O2M2N2O2M2N3M2O2M2N2O2M2N3Nobe7"}, "image_id": 274, "id": 3994}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 398.0, 58.0, 53.0], "area": 1183, "segmentation": {"size": [512, 512], "counts": "dmT31n?3N1N2N2N2N2N2N3N1N2N2N2N1O0001O0001O00000001O0001O00000001O0001O0000000001O01O0000000001O101N2N2N3M2N2N2N2O1N3M2NVSn3"}, "image_id": 274, "id": 3995}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 409.0, 23.0, 21.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "Vmn21n?3M2O1N2N2N2N01O0001O000001O01O01O2N2N2N2O2MRce4"}, "image_id": 274, "id": 3996}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 0.0, 27.0, 14.0], "area": 199, "segmentation": {"size": [512, 512], "counts": "P`b11o?1O1O2N1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O100O1O1O1O1O1OQPP6"}, "image_id": 275, "id": 3997}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 0.0, 12.0, 6.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "QPY21n?2O1O1O1O00O1O100O1O1OQPa5"}, "image_id": 275, "id": 3998}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 0.0, 5.0, 2.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "P`e21o?1O0000OQPX5"}, "image_id": 275, "id": 3999}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 0.0, 52.0, 34.0], "area": 988, "segmentation": {"size": [512, 512], "counts": "\\`\\43l?2N2N2N2O1N2N2N2N2K^Oi@d0V?5N1O1O1O1O1O1O1OO1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O100O3M2Nn_i2"}, "image_id": 275, "id": 4000}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 46.0, 34.0], "area": 792, "segmentation": {"size": [512, 512], "counts": "a`\\61n?2O1N2N2N2N2N2N3M200000000N2N2N0000001O01O00000002N2N10O1O1O1O1O1O1O1O1O2N2N2O2M2N2N2N2N2Ng_l0"}, "image_id": 275, "id": 4001}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 1.0, 30.0, 30.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "_PW51n?2N2N2N2N2N2N2N2N2N3N1N2N2N2N02N2N2N2N3M2N2N2N2N2N2N2N2O1NaoY2"}, "image_id": 275, "id": 4002}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 6.0, 21.0, 40.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "i`e71n?2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N0K"}, "image_id": 275, "id": 4003}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 12.0, 53.0, 55.0], "area": 1591, "segmentation": {"size": [512, 512], "counts": "nPX22m?2N2d@Kj>8SAJk>8SAJk>8SAJl>7RAKl>7SAJk>8SAJk>8SAKj>g0N2N2N2N2O1N2N3M000010O3M2N2N2N2N2N2N2O2M1O0000000000011N2N2N2N2N2N2N3M2O1N2N2N2N2N2NP_m4"}, "image_id": 275, "id": 4004}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 12.0, 38.0, 38.0], "area": 757, "segmentation": {"size": [512, 512], "counts": "k`j32m?2N3M2N2N2N2N2N2N2N2N2N2N2N2N2O1000000000O1N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2NP_b3"}, "image_id": 275, "id": 4005}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 21.0, 14.0, 17.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "jPZ61n?2U@Od?8N2N2N2000N2N2N2N2N2O2MSo^1"}, "image_id": 275, "id": 4006}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 21.0, 33.0, 31.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "VQn61n?2N2N2N3N1N2N2N2N2N2N2N2N1O0001O000000101N2N2N2N2N3M2N2N2N2N2O1No^a0"}, "image_id": 275, "id": 4007}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 37.0, 30.0, 50.0], "area": 870, "segmentation": {"size": [512, 512], "counts": "U1V1k>00000000000001O000001OO0O000000002N2N2DQADQ?:QADQ?;PACR?;PACR?;;N3M2N2N2NVn`7"}, "image_id": 275, "id": 4008}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 42.0, 57.0, 58.0], "area": 1537, "segmentation": {"size": [512, 512], "counts": "`bV52m?2N2N2N2N2N2N2JCf@`0W?6N2N2N2N2O1N2N2N2N000000000000010O0000000000000000000002N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2N2O1NRnl1"}, "image_id": 275, "id": 4009}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 56.0, 34.0, 37.0], "area": 643, "segmentation": {"size": [512, 512], "counts": "_b=2m?2N2N2N2IHc@:[?Hc@;Z?7N2N2N2N2N01O0000000000010O2N2N3M2N2N2N2N2N2N2O1N2N3Mj]Q7"}, "image_id": 275, "id": 4010}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 64.0, 32.0, 32.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "`R^42m?2N2N2N2N2N2N2N2N2O1N2N2N2N2N000002N2N2O1N2N2N2N2N2N2N2N2N2N2NcmQ3"}, "image_id": 275, "id": 4011}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 70.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "Xbn72m?2N2iM"}, "image_id": 275, "id": 4012}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 73.0, 27.0, 27.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "bba71n?2N2N2N2N2N2N2N2N2000000001O0000000O1N2N2N2N2N2N2N2NVm0"}, "image_id": 275, "id": 4013}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 75.0, 51.0, 54.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "^cV33l?2N2N2N2N2N2N2O1N2N2N3M2N2N2N2FPOaAQ1^>RO_An0a>TO]Al0c>801O01O000000000000001O2O1N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N3M2N2NPmo3"}, "image_id": 275, "id": 4014}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 84.0, 52.0, 56.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "^Sa01n?2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2O10000N2O1N1O00000003M2N2mN_Ad0c>ZO_Ad0c>ZO_Ad0c>ZO_Ad0c>ZO_Ad0c>ZO_Ad0P?N2N2O1N3M2N2N2N2N2N\\ld6"}, "image_id": 275, "id": 4015}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 85.0, 24.0, 23.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "VcS41o?1N2N2N2N2N1O00000000000000001O000002N2N2N2N2NW]`3"}, "image_id": 275, "id": 4016}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 91.0, 24.0, 24.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "ScY61n?2N2O2M2N2N2N2N2N3N100001O0O1N2N2N2N3N1N2N2N2Ng\\Z1"}, "image_id": 275, "id": 4017}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 92.0, 31.0, 34.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "T34k?2N2N2N2N2N2O2M2N200001O0000000001O000000N3M2N2N2N2N2N2N2N2O]\\`7"}, "image_id": 275, "id": 4018}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 93.0, 45.0, 46.0], "area": 1013, "segmentation": {"size": [512, 512], "counts": "\\cd51n?2O2M2N2N2N2N2N2N3N1N2N2N2N2N2N3N1N2O1000000010O00000N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2N2NXld1"}, "image_id": 275, "id": 4019}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 98.0, 46.0, 36.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "^cR74l?6I7J6J2N00000O01000000000O01000000000O0LYOQAg0o>500O1000O100000O1000O100000O1000O1000006J6J6I7JV\\6"}, "image_id": 275, "id": 4020}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 115.0, 50.0, 47.0], "area": 1168, "segmentation": {"size": [512, 512], "counts": "Td^62m?2N2N2N2N2]@E^?`0N2N2N2O1N2N2N2N3M2N2N2N20000000O1N2N3M2N2N20000000000000000N2N2N2N2N3M2N2N2N2N2N2O1N2N][h0"}, "image_id": 275, "id": 4021}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 120.0, 53.0, 60.0], "area": 1501, "segmentation": {"size": [512, 512], "counts": "Pej31n?2N2N2N3M2N2N2N2O1N2N2N2N3]OUOlAm0S>TOkAn0S>TOkAn0S>TOkAn0S>TOkAl0U>VOjAj0U>YOhAg0X>[OfAe0Z>`0001O0000001O2O1N2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N^kZ3"}, "image_id": 275, "id": 4022}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 120.0, 46.0, 47.0], "area": 1038, "segmentation": {"size": [512, 512], "counts": "XdT51n?3M2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N201O00000001O0000000O1N2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N[[T2"}, "image_id": 275, "id": 4023}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 131.0, 44.0, 55.0], "area": 1206, "segmentation": {"size": [512, 512], "counts": "jdQ62m?2N2N2N3M2O1N2N2N2N2N2N3EVO[Al0c>UO\\Am0b>UO\\Am0b>UO]Al0c>TO[An0e>7OhN]AT1c>jN_AV1f>1O00000N2N2N20O1N2N2N3M2N2N2N2N2Al@0U?Nm@0U?Nm@0U?Nn@OU?Ol@OV?Ol@OV?O`[X1"}, "image_id": 275, "id": 4024}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 141.0, 63.0, 65.0], "area": 2020, "segmentation": {"size": [512, 512], "counts": "oUk01o?1N2N3M2N2N2N2N2N2N2N2N1O0001O0001O2N2@VOgAm0T>WOjAk0R>XOiAJLP1W>YOjAn0T>?2O0O1O2NO12N1O1O2N1O1O2NN2N3M2001O2N1O1O2N1O1O2N1O1O2N1O1M4L3M3N3L3M4L3M3M4L^[U6"}, "image_id": 275, "id": 4025}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 152.0, 42.0, 37.0], "area": 723, "segmentation": {"size": [512, 512], "counts": "bUP73l?2N2N2N2N2N2O2M1O001O00000001O0002N2N2N2N0001O000001O000002N2N2N3M2O1N2N2N2N2N3M2Nkj:"}, "image_id": 275, "id": 4026}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 158.0, 31.0, 31.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "]eP51n?2N2N2N2N2N3M2N2N2N2N2O1N2N2N0001O2N2N2N2N3M2N2N2O1N2N2N2N2Nej_2"}, "image_id": 275, "id": 4027}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 163.0, 44.0, 49.0], "area": 1164, "segmentation": {"size": [512, 512], "counts": "cec52n?1N2N2N2N2N3M2N2N2N2N2O1N2N2N2N3M2N2O11O00000000N2N3M2000N2N2N2N2N3M2N2N2N2Ak@1V?Ml@1V?Ml@1V?Ml@1V?Nk@0W?Nk@0_if1"}, "image_id": 275, "id": 4028}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 169.0, 29.0, 29.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "fe<2m?2N2N2O2M2N2N2N2N2N3M2O1N1O0002N2N2N2N2O1N3M2N2N2N2N2N2OYjT7"}, "image_id": 275, "id": 4029}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 173.0, 60.0, 60.0], "area": 1696, "segmentation": {"size": [512, 512], "counts": "dfa11n?2N2O1N2N3M2N2N2N2N2_O\\OcAf0[>\\OcAf0[>\\OcAf0[>]ObAe0\\>]OcAd0[>^OcAe0[>\\OcAf0[>a0N0000000000000002O2M2N2N2N2N2N1O001O2N2N2O1N2N2N2N2N2O10001O00000O1N2O1N2N2N2N2N2NeY`5"}, "image_id": 275, "id": 4030}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 176.0, 45.0, 45.0], "area": 981, "segmentation": {"size": [512, 512], "counts": "Yf[42n?1N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N000001O0001O000000000002N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2NPjm2"}, "image_id": 275, "id": 4031}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 188.0, 45.0, 49.0], "area": 1202, "segmentation": {"size": [512, 512], "counts": "]fU51n?2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N2O1N2O11O01O000O1N2N2N20N3N1N2N2N2N2N2N2@o@NT?On@OT?On@OT?On@0S?No@0S?On@OT?On@OT?OoiS2"}, "image_id": 275, "id": 4032}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 192.0, 69.0, 89.0], "area": 2771, "segmentation": {"size": [512, 512], "counts": "hfm61o?1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2O1001O000001O0000000001O000001O00_NiAX1W>fNkAZ1U>dNmA\\1S>bNPB]1Z>0000001O000001O0000000N2N2N000000011N2N2NdH"}, "image_id": 275, "id": 4033}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 196.0, 5.0, 9.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "T69h?N3M2N2OgYm7"}, "image_id": 275, "id": 4034}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 205.0, 26.0, 26.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "hf\\61n?2N2N2N3M2N2N2N2N2N2N2O1001M2N2N2N2N2N2N2N2O1N2N2NVYV1"}, "image_id": 275, "id": 4035}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 213.0, 44.0, 48.0], "area": 1155, "segmentation": {"size": [512, 512], "counts": "VWh42m?2N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N20000000000O2N1N2N20O1N2N2N2N2N2N2N3M2Bk@NW?0k@OV?Ol@OV?Ol@OV?Ol@OV?Ol@OV?OUia2"}, "image_id": 275, "id": 4036}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 217.0, 96.0, 72.0], "area": 3307, "segmentation": {"size": [512, 512], "counts": "Qhj13l?2O1N2Y@Ib?40KeAfN\\>Y1fAeNZ>[1500001O00B`A]O`>c0bA[O^>e0dAYO]>f0eAXO[>h0gAVOY>j0iATOW>o0;2N2N2N2N2O1N2O2O00000001O000001O00000001O00000O1N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3N1NmWe4"}, "image_id": 275, "id": 4037}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 221.0, 6.0, 7.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "oV[23l?2N20O1N3MPia5"}, "image_id": 275, "id": 4038}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 226.0, 3.0, 5.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "R75l?N2NmXn7"}, "image_id": 275, "id": 4039}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 227.0, 61.0, 62.0], "area": 1774, "segmentation": {"size": [512, 512], "counts": "gg52m?2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2O1000000001O0000000001O0000000000N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2Nggk6"}, "image_id": 275, "id": 4040}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 237.0, 37.0, 35.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "UXY11n?2O1N2N2N3M2N2N2O1N001O0000000001O000O10000001O01O0000002N2N2O1N3M2N2N2N[XT6"}, "image_id": 275, "id": 4041}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 239.0, 76.0, 72.0], "area": 2456, "segmentation": {"size": [512, 512], "counts": "nhd52m?2N2N2N3M2N2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N1O1O000000000001O000001O000000000001O000001O00000000000001O001O2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1NjWU1"}, "image_id": 275, "id": 4042}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 245.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "e74m?N[hn7"}, "image_id": 275, "id": 4043}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 268.0, 37.0, 36.0], "area": 657, "segmentation": {"size": [512, 512], "counts": "jhU71n?2N2N2N2N2N2N2N2N2N2N2N2N2N2O1O1000001O00O1N2N2N3M2N2N2N2N2N2N2N2N2N2N2Oof7"}, "image_id": 275, "id": 4044}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 282.0, 6.0, 12.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "oXm71n?2N2N2N3M2UG"}, "image_id": 275, "id": 4045}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 286.0, 64.0, 67.0], "area": 1981, "segmentation": {"size": [512, 512], "counts": "cYf02m?2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2001O01O00000000000001O0001O000000N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N2NheY6"}, "image_id": 275, "id": 4046}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 289.0, 129.0, 104.0], "area": 4364, "segmentation": {"size": [512, 512], "counts": "akm11f?0a@2]?0a@2]?1`@1^?:N2N2O1000001O0N2N2N2N2N2O1N2N2N2N2N2N0000000000000010O00000000000000000000001O2O1N2N2N2N2N3M2N2N2N2N2O01N2N1O0EYO^Ag0b>[O\\Ae0d>]OZAc0g>^OWAb0i>@UA`0k>;O3M2N1O0000000000000001O000001O2N1O0000000000000001O01O00000000000000000001O2N2N2O1N2N1O0000001O002N2N2N2N3M2O1N2N2N2L^@Gd?74N2N2N`fQ4"}, "image_id": 275, "id": 4047}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 289.0, 56.0, 51.0], "area": 1486, "segmentation": {"size": [512, 512], "counts": "hYW62m?2N2N3M2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N10O0000000002O100000001O01O00000O1N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N3M2N2O1NPfl0"}, "image_id": 275, "id": 4048}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 289.0, 7.0, 7.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "Tii71n?2N3M10O2N2Nnf2"}, "image_id": 275, "id": 4049}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 293.0, 37.0, 32.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "\\ij11n?2N2N2N3M2N2O1N200000001O01O0000O1N2N2N3M2N0002N2O1N2N2N2N2N3M2N2N2O1N2N[fb5"}, "image_id": 275, "id": 4050}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 313.0, 69.0, 89.0], "area": 2665, "segmentation": {"size": [512, 512], "counts": "okm61n?3M2\\OLYA6e>LYA6e>LYA6f>KXA7f>KXA7f>KXA7f>KYA7d>LYA6e>d0N2N2N2O1N2N00000101N2N2N2N2N2N3M2N10O000@oNRBQ1n=QOPBo0P>SOnAm0R>VOkAj0U>XOiAh0W>ZOhAe0X>]OfAd0Z>]OdAc0\\>`0000000000000010XOeAJ[>6gAHY>8iAFW>:kADU>>mA@V>?jA_OX>a0hA]OY>d0gA[OX>g0hAWOX>k0hASOX>P1:2O1N2N2N2N00001O0XF"}, "image_id": 275, "id": 4051}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 333.0, 33.0, 45.0], "area": 723, "segmentation": {"size": [512, 512], "counts": "P;j0U?1O0000000000010O0000000000000000010O000001Kl@]OV?a0l@]OV?a07M2N2N2N2N2N2O1NYU_7"}, "image_id": 275, "id": 4052}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 342.0, 23.0, 24.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "Vkh02m?2N2N3N1N2N1O000MDc@=\\?3000000002N2N2N2O1N3M2N2NRek6"}, "image_id": 275, "id": 4053}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 346.0, 35.0, 41.0], "area": 768, "segmentation": {"size": [512, 512], "counts": "Z[]32m?2N3M2N2N2N2N2N2N2N2N2N2O1N2N2N2N3N100001O0N2N2N2N2N2N2Ai@2Y?Mh@2Y?Li@2Y?Li@2Y?Li@2Y?Li@2d?N[TQ4"}, "image_id": 275, "id": 4054}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 350.0, 12.0, 12.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "Sk^12n?2M2N2N2N0002N2N2N2N2OmT[6"}, "image_id": 275, "id": 4055}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 350.0, 25.0, 28.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "_[_12m?2N2N2N2N2N2N0001O0002N2N2N2N3M2NDl@MT?3n@LT?1n@MT?1n@MT?1n@MT?2m@LU?2>NlTT6"}, "image_id": 275, "id": 4056}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 354.0, 58.0, 44.0], "area": 1321, "segmentation": {"size": [512, 512], "counts": "b[P41n?2N3M2O1N2N2N2N2N3M2N2O10000n@XOk>h0SAZOm>f0QA\\On>l0N2O0O00001O2N000010O00002N2N2N2N2O2M2N2N00000000010O000000002N2N3M2O1N2N2N2N2N3McdR3"}, "image_id": 275, "id": 4057}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 365.0, 10.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "ak^61n?2N2N2N2O10N2N2N2N`T\\1"}, "image_id": 275, "id": 4058}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 370.0, 21.0, 18.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "i[<2n?1N2N2N2N2N3M21O0N000010O0001O2N2N3M2N2OXTY7"}, "image_id": 275, "id": 4059}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 380.0, 72.0, 54.0], "area": 1794, "segmentation": {"size": [512, 512], "counts": "^lP31o?1N2N2N2N3M2N2O1N2N2N3M2N2N2O1N2N3M2N2N2N2O1O2O000001O0001O00O1N3M2O1N2N2N2N3M2N2O1N2N2N3M200000000000N3M2N2O1N2N000002N2N2N2O1N2N3M2N2N2N2O1NoRk3"}, "image_id": 275, "id": 4060}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 384.0, 34.0, 33.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "dl92m?2N2N2N2N2N2N2N2O1N2N2N1O000000000000000001O2N2N2N2N2N2O1N2N2N2N2NfSU7"}, "image_id": 275, "id": 4061}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 386.0, 69.0, 59.0], "area": 1829, "segmentation": {"size": [512, 512], "counts": "[mY41n?2N2N2N2O1N2N2N2N2N2N2N2N2FWOZAl0c>VO[Al0c>VO[Al0d>UOZAl0e>7000000000000000001O00000001O00001O2N2N2N1O001O102M2N2N1O0000000001O2O1N0002N2O2M2N2N2N2N2N2N2N2N2N2Nacc2"}, "image_id": 275, "id": 4062}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 388.0, 25.0, 62.0], "area": 780, "segmentation": {"size": [512, 512], "counts": "`mc71n?2N2N2N3M2N2N2N2XO_OoAb0o=@oAb0o=@oAb0P>_OnAc0P>_OnAc0P>_OnAd0o=^OoAd0o=^OoAc0P>@mA`0S>BlA>S>DmA:S>HmA6S>LkA4V>j00000mC"}, "image_id": 275, "id": 4063}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 394.0, 31.0, 27.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "h\\a22m?2N2N1O0000002O1N2N2N2N2N2N2N2O100000O1N2N3M2N2N2N2N2N2O1N2NXSo4"}, "image_id": 275, "id": 4064}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 399.0, 6.0, 11.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "_<;f?N2N2N2N2N]cl7"}, "image_id": 275, "id": 4065}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 412.0, 58.0, 64.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "[nS12m?2N2N2N2N2N2DCRA?l>CRA?l>CRA?l>CRA?l>CRA?f>[O\\AQ1b>QO\\AQ1b>9M00000010O00000000000000000MaAfN_>Z13000001O2N2N2N2O1N3M2N2N2N2N0000001O2N2N3M2N2O1N2N2N2N2NbRo5"}, "image_id": 275, "id": 4066}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 415.0, 13.0, 10.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "QmT21n?2N2O2O0O100N1O02N2N2N2Nnbd5"}, "image_id": 275, "id": 4067}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 418.0, 58.0, 56.0], "area": 1449, "segmentation": {"size": [512, 512], "counts": "V^g41n?2O1N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N0HoNbAQ1^>QO`Ao0`>SO_Al0a>900000001O01O0000002N2N2N2O1N2N3M2N2N2N2N1O0001O0002N2N2N2N2N2N2O2M2N2N\\b[2"}, "image_id": 275, "id": 4068}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 420.0, 13.0, 12.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "Z]P21o?1N2N2N2N2N0010O2N2N2N2NiRi5"}, "image_id": 275, "id": 4069}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 422.0, 43.0, 46.0], "area": 988, "segmentation": {"size": [512, 512], "counts": "i]c21n?2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2O000000N2N2N2N2N3M2N2N2ZOj@`0W?^Ol@?\\?N2N2N2N2N2N2N2NoQg4"}, "image_id": 275, "id": 4070}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 424.0, 29.0, 55.0], "area": 913, "segmentation": {"size": [512, 512], "counts": "[=d1\\>O01O0001O2N2N2N2N2O2M2N2N2N2N2N2N3M2O1N2N2N2N2K`@Gc?65N2N2NoQa7"}, "image_id": 275, "id": 4071}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 439.0, 56.0, 70.0], "area": 1809, "segmentation": {"size": [512, 512], "counts": "\\_]32m?2O1N2N2N2N3M2EARAa0m>@QAb0m>@QAb0_>\\OiA4Fb0_>\\OiA4Ga0^>DbAg0\\>`0N01O01O0000000000000000010O00KfAeNZ>[1hAcNX>]15001O01O2N2N2N2N2N2N2SOSAd0o>ZOSAe0n>YOTAe0U?N3M2N2N2N2N2N2N2O1N2N^af3"}, "image_id": 275, "id": 4072}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 441.0, 30.0, 29.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "Wng11n?2O1N2N2N2N2N3M2N2N2O1N2N2N000001O2N2N2O1N3M2N2N2N2N2N2O2MjQi5"}, "image_id": 275, "id": 4073}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 469.0, 29.0, 29.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "Soc42m?2N2N2N2N2N2N2O2M2N2N2N2N1O0001O2O1N2N3M2N2N2N2N2N2O1N3Mn`m2"}, "image_id": 275, "id": 4074}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 471.0, 61.0, 41.0], "area": 1334, "segmentation": {"size": [512, 512], "counts": "noa52m?1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O1F]OVAd0j>^OSAc0l>_OSAa0l>ARA`0m>:O1O1O1O1O1O1O1O1001O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1N3M2O1N2N2N2N2N2N2N2N]`_1"}, "image_id": 275, "id": 4075}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 476.0, 51.0, 36.0], "area": 1079, "segmentation": {"size": [512, 512], "counts": "co61o?1N2N2N2N2N2N3M3M2N2O0O1O1O1O1O1O1O1O100O1O1O1O1O1O1001O1O2M2N2N2N2N010O000000002N3M2N2N2O1N2N2N2N2Ng`o6"}, "image_id": 275, "id": 4076}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 478.0, 26.0, 27.0], "area": 360, "segmentation": {"size": [512, 512], "counts": "Yo]62m?2N2N2N2N2N2N2N2N2N2N20000000N2N2N2N2N2N2N2N2N2N2NePU1"}, "image_id": 275, "id": 4077}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 480.0, 45.0, 32.0], "area": 881, "segmentation": {"size": [512, 512], "counts": "com31n?2\\@NW?4a@3^?8O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O11O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O1N2N3M2N2NW`[3"}, "image_id": 275, "id": 4078}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 495.0, 25.0, 17.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "h_P32m?2N2N3N1N2N1O1O1O1O1001O1O1O1O1O1O1O1O1O1O2N1O1NTPc4"}, "image_id": 275, "id": 4079}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 499.0, 25.0, 13.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "o_R51n?1O1O1O1O1O1O1O1O1O100O1O12N1O1O1O1O1O1O1O1O1O1OQPa2"}, "image_id": 275, "id": 4080}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 509.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "oon41n?1O1001O1OQPn2"}, "image_id": 275, "id": 4081}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 317.0, 37.0, 42.0], "area": 651, "segmentation": {"size": [512, 512], "counts": "lZY12m?2N2M2O2N2M3N1O2M3N2N2N1N10000O0LVOUAj0k>XOSAg0n>5O11N3N2N1N3N2M3N2N2M2O2N2N200O1M3NgUT6"}, "image_id": 276, "id": 4082}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 356.0, 23.0, 23.0], "area": 277, "segmentation": {"size": [512, 512], "counts": "_k?2m?1O2N2N2M3N2N2N2N1O2O1000N2N2N2M2O2N2N2M3N1OddT7"}, "image_id": 276, "id": 4083}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 87.0, 39.0, 40.0], "area": 798, "segmentation": {"size": [512, 512], "counts": "Zcc62m?2N2O1N2N3M2N2N2N2N2O1N2N3M2N2N2N2N0001O00003M2N2O1N2N2N2N2N3M2N2O1N2N2N2N3Mglh0"}, "image_id": 277, "id": 4084}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 52.0, 65.0], "area": 1732, "segmentation": {"size": [512, 512], "counts": "0Q2o=O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2OQPV7"}, "image_id": 278, "id": 4085}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 35.0, 45.0], "area": 925, "segmentation": {"size": [512, 512], "counts": "5X1g>0001O0000001O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O2N2O1N2N2N2N2N2N3M2N2Ne_^7"}, "image_id": 280, "id": 4086}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 0.0, 42.0, 22.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "QPc22m?2O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O2N00O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1OQPh4"}, "image_id": 280, "id": 4087}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 0.0, 41.0, 30.0], "area": 704, "segmentation": {"size": [512, 512], "counts": "aPi51o?1N2_OMRA6l>LQA6n>KQA7m>JQA8n>Jo@9o>HPA9o>In@9Q?;0O1O1O100O1O100O1O100O1O1O100O1O100O1O100O1O1O100O3M2O2M2N3Nh_b1"}, "image_id": 280, "id": 4088}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 0.0, 15.0, 8.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "PPc61o?1O1O1O2N1O1OO100O1O1O1O1O1OQ`U1"}, "image_id": 280, "id": 4089}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 0.0, 5.0, 2.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "P`R71o?1O0000OQPk0"}, "image_id": 280, "id": 4090}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 7.0, 14.0, 14.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "\\Pg62m?2N2N2N3M2O1O01O1N2N2N2N3McoQ1"}, "image_id": 280, "id": 4091}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 18.0, 62.0, 59.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "lac41n?2N2N2N2HHe@:Y?He@:Y?8N2N2N2N2N2N1O2N2N2N1O002N2N1O00000000K_AkNb>U1400O100000O100001O1O2N2N2N2N2M3N2N2N2O1000000O0100000N2N2N2N2N2N2N2Mi^]2"}, "image_id": 280, "id": 4092}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 25.0, 21.0, 18.0], "area": 315, "segmentation": {"size": [512, 512], "counts": "Sae76b?8N21O00000001O000001O00000001O00000001OTO"}, "image_id": 280, "id": 4093}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 29.0, 29.0, 29.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "\\ak51n?2N2N2N2N2N2N2O2M2N2N2N2N1O000011N2N2N2N2N3M2N2N2N2N2N2Ngne1"}, "image_id": 280, "id": 4094}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 33.0, 45.0, 57.0], "area": 1324, "segmentation": {"size": [512, 512], "counts": "V27h?3M2^@Fk>0`A1_A`0_>B_A`0_>B_A`0_>B_A`0`>A^A`0a>B^A=b>`000000001O0001O0000000000011N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2O1N2N2N2NY^Y7"}, "image_id": 280, "id": 4095}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 37.0, 21.0, 21.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "`Qe01n?2N2N2N2N2N2N2N2N2N0002N2N2N2N2N2N2N2N2Nc^P7"}, "image_id": 280, "id": 4096}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 47.0, 47.0, 43.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "QRo61n?3M2X@Kb?7\\@Kb?;N2N2N2O1N2N2N2N3M2N2N2N2N2N2N01O1O2N2N2N2N2N2N2O1N2N1O000000001O2N2N2O1N3M2N2N2N2N2NR^9"}, "image_id": 280, "id": 4097}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 60.0, 59.0, 66.0], "area": 1767, "segmentation": {"size": [512, 512], "counts": "^Sk52m?2N2N2N2N2EFm@=P?En@=P?En@=P?Eo@nNcAR1]>POaAP1_>RO`An0_>90000001O000000000001O000002N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N2N2N\\]W1"}, "image_id": 280, "id": 4098}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 70.0, 39.0, 60.0], "area": 1477, "segmentation": {"size": [512, 512], "counts": "dSP42l?3N1N3M2N3M2O2M3M2N3M2J7[Od00100O001O001O001O00O1N2N2N2O1N2N2N2N2O1N2N2N2N2O1]O\\^\\3"}, "image_id": 280, "id": 4099}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 74.0, 54.0, 49.0], "area": 1280, "segmentation": {"size": [512, 512], "counts": "SSg02m?2N2N2N2N3M2N2N2O1N2N2N2N200000000O1N2N2N2N0000000000000000000000000001O1O2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2NWm]6"}, "image_id": 280, "id": 4100}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 76.0, 29.0, 28.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "iRQ51n?2N2N2O1N2N3M2N2N2N2N2N2N2O0O01O2N2N3M2N2N2N2N2O1N2N2N3MW]`2"}, "image_id": 280, "id": 4101}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 91.0, 42.0, 50.0], "area": 1105, "segmentation": {"size": [512, 512], "counts": "\\ch61n?2N2N2N2d@Il>9RAIl>9RAJk>9RAIl>9RAIm>8QAJm>f0N2N2N2N2N2N2N2N10O2N2N2N2N2N2O1IVAUOl>j0UATOm>j06N2N2N2N2N3M2N2N2N2N2N2N2N_\\b0"}, "image_id": 280, "id": 4102}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 102.0, 36.0, 35.0], "area": 637, "segmentation": {"size": [512, 512], "counts": "lc02m?2N2N2N2N2N2N2L4N2N2N2N0000000000000000101000N2N2N2N2N2N2N2N2N2N2N2N2N]\\]7"}, "image_id": 280, "id": 4103}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 109.0, 29.0, 29.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "jc^42n?1N2N2N2N2N2N2N2N3M2O1N2N2N1O01O2N2N2N3M2N2N2N2O1N2N2N2NWlR3"}, "image_id": 280, "id": 4104}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 123.0, 62.0, 49.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "`dX11n?2N3M2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2N2O1000000000O1N2N2N2N2N3M00001O01O0000000000000000000001O2O1N2N2N3M2N2N2N2N2N2N2N2Nh[h5"}, "image_id": 280, "id": 4105}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 125.0, 53.0, 49.0], "area": 1275, "segmentation": {"size": [512, 512], "counts": "idm41n?2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2N1O00000010O00000000001O2N2N2N3M2N2N2N1O001O2O1N2N2N2N2N2N2N2N2N2NbkW2"}, "image_id": 280, "id": 4106}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 126.0, 73.0, 69.0], "area": 2121, "segmentation": {"size": [512, 512], "counts": "[ej51n?2N2N2\\@J[?8c@J[?9c@HV?Mm@b0Q?Al@b0Q?8N2N2N2N2O1N3M0001O2O2M2N2N2N2N2N10O000_OVOmAj0T>WOjAi0V>ZOgAf0Y>\\OfAc0Z>_OdAa0\\>AbA`0]>BaA>_>D_A>_>b00O00002N3M2OO0000000000010O0002N2JVASOl>k0VASOm>j06O1N1O0000002N2N3N1N2N2N2N2N3M2N2OakP1"}, "image_id": 280, "id": 4107}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 138.0, 51.0, 49.0], "area": 1022, "segmentation": {"size": [512, 512], "counts": "\\eb62m?2O1N2N2N2N2N3M2N2N2N2N2N1O00010O000000000000000000000000010O00000000000002N2N2N2N2N2N2O2M2N2N2N2N2NYkc0"}, "image_id": 280, "id": 4108}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 161.0, 62.0, 47.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "hUP21n?2JN[@4d?MZ@5d?5N1O0000002N2N2N2N2N3M2N2O1N2N2N2N2O100000001O0000O1N2N2N3M2N000000000001O01O00000000001O2N2N2N2N2N2O1N3M2N2N2N2N2NajP5"}, "image_id": 280, "id": 4109}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 165.0, 24.0, 24.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "^eh41n?2N2N2N2N2N2N2N2N2O10001O00O1N2N3M2N2N2N2N2N2O]Zk2"}, "image_id": 280, "id": 4110}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 167.0, 32.0, 41.0], "area": 625, "segmentation": {"size": [512, 512], "counts": "]Un01n?2N3c@Kj>7TAKk>6SALk>6SALk>6SAMj>5UALj>6SALm>4QANo>b000010O00000N2O1N2N3M2N2N2N2O1N3M2N2N2N2N2NPja6"}, "image_id": 280, "id": 4111}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 170.0, 25.0, 24.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "gU[11n?2N2N2O1N2N3M2N2N2N00000001O010O2N2N3M2N2N2N2N2N^ZX6"}, "image_id": 280, "id": 4112}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 173.0, 103.0, 126.0], "area": 3465, "segmentation": {"size": [512, 512], "counts": "hh\\61n?2N2O2M2N2N2N2N2N2N2N2N2N3N1N2N2N00000N21O000001O00000000000001O000001O0000000000000001O0001O0000000000000001O000001O00000000000001O000001O0000000003M1O_OROQBn0P>SOnAm0R>VOkAj0U>XOiAi0V>YOiAf0W>\\OgAd0Y>^OeAb0[>@cA`0]>a0000000001O0MbAeN^>[131O000000E_AXOa>i0`AUO`>k0cARO]>n0eAPO\\>o0fAoNZ>Q1nJ"}, "image_id": 280, "id": 4113}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 183.0, 45.0, 47.0], "area": 1169, "segmentation": {"size": [512, 512], "counts": "UVY51`00m>2QA0m>2QA0m>2QA0m>2QA0m>2QA0m>2QA0m>2QA0m>c0N2N2O1000000O1N00000000000002N2N2LTAQOn>m04N2N2N10O2N2N02Jl@]OV?a0l@]OV?a06N2N2N2N2N2N2N2NkYP2"}, "image_id": 280, "id": 4114}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 188.0, 7.0, 7.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "oe:2m?2N2N10O2N3MRja7"}, "image_id": 280, "id": 4115}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 188.0, 50.0, 49.0], "area": 1222, "segmentation": {"size": [512, 512], "counts": "hf:1n?2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N1O00000000001O0001O0000002N2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2NaYl6"}, "image_id": 280, "id": 4116}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 188.0, 71.0, 62.0], "area": 1993, "segmentation": {"size": [512, 512], "counts": "nfP61n?2N2N2N2N2N3N1N2e@AR?a0l@AR?a0m@@Q?i0N2N2N2N3N1N2N2N1O0000000010O0000002N2N2N2N3N1N00001O0000000001O000001O0000000001O01O2N2N2N3M1O002O1N2N2N2N3M2N2N2N2O1Neik0"}, "image_id": 280, "id": 4117}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 201.0, 30.0, 30.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "hVj11n?2N2N2N2N2N2N2O1N3M2N2N2N2N001O02N2N2N2N2N2N2N2N2N3N1N2N2NZif5"}, "image_id": 280, "id": 4118}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 206.0, 53.0, 55.0], "area": 1299, "segmentation": {"size": [512, 512], "counts": "Wg]21n?2N2O1N1O0O1000001O1O2N2N2N2N2SA^OY>d0eA^OY>d0eA_OX>c0fA_OX>c0fA_OX>c0fA_OX>c0fA_OX>c0fA^OY>T100002N2N2N2N2N2O1O1O1N2N2O1N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2Nghg4"}, "image_id": 280, "id": 4119}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 222.0, 56.0, 53.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "nWo01n?3M2N2N2O1N2N2N3M2N2N2N2O1N2N3M2N2N1O01O01O000000000001O01O000000000001O010O2N2N2N3M2N2O1N2N2N3M2N2N2N2O1N2N3M_hT6"}, "image_id": 280, "id": 4120}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 228.0, 30.0, 26.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "gWc02n?1N2N2N3JH_@8b?2000001O002N2N2O0O0000000001O2O1N2N2N2N2N3M2O1Nchm6"}, "image_id": 280, "id": 4121}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 231.0, 28.0, 27.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "bW41n?2O1N2N2N2N2N3M2N2N2O1N2N2O10N2N2N2N2N2N2N3M2N2O1N2N2N\\h]7"}, "image_id": 280, "id": 4122}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 238.0, 17.0, 33.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "^7Q1P?N2N2N2N2N3N1N2N2N2N2N2N2N3M2N2OPXg7"}, "image_id": 280, "id": 4123}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 242.0, 25.0, 25.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "lgj31n?2N2N2N2N2N3M2N2N2O1N200000O2M2N2N2O1N2N2N2N2N2NQhh3"}, "image_id": 280, "id": 4124}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 254.0, 23.0, 24.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "Vh\\21n?2N2N3M2N2N2N2O1N3O01O00000O1N2N2N3M2N2O1N2NdgW5"}, "image_id": 280, "id": 4125}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 255.0, 58.0, 49.0], "area": 1395, "segmentation": {"size": [512, 512], "counts": "i8>a?2O1N2N20000N2N2N2N10O00000000000001O00000000000020000001OO1N1O0000000000000000000002N2N2O1N3M2N2N2N2N2N2N2N2N2N2NbgR7"}, "image_id": 280, "id": 4126}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 257.0, 57.0, 52.0], "area": 1460, "segmentation": {"size": [512, 512], "counts": "dhm21n?2N2N2N2N2N3]@C^?a0O1j@^Ok>d0SA^Ok>d0SA^Ol>d0QA^OP?i0000001O00000N2N000000002O0O0000000000000000001O01O002N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N]gU4"}, "image_id": 280, "id": 4127}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 264.0, 12.0, 22.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "bXj71n?2N2N2N2N2N2N2N2O1N2N2gG"}, "image_id": 280, "id": 4128}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 268.0, 23.0, 22.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "iX[11n?2N2N2N3M2N2N2O0O1O0000000001O2N2O1N3M2N2N2N]WY6"}, "image_id": 280, "id": 4129}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 268.0, 50.0, 49.0], "area": 1186, "segmentation": {"size": [512, 512], "counts": "Xi`11n?2N2N2N3N1N2N2N2N2N2N2N2N2N2N2N2N3M2OO000001O0000000000000001O2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2NRWf5"}, "image_id": 280, "id": 4130}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 273.0, 48.0, 58.0], "area": 1443, "segmentation": {"size": [512, 512], "counts": "^iQ71n?2N2`@Mh>0RA54Mh>1QA45Mi>0PA55Mi>=TAEj>=TAEj>j0N2N2N3M2N2N000001O0000000001O02N2N2N2N3M2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2N2OeV6"}, "image_id": 280, "id": 4131}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 278.0, 15.0, 15.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "jhg71n?2O1N2N2N2O2O0001O0N2N2N2N2NRg0"}, "image_id": 280, "id": 4132}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 282.0, 56.0, 42.0], "area": 1389, "segmentation": {"size": [512, 512], "counts": "hi?2m?2N2N2N3HGd@;Z?Ge@:Z?7N2N2N1O000000002N2N2O1N1001N2N000000001O01O2N2N2O1001N0O00000001O01O2N2N2N2N2N2N2N3M2Jb@H_?6c@H_?76N2N2NhVd6"}, "image_id": 280, "id": 4133}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 294.0, 79.0, 68.0], "area": 2421, "segmentation": {"size": [512, 512], "counts": "WZn12n?1N2N2N2N2N2N3M2O1N2N2N2N2N3M2O10000000010O00000N2N2N2O2M2N0000001O0001O0000000001O0001O0000000001O0001O00000000011N2N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N2N2N3MSVj4"}, "image_id": 280, "id": 4134}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 297.0, 47.0, 47.0], "area": 1093, "segmentation": {"size": [512, 512], "counts": "kiP41n?2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2O1000010O00O1N2O1N2N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2N2OkeW3"}, "image_id": 280, "id": 4135}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 311.0, 23.0, 44.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "h9[1e>O002N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2NcUd7"}, "image_id": 280, "id": 4136}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 311.0, 33.0, 55.0], "area": 960, "segmentation": {"size": [512, 512], "counts": "aj_71n?2N2N2N2N2N2N00012M2N2N2N2N2N2N2N2N3N1N2N2N2N2]AfN]>\\1aAfN]>a1M000001O2N2N3NUF"}, "image_id": 280, "id": 4137}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 316.0, 51.0, 53.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "oji02m?2N2DLi@6U?Li@6V?Kh@7V?Ki@6U?Li@6U?=M2N2N2N2N2O1000N0001O0000000001O00000000000001O02N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2Nbe\\6"}, "image_id": 280, "id": 4138}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 335.0, 84.0, 83.0], "area": 2908, "segmentation": {"size": [512, 512], "counts": "mki225Oc?2[@0c?2[@0c?8N2N2N3M2N2N2O1N2N2N2N3M2O11O0000N2N2O1N3N100001M2N2N2N2N2N1O01O00000ZOoN_BP1a=RO]Bn0c=TO[Bl0f=UOXBk0h=WOVBi0j=YOlAG3Q1P>ZOlAF2P1R>\\OjAF2n0T>CjA=V>f000001O000001O000002N2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N2O1N2N2NdTl3"}, "image_id": 280, "id": 4139}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 340.0, 55.0, 41.0], "area": 1343, "segmentation": {"size": [512, 512], "counts": "T[_11n?2N2N2Y@K`?7^@K`?Y101O00000000000001M2N2N10O000003M2N2^OQA0Q?NQA0Q?NRAOP?0QANQ?0QANQ?0QANQ?0QAOP?OWTT6"}, "image_id": 280, "id": 4144}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 370.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "c[o71n?2]D"}, "image_id": 280, "id": 4145}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 373.0, 29.0, 30.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "Rll32m?3N1N2N2N2N2N2N2N2N3M2N2O1N0002N3M2N2N2O1N2N2N2N2N2N3M2Nmcd3"}, "image_id": 280, "id": 4146}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 380.0, 45.0, 62.0], "area": 1498, "segmentation": {"size": [512, 512], "counts": "SN0000010O00000000001O2N2N2O1N002N2N2N2N2N3M2N2O1N2N1O00001O0000002O1N3M2N2N2N2N2N2N2N2N`SY7"}, "image_id": 280, "id": 4147}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 381.0, 59.0, 56.0], "area": 1597, "segmentation": {"size": [512, 512], "counts": "`lT51n?2N2N2N2h@He>:YAHe>:ZAGd>;ZAGd>;ZAGe>:YAHe>:YAHe>k0N2N2N2N2N3M1O01O002N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2OO000001O000000000001O02N2N3M2N2N2N2N2N2O1Nacm1"}, "image_id": 280, "id": 4148}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 414.0, 58.0, 73.0], "area": 1900, "segmentation": {"size": [512, 512], "counts": "i^72m?2M2O2M3EGl@;R?Fl@POcAP1]>ROaAn0_>TO`Al0_>:00001O000000001O2O1N2N2N2N2N2N2N3M2N2OO000002N2N2N2N2N2N2O2M2N2N2Nkaa1"}, "image_id": 280, "id": 4155}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 443.0, 52.0, 47.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "X^o13l?2O1N2N2N2N2N2N3N1000000001OO1O1N3M2N2N2N2N2N2N2N3N1N2N2N02N3M2O1N2N2N2N2N001O2N2O1N2N3^Og@8[?Fg@9a?O1N2N2O1N2NZaV5"}, "image_id": 280, "id": 4156}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 449.0, 50.0, 49.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "n^T42m?2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N2N1O00000001O01O00000000000002N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N]aR3"}, "image_id": 280, "id": 4157}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 450.0, 30.0, 30.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "`^P12m?2N2N2N2N2N2N2N2N3M2N2N2N2N10O2N2N2N2N2O1N2N2N2N2N2N2N2N2Naa`6"}, "image_id": 280, "id": 4158}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 456.0, 29.0, 29.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "g^o42m?2N2O2M2N2N2N2N2N2N2N2N01O000000011N2N2N2N2N2N2N3M2N2O1N]Qb2"}, "image_id": 280, "id": 4159}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 461.0, 10.0, 9.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "`^`12m?2N2N2O10O1N2N2N2O^aZ6"}, "image_id": 280, "id": 4160}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 477.0, 64.0, 35.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "doP61n?2N2N2N2N2N2N2N2N2O1N2N1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O2N00O1O1O1O100O1O1O1O1O1O1O11O1O1O1O1O1O1N2N2N2N2O2M2N2N2N2N2N2N2N2N_Po0"}, "image_id": 280, "id": 4161}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 481.0, 76.0, 31.0], "area": 1211, "segmentation": {"size": [512, 512], "counts": "ooT31n?1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O11O1O1O1O1O1N2N2N2N3M2N2N2O1N2N000002N3M2`@EY?U101O01O01O0001OkAnN\\=Q1bBRO^=n0^BVOb=k0ZBXOg=g0UB^Oj=b0SBAm=Y10010O000010O0000010O00M4K4L4000WO^BUOa=g0cBYO^=c0eB[O]=a0hB[O[=b0hBZO]=a0gB\\O\\=a0hBZO\\=b0hB[O[=b0T1K4M3L5Lk_^1"}, "image_id": 283, "id": 4172}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 19.0, 64.0, 71.0], "area": 2173, "segmentation": {"size": [512, 512], "counts": "Sbj11n?1N3N2N1N3N2N2M2O2d@_OU?h0N2N1N3N2M2O2N2M3N1N3N2N1N3N2M3N000O10O2O2M2O000O10O10O10O11N3N1N3N2N2M2O2N2M3N1N3N2N2M2O2N2M2O2M3N2N1N3N2Nm^U5"}, "image_id": 283, "id": 4173}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 63.0, 46.0, 69.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "mRY71n?3M2O2M3M2e@Fn>;QAFm>i0N1N3M2EgNmA[1Q>fNmA\\1Q>gNlA\\1Q>fNnA[1Q>901O01O01O01O01O01O010O00010O102M2N3N1N1N1010O00010O000QN"}, "image_id": 283, "id": 4174}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 72.0, 51.0, 38.0], "area": 1140, "segmentation": {"size": [512, 512], "counts": "iRn23k?2M3N3L3N2N3L30010O0k@\\On>d0o@_OR?g0010O01O01O01O010O00010O01O01O010O00O2L3010O00010O0010O0010O00010OZOk@`0[?M4M2M4L3NU]X4"}, "image_id": 283, "id": 4175}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 78.0, 55.0, 66.0], "area": 2164, "segmentation": {"size": [512, 512], "counts": "^c_62m?3N3M2M4M3L3N3M2ZOXOoAl0n=VOPBl0n=WOnAm0n=VOPBl0n=WOoAi0P>[OlAe0U>]OiAc0W>b001000O0100O0100O0100O011O2M4M2N0O12M4M3L3N3M2M4M3L3N3M2M4M3L3N3M2M4M3L3N3M3Llkd0"}, "image_id": 283, "id": 4176}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 89.0, 51.0, 38.0], "area": 1171, "segmentation": {"size": [512, 512], "counts": "YcU44i?3N2M4L3N3L30010O001j@\\Oo>c0n@@R?g0010O00010O00010O01O01O01O01O010O000O2L30010O00010O00010O00010O01O[Ok@>]?L3M3M4M2MelP3"}, "image_id": 283, "id": 4177}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 99.0, 12.0, 19.0], "area": 228, "segmentation": {"size": [512, 512], "counts": "SSR5c0]?000000000000000000000mlg2"}, "image_id": 283, "id": 4178}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 103.0, 33.0, 31.0], "area": 587, "segmentation": {"size": [512, 512], "counts": "jSU21n?1N3M2N3M3M2O2M2N3M2O2O010O010O010O010O010O01O0N3M2N3M2N3M2N3M2Na\\Z5"}, "image_id": 283, "id": 4179}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 109.0, 57.0, 59.0], "area": 2062, "segmentation": {"size": [512, 512], "counts": "eTb52m?4M2N3L4TODbAO0?\\>EaAN0a0[>EaAN1>]>F`AN1;`>i0O10O010O10O10O01000O010O01000O010O01000O010O01000O010O10O100201O0N3L3N3M2M4M2M4M2N3L3N3L3N3M3LY[a1"}, "image_id": 283, "id": 4180}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 137.0, 29.0, 29.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "jTa34i?3L4L5L3M301O01O0002N10O0000010O0000010O0000010L3L4L4L5Ke[P4"}, "image_id": 283, "id": 4181}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 139.0, 21.0, 23.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "]dj4e0Y?2000000000O1000000000000000000000000000ekj2"}, "image_id": 283, "id": 4182}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 162.0, 51.0, 41.0], "area": 1023, "segmentation": {"size": [512, 512], "counts": "beo02m?1N3M2O2M3M2O2M2N5L1010O10O10O010O10O1O010O010O10O010O10O10O0O2O10O10O010O10O10O0O2M3M2O2M2N3M3N1N3MYjV6"}, "image_id": 283, "id": 4183}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 188.0, 59.0, 71.0], "area": 2163, "segmentation": {"size": [512, 512], "counts": "\\7:^?Hf@:X?If@9X?9M2O2M3N1N3N2M2O2M3N1N3N2M2O2M3N1O2M2O0O02O1N3N2M2O1N010O010O010O3N2N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2NeYR7"}, "image_id": 283, "id": 4184}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 224.0, 73.0, 57.0], "area": 2261, "segmentation": {"size": [512, 512], "counts": "lgR11m?2N2N3M2N3M2N3M2N3M2M4M2N3N1010O010O010O010O01O001M200010O010O010O010O010O01O0O2O010OhN[AU1i>0O010O0O2N11O013L010O010O010O010O0N3M2ROSAg0U?M2N3L3N3M2N3M2N3MThh5"}, "image_id": 283, "id": 4185}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 227.0, 58.0, 53.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "bg_35g?4K6K4N20001O01O00f@\\OX?f001O01O000001O01OYA\\OR>d0jA@V>a0dAD\\><`AHa>l0O00000010O00000010O000000010O00000010O00000010O000000O2J5L4K5L5J5L4K5KgXc3"}, "image_id": 283, "id": 4186}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 239.0, 15.0, 13.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "egk02l?3M2N3O010O010O010O01O0N3M2N^hl6"}, "image_id": 283, "id": 4187}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 247.0, 94.0, 130.0], "area": 5346, "segmentation": {"size": [512, 512], "counts": "fXa6212g?6N3M3L3N3L3N3M2M4M2N3L3eBiNi;Z1TDhNi;[1UDhNh;[1TDhNj;Z1TDiNh;[1TDhNj;Z1TDhNi;\\1TDdNl;^1QDbNQ<`1lC`NTgBFo>;SACm>=VA@i>`0EBlA>T>DjA;V>IfA8Z>KcA0FHg>:aAMd>7XAJh>9UAFk>h0000O0101O2M4M3L3N3M2M4M3M2M4VH"}, "image_id": 283, "id": 4188}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 249.0, 18.0, 18.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "WXh01m?2JN\\@5a?6N3N1O20N1O2M3M2010O0100M2N3NQhn6"}, "image_id": 283, "id": 4189}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 258.0, 70.0, 49.0], "area": 2204, "segmentation": {"size": [512, 512], "counts": "nh\\24h?5K4M3L5L30000O2K4L4O101O0001O01O01O0001O01ON3O0000010O0001O01O0001O01O0001O2OO01O0001O01O01O0001O01O0001O01O0001O01O01mNWAm0n>0010L3L4L4L5L3L4L^W`4"}, "image_id": 283, "id": 4190}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 266.0, 21.0, 24.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "jXc02m?2M2N3M2O2M3M2N3M2010N1N3M2O2M3M2N3N1N3MbWR7"}, "image_id": 283, "id": 4191}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 271.0, 30.0, 45.0], "area": 776, "segmentation": {"size": [512, 512], "counts": "`8\\1c>02M2O2N2M3N11000O10O1N1O02M3N2N1N3N2M3N1O2M3N1O2M3N2N1N3NQg`7"}, "image_id": 283, "id": 4192}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 281.0, 68.0, 40.0], "area": 2193, "segmentation": {"size": [512, 512], "counts": "eYm36_?;K500000000E;O101O00000000000001O0001O000000000000000010O000000000000000001O01O0000000000000001O000001O000000006KO00000000000001O0N2A[gP3"}, "image_id": 283, "id": 4193}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 315.0, 61.0, 84.0], "area": 2857, "segmentation": {"size": [512, 512], "counts": "Wkd02m?1N3N2N1N3N2M3N1O2M3UA[OX>g0eA\\OX>f0gA\\OW>f0gA[On=JQBm0N\\Oo=IPBm00\\On=IPBm0O\\Oo=JPB]1n=Z1QBdNQ>Z1QBcNR>Z1PBeNQ>Z1?eA_O^>a0bA]O_>c0bAZOa>f0^AXOd>h0]AVOe>j0ZATOi>i0ZAUOg>j09M2N3N2M2N3M2O2M2N3N2MfTh4"}, "image_id": 283, "id": 4196}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 350.0, 64.0, 61.0], "area": 2686, "segmentation": {"size": [512, 512], "counts": "Y\\b39_?8G9J60000000000010O00000000K5O101O0001O0000000000010OJ6N20004MO0000000000001O01O0000000J6O1O1O2N100O100000000001K4G:G9K5AWAEj>1_AOY?LYd]3"}, "image_id": 283, "id": 4197}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 353.0, 37.0, 48.0], "area": 1048, "segmentation": {"size": [512, 512], "counts": "`;e0Y?3N2N1N3N2N1N3N2M3N1O20000O010000M2O2N2M3N1N3N2N1N3N2N2M2O2M3N2N1N3N2N1N_T]7"}, "image_id": 283, "id": 4198}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 354.0, 13.0, 15.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "[[b11m?3N1N3M3N1O2000M2O2M3N1NlTW6"}, "image_id": 283, "id": 4199}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 398.0, 76.0, 67.0], "area": 2391, "segmentation": {"size": [512, 512], "counts": "dmh52l?3M2N3N1N3M2N3M2N3M3M2100O010O010O010O010O010O010O01000ON3N110O010O010O010O010M2N3M2N3M2N3N2M2N3M2N3M2N1O0003M2O2M2N3M2CcAUO`>i0aAUOa>i0bATOa>i0aAUOa>i0=M2N3M2N3N1N3M2N3M2N3MYSQ1"}, "image_id": 283, "id": 4200}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 399.0, 38.0, 83.0], "area": 2017, "segmentation": {"size": [512, 512], "counts": "a^]71n?2N1N3N2nNIXB9f=IXB9f=IWB9b=CiA5d0:`=DiA5e09`=O^B3`=OPB]O8g0f=NPB]O8f0g=NPB^O7f0f=5XBMf=5WBNg=3XBOf=V1O2M3N2NO1000001O2O100000OF]BWNd=g1^BWNd=g1^BWNd=g1]BXNe=e1^BXNd=g1<00OWC"}, "image_id": 283, "id": 4201}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 400.0, 83.0, 61.0], "area": 2765, "segmentation": {"size": [512, 512], "counts": "fm\\12l?3M2M4M2N3L3N2N3N1010O0010O010O00010O010O010O0N2N3L3N3M2M3N3M2N3L3N201O010O010O01O01O010O01O010O01O01O0O2M2M4N10010O0nN^Ag0b>VOcAg0]>WOhAf0Y>WOjAi0U>UOnAj0d>10O0010O0010O0010OO1N3M2M4M2M3N3L3N3M\\bY5"}, "image_id": 283, "id": 4202}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 415.0, 29.0, 62.0], "area": 929, "segmentation": {"size": [512, 512], "counts": "oN2M2O2N2M2O2N2M2O2iN\\AP1e>nN\\AS1c>kN`AT1g>mNWAn0h>QOZAn0l>O2M3N1N3N2M2O2M3N1N3N2M2O2MURa7"}, "image_id": 283, "id": 4203}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 460.0, 87.0, 52.0], "area": 2705, "segmentation": {"size": [512, 512], "counts": "VoV53k?2N3N1N3M3M2N3M2N3M2N30O0100O010O0100O010O0100O010O0100O0100OO2M210O0100O010M2N3O0010OO2M3M2N3N1N2N2N2O1N21O001O001O001O001O1O001O001O001O001O1TOZAB\\A>e>_O^A`0b>^O`Ab0a>[ObAd0^>[OcA6E5X?Hj@8W?El@:T?Do@;[?01O0O2M3M2N[`]1"}, "image_id": 283, "id": 4204}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 471.0, 54.0, 41.0], "area": 1342, "segmentation": {"size": [512, 512], "counts": "o_e01m?2O1N2O1N2O1N2O1N2O1O1N2O1I]Oo@e0o>^Oo@c0P?7O1N2O11O00O1N2O1N2O1O11O001O1O001O1O001O1O001O1O001O1O001Mn@VOS?g0o@YOQ?3n@=X?Ai@=_?L_@Fc?74O2M3Nc`_6"}, "image_id": 283, "id": 4205}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 501.0, 30.0, 11.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "oon41n?1O100O1O100O1O1O100O1001O1O1OO100O1O100O1O11O2N1O1O2N1O1ORPb2"}, "image_id": 283, "id": 4206}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 502.0, 4.0, 7.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "h_n72n?1N2N2Y@"}, "image_id": 283, "id": 4207}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 509.0, 13.0, 3.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "o_Z21n?1O10000000000001O001O00QP_5"}, "image_id": 283, "id": 4208}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 0.0, 16.0, 9.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "P`j31o?1O2N1O1O1O1O1OO1O1O1O1O1O1O1OQ`m3"}, "image_id": 284, "id": 4209}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 0.0, 39.0, 29.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "c`l31o?1N2N2N2N2N2HEh@>U?Di@>U?Di@h0PAYOn>n0N2N2N3ON3M200000N2N2N2N0001O00000000000001O0002N3M2N2N2N2N2N2N2N2N2N2N2O1N2N3M2N2N2NQ^[1"}, "image_id": 284, "id": 4215}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 57.0, 34.0, 34.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "ZbV41n?3M2N2N2N2N2N2N2O1N3M2N2N2N2N00000001O2N2N2N3N1N2N2N2N2N2N2N3M2N2Og]X3"}, "image_id": 284, "id": 4216}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 87.0, 61.0, 47.0], "area": 1437, "segmentation": {"size": [512, 512], "counts": "YcW61n?2N3M2O1N2N2N2N2N2N2N2O100000001O000O1N2N2N2N2N2N2N2O0O00000000000001O2N2N2O1N2N3M2N2N1O00000000001O2N2N2O2M2N2N2N2N2N2Nkli0"}, "image_id": 284, "id": 4217}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 94.0, 45.0, 46.0], "area": 1067, "segmentation": {"size": [512, 512], "counts": "gcR52n?1N2N3M2N2N2N2N2N2O1N3M2N2N2N2N2N2O0O000000001O000001O1O3M1O0001O0002XOPA=R?BPA;R?CPA;R?CPA;R?CPA;\\?O1N2N2N3M^lV2"}, "image_id": 284, "id": 4218}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 109.0, 28.0, 27.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "nSg51o?1J0Y@2f?5N3M2N2N2N2N10O000001O000001O1O2N2O2M2N2N2N2N2N3NXlj1"}, "image_id": 284, "id": 4219}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 113.0, 56.0, 51.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "]dj62m?2N2N2N2N2N2O1N2N2N3M2N2N1O000000002N2N3N1N2N2N2N2N2N2N2N1O01O3N1N2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2Nf[9"}, "image_id": 284, "id": 4220}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 115.0, 37.0, 38.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "Pd\\42m?2N2O1N2N2N2N2N2N2N2N2N2N2N2O11O0000000001O00N2O1N2N2N2N2N2N2N2N2N2N2N2NhkP3"}, "image_id": 284, "id": 4221}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 148.0, 37.0, 59.0], "area": 1335, "segmentation": {"size": [512, 512], "counts": "[e]72m?3M2N2N2N2N2N2N2O1m@^Oe>d0YA^Oe>d0YA^Oe>e0XA]Of>e0XA]Of>Q1M2N2N2N2N2N2OO000000001O2N3M2O1N2N2N2N2N2N2NRK"}, "image_id": 284, "id": 4222}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 154.0, 28.0, 28.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "VeQ52m?2N2N3N1N2N2N2N2N3M2O1N2N1O01O2N2N2O1N3M2N2N2N2N2O2M2NiZ`2"}, "image_id": 284, "id": 4223}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 184.0, 27.0, 27.0], "area": 371, "segmentation": {"size": [512, 512], "counts": "Ufj62m?2O1N2N2N2N3M2N2N2N2O1N00000002N2O1N2N2N2N2N3M2N2N2Olig0"}, "image_id": 284, "id": 4224}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 211.0, 5.0, 10.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "gfm72m?2N2N2N2\\I"}, "image_id": 284, "id": 4225}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 222.0, 6.0, 11.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "SWm71n?2N2N2N2N2QI"}, "image_id": 284, "id": 4226}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 251.0, 13.0, 14.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "Phh71n?3M2N2N2N200000N2N2N3M2Nng0"}, "image_id": 284, "id": 4227}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 261.0, 61.0, 57.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "nXl61n?2M3N2N1N3N2M2O2N2M3N1N3N2N1N3N2M3N101O10O1000O10O10O1000O10O10O10O1000O10O10O10O1000M3N1O2M3N1N3N2N2M2O2M3N1O2M3N2M2O2NoV5"}, "image_id": 284, "id": 4228}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 316.0, 33.0, 37.0], "area": 699, "segmentation": {"size": [512, 512], "counts": "_j_73m?1N2N2N2N2N2N2N3M2N2O1N2N2N2N1O000000010O0001O2N2N3M2N2N2O1N2N2NkE"}, "image_id": 284, "id": 4229}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 376.0, 29.0, 54.0], "area": 845, "segmentation": {"size": [512, 512], "counts": "bla71n?2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2O1N1O000YD"}, "image_id": 284, "id": 4230}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 438.0, 5.0, 9.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "imm72n?1N2N2N2YB"}, "image_id": 284, "id": 4231}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 0.0, 5.0, 3.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "P`[51o?1O001OOQPb2"}, "image_id": 287, "id": 4232}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 9.0, 27.0, 12.0], "area": 317, "segmentation": {"size": [512, 512], "counts": "Z`b7;e?00000000000O100000000000000000000000000000000000000G"}, "image_id": 287, "id": 4233}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 180.0, 35.0, 52.0], "area": 702, "segmentation": {"size": [512, 512], "counts": "Qg^72h?0[@2c?O\\@3b?7N1N3N2N00000O2O1O1O00O10O100000O10O100000O10O10000000O01000000]J"}, "image_id": 287, "id": 4234}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "P`d11o_[6"}, "image_id": 289, "id": 4235}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 0.0, 21.0, 17.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "Z`f12l?2N2M4M2O2O00001O001O00001OO1N2M3N2M3N2MSPo5"}, "image_id": 289, "id": 4236}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 0.0, 36.0, 32.0], "area": 750, "segmentation": {"size": [512, 512], "counts": "c`m13k?3L3N2M4M2N3L3N3O00001O001O001O00001O010O010O0001N1N3M2M4M2N2M4M2N3L3Nk_`5"}, "image_id": 289, "id": 4237}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 20.0, 17.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "[`l21m?2M4M2M4N10001O001O00001O00N2M3N2M3N2MS`i4"}, "image_id": 289, "id": 4238}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 0.0, 13.0, 7.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "PPW41o?1O2N1O1O1OO100O1O1O1O1OQ`b3"}, "image_id": 289, "id": 4239}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 0.0, 45.0, 37.0], "area": 1149, "segmentation": {"size": [512, 512], "counts": "iPn44h?4L4L4N30O0000010O000001K4L4K501O0000001O0000001O0000001O0000001O000000M3L4L4L4L4L4L4L4LT`[2"}, "image_id": 289, "id": 4240}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 0.0, 67.0, 80.0], "area": 2591, "segmentation": {"size": [512, 512], "counts": "PRe53k?2M4M2N2M4M2N30O010O0001N1M4M2M4M2N2M4M2N3L3N3M2M3N3L3N3M2M3N3M2M3ON3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2NR`Y1"}, "image_id": 289, "id": 4241}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 6.0, 24.0, 26.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "fP[44i?3N3L3N2M4M2O2O01O010O01O01O010O0N2M4M2M4M2M3NfoX3"}, "image_id": 289, "id": 4242}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 8.0, 29.0, 53.0], "area": 1473, "segmentation": {"size": [512, 512], "counts": "9d1\\>0000000000000000000000000O100000000000000000000000000b1^NV^a7"}, "image_id": 289, "id": 4243}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 9.0, 29.0, 67.0], "area": 1862, "segmentation": {"size": [512, 512], "counts": "YPP1i1W>000000000000:F00000000000000000001O00000000000000000000f_a6"}, "image_id": 289, "id": 4244}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 17.0, 59.0, 72.0], "area": 2044, "segmentation": {"size": [512, 512], "counts": "]Rc61n?2M2N3oNIUB:i=HUB9i=IUB:i=HTB:j=ISB:k=GTB;i=HTB:j=ISB:k=GSBa0aAA`>?]ACb>>[AFa>>\\ADb>o0L3N3M2M3N3M2M4M2M4MO10O10O011O2M3N3L3N3M2M4M2N2M4M2M4M2N2M4M2M4M2N3L3N2N3L3N3LnnT5"}, "image_id": 289, "id": 4247}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 28.0, 52.0, 40.0], "area": 1051, "segmentation": {"size": [512, 512], "counts": "YQS33k?2M4M2M4M2O101O010O01O01O010O01O01f@@R?`0k@CV?c0010O01O01O010O01O01O010O01O01O010O01O01O010O01O01L3N3L3N2M4M2M4MdnR4"}, "image_id": 289, "id": 4248}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 32.0, 20.0, 25.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "ca`41l?4L3M3N3L3M4M20010O0010O0M4M2M3N3L3M4Mm^U3"}, "image_id": 289, "id": 4249}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 46.0, 28.0, 27.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "PRl21l?3N2M4M2M4L3N210O0010O00010O0010O0010O0010OM3M4M2M3N3L]ne4"}, "image_id": 289, "id": 4250}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 71.0, 21.0, 24.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "gbj43k?2M4M2M4M2N3M2010O0010O01O0M3N3M2N3M2M4Memj2"}, "image_id": 289, "id": 4251}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 76.0, 58.0, 62.0], "area": 1785, "segmentation": {"size": [512, 512], "counts": "gcf21m?2N3N1N3M3M2O2M201O10O10O010N2M2O2M2N3CTOcAn0Z>UOcAm0\\>TObAo0[>TOcAm0[>>N2M2N30O0100O01O1N1N3M2O2M3M2O2M3M2O2M2O2M3M2O2M2N3N2M2N3N1N3M3N1NT]\\4"}, "image_id": 289, "id": 4252}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 78.0, 55.0, 47.0], "area": 1765, "segmentation": {"size": [512, 512], "counts": "acT72i?6I6K5K5M3010O00000000010O000000010O000000010O000000010O00N2K5J6M4O0001O0001O000001O01O000001O0001O00M4I6K5KcM"}, "image_id": 289, "id": 4253}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 81.0, 26.0, 26.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "QcT61m?3M2O2M3M2N3M2O2M2010O0100O010OO2M2O2M3M2N3M2O2M2NY]^1"}, "image_id": 289, "id": 4254}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 82.0, 26.0, 28.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "lR]41l?4M2N3L31O01c@ES?;j@IV?6h@LX?>010O0010O0010O0010ON2M4M2M3N3L3MYmU3"}, "image_id": 289, "id": 4255}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 84.0, 65.0, 50.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "Uc\\52k?3M4L3M4L3N2010O010O00010O010O010O0010O0010O01n@^Oe>a0YAAg>?WADi>6TALm>1SA2l>KTA8l>FTA=l>_OUAc0k>[OTAh0P?30O01O010O01O01O010OM4M201O00010O010O100O0010O01N1N2N3L3N3L3N2Nalb1"}, "image_id": 289, "id": 4256}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 96.0, 37.0, 65.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "hSl34l00k=5PB0l=4oA1m=3nA2R>NiA7W>JdA;[>e0001O00N3I6L41O01O000001O01O000001O01OK5K6K4K5K5K5L5J5K5K5K5Lo\\a3"}, "image_id": 289, "id": 4257}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 99.0, 40.0, 31.0], "area": 692, "segmentation": {"size": [512, 512], "counts": "ccd12k?3N3M2M4M2N3M20010O010O00010O010O010O00010O010O010O00010O010O0001N1M4M2M4M2N2Mc\\g5"}, "image_id": 289, "id": 4258}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 104.0, 64.0, 74.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "Re_42k?3N3M2M3N3M2M4M2M4M2N2M4M2M4M2N3L3N2N2M100O4M2N3N10010O010O0010O0010O010O01`NQBn0n=oNUBQ1k=lNYBS1h=jNZBW1e=fN^BZ1c=bNaB^1P>010O01O0N2N3M2M4M2N3L3N2N3L3N3M2N3L3N2Nh[`2"}, "image_id": 289, "id": 4259}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 117.0, 16.0, 24.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "YdY32l?2N3M2N3L3N1O0000O2O3M2N3M2N3MZ\\^4"}, "image_id": 289, "id": 4260}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 122.0, 29.0, 35.0], "area": 585, "segmentation": {"size": [512, 512], "counts": "ddg62k?3N3L3N3L3N2M4M2M4M2O110O010O00010OO2L3N2M4M2M4M2M3N3L3NQli0"}, "image_id": 289, "id": 4261}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 132.0, 18.0, 23.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "gdS71l?3N2M4L3N3L3N2O2O00O2M2M4L3N2M4M2Ml[c0"}, "image_id": 289, "id": 4262}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 149.0, 55.0, 67.0], "area": 2180, "segmentation": {"size": [512, 512], "counts": "gee52l?2M3N3L3M4M2M3N3QAZO_>i0_AYO_>j0]AZO_>U1N3L3O110O0iA\\NR>j10O01O01O010O01O01O01O010O01O01O010OO2L3N2M4L3N2M4M2M4M2M3N3L010O3N3L3Db@4`?Jb@3h?Mmj^1"}, "image_id": 289, "id": 4263}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 160.0, 14.0, 33.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "iUi73j?3N3L3N2M4M2M4M2M3N3O0010OoJ"}, "image_id": 289, "id": 4264}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 171.0, 51.0, 68.0], "area": 2008, "segmentation": {"size": [512, 512], "counts": "bfb63k?2N2e@Ji>:SAJj>8TAJj>9SAIk>9SAJi>h0L5L3N2N3L3N3M2M3N3O001O01O01O010OTNSBe1T>10O00010O0O2M2N2M4M2N3L3N3M2M3N3M2N3L3N2N3L3N3M2M4MSjc0"}, "image_id": 289, "id": 4265}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 211.0, 23.0, 18.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "lf^51m?3L3N3M21O010O01O010O01O010O01O010O01N1M4M2NWiU2"}, "image_id": 289, "id": 4266}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 266.0, 15.0, 28.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "nhh72l?2N3L3N3M2N3L3N2N3O0010O0001dG"}, "image_id": 289, "id": 4267}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 342.0, 35.0, 41.0], "area": 837, "segmentation": {"size": [512, 512], "counts": "b[a04j?2N3M2N3L3N2N3M2N3L3N3M2N3O010O01O010O01O01M2N3L3N3M2N3L3N2N3M2N3L3NSUm6"}, "image_id": 289, "id": 4268}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 406.0, 35.0, 85.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "Q>Z1d>0O10O10O010O10O03N2N2M4MFhAPOU>Q1nAoNo=Q1SBlNn=S1VBjNi=W1YBfNh=[1`0O010O01O010OO2M2M3ZOXA2k>JYA3i>KYA2k>JYA3i>KYA2k>KWA3]?McR^7"}, "image_id": 289, "id": 4269}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 407.0, 20.0, 27.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "[]n13j?3M4M2M3M4M2M4N1010O0M4M2N2M4M2N3L3N3MVcg5"}, "image_id": 289, "id": 4270}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 421.0, 87.0, 79.0], "area": 3996, "segmentation": {"size": [512, 512], "counts": "Vod62l?3L3N2M4M2N3L3N3L3N3M2M3N3M2M4M2M4M2N2M4M2M4M2N3L301O0000M3N2N1N3N2M3N2N2M3O1O100O1O1O001O1O1O1000000000000000O10O1000000000000000000O10O1000000000000000O100000O10000000000kB"}, "image_id": 289, "id": 4271}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 438.0, 58.0, 74.0], "area": 2336, "segmentation": {"size": [512, 512], "counts": "noY12l?2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N200001O00O1N2N2M30O2M2M4M2N2M4M2M4bNgAQ1[>mNgAP1\\>nNgAo0f>N3L3N3L3N2N3L3N3L3N2MjQi5"}, "image_id": 289, "id": 4272}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 476.0, 16.0, 16.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "Uo_43k?3M2M3N30O010O01O01O001M2N3L3NQQX3"}, "image_id": 289, "id": 4273}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 501.0, 29.0, 11.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "n_72l?2M3N2N21O001O00001O001O00001O001O00001O001O001O00001O00QPZ7"}, "image_id": 289, "id": 4274}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 505.0, 20.0, 7.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "ooV41m?2M3O1001O001O00001O001O00001O001O0000QP_3"}, "image_id": 289, "id": 4275}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 0.0, 27.0, 69.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "c`b71m?2N3L3_@HX?:f@IV?b0N3M2M4O00001O001O001O00001O0oAPOR=R1iBQOW=Q1eBROZ=Q1aBQO_=Q1\\BSOc=P1XBROh=d1O00"}, "image_id": 291, "id": 4276}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 88.0, 85.0, 76.0], "area": 3001, "segmentation": {"size": [512, 512], "counts": "jce64j?2N3M2M310O010O01O05LO00010OO2M2M3M4M2M4M2M3M4M2M4M2O1010O01ON3M2O2O01O01O010O01O01O010O00010O010O00010O010mN`Ad0`>YOdAf0\\>WOgAj0Y>SOjAl0e>1O01O010O01O01OSATOf>m0WAUOj>Q1O0010O0010O0010O001oNVAk0i>ROZAn0l>10O0010O001lK"}, "image_id": 291, "id": 4277}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 148.0, 107.0, 70.0], "area": 3489, "segmentation": {"size": [512, 512], "counts": "eeZ62l?3L3M3N3L3M3M4M2M3M4L3N3N10010O00010O00010O01O01O010O00010O00010O01O01O010O00010O00010O00010O010O00010O00010O00010O01O01O010OjNZAR1j>010O01O01O01POUAi0k>TOXAm0n>O01O01O01O01O010O00010O00010O01O01O01O01O010O00010O000M4L3M3M4M2M4L3MSJ"}, "image_id": 291, "id": 4278}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 197.0, 65.0, 65.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "efX61n?2N1N3N2N2M2O2M3N2N1N3N2^AWOk=i0SBYOn=g0nA]OR>c0kA_OV>a0fACZ>:_AIb>k00O010O1O100O100O0010O0010O0010O0010O010kNZAn0f>PO\\AP1j>10O010O00010O010O0POUAk0P?10O01OO2M2M4M2M3N3L3N3L3N2Nghf0"}, "image_id": 291, "id": 4279}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 208.0, 38.0, 30.0], "area": 640, "segmentation": {"size": [512, 512], "counts": "jfP74i?4L3M3O2O00010O00010O010`@DZ?a00010O00010O00010O010O00010O00010O0001L3M4M2M3M4LWY<"}, "image_id": 291, "id": 4280}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 209.0, 15.0, 14.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "hfU53k?2M3N30O01O01O01O010O01L3N2M^ib2"}, "image_id": 291, "id": 4281}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 250.0, 42.0, 31.0], "area": 752, "segmentation": {"size": [512, 512], "counts": "ShU51m?3M2M4O010O0^@J[?5c@M]?;10O001M2M301O010O00010O010O00010O010O00010O010O0001M2N3L3N2M4M2M4MmWU2"}, "image_id": 291, "id": 4282}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 267.0, 66.0, 47.0], "area": 1874, "segmentation": {"size": [512, 512], "counts": "TiS62l?2N2M4M2M4M2N2M4M2N3M2010O0010O010O00010O010O0O2L31O010O01O010O01O010O01OPOUAk0P?010N1N3N101M2N210O010O000N03M2N3L3N2M4M2N3L3N3L3N2N3L3N]Wk0"}, "image_id": 291, "id": 4283}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 309.0, 56.0, 38.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "Yj_32l?2M3N3L3N2M4M2M4O01O01O01O01O01O01O010O00010O00010O010O00010O00010O00010O0010O00010O0010O0001O0M3N3L3N2N3L3N2MnUd3"}, "image_id": 291, "id": 4284}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 341.0, 160.0, 97.0], "area": 6944, "segmentation": {"size": [512, 512], "counts": "mkU32k?3M4M2M4M2M3M4M2N201O010O01O01O010O00010O000N3M210O000UAPOg>T1010O00010O0010O0010O00010O010O000O2L310O01O01O01O010O01O01OO2L3M4M2M310O01O01O0_Oe@>_?O01O01O010OM3M4M2M4L3M3N3L3M4M2M3M4M2M3M4O001O01O010OQBTNj=R2O01O01O0ZBnMNO\\=S2bBWN]=_1dBjN]=S1eBnNZ=o0jBPOV=n0lBSOT=k0nBTOR=l0nBTOS=l0lBUOS=k0nBTOR=l0nBUOR=k0mBUOS=k0nBTOR=m0mBTOS=k0nBTOR=l0nBUOR=k0mBUOS=k0nBUOQ=l0nBSOT=l0lBROV=n0kBnNY=R1fBlN\\=T1eBiN]=W1cBfNa=Z1^BdNd=\\1]BaNe=7RBm0>kNE3]=2cBo08oNF2_=MfBR13\\Oc=d0YB_Og=a0VBBj=?SBDm=W10010O0010O0010O0010O000O2L3N3L3M3N3L3N3L3N2[OWA0m>LVA1m>MUA1m>LWA0m>MUA1m>LWA0QcZ2"}, "image_id": 291, "id": 4285}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 406.0, 28.0, 28.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "W]Q14j?2M4M2M3M4M2N3O01O010O0010O0010O0010O00O2M2M4M2M4L3N2MUc`6"}, "image_id": 291, "id": 4286}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 420.0, 47.0, 30.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "`]]11l?4M2M3M4N110O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O010O01O010O01O0N2N3L3M^Rk5"}, "image_id": 291, "id": 4287}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 443.0, 21.0, 18.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "T^T23j?4M2M3O20O01O01O01O01O010O00010O0M3N3L3MRRa5"}, "image_id": 291, "id": 4288}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 452.0, 32.0, 20.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "\\^]21l?3N2M4O001O01O010O01O01O010O01O01O010O01O01O010O00010O010M2M3NdaR5"}, "image_id": 291, "id": 4289}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 0.0, 76.0, 59.0], "area": 2860, "segmentation": {"size": [512, 512], "counts": "SQW22n?3M3M4K4M3M3L4M0UOZO\\Bf0c=^OYBb0h=AUB?k=DRBLjA4V>OgA1Y>j00100M]NiAb1X>300000O10000O10000O10000O1000000O10000O10000O10000O1000000O10000O10000O10000UO]A8d>G`A6`>JcA3]>MfA0Z>OkALV>4mAIS>7PBFP>:SBBn==k00000O10000O2O3M4Kiob4"}, "image_id": 292, "id": 4290}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 33.0, 33.0, 41.0], "area": 869, "segmentation": {"size": [512, 512], "counts": "WaZ34l?3M4K4M3M4K4M4L3M4K01000O01000O10O10O10O10003M3L4M3M3L5L3M3M3L4MPnT4"}, "image_id": 292, "id": 4291}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 460.0, 13.0, 13.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "b^a03j?3M4O000010O01O0010L3M3LfQX7"}, "image_id": 293, "id": 4292}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 485.0, 13.0, 15.0], "area": 95, "segmentation": {"size": [512, 512], "counts": "]og11n?2M3M3N2M3000M3N1O2M3N2Nf`Q6"}, "image_id": 293, "id": 4293}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 399.0, 77.0, 94.0], "area": 3556, "segmentation": {"size": [512, 512], "counts": "U>Q1`>POmAR1P>POQBo0l=TOTBl0j=WOUBj0k=UOTBl0l=UOQBn0n=ROPBP1Q>POkAS1U>mNiAV1W>901BYNbBf1\\=\\NbBf1[=^NaBf1\\=\\NbBf1[=]NbBg1[=>00O010O03N3L3N3L3N2IVBUNn=h16M4M2M3N3L301O01O01O010O01O01O010O0N3M2N2010O01O01O010O01O01N1M4M2N03M2O110O001L3N2M4M2M4Bg@1[?Mg@1[?Li@0[?Mg@1_bi6"}, "image_id": 294, "id": 4294}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 495.0, 55.0, 17.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "m_`21n?3N00000O10000O10000O10000O10000002N3M0000O10000O10000O10000O10000IKc@5]?N_@3a?6O10000O1000000O1001O3M1O0000003M4L3MRPd4"}, "image_id": 295, "id": 4295}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 0.0, 81.0, 137.0], "area": 7274, "segmentation": {"size": [512, 512], "counts": "P`a11o?5K5K0^@G]?9c@LX?3i@2R?Nn@7m>IRADVA<1]Ob>7]A<1B]>1cA=OHY>KhA=OMT>FlA=11o=BPB=10P>CoA=10P>BPB>O10^Oi=3XB>O1ODe=f0]BFNI`=a0bBEOOZ=CV1IQOj;[OmD;_OZ1MmNhNFY=MjBLF1:V>0nAAM>U>1\\BJg=2_BIf=2Y1KYmU5"}, "image_id": 296, "id": 4296}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 0.0, 42.0, 23.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "P`e21o?3M3M0000O100001O3M3M2N3M2N3M00O100O10000O10000O100O10000O100O10000O100O3N2N3L4M2Nd_e4"}, "image_id": 296, "id": 4297}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 0.0, 19.0, 8.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "PPb31o?2N2N2N1OO100O100O100O100O100O100O10P`T4"}, "image_id": 296, "id": 4298}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 0.0, 60.0, 56.0], "area": 1791, "segmentation": {"size": [512, 512], "counts": "WPZ41o?1N2N3M2O2M2N2O2M2N3N1O2N1O1O2N1O2N1O1WAPO`>R1]APOb>Q1]AQOa>X1O1O2N1O1O2N1O1O1OO1O100O3M2N2O2M2N3M2O1N3M2N3N1N3M2O101N1N3M2O1N3M2O2M2N2Omng2"}, "image_id": 296, "id": 4299}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 0.0, 47.0, 32.0], "area": 880, "segmentation": {"size": [512, 512], "counts": "P`Q51o?1O1O2N1O2N1_@IW?8g@JX?8e@JZ?7e@KY??O1O1O2N1O1O2N1OO1O1O100O1O1O100O1O1O100O1O1O100O1O1001O02M2O1N3M2O2M2N2NfoV2"}, "image_id": 296, "id": 4300}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 0.0, 10.0, 5.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "PPi51o?1O1O1O1OO1O100O1OQPR2"}, "image_id": 296, "id": 4301}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 17.0, 48.0, 58.0], "area": 1824, "segmentation": {"size": [512, 512], "counts": "Tbh31c>4mAO31k=6mAN2Oo=7jA02IT>\\100O1000O1000O1000O10O100000O10O1000O1000OLgAcNY>]150000O013M5K5J6K5K4L000O10O100002M6K5K4LU^_3"}, "image_id": 296, "id": 4302}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 28.0, 46.0, 52.0], "area": 1734, "segmentation": {"size": [512, 512], "counts": "nao22S?3cA1Y>3bA3Y>2bA2Y>3cA0Z>5aAK_>k000O010000000O01000000O01000000O010000001N4M00O10O12N4L5J5L5K000O103M5K4L5J5L5Kf]Y4"}, "image_id": 296, "id": 4303}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 30.0, 45.0, 71.0], "area": 1262, "segmentation": {"size": [512, 512], "counts": "YaY72n?1N2N2O2M2N2N3N1N2N2O2M2N2N3N1N10O2N2N3N1N2N3M1O010O01O2O2M2N2N3aATOf=m0YBUOd=n0YBTOe=n0YBUOe=l0ZBUOf=l0WBVOi=j0UBYOj=h0SBZOn=e0QB]On=c0PB_OQ>a0lAAT>W1O0gN"}, "image_id": 296, "id": 4304}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 47.0, 78.0, 91.0], "area": 2593, "segmentation": {"size": [512, 512], "counts": "RRi61n?3M2O2M2N2N3N1N3M2N2O2M2N2OO003M10O01O01O0001O03M2O1N3M2N2O2M2O2O01O01O010O01O01O01gNZAV13jN\\>OcAV10lNa>T1]AnNd>X1fAeNl=[1SBfNm=[1PBgNQ>X1mAkNR>U1mAlNT>Q1lAQOT>n0kAUOT>i0lAYOU>f0jA\\OU>V12N3N1N3M2N2O2M2N2OO2N20N3M2O2M2YOQA:Q?DRA9Q?DQA;P?DQA:R?CQA:\\?N3N1NZL"}, "image_id": 296, "id": 4305}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 72.0, 80.0, 93.0], "area": 2509, "segmentation": {"size": [512, 512], "counts": "kbW63l?2O1N3M2N2O2M2N2O2M2N2N3N1OO2N10O01O00010O0001O2O2M2N2O2M2N2N30O010O00010O0010OlNZAm0f>QO\\AP1d>nN]AR1:mNK0l=R1XBoNJ1n=Q1UBPOL1o=n0SBTOK0R>l0QBVOK1S>j0PBAQ>>mADS>V10M2O1N3M2N3N1N2N3M2010OO1N3M2N3N1N2N3N1N3M2N2O2M2N3M2O1N3M2O^[`0"}, "image_id": 296, "id": 4306}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 98.0, 80.0, 92.0], "area": 2538, "segmentation": {"size": [512, 512], "counts": "fce51o?1N2N3N1N2N3M2O1N3M2O1N3M2N10O101N1O00010O00000101N2N2O2M2N2N3N1N30O0010O00010O01O01OiN[AR15lN]>T1aAnN`>R1]AQOb>W11OeAgNn=Z1PBgNP>Y1nAjNR>U1lAmNT>R1kAQOU>l0lAUOT>i0lAZOS>f0kA\\OV>U12N3N1N2N3M2O2M2N11O1N11N3M2O2WORA;P?CRA;P?DRA:P?CRA;P?CRA;\\?N1N3M2N3NbZR1"}, "image_id": 296, "id": 4307}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 123.0, 81.0, 92.0], "area": 2470, "segmentation": {"size": [512, 512], "counts": "_dS51n?2N2O2M2N2N2O2M2N2N3N1N2N3N01N100O0000010O000002O1N3M2O2M2N2N3O01O010O01O01O01O01jN[An0e>PO]AQ1j>O_APOQ>Q1mAPOJ0P>P1TBROJ0S>m0QBVOJOT>l0oA@Q>`0nAAS>>kAET>T11N1O1N3M2O1N3M2N3N1O110M2N2O2M2N3N1N2N3M2O1N3M2N3N1N2N3N1N2Nlic1"}, "image_id": 296, "id": 4308}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 151.0, 74.0, 89.0], "area": 2405, "segmentation": {"size": [512, 512], "counts": "VUe43l?2O2M2N3N1N3M2O2M2N3O01ON10O01O01O01O011N2N3N1N3M2O2M2N201O010QBfNU=Z1jBhNV=W1hBkNX=V1eBmN[=R1cBPO]=Q1aBPO`=ITBP1:ZOa=EWBP16]Od=@XBT12^Ol=a0RBAn=`0oABR>=mAER>T12M2O2M2N3N1N3M2N3N1N3M2O20O01O0O2M2N3N1N3M2N3N1N3M2O2M2N3M2OPiU2"}, "image_id": 296, "id": 4309}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 170.0, 84.0, 125.0], "area": 6680, "segmentation": {"size": [512, 512], "counts": "`UT2b0^?5K0O1000000000000000a0_O2L2000;D9H00009FjBES==jBGR==kBFR==jBFT==iBBW=a0gB_OY=d0cB]O]=f0`BYO`=k0]BUOc=n0YBSOg=b1010O01000O011O201XNVBW1j=eNZBY1h=dNZBY1j=dNYBY1i=dN[BX1Z>M2N3L4M3M3L10O10O3N3Cc@0a?Lc@1_?Md@0gge3"}, "image_id": 296, "id": 4312}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 335.0, 36.0, 68.0], "area": 1258, "segmentation": {"size": [512, 512], "counts": "Q[^71n?2N2O2M2N3M2O1N3M2O2M2N2N3N1N3M2O1N3M201O01O01M2N3M2O1N3M2O2M2N2N3N1N3_E"}, "image_id": 296, "id": 4313}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 373.0, 79.0, 63.0], "area": 2341, "segmentation": {"size": [512, 512], "counts": "Q\\e61n?2O1N3M2N2O2M2N2O2M2N2N3N1N3M2N2O2N11O01O01O01O01O01O0N2N3M2O1N3M2N2OO00010O2N3M2O1N3M2N2O2M2N2O2O0001O0O1N3O00010O0001O0O2M1O00102M2N2N3N1N2N3M2O1N3M2N2O2MkR3"}, "image_id": 296, "id": 4314}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 401.0, 69.0, 89.0], "area": 2945, "segmentation": {"size": [512, 512], "counts": "]]k53m?1N3M2O1N3M2N2O2M2N3M2O1N3M2O2N10010O01O01O0O2N1YAlN_>V1`AkN^>^1A`NYBb1e=`NZBa1d=bNYBa1e=`NYBb1e=`NZBa1d=bNYBa1d=?O2M2N2O2M2N0010O00101N3M2oNbB]Oa=`0aB_O`=?cB^O_=a0bB]Oa=`0aB_O`=?cB^O_=a0bB]Oa=`0bB]O`=a0bB^O`=?bB_O`=`0bB]O`=a0bB]Oa=`0aB_O`=?cB^O_=a0R1M2N3N1N2N3M2O2M]RR1"}, "image_id": 296, "id": 4315}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 437.0, 72.0, 75.0], "area": 2596, "segmentation": {"size": [512, 512], "counts": "^^X52m?2O1N3M2N2O2M2KAf@a0X?6N1N1O011N2N2N3SAPOf>Q1XAQOf>V1N3M2O1N3M2N2O2M2N2N01O0201O01O010O01O01TNoAf1Q>XNQBi1T>N1O1O2N1O1O2M2N2O2M2N2N3NO000101N2N3M2O1N3M2N3N1N2N3M2O1N3M2N2Oe`c1"}, "image_id": 296, "id": 4316}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 443.0, 19.0, 39.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "Xnf71n?3N1N2N3M2O2M2N2O2M2N3M2O1N3M2N2O2M2TB"}, "image_id": 296, "id": 4317}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 477.0, 57.0, 35.0], "area": 1228, "segmentation": {"size": [512, 512], "counts": "Zof634Nc?4\\@Mc?4[@Ob?9N2N3N1N2N3M2O1N2N100O1O1O100O11O1O1O2N1O1O2N1O1O2N1O0000O1O1O100O1O1O1001O2N1O1O2N1O1O2N1O1D_@7f?O1O2N1O1O2N1OQ`<"}, "image_id": 296, "id": 4318}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 493.0, 54.0, 19.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "joW21m?2V@Oc?8M3N2M3O11O001O00001OO1N2001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001O00SPm4"}, "image_id": 296, "id": 4319}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 497.0, 61.0, 15.0], "area": 483, "segmentation": {"size": [512, 512], "counts": "ooU41n?1O100O1O1O100O1O1O100O1O1O100O1O100O11O1O2N1O1O2N1O1O00O1O1O100O1O1O100002N1O2N1O1O00O1O1O100O1O1O100O1001O2N1O2N1O1O2NQ`k2"}, "image_id": 296, "id": 4320}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 505.0, 8.0, 7.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "l_[51n?2O2M1O1002M2NU``2"}, "image_id": 296, "id": 4321}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 507.0, 11.0, 5.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "oo[61o?0O1O1O100O1001O1O2NQ`^1"}, "image_id": 296, "id": 4322}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 512.0, 512.0], "area": 173470, "segmentation": {"size": [512, 512], "counts": "b0X1o0VOk2b6fNdJTNjNT3e6fNlJlM^N^3h6cNcK\\1_4bNbK]1`4aNgKX1Z4gNSLk0P4RO]Lb0e3\\OgL8[3FRMMP31\\MBg2;fMYO[2`0\\H`Nf5c0P2l0ZH`NR68f1W1XH`NY>_15O001O1O1O1O001O1N2O1O1O001O1O1O1O001O1O1O1O0O2O1O1O4L:FPS`3IVm_L;F:F:F:EQKSNY1_1g3;RKVNW1^1i3:RKXNT1^1k38SKZNQ1^1l37UK[Nn0^1n34WK^Nj0^1P42XK`Ng0]1S41WKbNf0]1T4OXKdNc0]1V4MYKfN`0]1W4K\\KhN<\\1Z4J\\KjN9\\1\\4H]KkN7]1]4F]KnN5\\1_4C_KQO1[1a4C`KRON[1c4AaKTOK[1e4_ObKVOH[1g4]OcKWOF[1i4[OdKZOB[1k4YOdK]O@Z1l4XOfK^O]OZ1n4VOgK@ZOY1Q5TOhKCVOY1S5ROiKDTOZ1T5POjKFQOZ1U5oNkKHoNY1W5lNmKKkNX1Z5kNmKMhNX1\\5iNnKOeNX1^5gNoK0cNY1_5eNPL2`NX1a5dNQL5]NW1c5cNQL6[NW1d5cNRL6YNW1e5cNSL5XNW1f5dNSL5VNW1g5dNTL5TNW1h5dNTL6SNV1i5dNUL6QNU1k5eNUL6oMU1l5eNVL5nMV1l5eNWL5lMV1m5eNXL5jMU1o5fNWL6iMT1P6fNXL6gMT1Q6fNYL6eMT1R6fNZL5dMU1R6fN[L5bMT1T6gN[L5`MT1U6gN[L6_MS1V6gN\\L6VMBiNa1e7gN]L5TMDjN_1f7gN^L6PMFkN]1g7gN_L6mLHlN[1h7lN[L1nLKnNX1i7SOTLKSMKPOV1j7TOTLKPMMQOT1k7TOULJmLb1n6dNVLJjLc1P7cNWLJgLc1S7cNWLJdLd1U7bNWLKaLe1X7`NXLK^Lf1Z7_NYLJ\\Lg1[7`NZLIYLh1]7_N[LIULj1`7]N\\LIRLk1b7\\N\\LJPLk1d7[N]LJlKl1h7ZN]LIjKn1i7lMVH0X46gKo1k7kMlL6WKP2m7jMmLd2T3\\MlLd2T3\\MmLc2S3]MnLb2R3^MoL`2R3`MmLa2S3_MlLb2T3^MkLc2U3]MjLc2W3]MhLd2X3\\MgLe2Y3[MeLg2[3YMdLg2]3YMbLh2^3XMaLi2_3WM`Lj2`3VM^Ll2b3TM]Ll2d3TM[Lm2e3SMZLn2f3QMZLP3f3PMXLQ3i3oLVLR3j3nLULS3k3mLTLT3l3lLSLT3m3mLQLU3o3kLPLV3P4jLoKW3Q4iLnKW3S4iLlKX3T4hLjKZ3V4fLiK[3W4eLhK[3Y4eLfK\\3Z4dLeK]3[4cLcK_3]4aLbK`3^4`LaK`3_81O1N21N2O1O1O0O2O1O1O1N2O001O1O1N2O1O001N2O1O1O1N10O1N2O001O1O1N101O1O1O1N101O1O1O0O2O1O1O001N2O1O001O1N2O001O1O1N2O001O1O[O"}, "image_id": 297, "id": 4323}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 147.0, 43.0, 27.0], "area": 1161, "segmentation": {"size": [512, 512], "counts": "cdZ7k0U?00000000000000000000000000000000000000000000000000000000000000000000000000000000000]K"}, "image_id": 297, "id": 4324}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 200.0, 51.0, 51.0], "area": 2047, "segmentation": {"size": [512, 512], "counts": "lfb6:c?7I5L4M3L3N2N2N2N2O1N2N101N101N10001N1000001O000000000000O10001O0O101O0O2O0O2O1N1O2N2N2N2N3M2M4L5K5IXic0"}, "image_id": 297, "id": 4325}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 0.0, 176.0, 93.0], "area": 8386, "segmentation": {"size": [512, 512], "counts": "_`\\42m?2N3M2O1N2N3M2N2O1N3M2N2N2O2M2N2N2N2O2N1O1O1O2N1O1O1O2N1O1O1O2N1O1O1O1O2N1O1O1O2N1O1O1O2N1O1O1O2N1O1O1O1O2fB_MQ=b2nB_MQ=b2mBaMQ=g2000O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O100OQ`k0"}, "image_id": 298, "id": 4326}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 38.0, 211.0, 196.0], "area": 18113, "segmentation": {"size": [512, 512], "counts": "iTd12n?1N2N2N3M2O1N2N3M2N2O2M2N2N2N3N1N2N2N3M2O1N3M2N2N2O2M2N2N2N3N1N2N3M2N2O1N3M2N2N2O2M2N2N3M2O1N2N3M2N2O1N3M2eCdLo;^3oCdLo;_3nCcLQ<^3nCaLRU1SBiNl=W1VBgNj=Y1XBeNh=\\1YBbNg=^1\\B_Ne=`1=00001O01O00[OaNjB`1U=bNiB^1W=eNfB[1[=fNdBY1\\=iNbBW1^=kN`BV1_=mN^BS1b=oN\\BQ1e=POZBo0f=SOXBm0h=f0O0001O2N2O1N3M2N2N2O2M2N2N2N3N1N2N2N3M2O1N2N3M2000010O0O1O1N3M2N2N2O2M2N2N2N3N1N2NihU6"}, "image_id": 298, "id": 4328}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 227.0, 56.0, 103.0], "area": 3256, "segmentation": {"size": [512, 512], "counts": "hXT71n?3M2O1N2N3M2N2N3N1N2N2N3M2N2N3N1N2GlNcAV1OkNU>1jAW1NjNV>2iA\\1U>:M2N2N1O00001O3M2N2N3M2N2N2N3M2N2N3M1O1O00eNRCEn<;TCCl<=VCAjYCBi<M2N2O1N2N3M2N2N2O1N3M2N0000101N2N2N2N2N3N1N2N2N2N3M2O1N2N2Nbe:"}, "image_id": 298, "id": 4330}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 314.0, 7.0, 11.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "k9:f?O0100000O6KQVl7"}, "image_id": 298, "id": 4331}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 347.0, 8.0, 16.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "Q[l71n?2N3M2O1N2N2N3SE"}, "image_id": 298, "id": 4332}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 364.0, 61.0, 68.0], "area": 1892, "segmentation": {"size": [512, 512], "counts": "f[X52m?2O2M2N3N1N2N3M2O2d@]OV?h0N2N3N1O110O0O2M2N2O2M2N201O010O00010O01O01O010O0N10O0201O00010OO2M2N2O2M2POXAe0k>XOXAe0j>ZOWAe0j>YOXAe0T?M2O1N3M2N3N1N2N3M2OnRi1"}, "image_id": 298, "id": 4333}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 367.0, 45.0, 63.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "okY71n?2N2O1N2N3M2d@EP?=n@EP?=n@EQ?g0N2N2N3M2O1N2N3M2N20001O0001O01O0000010ON001O01O000000010O000000010O0`D"}, "image_id": 298, "id": 4334}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 383.0, 55.0, 68.0], "area": 1798, "segmentation": {"size": [512, 512], "counts": "Q]l41o?1N3M2N3XOHdA9Z>JcA8\\>IbA:[>IbA9\\>IcA9Z>IdA9[>IbA:[>HdA9Z>JcA9[>g0N000010O2O20O010O00010O010O0010O0O1N3M2O2M2N2O2M2N3N1N2N3N1N3M2O1N3M2N2O2M2N3N1N`RX2"}, "image_id": 298, "id": 4335}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 406.0, 54.0, 70.0], "area": 1781, "segmentation": {"size": [512, 512], "counts": "jm[41n?3N1N3UOJgA8W>JhA7W>KfA8W>JgA8W>KgA7W>JgA8W>JgA9V>JhA7W>JgA9V>JgA7X>KgA5Y>h000012O010O010O01O010O01O010O0O2M2O2M2N3M2O2M2N3N1N2N3N1N3M2O2M2N3N1N3M2O2M2NfQi2"}, "image_id": 298, "id": 4336}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 425.0, 59.0, 64.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "e]n52n?1N2N3M2O1N3M2N2O2M2N2N3N1010ON2N3N1N2N3\\AkNX>W1gAjNW>X1gAjNY>W1eAkNZ>^100010O000010O00N3N1N2N3M2O1N3M2N2O1N3M2N2N30OO1O2M2N2N3M2O1N2N3M2O1NZQT1"}, "image_id": 298, "id": 4337}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 432.0, 52.0, 70.0], "area": 1763, "segmentation": {"size": [512, 512], "counts": "a^j33m?2M2N3N2M2YOEgA=V>EhA>V>DgA>W>DhA>V>DgA>W>EgA=V>EhA;Y>GdA:[>IcA6^>f0O1O3N2M21000O010O10O10O01M3M2O2M3N1N3M3N1N3N2M2N3N1N3N2M2N3N2M2O2M3M2O2Ml`[3"}, "image_id": 298, "id": 4338}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 450.0, 51.0, 62.0], "area": 1697, "segmentation": {"size": [512, 512], "counts": "SoW32m?3N2M3N2M3XOBjA`0S>CjA`0T>BjA`0S>CjA`0T>BjA?T>DiA=W>DhA;X>HeA9[>IcA6]>g002M3N2M3N1N20O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1N3N2M3N2M3N2M3N2MW`n3"}, "image_id": 298, "id": 4339}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 454.0, 59.0, 58.0], "area": 1786, "segmentation": {"size": [512, 512], "counts": "e^\\51n?2N2O2M2N2N3N1N2N3M2O1N3M2N2O2O000O2M2YAoN]>S1`AoN^>S1`AoN^>[1N3N1N2N2O02N1O1O2N1O2N1O1O2N1N2O2M2N2N3N1N2N3M2N2O20OO1N3M2O1N3M2N2O2M2NaPf1"}, "image_id": 298, "id": 4340}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 470.0, 41.0, 42.0], "area": 945, "segmentation": {"size": [512, 512], "counts": "foc21n?4M2M4M1O0O100O1ZOCiA=W>EgA;X>IdA8\\>JbA6]>N`A2`>1\\A0d>2ZANe>g00000O100O10000O12N3M3M2N3M2N3M3M2N3M2N3M2N3M3M2NQ`g4"}, "image_id": 298, "id": 4341}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 479.0, 50.0, 33.0], "area": 971, "segmentation": {"size": [512, 512], "counts": "^ok42n?1N3M2O1N3M2N3N1N2N3N1N2N1O1002N00O1O100O1O100O1O1O11O1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2NQP[2"}, "image_id": 298, "id": 4342}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 489.0, 70.0, 23.0], "area": 1222, "segmentation": {"size": [512, 512], "counts": "eo`12n?5K4L000O100000000O100000000O100N2O100000000O100000000O100000000O100000000O100000000O100000000O14LO100000000O100000000O100000000O14L5K5K5KTP\\5"}, "image_id": 298, "id": 4343}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 504.0, 22.0, 8.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "o_`41n?1O100O11O1O00O1O100O1O1O100O1001O2N1O1O2NQ`T3"}, "image_id": 298, "id": 4344}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 161.0, 2.0, 5.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "Q55k?MRkn7"}, "image_id": 300, "id": 4345}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 169.0, 21.0, 74.0], "area": 849, "segmentation": {"size": [512, 512], "counts": "Y5Y2h=0OO1L5L3L4L4M4K4M3L5L3L4L4M4K4M3L4L5L3LeZe7"}, "image_id": 300, "id": 4346}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 174.0, 18.0, 31.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "Xf[12j?4L4L4L5J5L4O2O0001O010N1K5L5J5K6KbZ[6"}, "image_id": 300, "id": 4347}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 199.0, 55.0, 53.0], "area": 2269, "segmentation": {"size": [512, 512], "counts": "]g<5f?6L5N2C_OUAc0g>AUAc0g>=L5K4000000010O00000010O0L4O11O01O0001O0001O0001O0O1M301O0001O01O000001O01O0000010O000O1K6]OWAHn>3WAHn>3WAHn>4Rjg6"}, "image_id": 300, "id": 4348}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 204.0, 70.0, 57.0], "area": 2289, "segmentation": {"size": [512, 512], "counts": "cgZ11m?2M4L3N3L3N201O00010O010O00010O010O0N2N3L3N2M4M2M4M2M3N3L3O1010O01O01O0O2M21O01O010O01O01O010O01O01O010OO2M2OO3M2M4M2M3N3DVA^Ol>`0VA]On>?;N2N3L3N3L3N[Yb5"}, "image_id": 300, "id": 4349}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 225.0, 54.0, 61.0], "area": 1808, "segmentation": {"size": [512, 512], "counts": "eXX22k?4M2N2N3L3N3M2N2O2O010O000N3M2N2M01000O3N3M2M4M2N3L3N2N3O010O0010O0010O010OM3N3M2M4M2N3C[AZOh>d0ZAZOh>c0=M2M4M2M4M2M3Nghl4"}, "image_id": 300, "id": 4350}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 243.0, 55.0, 61.0], "area": 1815, "segmentation": {"size": [512, 512], "counts": "WYn22l?2N3L3N3M2N3L3N2N30O010O01O0M3N1O0O011O3L3N3M2N3L3N3M2M3O2O010O010O00010O01M2M4M2N2M4M2D\\AZOg>c0[A[Og>c0\\AYOh>c01O01O01O01O010O00010ON2010O0N2M4M2M3M4N11O01O0N3L3M3M4L3NYe`7"}, "image_id": 300, "id": 4354}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 339.0, 67.0, 66.0], "area": 2230, "segmentation": {"size": [512, 512], "counts": "_[U61m?2M4M2N2N3M2M4O01O010O01O01O010O01O0M3N3L3_AVOj=m0RBVOn=j0PBXOQ>g0lA]OS>d0jA^OW>a0fACY>R10010O010O00010O010O00010O010N1M2O0O3N2MKRB^Nn=`1UB`Nk=\\1XBbNj=\\1XBbNj=[1`0M2M3N3L3N3L3N2M4M2M4M2M3N3L3NXUi0"}, "image_id": 300, "id": 4355}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 348.0, 76.0, 50.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "[[g04i?3M3N3a@FS?=j@GR?e0O101O01O010O00010O01M200010O00010O00010O010O00010O00010O00010O0010O0010O0001O0O101N1O1010O010O00011N1TOVA`0l>ZOXAg0Q?O00010O01O01O0O101O0O101L3M3M4L3M3MZdR6"}, "image_id": 300, "id": 4356}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 363.0, 57.0, 79.0], "area": 2052, "segmentation": {"size": [512, 512], "counts": "mlS71o?1N2N2N2N2N2N2O1N2N2N2N2N3O00000N2O1N2N2N2N2N2N2O1N3M1O00000001O01O0000000001O0000000000HeNlA[1U>gNhAY1X>iNgAV1Y>8000000010O000000fD"}, "image_id": 300, "id": 4357}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 386.0, 2.0, 6.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "R<6j?MQdn7"}, "image_id": 300, "id": 4358}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 387.0, 55.0, 63.0], "area": 1893, "segmentation": {"size": [512, 512], "counts": "^]_21m?3M3M2O2M2N3M2^O@cAb0Z>@dAc0Y>@dAb0[>_OcAd0Z>_OdAb0Z>@dAc0Y>@dAb0[>c0O1O010O010O01000O010O010O01000O010O01O0N3M3bNaAW1e>O2M2N6J3N1N3M210N1N3M3M2O2M2N3M2ORSe4"}, "image_id": 300, "id": 4359}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 398.0, 52.0, 52.0], "area": 1392, "segmentation": {"size": [512, 512], "counts": "\\mb02m?2N1O2N2N2N2N2M3K_Oh@c0V?5N2N2N2M3N1O2N1O1O1O2N2N2N2N2N2OO2N2N2N2N2N2N2N2N2N2M3N2N1O2N2N2N2N2N2N2N2N2M3N2N1OlRc6"}, "image_id": 300, "id": 4360}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 417.0, 71.0, 62.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "VnQ32m?1O2M3N2M2O2N2M2O2M3N2N1O2000O01000N11000O10O10O10O1000ON3N2M2O2N2M3N1N2O00O010O0100O03N1N010O01000O010O3N2N2M2O2M3N1O2M3N2M2O2N2M2O2M3N2N1Ndbj3"}, "image_id": 300, "id": 4361}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 438.0, 73.0, 66.0], "area": 2057, "segmentation": {"size": [512, 512], "counts": "n^g32m?2N2M2O2N2M3N1O2M3N1O2M3N20O1000OO200O10O10O1000O10O01N2N2M2O2N2M2O2N2M2O00O01000O010O100O2O000O10O01000O0102M2O2M3N2N1N3N2M3N1O2M3N2M2O2N2M3N1NoQT3"}, "image_id": 300, "id": 4362}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 444.0, 15.0, 42.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "Pnh74k?4M3M4K4M3M3M4K4M3M4K2O0000OUB"}, "image_id": 300, "id": 4363}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 450.0, 60.0, 62.0], "area": 2021, "segmentation": {"size": [512, 512], "counts": "hoa11n?2N3N1N3M2O0O1O1O100O1O100RO@[Ba0e=AXB`0g=BXB>g=EVBLnA4Q>OlA2T>OjA2U>0jA0U>n0O100O1O100O1O1O1002N1O2N1O1O2N1O2N1O2UObA0_>ObA0_>NcA1_>LdA2]>MdA2^>KeA3\\>KfA4[>JgA5[>IgA5Z>IhA6Z>GiA6R?O1N3MUP`5"}, "image_id": 300, "id": 4364}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 457.0, 80.0, 55.0], "area": 2244, "segmentation": {"size": [512, 512], "counts": "onk42l?2M3N3M2N3L3N3O01O01O010O010O0O1N3L3N3N110O0010O0010O010O010O00010O010O001M2N3M21O010O01O010O01O01YAiNd>Y1010O01O010O010OSO]A=d>_O`A`0`>^ObAc0]>[OfAd0[>YOgAh0i>O010O00001O001O001L3N2N3L3N3M2M4MfPl1"}, "image_id": 300, "id": 4365}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 502.0, 20.0, 10.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "oon41n?100O1O1O1O100O1O1O100O11O1O1O2N1O1O1ORPg2"}, "image_id": 300, "id": 4366}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 503.0, 23.0, 9.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "o_Q31n?1N2O1O1N2000000O11O00001O1O001O1O001O1O001OQPc4"}, "image_id": 300, "id": 4367}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 503.0, 17.0, 9.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "ooc41n?1O100O1O1O100O1O1O11O1O2N1O1O1OR`S3"}, "image_id": 300, "id": 4368}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 508.0, 8.0, 4.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "ooY61n?100O1O1002N1OQPb1"}, "image_id": 300, "id": 4369}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 183.0, 66.0, 151.0], "area": 5507, "segmentation": {"size": [512, 512], "counts": "gXo61m?3]OMVA5h>LWA6f>MXA5f>MXA4g>NVA5h>MVA5h>LWA6f>MXA5f>MXA5f>c0O2N2M3N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2XJ"}, "image_id": 302, "id": 4370}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 246.0, 24.0, 24.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "RhV62m?2N2N2M2O2N2M3N1O2N2O1O10N2M3N2N1O2M3N2N1N3N2NRX]1"}, "image_id": 302, "id": 4371}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 373.0, 165.0, 139.0], "area": 12261, "segmentation": {"size": [512, 512], "counts": "Voi41n?2M3N2N2N1O2N2M3N2bM^O[Ed0c:]O\\Ee0b:]O\\Ed0c:^OZEe0c:^O[Ed0c:^O[Ed0c:^O[Ed0c:]O\\Ee0b:]O\\Ed0c:^OZEe0c:^O[Ed0c:^O[Ed0c:^O[Ed0c:^O[Ed0c:]O[Ef0b:]O\\Ed0c:^O[Ed0c:^O[Ed0c:^O[Ed0c:^O[Ed0c:]O[Ee0c:^O[Ec0f:]OXEd0i:\\OUEe0k:\\OSEe0n:[OoDg0R;YO^DeN5S2^;WO[DhN6R2`;VOXDjN6Q2c;UOUDlN6P2e;^OYDc0h;]OUDe0l;g11O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O00O11O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001N2M3N2N2N1O2N2M3N2N2N2N1N3N2N2N2N2N1N3N2N2N2Nn`c0"}, "image_id": 302, "id": 4372}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 389.0, 28.0, 56.0], "area": 795, "segmentation": {"size": [512, 512], "counts": "V]b71n?2N2N1N3N2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N1O2M3N2N2N2M2O2jC"}, "image_id": 302, "id": 4373}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 450.0, 27.0, 28.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "cnW32m?2N1N3N2N2N2N2M2O2N2N2N1N0102N2N2N1N3N2N2N2N2M2O2N2NeaZ4"}, "image_id": 302, "id": 4374}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 482.0, 57.0, 30.0], "area": 899, "segmentation": {"size": [512, 512], "counts": "o_S71n?1O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N21O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1M5"}, "image_id": 302, "id": 4375}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 104.0, 77.0], "area": 5462, "segmentation": {"size": [512, 512], "counts": "0[1e>001O1O1O1O001O1O1O1mA^Nf=b1YB`Nf=a1XB`Nh=a1UBbNj=_1TBcNk=^1SBdNl=h101O1O1O1O001O1O1O1O001O1OO1O1N2O1O1O11O1O001O1O1O1O001OO1O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1N2O11O1O1O001O1O1O1O00O1N2O1O1O1\\OaAEa>:aAD`>;aAD`>;bAC_>;dAB^>=dAA]>>dAA]>=fA@\\>?fA_O[>`0c0O1N2O1O1N2O1O1O1N2O1O1OQP\\6"}, "image_id": 303, "id": 4376}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 26.0, 35.0, 51.0], "area": 1207, "segmentation": {"size": [512, 512], "counts": "Pan24l?4K6K4L5K4K6K4L4L5K1N1000O10O1000O1000O1000O10O12N5J5L5K4L4K6K4L5K4Llm_4"}, "image_id": 305, "id": 4377}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 45.0, 8.0, 25.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "]1i0W?N2M4L3M4M2M3Mdnk7"}, "image_id": 305, "id": 4378}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 55.0, 27.0, 26.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "XR`21m?2N3L3N2N3L3N3N110O00010O010O010O0010O0N3M2M4M2N3L3NS^R5"}, "image_id": 305, "id": 4379}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 78.0, 25.0, 25.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "bbY33m?5K5K5K3L010000000O010000000O010000000O05L5K5K5KolY4"}, "image_id": 305, "id": 4380}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 103.0, 50.0, 38.0], "area": 1254, "segmentation": {"size": [512, 512], "counts": "Pde22j?5K4L4L4L5N1000010O00000M4M21O0001O01O0001O01O0000011N0000010O0000010O0000010O0000010N1L4L4L4L5J5Lc\\a4"}, "image_id": 305, "id": 4381}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 156.0, 61.0, 62.0], "area": 1920, "segmentation": {"size": [512, 512], "counts": "bfe12k?4L3L4M4HBi@a0T?7O101O01O01O00010N1L4M4L3M2M0100000O03O20010O000010O00010O000010O00010O000010ON2M4L3L4M3M4O000010OO1M4L3L4M4Llj[5"}, "image_id": 305, "id": 4382}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 166.0, 26.0, 26.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "ZUW61o?5K4K6K4L3M00O0100000O010000000O010000000O4M5K4L5JZj[1"}, "image_id": 305, "id": 4383}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 175.0, 50.0, 63.0], "area": 1940, "segmentation": {"size": [512, 512], "counts": "`eY3c0]?002N0000c0]O000000000000000000000000000000000000000O10000000f0ZO000000000000000S1mN0000004L00000000007kAIU>;gAEX>R10O1000O10O100000O10O100000O0100000O10O6K4L5K4L5J5L5K5K4K6K4LTh[1"}, "image_id": 305, "id": 4388}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 213.0, 26.0, 64.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "\\Wc71o?5K5K3L1000O1000O1000O10KEe@;[?5000O13L10O15K5]OTOhAQ1S>TOgAR1S>c0K3XI"}, "image_id": 305, "id": 4389}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 219.0, 60.0, 40.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "_Wd13j?3M4L3M3N30O00010O00N3L3M301O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01O01OWOm@e0X?O01O01O0N2M4L3M3M4L3Meh]5"}, "image_id": 305, "id": 4390}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 240.0, 53.0, 48.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "jgb41o?4L5K4L5J5L4L000O01000000O0103M5K4K2O00O1000O1000O10O1000O1000O1000O10O10B^A^Ob>b0cAXO^>h0=0O0100000O10O2O4L5K4K6K5Khgb2"}, "image_id": 305, "id": 4391}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 241.0, 56.0, 30.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "iWY3b0^?4L000O10O100000000000K50000O10000000000000000000O100000000000000000O1000000000000000000000O100000009G0000009Gmgj3"}, "image_id": 305, "id": 4392}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 271.0, 66.0, 58.0], "area": 2061, "segmentation": {"size": [512, 512], "counts": "^Y`11m?3N2N1N3N2N2M2O2N2M3N1O2M3N2N1RAROh>U1N1O2M3N2N1N3N2000O0100000O010O1N2M2O2N2M3NO10O10002N1100000O010M3N2N1N101O2M3N2N1N3N2N2M2O2N2M3N1Omf^5"}, "image_id": 305, "id": 4393}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 277.0, 74.0, 89.0], "area": 3612, "segmentation": {"size": [512, 512], "counts": "TY[33m?4K4M4L4K5LO1000O10O13M4K5L3M4K5L3fA_No=e1mA^NP>m1K5L3M4L3L1000O10O1000O01000O10O10]NdBb0\\=^OhB=Y=BlB:S=GPC6P=JTC2l01000004K5L3M4K5L3M4LQe_3"}, "image_id": 305, "id": 4394}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 295.0, 62.0, 87.0], "area": 3127, "segmentation": {"size": [512, 512], "counts": "R[l45k?4L5J6K5K4L000O10O100000\\NUORDl0m;ZOnCe0S<@hC`0XjBBV=b0eB_O[=b1010000000O01000000O01000001O5J5L5K2N0O10O1002N5J6K5K1O00O03N5K5K4L5J6K5K5K4L`dT2"}, "image_id": 305, "id": 4395}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 309.0, 38.0, 30.0], "area": 782, "segmentation": {"size": [512, 512], "counts": "lYk55k?4L5K4K5L1OO10O1000O1000O10O1000O10O1000O10O1000O10O1000O10O10002N4K5L4L4Liea1"}, "image_id": 305, "id": 4396}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 328.0, 67.0, 51.0], "area": 2290, "segmentation": {"size": [512, 512], "counts": "hZV64l?5K3L1000003L6K4L5K000O1000O10O100000O13L6K00000O10O10M300O10O100000O0100000O10O100000O0100000O10O1000O10003M00O013J^AhNg>R19K4L5K4L5J5L5K`Th0"}, "image_id": 305, "id": 4397}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 339.0, 61.0, 44.0], "area": 2231, "segmentation": {"size": [512, 512], "counts": "R[l0m0S?0000000000000000000000000E;00000000000O10000000O1000000000000N2000000000000000000000000O100000000000000004L`0@0000000000iTU6"}, "image_id": 305, "id": 4398}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 363.0, 57.0, 52.0], "area": 1923, "segmentation": {"size": [512, 512], "counts": "l[h36j?6J6J00003L7J6J2N00000O1000O10000000JPO[AQ1e>41000O100000O100000O10I_AoNa>Q13nNZAQ1g>4O10000000O10O10000000O10O1002N6J0O106J6J6J7IdS[3"}, "image_id": 305, "id": 4399}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 385.0, 58.0, 49.0], "area": 1964, "segmentation": {"size": [512, 512], "counts": "k\\c65f?5N2O2N1O1O1j@@k>b0QABm>?o@EQ?e000O2O00001O0000001O01O01O01OL4L5O000O2O0000N2N2001O000O101O000O2N100O1O2O01N1L4M4O0CZA@f>=]ADb>8bAH_>4eAK[>1iA0V>MmAOX>LlA1RR`0"}, "image_id": 305, "id": 4400}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 402.0, 39.0, 45.0], "area": 1537, "segmentation": {"size": [512, 512], "counts": "fl]1X1h>000000000000000000000000000000000000000L401O00000000000000000004L000ZOf000000Zcn5"}, "image_id": 305, "id": 4401}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 405.0, 36.0, 42.0], "area": 1322, "segmentation": {"size": [512, 512], "counts": "[]=d0\\?00000000000ZODhAf000000000000O100000000000000000000000000000000000000000[cP7"}, "image_id": 305, "id": 4402}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 407.0, 19.0, 28.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "j\\k52n?4L5K4K6K4L1N1000000O010003M5J5L5K4LdRk1"}, "image_id": 305, "id": 4403}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 409.0, 42.0, 37.0], "area": 1102, "segmentation": {"size": [512, 512], "counts": "cmU55[?NRA3m>2n@NQ?8j@HV?a001O4L0O0L50000000O010000000O0100000O0100004L1N1000O1000O0100001O4K6K5K4L5J]RU2"}, "image_id": 305, "id": 4404}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 419.0, 63.0, 50.0], "area": 2014, "segmentation": {"size": [512, 512], "counts": "`]b33l?6K6J3M000O1000O12N3L10O12N5K5K5J100J6O01000O1000O1000O11O001N5L0000000O10OI]ASOc>m0800O10O10000000O10O100000O10O100003L6K6J5K5K6JmQ^3"}, "image_id": 305, "id": 4405}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 424.0, 20.0, 21.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "YmP1d0\\?000O10000000000000000000000000000001OgRe6"}, "image_id": 305, "id": 4406}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 429.0, 31.0, 27.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "c]W62n?5K5K5K4K10O1000O1000O10O100000O10O100000O01000000O04M5K5K5JRRY1"}, "image_id": 305, "id": 4407}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 454.0, 56.0, 36.0], "area": 1353, "segmentation": {"size": [512, 512], "counts": "`^o64l?5K5K5K5J3NO100000O10O1000O1000O1000O1000O10O100000O10O10001O00O01000000ON30000000O0100000O10O1000003M5J6K5K4LTa4"}, "image_id": 305, "id": 4408}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 457.0, 30.0, 35.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "\\nZ65j?6K4L5K2M10O10O1000O10005K4L5J6K05K5J5L00O1000O1000O6K4L5KhPV1"}, "image_id": 305, "id": 4409}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 466.0, 75.0, 44.0], "area": 2598, "segmentation": {"size": [512, 512], "counts": "Z_W1`0T?<0000000000000000000000000000000000000000000004L00000000000000000E;0M300000000000000000000000O100000000000000000000000000000000000000000000=C000000QQc5"}, "image_id": 305, "id": 4410}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 472.0, 33.0, 40.0], "area": 1155, "segmentation": {"size": [512, 512], "counts": "k_65]?>E;00000000F:0000001O00000000000000000000000000000000001O000G9AeQY7"}, "image_id": 305, "id": 4411}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 474.0, 40.0, 38.0], "area": 1196, "segmentation": {"size": [512, 512], "counts": "PoP42m?7J5K6J6J6J1N100000000O10000000000O10000000000O10000000000O100002N6J6J6J6J5K6JQP[3"}, "image_id": 305, "id": 4412}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 486.0, 34.0, 26.0], "area": 605, "segmentation": {"size": [512, 512], "counts": "ko`53l?3N0DMk@3U?1g@OY?<0000O100000000O1000000K50000O1003M1O00000000001O5K5K4L5KRPn1"}, "image_id": 305, "id": 4413}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 510.0, 5.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "o_n61o?00000O1RPo0"}, "image_id": 305, "id": 4414}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 54.0, 84.0], "area": 1020, "segmentation": {"size": [512, 512], "counts": "P2d0Z?1O010O10O010O01O10O0010O010O01O01O010O01O010O010O00010O010O0010O010O00010O010O01O2O1N2O1N2N2O1N2O1N2O1N2NRPU7"}, "image_id": 306, "id": 4415}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 74.0, 512.0, 393.0], "area": 67977, "segmentation": {"size": [512, 512], "counts": "b5k8U71O0000000000000000000001O000000000001O0000000000000000000001O0000000001O00000000000000000000000001O000001O0000000000000000000000000001O000001O0000000000000000000000000001O0001O000000000000000000000000J6\\Od0D<01O00000000000000000eKfGP1Z8^NXHb1h7]NYHc1g7]NYHc1g7]NYHc1g7]NYHc1Y8kMgGU2X8lMhGU2V8lMjGT2V8lMjGT2U8mMkGS2T8nMlGR2T8nMlGR2S8oMmGQ2R8RL]G^1a0`2Q8SL^G]1a0`2Q8RL_G^1`0`2P8RLaG_1>_2P8SLbG^1>_2P8RLcG_1=_2o7RLeG_1<_2n7SLfG^1<_2m7SLhG^1;_2m7RLiG_1;^2k7TLjG^1;^2j7TLlG^1:^2j7SLmG_19^2i7SLoG_18^2h7TLPH^18^2h7SLQH_17_2f7RLTH_16_2e7SLUH^16_2d7SLXH]14`2d7RLYH^13`2c7SLZH]13`2b7SL\\H^11_2c7RL]H_10_2b7SL^H^10_2a7SL`H^1O_2`7SLbH^1N_2`7RLcH_1M_2_7SLdH^1M_2^7SLfH^1L_2^7RLgH_1K_2]7SLhH^1K_2\\7SLjH^1J_2\\7RLkH_1I_2[7SLlH^1I_2Z7SLnH^1I^2X7TLPI^1H^2X7TLQI]1G`2V7SLTI]1F`2U7SLVI]1E`2U7RLWI^1D`2T7SLXI^1C_2T7SLZI^1B_2S7SL\\I^1Ai1i7iLfH^1A^1S8TM]H^1@^1]5hLcL^1gNa0U7PNdI1a0^1fNOh7bNoH29g1POWOW8POaGDh0=a0h1oNWOX8PO^GFh0;d0h1nNWOY8PO[GHh09g0h1mNWOZ8VOQHKh0h1mNiNi8BbGMi0i1Q9XNVFOj0i1Q9WNTF1k0h1R9WNRF0m0i1R9WNoE0P1i1R9WNlE1R1h1S9WNjE0T1i1S9WNgE0W1i1S9WNdE1Y1h1T9WNaE3Z1f1V9WN_E5Y1d1Y9WN\\E7[1a1Z9XNYE:[1^1]9XNWE;[1]1_9XNTE=\\1[1a9XNQE`0\\1X1d9WO[Fi0f9XOXFh0i9WOWFi0j9UOWFl0i9SOWFm0j9QOWFo0i9POXFP1h9POXFP1j9mNWFS1j9kNWFU1j9iNWFW1k9gNUFY1l9eNTF\\1n9aNmEe1T:ZNkEg1V:WNjEj1X:SNiEm1X:QNhEP2Z:nMeES2\\:kMeEU2\\:iMdEX2^:fMaE\\2_:bMaE6gNk1j;lM`E9fNk1k;jM_E;gNk1k;iM]E[91XDK7HV2;\\92VDK7JW28\\93VDK6LX25]94UDJ5OX23_9>WDA[20_9>UDD[2Na9=SDG[2Lb9=RDI[2Jd9o0[FQOf9o0YFQOh9n0XFROi9n0VFROk9n0TFROl9o0SFQOn9m0SFSOn9k0SFUOn9i0SFWOn9g0SFYOm9f0TFZOm9c0UF]Ol9a0UF_Ol9?UF@m9>TFBl9X1m>K4L4L3M001O0Eh@NX?3k@IV?6n@FR?::10O0000010O000010O0001O01O0001O01O01O000100O2N1O01O2N1O1O1O2N1O1O1O1O2NQPX7"}, "image_id": 306, "id": 4417}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 410.0, 35.0, 74.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "l1000O10O4M4N10010O00N3K4M4L0O10O1000O10O10O5L3M4K4M4L3M4K4M3M4K4M4LTQ^7"}, "image_id": 308, "id": 4418}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 477.0, 27.0, 26.0], "area": 474, "segmentation": {"size": [512, 512], "counts": "R_=2n?4L3L5L4L3L2OO1000O10O1000O10O10O10O1000004K4M4L4K4MaPU7"}, "image_id": 308, "id": 4419}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 477.0, 49.0, 35.0], "area": 1194, "segmentation": {"size": [512, 512], "counts": "boc51l?4L3N2d@Hm>=PAFm>=PAFn>g0N2001O00001O001O00001O00001O00001O001O00001O00001O0O1O2O0010O00010O00O2L3N2M4L3M4Mf`c1"}, "image_id": 308, "id": 4420}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 487.0, 31.0, 25.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "m_]63k?2N2M3N2M3N2N2M3N2O100001O001O00001O00001O001O0O1M4M2M4M2N2MePS1"}, "image_id": 308, "id": 4421}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 498.0, 30.0, 14.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "oon31n?1O1N2O1O1O10000N2O1N2O11O1O001O1O1O001O1O1O001O1O1O001O1OQPb3"}, "image_id": 308, "id": 4422}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 447.0, 71.0, 65.0], "area": 1275, "segmentation": {"size": [512, 512], "counts": "X^61m?2N3M2N3N110O010O010O010O010O010O010O010O010O01O010O010O010ObAGU=:iBGX=8fBJZ=7cBK^=4aBM_=4^BNc=1\\B0d=1YB1h=NWB3j=MSB5m=KQB7P>InA8R>HlA:U>FiA9Y>GeA:]>FaA9a>G]A9f>GXA9i>GUA9n>;1O20O010O010O010O01M2N3M2N3M2N3M2O2M2N3M2NRQf6"}, "image_id": 309, "id": 4423}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 82.0, 140.0], "area": 6337, "segmentation": {"size": [512, 512], "counts": "5W4i;N3N2N2N2NHTL_Di3c;XL\\Dg3e;ZL[Dd3f;\\L[Db3f;_LXDa3i;`LUD_3m;bLQD^3P<81O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2fNeBN\\=1fBM[=2gBLZ=3hBKY=3jBJX=5iBJX=5jBIW=6kBHV=6mBHT=7nBFT=9nBES=:nBES=:oBDR=:QCCQ=SC@n<>TCAm<>UC_Om<`0UC^OlHSA6n>ITA4o>JRA5P?HSA6]?M3N1OfPS6"}, "image_id": 310, "id": 4430}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 413.0, 44.0, 41.0], "area": 938, "segmentation": {"size": [512, 512], "counts": "^]_22l?3N1N3N2N1N3N2N2M2k@\\Om>f0QA\\Ol>g0QA\\Om>m0O010O01M3N2N11000O0100000O01000O10O10N2N1N3N2N2M2O2M3N2N1N3N2N^bj4"}, "image_id": 310, "id": 4431}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 0.0, 21.0, 5.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "PPa14l?1O000000O1000000O1000000O1000000O100000P`T6"}, "image_id": 311, "id": 4432}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 27.0, 20.0, 19.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "Zae12g?7I7O101O000001O00000001O000001O000K5IZ_P6"}, "image_id": 311, "id": 4433}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 57.0, 26.0, 38.0], "area": 673, "segmentation": {"size": [512, 512], "counts": "kb_12g?7H8I7I8J50000001O000001O000001O0L4I7O1_On@4_?1O0000O2Hn]S6"}, "image_id": 311, "id": 4434}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 202.0, 107.0, 118.0], "area": 6629, "segmentation": {"size": [512, 512], "counts": "PX[27c?6J6I7J7M202N00010O000000000QBoNP=Q1hBVOX=j0bB]O]=c0aB_OY=g0hBXOV=j0jBVOS=m0mBSOmnNTBR1m=fNZBZ1V>01O0001O00000001O0O1I7J6O110TOPAf0V?O000L4J6IfXo3"}, "image_id": 311, "id": 4435}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 294.0, 25.0, 44.0], "area": 803, "segmentation": {"size": [512, 512], "counts": "Wih04l?7I3M:F9G9G1O0O1000000000000000O9H2N00004L9G:F9G`ej6"}, "image_id": 311, "id": 4436}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 302.0, 13.0, 13.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "^ik5=c?00000000000000000000000bfm1"}, "image_id": 311, "id": 4437}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 324.0, 26.0, 16.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "ZZ=1o?6FL\\@4d?7M0000O10O1000000000000000O10O10000000000009GceU7"}, "image_id": 311, "id": 4438}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 325.0, 48.0, 16.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "Uja2>b?000000000000000000001O000001O00000000000000000000000000000000000000000001O000001O000000000000iUf4"}, "image_id": 311, "id": 4439}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 397.0, 40.0, 40.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "Pmc01n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NPSh6"}, "image_id": 311, "id": 4440}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 398.0, 24.0, 32.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "_lk49g?=C9G0000000000O0100000000000000000000000O>C=ChRh2"}, "image_id": 311, "id": 4441}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 441.0, 33.0, 35.0], "area": 831, "segmentation": {"size": [512, 512], "counts": "Pnn03l?5L4L4L5J5L4L00O0100000O0100000O0100000O0100000O0100000O1Cn@LV?Oo@MU?Oo@MU?OYb`6"}, "image_id": 311, "id": 4442}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 459.0, 30.0, 35.0], "area": 859, "segmentation": {"size": [512, 512], "counts": "enk1`0`?9G000000000F:0000000000000O1000000000009G00000000000000?AmPe5"}, "image_id": 311, "id": 4443}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 488.0, 17.0, 12.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "X_a17i?5K0000000000000000O100000000000hPV6"}, "image_id": 311, "id": 4444}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 509.0, 15.0, 3.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "o_j11o?00000O100000000000000O10000SPn5"}, "image_id": 311, "id": 4445}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 0.0, 72.0, 48.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "P`i11o?1O1O1O1O1O1O1O1O001O1O1O1O1O1O00N2001`@DZ?=d@E[?a0O1m@\\Oh>e0VA\\Oj>e0TA]Ok>d0SA^Ol>l0O1O1O1O001O1O1O1O1O1O1O001O1O1OO1O1O1O1O1O1O1O1O1O1O1AVAGk>8WAFj>8YAFh>9ZAEg>:[ADf>;\\ABf>=?O1N2O1O1O1O1O1O1N2O1O1OQ`R5"}, "image_id": 312, "id": 4446}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 0.0, 87.0, 42.0], "area": 2927, "segmentation": {"size": [512, 512], "counts": "P`n3=c?:F000000O10000000000`0@4L000000000000000000O100000000000000000000000000I700000000000000000000000000O1000000000000000000000000000000O100000000000000000000000000000000O1000>Bboe2"}, "image_id": 312, "id": 4447}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 0.0, 77.0, 37.0], "area": 1986, "segmentation": {"size": [512, 512], "counts": "PPa54l?7I7I7I6J6J00000000O100JWAUOi>k0600000000O1000000000000O1000000000000O10000000000O1000000000000O1000000000000O10000000000O1000000000000O1000000000000O102N6J7Ia_X1"}, "image_id": 312, "id": 4448}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 15.0, 10.0, 11.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "e`e22l?3N1O2N2N02N2N2N2N^_U5"}, "image_id": 312, "id": 4449}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 36.0, 153.0, 139.0], "area": 8447, "segmentation": {"size": [512, 512], "counts": "SR`48h?7I8G8I2NO10000000O4M1O000000000O10O100000000000O0100000000000O0100000000000O10O100000000000O0100000000000O10O1000000000O10O1000000000O10O100000000000O10O1000000000O10O1000000000O1000O1000000000O10O1000000000O10004L7]OaNWBg1a=`NXBg1a=c0I8H7I8H7H4M00000000001N9H7I3MO10000000O100000O100000O16J7I8G8TMbCh1eCPA?n>BQA`0m>BQA`0m>=L2N2N2N2N2M3N2N1O011000000000O100000iA_Nm=b1QB`No=`1oAbNQ>^1mAdNR>f1N2N2N2M3N2N2O10000000O100RN]B^1c=`N_B`1a=^NaBa1`=]NbBa1`=]NbBc1^=[NcBf1]=XNeBh1j=00000000N2N2M3N2N2N2N2N2N2N2POTA:00n>DTA:00S?No@0S?MPA1R?MPA1R?MPA1R?MPA1Q\\k3"}, "image_id": 312, "id": 4452}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 45.0, 8.0, 5.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "]QR65k?00000000O102Nani1"}, "image_id": 312, "id": 4453}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 60.0, 3.0, 5.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "l15l?N2NS^n7"}, "image_id": 312, "id": 4454}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 85.0, 23.0, 47.0], "area": 720, "segmentation": {"size": [512, 512], "counts": "hbi64k?7J6J7I6J6J7H3N00000000O0100000J1O00000001O01O00000L4M3010O00000001O01O00000001O0001O000001O0001O0001O0000N3J5J6K5K5XOQA:T?@RA:]?K5KQWT7"}, "image_id": 312, "id": 4460}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 405.0, 203.0, 107.0], "area": 11559, "segmentation": {"size": [512, 512], "counts": "oof41n?1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O1O100O1O1O100O1O1O1O100O1O1O1O100O1O1O1O1001O2N00O100O1O100O100O1O100O1O100O1O1002N1O1O1O2N1O1O1O2N1O1O2N1N2N2O2M2N2N2N3N1N2N2N3N1N2N2N3M2O1N2N3M2O1N3M2N2N2O2M2N2N2O2M2N2N2N3N11O00N3M2O1N2N3M2O1N2NSa3"}, "image_id": 312, "id": 4461}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 41.0, 59.0], "area": 1207, "segmentation": {"size": [512, 512], "counts": "a0Z1g>N3M0000000001O000000000000000000000000000002N2N3M2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N^_[7"}, "image_id": 313, "id": 4462}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 0.0, 10.0, 2.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "PPU51o?000000001O0000000PPf2"}, "image_id": 313, "id": 4463}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 0.0, 52.0, 16.0], "area": 662, "segmentation": {"size": [512, 512], "counts": "XPi55c?9O0000000000000000001O000000000000000000001O00K5000000000000000000001O0000000000000000001O00000000O1FZP]1"}, "image_id": 313, "id": 4464}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 0.0, 63.0, 29.0], "area": 1310, "segmentation": {"size": [512, 512], "counts": "Z`d68_?:N10000000000000001O0000000000000000001O000000O1I7000000003M001O0000000000000b@HT?b000000001O0000000000000000001O00000000H8F:00OQP<"}, "image_id": 313, "id": 4465}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 10.0, 68.0, 75.0], "area": 2430, "segmentation": {"size": [512, 512], "counts": "o`o01n?2N2N2N2N2N2N2N2N2N2N2N3M2N2N2000000N2N2N2N2N2N3M2N2N2O1000000000000000001N1jA[Nn=g1PB[Nn=g1PB[NP>k1000O1N2N2O10000ZNkAa1T>]NnAa1Z>N2N2N2N2lN[Aj0g>TO[A0Oc0h>[O[A0Od0g>ZO\\A00c0V?N2N2N2N2N2N2N2N2No]n5"}, "image_id": 313, "id": 4466}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 43.0, 80.0, 69.0], "area": 2631, "segmentation": {"size": [512, 512], "counts": "m23l?2N2N2N2N2GEj@=T?Ej@=T?Ej@=T?9N2N2N3M10O000000000000000003M2N0012M2N2N2O100000N2N2N2N3M2N2N2N2N2N000000000001O000000000DfASOZ>m0hAQOX>o0jAoNV>Q1lAnNS>R1oAlNQ>T1QBjNo=V1>O2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N2NSng6"}, "image_id": 313, "id": 4467}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 59.0, 94.0, 79.0], "area": 3451, "segmentation": {"size": [512, 512], "counts": "[ST23l?2N2N2N2N2O1N2JAh@b0U?@i@b0U?6N2N2N2EPOdAQ1Z>QOdAQ1Z>QOdAQ1Z>QOdAR1Y>POeAR1Z>;N2N2N2O11O01ON2NMXNoAh1Q>30001O00000000000000000003M2O1N2N2N2N2N2N2N2N2N2N2N2N2OO00000000000000000000001O0000001O2N0002N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2Ndm\\4"}, "image_id": 313, "id": 4468}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 60.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "l11Sno7"}, "image_id": 313, "id": 4469}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 66.0, 14.0, 12.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "ZRW11n?2N2O1N1O00001O0001O2N2N2Nlma6"}, "image_id": 313, "id": 4470}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 76.0, 19.0, 26.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "lRj11n?2N3N1N2JI`@9^?6N2N2N00Gg@LY?4i@JW?6k@HU?8m@FU?8l@GV?7;N2N2N`]l5"}, "image_id": 313, "id": 4471}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 77.0, 8.0, 7.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "_b\\12l?3O010O0010M2Nc]_6"}, "image_id": 313, "id": 4472}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 85.0, 74.0, 83.0], "area": 3060, "segmentation": {"size": [512, 512], "counts": "`c`31n?2N3M2N2N2N2N2N2N2N2N2N3M2N2N2N2O1O1000001O0000000000000O1N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N2N01O2N2PO_B^Oc=`0_B^Oc=`0_B^Oc=`0_B^Oc=`0_B^Oc=`0_B^Oc=`0_B^Oc=`0_B^Oc=`0_B^Oc=`0`B]Ob=a0`B]Ob=NjA;f0Eb=NjA;f0Eb=NjA;f0Fi=8YBFi=8Q1N2N2N2N_\\Z3"}, "image_id": 313, "id": 4473}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 98.0, 79.0, 83.0], "area": 2911, "segmentation": {"size": [512, 512], "counts": "TUf02m?2N2N2N2O1N2N2N2N2N3M2N2POWOfBk0o<@oBb0o<@oBa0Q=@nB?R=CXBE6kAR1[>lNgAR1\\>kNfAS1\\>kNfAS1e>N1O0010O000000000HPOaAP1_>SO^Am0b>UO\\Ak0d>800101N2N2O10N2O1000N2N2N2FQA@Q?>QA@Q?>QA@Q?>QA@Q?>:N2N2N2N3M2N\\\\R6"}, "image_id": 313, "id": 4474}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 114.0, 88.0, 72.0], "area": 3285, "segmentation": {"size": [512, 512], "counts": "VdV61n?2O1N2N3M2N2N2N2N2d@^OW?g0N2N2N2N2N2N2N2N2N2N2N200000N2N2N2O10000000[AkN^>U1`AmN`>S1^AoNb>X100N2N2N2N2N2N2N2N2N2N2N2N2N0002N2N2N2N2N2O1N2N2N2N2N2N2N2G^AQOc>n0_APOa>P1aAnN_>R1700000002N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2Nf[="}, "image_id": 313, "id": 4475}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 120.0, 10.0, 12.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "mSk22m?3M2N2N2N02N3M2N2NTlo4"}, "image_id": 313, "id": 4476}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 130.0, 82.0, 77.0], "area": 3126, "segmentation": {"size": [512, 512], "counts": "bTf22m?2O1N2N2N2N2N2N2N2N3M200000000O1O1N2N3M2N2N2N2N2N2N2O1N3N10000001OdA`NW>`1gAbNY>c10000000000010O00000000000000000N2N0001O00000001O0002N000002N2O1N2N2O1N2N2N2[OWA0k>NWA0k>OVAOl>OVAOl>OWANk>0WANk>0WAOj>OXAOj>OXAOhYQ4"}, "image_id": 313, "id": 4477}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 142.0, 80.0, 71.0], "area": 2838, "segmentation": {"size": [512, 512], "counts": "[U\\1=]?600000:G0O00000001O00001O1O0100N2M3M3N2M4M2N2M3N2N2N2N3M2N2N00000002N2N2N3M2N2N2N01O000000JaAlN_>T1cAjN]>V1eAhN[>Y1501O2N2N00001O0000000000101N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2NQk[5"}, "image_id": 313, "id": 4478}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 162.0, 58.0, 65.0], "area": 2151, "segmentation": {"size": [512, 512], "counts": "UVS72m?3M2N2N2O1N2N2N3M2J\\Om@f0Q?\\Om@f0R?5N000000002N2N2O1N2N2N3M2N2N2N2O1N2N30O000N2N2O1N2N2N0000000001O0001O000HaAPO_>P1cAnN^>R1cAlN_>R1701O00000000oJ"}, "image_id": 313, "id": 4479}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 172.0, 76.0, 83.0], "area": 3137, "segmentation": {"size": [512, 512], "counts": "PWW42m?2N2N2N2N3M2O1g@Bm>`0QABm>`0QABm>`0QABm>j0N2N2WOmNbBU1\\=mNbBV1[=lNcBV1[=lNcBV1[=lNcBT1]=nNaBR1_=PO_BP1b=QO\\Bo0d=SO[Bl0e=VOYBj0g=XOWBh0i=g000000001O0002N2N2N2N2N2N2N2N2N2N000001O000001O0000001O2N2N2N3O000O1N2N2N2N2N2ROPAi0U?N2O1ZOg@b0]?N2N2N2N2N3M2N2Nfib2"}, "image_id": 313, "id": 4480}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 184.0, 69.0, 65.0], "area": 2119, "segmentation": {"size": [512, 512], "counts": "jfV21o?1N3M2N2N2N2N2N2N2N2N2O2M2N20000001NO001O2N2O1N2N2N2N2N2N3M2N2N1O000000000000001O0001O00000JfAgNZ>Y1hAeN[>X18O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3Maif4"}, "image_id": 313, "id": 4481}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 202.0, 62.0, 87.0], "area": 2827, "segmentation": {"size": [512, 512], "counts": "lV62l?3N1N3N2M2lADc<>ZCEc<>[CDc<=[CEc<>[CDc<>ZCEc<>[CCd<>[CDd<=YCFg<:WCGj<8TCKk<6SCLm<4PCNQ=1nB1Q=0lB3T=MjB4W=KgB6[=JcB6^=L^B7b=S110M3O01O0N3N2M3O010O10O10O1000O01000O10O10003lNoA1T>JoA3S>LnA3S>AkAK5a0R>DnAE2e0R>EWB9T>]OnAa0j>M2O2M3N1N3N2M2Ofgj6"}, "image_id": 313, "id": 4482}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 213.0, 73.0, 73.0], "area": 2574, "segmentation": {"size": [512, 512], "counts": "TXl41n?2N2N2N2N2N2N2N2N2J^Ok@e0R?]Ol@e0R?6N2N2N2N2N2N2N2N3M2N2N2O0O0000000000001O0000000001O000000000LgAbNY>^1iAaNV>_1501O2N1O2O1N2N2001O00oNZAg0f>WO\\Ai0d>UO\\Am0d>QO\\Ao0l>N2N3M2N2N2N2N3M2N2N2N2N2N3MZXo1"}, "image_id": 313, "id": 4483}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 227.0, 16.0, 33.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "dWh71n?2N2N2N2O1N2N3M2N2K^Oi@d0U?5N2N2N2N1mH"}, "image_id": 313, "id": 4484}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 235.0, 67.0, 77.0], "area": 2365, "segmentation": {"size": [512, 512], "counts": "jWR31n?2[@0W?2g@0W?2g@0W?1h@1U?2i@0U?>O2N2N2N2N2M3N110lAjN]=V1`BmN`=S1^BoNb=Q1\\BPOe=P1YBROg=n0WBTOh=l0WBVOi=j0UBXOk=h0RB[On=e0PB]OP>c0nA^OS>X100O10000000O1O1N1O2N2M3N2N2N2N1O2M3N2N2N2N2N1N3N000000000O3N2N2N2N1O2N2M3N2N2N2N1ObWl3"}, "image_id": 313, "id": 4485}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 256.0, 19.0, 14.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "Vhc11n?3M4L3M100O01000O010000O10000O3N3M3LigR6"}, "image_id": 313, "id": 4486}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 258.0, 69.0, 88.0], "area": 2934, "segmentation": {"size": [512, 512], "counts": "Zjc51n?2N2_OMRA5l>MRA5l>MSA4k>ORA3l>ORA4U>B[B<^O4U>B[B<^O4U>B\\Bo0b=SO\\Bo0c=RO[BP1c=RO[BP1c=RO[BP1c=RO[Bn0e=TOYBl0g=VOWBj0i=XOUBh0k=f000000000000010O000000000000000000000101N2N2N2N2N2N2N2N2N2O1cNaA5Oj0e>TO]Al0c>RO^An0c>PO]AP1j>O2N2N2N2N2N2N2N2N3M2N2N2N2N2N2NffY1"}, "image_id": 313, "id": 4487}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 271.0, 6.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "_X2:f?000001O00`gj7"}, "image_id": 313, "id": 4488}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 277.0, 9.0, 9.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "khP22l?2M3O101O01O0O2L\\gj5"}, "image_id": 313, "id": 4489}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 288.0, 54.0, 71.0], "area": 2259, "segmentation": {"size": [512, 512], "counts": "^jZ21m?3N1N3UOKgA8V>JhA8W>JgA8V>JhA8V>KgA8V>JhA8W>JgA8V>KgA8V>JhA8W>JgA8X>h0010O0100O0100O0100O0100O01jAZNo=g1oA\\Nn=l1010O10OO2M201O10O01SOlAMS>1PBNQ>OQB2n=MTB1n=ZOkA=97o=YOkA=98R>FPB8S>FoA7S>GPB6S>GoA8S>FoA7S>GoA7o>M3N1N`Uj4"}, "image_id": 313, "id": 4490}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 289.0, 11.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "XYm31n?2N1N3N2N0001O2M3N1OnVm3"}, "image_id": 313, "id": 4491}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 290.0, 60.0, 73.0], "area": 2544, "segmentation": {"size": [512, 512], "counts": "b9g0Y?0001O0001O0001O00g@]OU?g001O01O0000YAYOU>h0eA]O[>c0aAA`>P1O00003I4K5K5K5O2O0001O01O2N0001O01O000001O01O0M30M3K5K5_OoAnNW>l0nAoNW>l0a0K5K5K6N1001O0001L3L4KPgQ7"}, "image_id": 313, "id": 4492}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 293.0, 66.0, 79.0], "area": 2784, "segmentation": {"size": [512, 512], "counts": "mjj01o?3M2M4M2M4M2M10O01N1O2@\\OaAg0\\>]OaAd0^>^O_Ae0^>_O_Ab0`>a0M2N3O01O010O01O01O010O01O0N3M2M3N3L3N3M2M4N10010O0001M2N3L3N3L2OOHPBdNP>[1SBbNQ>[1QBcNQ>[1;M4F\\ATOf>i0]ATOg>i09N3L3N2M4M2N3L3N3MfVT6"}, "image_id": 313, "id": 4493}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 302.0, 105.0, 66.0], "area": 3290, "segmentation": {"size": [512, 512], "counts": "oYg32m?3M2N2N2O1N2N2N3M2N2N2N2N2N2N2O2M2N2N200000001O00000000000010O000000000001O000001O0N2N0000000010O0000000000001O2O10001OO1N1O00001O0001O0002N2N2N2N2N2O101O00000001O0000000N2N2N10O2N2N2N3M2N2O1N2N2N2N2N2N2N2N3MXUd2"}, "image_id": 313, "id": 4494}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 303.0, 76.0, 96.0], "area": 3336, "segmentation": {"size": [512, 512], "counts": "]kW62m?2N2c@Lk>6SAMj>5TAMj>5TAMk>4SANk>5RAMl>5RAMl>e0N2N2N2N2N3M1IbNkA^1U>dNiA\\1W>fNhAY1f=cNiBd1W=^NgBc1X=_NfBa1Z=aNdB_1\\=cNcB\\1]=fNaBZ1_=hN_BX1a=kN\\BU1e=b00000000001O000000000001O00002N2N2O1N2N2N2N3_NkAR1W>lNkAR1W>lNkAR1W>lNkAR1W>lNkAR1c>N2N3NO002N01O2O1N2N2N2N2N2N2N3M2N2N2N2O1N2NXUb0"}, "image_id": 313, "id": 4495}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 345.0, 75.0, 56.0], "area": 2291, "segmentation": {"size": [512, 512], "counts": "ZkT32m?2N3M2N2N2N2N2N2N2N2N2N2N2QAUOf>m0XAUOf>m0XAUOf>T1]AgN]>Z1aAhN]>_1O1000001O00N2N2N20N2N20O1N2O2M2N2N2N2N2N2N2N2N2O10N2N2N2N2N2N2N2O1000O1N2N2N2N2N2N20000O1N2N2N3M2N2N2N2N2N2N2N2NRde3"}, "image_id": 313, "id": 4496}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 350.0, 77.0, 57.0], "area": 2241, "segmentation": {"size": [512, 512], "counts": "h[c11n?2N2N2N2N1N3N2N2N2N2N2N1N3N2O1000000000O10O10000M3O01N2M300000O001N0O10002N2N2N2N1N3N2N2N2N20O100ON010000000O10O2O2N1O2N2M3HXAUOj>i0XAUOj>h08O2N2N2N2N2M3N1O2N2N2M3NcTV5"}, "image_id": 313, "id": 4497}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 353.0, 82.0, 88.0], "area": 3398, "segmentation": {"size": [512, 512], "counts": "T]g62n?1N2N2N2N2N2N2N2N2N2N2N3N1_OWOhAk0V>XOgAj0W>XOgAj0W>XOgAj0W>XOgAj0W>XOgAi0Y>>0001O00000000001O2N2N2N0001O0001O0000000000000000000001O01O00KnA]NR>c1PB[NP>e1500000002N2000000000000N2kNdAc0^>[OdAc0^>[OdAc0^>[OdAc0^>[OdAd0]>ZOeAd0]>ZOeAd0m>N2N2N3M2N2N2N2N2NgC"}, "image_id": 313, "id": 4498}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 378.0, 88.0, 88.0], "area": 3443, "segmentation": {"size": [512, 512], "counts": "_lP51n?2N2GMb@5\\?Mb@5k0Hb=a0\\BAb=b0cA\\Of04e=d0YB^Of=d0WB^Og=d0WB^Og=d0WB^Og=d0WB^Og=d0XB^Oe=d0YB^Oe=d0YB^Oe=d0YB^Of=c0XB_Oh=a0VBAj=[10000000O1N2N2N3M2N2N2N2N2O1N2O100000001O`AcN\\>a10001O0000000000000001O0O1N2N2N2N2N2N2N2O1N2N2N3M000000000000000001O0001O2N2N3M2N2N2N2N2N2N2N2O1NbRc1"}, "image_id": 313, "id": 4499}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 395.0, 74.0, 62.0], "area": 2351, "segmentation": {"size": [512, 512], "counts": "hlb22m?2N2N3M2N2O1b@CV??h@CV??h@CV?e0N2N2N2N2N2O112M000000000000000O1N1O02N2000000000001ON2N3M2N2N2N2N00000003N1N2N2N2N2N1O000000001O2N2KTATOm>j06N2N3M2N2N2N2N2O1N2N2N2N2NkRX4"}, "image_id": 313, "id": 4500}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 412.0, 28.0, 60.0], "area": 887, "segmentation": {"size": [512, 512], "counts": "R^b72m?2N2N2N2N2N2N2N2N2N2N2N2N2CUObAm0\\>UObAm0\\>UObAm0\\>UObAm0\\>UObAm0\\>=N2N1O00000000000UC"}, "image_id": 313, "id": 4501}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 436.0, 96.0, 76.0], "area": 3426, "segmentation": {"size": [512, 512], "counts": "Qoe31n?2N2N2O1N2N2N2N2N2N2i@]On>e0PA]On>e0PA]On>l0N2N2N2O1N2N2N2N1O00001O2N2N2N2N2O0O1O1O1OnNoA7P>ISB5l=KVB4i=LYB3f=M\\B2c=N_B1`=ObB0]=0eBOZ=1hBNX=1iBNW=2jBNU=2kBNU=2kBNU=2kBNU=2kBNU=4iBLX=5fBK\\=5bBK`=5^BKd=5ZBKi=4UBKn=5PBKR>5lAKV>5hAKZ>5eAJ]>k02N2N2N2O1N2N3M2N2N2N00000000000001O01O2N2N2N2N2N2N2N2N2N3O00000O1N2N2N2N^Qj2"}, "image_id": 313, "id": 4502}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 454.0, 10.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "[nU51n?2N2N2N1O02N2O1N2NgQe2"}, "image_id": 313, "id": 4503}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 457.0, 20.0, 55.0], "area": 763, "segmentation": {"size": [512, 512], "counts": "Y>g1Z>O3M0000000000000B>E;00000000000000000cae7"}, "image_id": 313, "id": 4504}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 462.0, 76.0, 50.0], "area": 2170, "segmentation": {"size": [512, 512], "counts": "k_>1n?2N1O2M3N2N1O1O1O1LBe@?Z?4O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1001O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1QOSAi0n>UOTAj0R?O1O1O001O1O1O1O1N2N2N2N1O2N2N2M3N2N]`[6"}, "image_id": 313, "id": 4505}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 466.0, 95.0, 46.0], "area": 2708, "segmentation": {"size": [512, 512], "counts": "Pom52m?2N2N2N3M2N2N2N2N2O1h@[OR?g0l@[OR?k0N2N2N2O1000001O0000000000O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O00000000000000O1O1O1O1O1O1O1O1O1O100O1O1O1O1O001O2N1O1O1O1O1O1O1O1002N1O1O1O1O1O1O1O1[OVA2k>LWA3j>LWA3j>KXA4i>JYA5h>IZA6g>H[A7f>G\\A7f>G]A7W?N2N2NW`b0"}, "image_id": 313, "id": 4506}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 485.0, 10.0, 10.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "W_\\71n?2N2O10001O00N2N2Oe`>"}, "image_id": 313, "id": 4507}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 487.0, 70.0, 25.0], "area": 1080, "segmentation": {"size": [512, 512], "counts": "jof41n?2N2N2N2N2N100O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O00O1001O1O1O1O1O1O00O1O1IFg@;X?Gf@:Y?He@9Z?7O11O1O1O00O1O1O1O1O1O1001O1O1O1O1O1O1O1O1O1N2N3M2N2N2N2OXPV2"}, "image_id": 313, "id": 4508}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 490.0, 10.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "]og51n?2N2N2O1000N2N2N2OaPS2"}, "image_id": 313, "id": 4509}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 510.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "noo72"}, "image_id": 313, "id": 4510}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 17.0, 19.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "0`0`?001O00001O001O000000M3N2M3M3M3NR`g7"}, "image_id": 314, "id": 4511}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 0.0, 44.0, 47.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "^P>3k?3L3M3N3L3O2O00001TA]OY>c0eA@Z>`0cAC]>>_AF`>:^AHb>9ZAJf>i0O001O00001O00001O001O00001O00O1M3N2M3M3DUAAm>=UA@n>=VA@l>>R1^APOb>X10O01O01O01O0M301O010O00010O00010O01O01O01M2M3M4M2M3M4L3N3O000010O0001M2M4M2M3M4L3N]_`5"}, "image_id": 314, "id": 4516}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 21.0, 79.0, 57.0], "area": 2958, "segmentation": {"size": [512, 512], "counts": "]ak4232`?9M3M4K4M3M4L11N3O11O01OTAVOb>k0ZAXOf>Q110O00010O00010O0000010O00010O003N0N1M3N30O000010O000010OO1N3O00010O0000010O00010O00010O0001K4M3M3M4K4N201O01O01N1L4M4L3M3L4M4L3MSol1"}, "image_id": 314, "id": 4517}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 23.0, 73.0, 60.0], "area": 2441, "segmentation": {"size": [512, 512], "counts": "lQk62j?5L3M3GFi@>S?9L4M4L3M300010O00010O00010O00M4M20010O0001O01O01O01O00010O00010O0001O01OSO_A;b>AaA?_>^OeAb0Z>[OiAe0W>XOlAh0f>10O00010O00010O000010O000010O00010O00010L3M3L5L3M3M4K[>"}, "image_id": 314, "id": 4518}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 35.0, 90.0, 75.0], "area": 3376, "segmentation": {"size": [512, 512], "counts": "iQQ33j?3M3L5L3M3L4O20O00010O0004L01O00010O0001ZAVOU>i0gA\\OX>d0eA_O[>b0aAA`>o001O000O2L3M3010O0000010O00010O00010O0001O01O000iNgAh0Y>SOkAm0U>POoAo0Q>nNRBS1]>00010O00010O0001O01O00010O00010O0001O01O01O01O00010XAkNc>U1\\AmNc>S1]AmNd>W10K]AmNc>o0aARO_>j0dAUO]>h0?Dm@HV?5n@FW?6;M3LPna3"}, "image_id": 314, "id": 4519}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 37.0, 2.0, 6.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "U16j?Mnnn7"}, "image_id": 314, "id": 4520}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 52.0, 51.0, 75.0], "area": 2243, "segmentation": {"size": [512, 512], "counts": "k25i?2M4L3FFk@=R?Fl@=Q?:M4L3hAlNb=V1[BnNd=S1YBoNg=Q1VBROk=m0RBWOm=j0PBXOP>h0mA[OT>Y1O0M4L3N2N30O00010O01O01O010N1M3N3L3M3N0GmAfNU>X1nAeNV>X1:M3M4M2M4L3M3N3L3M3N3L3M4LW^V7"}, "image_id": 314, "id": 4521}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 64.0, 82.0, 74.0], "area": 3603, "segmentation": {"size": [512, 512], "counts": "[Sm04a?Ne@5X?Nf@4W?=M2M3M4JROVAP1g>7M21O010O00010O01O0O101O00010O00gN_AT1g>O00010O0010O0O2L3N2M4L3N2M4L301O00010O00010O010O00010O00010O010O00010OTNoAi1U>O01O010OTOlALU>1mAOS>NPB3o=JUB5l=HVB8j=EZB;e=]OlALb0g0c=ZOmAMb0j0`=VOiBi0W>M3M4M2M3M4M2M4Lali5"}, "image_id": 314, "id": 4522}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 76.0, 82.0, 59.0], "area": 2833, "segmentation": {"size": [512, 512], "counts": "USc423Od?8M3M4L3M3M4L3M3QAROk>R100010O00010O00010O00010O00010OO101O00010O00010O00010O00010XAkNc>Z1O01O01O01O01O01O01O01O01O01O01O01O01O01O01O0N2M4M20N3L3M3M4L3M3M4L3M3O2O0O1M4L3M3M4LYmS2"}, "image_id": 314, "id": 4523}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 80.0, 77.0, 59.0], "area": 2799, "segmentation": {"size": [512, 512], "counts": "jcb63[?NSA5j>ORA5j>NRA6j>NSA5j>c0L3M3M4M21O01O01O01O01O01O0001O01O01N1M3010O00010O00010O0000010O00iN^AP1i>1O01O01O01O01O00010O0001O01O01O01O01O01O01N1N210O0001O01O0O1M4L3M3L5L3M3M4Lgl6"}, "image_id": 314, "id": 4524}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 97.0, 100.0, 49.0], "area": 2758, "segmentation": {"size": [512, 512], "counts": "ice21l?3M4K4M3M3M4O01O01O01O01O01O00010O0001O01O02N10ON2M4O01O01O00010O0001O01O01O01O00010O0001M2000O2K4M3M4N11O03M10O00010O000010O000010O00010jN_Ak0a>RObAn0h>010O00N2M4N11M20010O0000010O00010O00010O0000L5L3M3L5L3M3M\\\\h3"}, "image_id": 314, "id": 4525}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 23.0, 73.0], "area": 899, "segmentation": {"size": [512, 512], "counts": "m3Y2h=N1010OEZB\\Nf=b1]B[Nf=a1]B\\Nf=a1>M4M4K3M4M2M4L3N2M4L3N2M4L3N3Lk[d7"}, "image_id": 314, "id": 4526}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 134.0, 84.0, 51.0], "area": 2767, "segmentation": {"size": [512, 512], "counts": "hd^44i?3M3L5L3M3N210O000o@\\Og>d0UA_Ok>l0O01O01O01O0001O000L5O01O01ON2O20O00010O00010O0000010O0VAmNg>V110O0001O01O01O01O00010O00010OM30O2L30010O0001O01O01O01O00010O0001O0N2M4L3L4M4L3M3L4M4L3M_[W2"}, "image_id": 314, "id": 4527}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 137.0, 91.0, 60.0], "area": 2986, "segmentation": {"size": [512, 512], "counts": "Xea01l?4L3N2M4L3M4M200010O00010O01O01O010O0001M2M3M4M201O01O010O00010O00010O01O01O010O00010OUAoNg>U1010OM3M4L3N3L30001O01O010O01O01O01O01O01O01L3M4M2M3M4L3N2N30O00010ON3Io@[OS?b07M4M2N201O01O01M2M4L\\kP6"}, "image_id": 314, "id": 4528}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 138.0, 58.0, 72.0], "area": 2451, "segmentation": {"size": [512, 512], "counts": "\\UW63c?Oa@5\\?Na@6Z?:M3M4L3M310O0000010O000iAoN`=Q1\\BROd=n0XBWOg=i0VBZOk=f0QB]Oo=c0nAAQ>?kAEV>R101O00010O0001O01N1M3M4L3N201O01O0N2M4K4M3M4L3L4M4L3M3L4M4L3L4M4L3M3L5L3Mbkk0"}, "image_id": 314, "id": 4529}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 139.0, 8.0, 7.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "_Tf52j?40010O000001NdkU2"}, "image_id": 314, "id": 4530}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 139.0, 9.0, 9.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "`Ta72k?4M21O01O0001N1Lg[:"}, "image_id": 314, "id": 4531}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 162.0, 54.0, 68.0], "area": 2431, "segmentation": {"size": [512, 512], "counts": "iVU72j?4M4L3L4M4L3M3L4M4L3L4M4L3L4M4N11O01O01O00010O0N2010O000010O00L5L3N200010O00010O00010L3M3WOPB_OS>>PB^OU>>nA_OU>=PB_OS>>PB_OT>=i0M3L5LkJ"}, "image_id": 314, "id": 4532}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 191.0, 86.0, 73.0], "area": 3032, "segmentation": {"size": [512, 512], "counts": "if01m?2O2M2O2M3M2O2M3N1N3N1N3O1O01000O01000O0N3N1N3M3N1N3N2O0010O01000O01000O0100N1O2M201O10O01000O010O10N2M2O2N20O10O10O10O01ROkA0T>NnA3R>KPB4Q>IQB8n=BmAC7k0l=@[B?f=^O\\Bc0c=[O`Bd0a=ZOaBf0_=WOcBj0\\=UOfBj0Y>0O10M2N3N2M2O2M3M2O2M2O2M3N^Xd6"}, "image_id": 314, "id": 4533}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 219.0, 67.0, 72.0], "area": 2788, "segmentation": {"size": [512, 512], "counts": "eXc22j?4M3L5L3N201O01O00010O0001D^OVAd0g>_OUAe0g>6RBJn=2WBMj=OYB2f=J^B6b=GbB8_=CeB=[=@hBa0W=[OmBe0S=XOQCg0T>1O01M2L4M3L5L3L4MmW[4"}, "image_id": 314, "id": 4534}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 236.0, 70.0, 84.0], "area": 3339, "segmentation": {"size": [512, 512], "counts": "dYh31l?3M3L4M4kNDaB?\\=D`B`0]=C`Ba0[=DcB=Z=FfB:W=IjB7S=LiB7X=HeB;[=EbB?]=A`Bb0a=^OZBf0f=ZOWBi0i=h00O0001O01OM4L3M300010O00010O0001O01O01O01N1L4M4L3M3TNTB`1P>\\NTBa1X>L3AaAYOc>g0]AVOg>i0YATOj>m040004L010O0001O01O01O01O000L5L3M3L5L3M3MogT3"}, "image_id": 314, "id": 4535}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 245.0, 8.0, 17.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "e7a0`?M3N1N3N2N2M2OWhk7"}, "image_id": 314, "id": 4536}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 261.0, 58.0, 83.0], "area": 2926, "segmentation": {"size": [512, 512], "counts": "QiR53i?4M3dAGn<=nBFo<=nBFn<>nBGn<=nBFo<=nBFo<=nBFP==kBHU=7hBLX=4eBO[=1bB3]=N^B6c=S101O01O01O01ON201O01O01O01O01O01O01O01L3M3L4M4L3M3L7J3M4L3M3L5L3M31O01O01OM4L3M3L5L3M3M_WP2"}, "image_id": 314, "id": 4537}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 279.0, 59.0, 87.0], "area": 3146, "segmentation": {"size": [512, 512], "counts": "ZiS64h?4nAIX<:eCIX<:\\BEn05e<8XBGP15h<M3L5L3L4M4L3L4M4N1001O01O01O01L3L4M4K4M3Mjfn0"}, "image_id": 314, "id": 4538}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 288.0, 11.0, 9.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "SiP33j?40O0001O01O01O01O0LQgi4"}, "image_id": 314, "id": 4539}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 289.0, 88.0, 78.0], "area": 3669, "segmentation": {"size": [512, 512], "counts": "hjR21j>1WB1f=3VB0h=2bAJ;7P>2bAI<7P>2aAK;7P>:nAHP>;mAHP>:nAHP>:mAJR>R11O010O010O010O0010O010O01O0N3M2M3O20O010O01O010O010O010O0010O010O0010O010O0010O010O010O01O0hNbAo0^>nNeAQ1[>mNgAT1c>O01O01O010O010O01O010O01O010O01ON3M2N3L3N3M2N3L3N2N3L3N3McUa4"}, "image_id": 314, "id": 4540}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 306.0, 60.0, 82.0], "area": 3088, "segmentation": {"size": [512, 512], "counts": "X[n6272Y?1d@2Y?1d@3X?9K4QBSNh=T2M3N3O01O0001O01O01O01O01O0M3010O00010O00010M2M3M4L3M3M4FoA`NT>]19L5L3M3M3M4O01OO2L3M3M4L3M3M4K4MWf3"}, "image_id": 314, "id": 4541}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 322.0, 8.0, 8.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "Xj^41l?3M30001O0000MQV]3"}, "image_id": 314, "id": 4542}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 340.0, 88.0, 64.0], "area": 2970, "segmentation": {"size": [512, 512], "counts": "fk\\32l?3L3N3M2N2M4M2N3L3N3M200010O010O01O010O01O01N1M40O010O0010O0ZAlN`>U1]AnNc>W1010O00N3N1010O010O01O01O010O01N1N3M20O1N3M2M3N3M2N1N0102N2N2N20N3L3N3M2O2O00010O010O010N1N2M4M2N3M2M4M2N3L3N2NTUW3"}, "image_id": 314, "id": 4543}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 349.0, 74.0, 81.0], "area": 3190, "segmentation": {"size": [512, 512], "counts": "elV12l?2N3M2N2N3M2N3N110O0i@]OQ?d0l@_OT?f0010ON3M2N3M2N3M2N3M2DeNQB]1m=eNQB^1l=eNQB]1m==M2N3M1O0020010O010O010O010O010O0010O010M2N3M2N3M5K2N3M2N3M2N3M2N3M2N3M3M2N3M2N3M2N3M2N3M2N3M3N\\Td5"}, "image_id": 314, "id": 4544}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 355.0, 73.0, 78.0], "area": 3366, "segmentation": {"size": [512, 512], "counts": "fkP51=2=Oc=5kA1<0c=5kA1<0d=d0WBAg=b0RBDo=V101O000000010O00000001O01O00000001O01O00O1N201O01O000001O0001O0000jNQB;P>_OUBa0k=YO[Bh0d=ROcBm0X>0000000010O00000000SASOg>T10O0000000TOZA>g>[O_Af0m>0001O000001O0001O0N2J6K5J6J_cj1"}, "image_id": 314, "id": 4545}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 366.0, 11.0, 11.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "dk`22m?1N3M2O2O0100O0N3M2NadY5"}, "image_id": 314, "id": 4546}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 385.0, 68.0, 86.0], "area": 2852, "segmentation": {"size": [512, 512], "counts": "^m[23k?2N2M4M2M4M2O1010O010O0010O0010OZA]On=e0nA^Oo=d0PB_Om=b0QBAl=c0QB@l=b0QBAl=c0QB_Om=c0PBAl=b0RB@l=\\1M2M301O010O00010O010O00010ON1N10O010O11N4HWBUNk=i17M4M2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3NhSb4"}, "image_id": 314, "id": 4547}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 395.0, 83.0, 57.0], "area": 2908, "segmentation": {"size": [512, 512], "counts": "`m_66e?5J6K6O000000000010OK5J6L41O01O000000010O00000001O01J5N21O0001O000001O0001O0K5000001O01O00000001O01O000001O01O0aNeAY1a>0000VO^A7a>DdA<]>]OjAb0V>YOoAg0e>000010O00000001O01O000000010O000L4K5J6Kib6"}, "image_id": 314, "id": 4548}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 422.0, 58.0, 69.0], "area": 2530, "segmentation": {"size": [512, 512], "counts": "f^X31k?5[@KZ?8b@L[?>N30O00N2L5L3M3L5L3O101O01O00010O0001O01O00010O0O10N3L3M3L4M4L3M310O00010O0001O01O00010O0001L3aNnAl0V>POnAm0V>oNmAm0W>POlAm0f>\\OPA3S?JPA2T?JPA3T?IPA3WQk3"}, "image_id": 314, "id": 4549}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 433.0, 59.0, 79.0], "area": 2723, "segmentation": {"size": [512, 512], "counts": "koc41j?5K5K5K6J5J6K5K5L5O000000000J7M2000L4L40001OcAdNV>d1O000001O0001O000001OL4K5J6J6M31O0000000\\O`BhN`=R1fBnN[=l0jBQOY=i0nBPOX=j0nBPOX=m0o001O0001O00K5J7I6K5Jab^2"}, "image_id": 314, "id": 4550}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 447.0, 59.0, 65.0], "area": 2643, "segmentation": {"size": [512, 512], "counts": "i_g54a?;F:K50000o@TOn>o0001O000000000000000000001O000000O1E;H800000000000000G9K5000000001O0000000000000000001O000000000^OWBmNi=i0aBWO_=>lBYO]==X1EZR[1"}, "image_id": 314, "id": 4551}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 467.0, 63.0, 45.0], "area": 1993, "segmentation": {"size": [512, 512], "counts": "joe66j?000000000H8F:G9F:N20000001O00000000000000001O000000000oN`Ab0o>0001O00000000000000001O0000H800000000000000001O00000000000000000@WAIj>L`AKio:"}, "image_id": 314, "id": 4552}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 477.0, 6.0, 7.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "P_f63j?301O0000MVaV1"}, "image_id": 314, "id": 4553}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 0.0, 93.0, 42.0], "area": 1942, "segmentation": {"size": [512, 512], "counts": "U`U42l?3L30001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001OUOTAb0l>[OXAe0g>YO[Ag0o>010O010O0010O00010O010O00010O01O01M2M4L3N2M4L3N2M`o[2"}, "image_id": 316, "id": 4554}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "PPc61o?0PP\\1"}, "image_id": 316, "id": 4555}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 0.0, 18.0, 28.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "\\Pg71n?2N2O2M2N2N2N3M2N2N2N2O2M1O01O1O1O1"}, "image_id": 316, "id": 4556}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 6.0, 25.0, 23.0], "area": 351, "segmentation": {"size": [512, 512], "counts": "d`X72k?3N3M2M4M2N210O0010O0010O010O0010O001O0M3N3L3N3Mdo:"}, "image_id": 316, "id": 4557}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 22.0, 87.0, 79.0], "area": 3242, "segmentation": {"size": [512, 512], "counts": "YRc51m?2N2N3L3N3M2N3M2N3M2N3M2N3O010O010O010O01O010O010O010O010O01O010O010O010O010O01O010O010O01L3N3M2N3M2N3M2N3L3N3M2N3M2N2N1O0O1003L3N2N3M2N3M2N3M2N3L3N3M2N3M2N3M2Co@IT?4o@IS?5o@HU?5n@HT?6YObAg0]>WOeAi0[>TOiAk0f>1O010O00010O010O00010O01O01O01O0M4M2M3N3L3M3N3L3NT^k2"}, "image_id": 316, "id": 4559}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 47.0, 45.0, 100.0], "area": 3282, "segmentation": {"size": [512, 512], "counts": "\\bY73h?5K5K5K6J5QBUOBO\\J5JYN"}, "image_id": 316, "id": 4560}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 81.0, 46.0, 42.0], "area": 1164, "segmentation": {"size": [512, 512], "counts": "YS\\52l?3L3N3M2N2M4M2N3M2N3M201UAQOc>o0[ASOf>m0WAUOi>R10O010O0O2M2N30O010O010O0010O010O010O001M2N3M2N3L3N2N3M2N3M2MSml1"}, "image_id": 316, "id": 4561}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 86.0, 65.0, 49.0], "area": 1560, "segmentation": {"size": [512, 512], "counts": "^Sc31m?3L3N2N3L3N3M2M4M2N2O2O010O0010O0010O0010O010O0010O0010O010O00010O010O010O00010O010O0WOQAb0n>\\OTAd0U?O010O01O010O01O010O01O01O0N3L3N3M2M3N3M^\\\\3"}, "image_id": 316, "id": 4562}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 124.0, 89.0, 60.0], "area": 2682, "segmentation": {"size": [512, 512], "counts": "edU52n?2M3N2M3N2M3N2M3N2M3N2M3N2M3N2N2M2OO010O010O010O010O010O010O010O010O010O010O010O10O010O010O010O010O010O03N2M3N2M3N2M3N2M010O010O0100O010O010O010O010O010O010O02O2M3N2M3N2M3N2M3N]k]1"}, "image_id": 316, "id": 4563}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 146.0, 80.0, 54.0], "area": 2174, "segmentation": {"size": [512, 512], "counts": "UeR32k?4M2N3L3N2M4M2N3O00010O0010O0010l@XOn>h0PAZOP?k010O010O00010O010O00010O010O00010O010O0M3N30O010O01O01O010O01O01O010O01O01O010O01O01O010O0VOUA`0j>]OYAc0h>ZOZAf0Q?0O010O0001O0N3L3N2M4M2M4M2M`Ze3"}, "image_id": 316, "id": 4564}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 148.0, 6.0, 14.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "lTm73l?1N3M2O2M3[K"}, "image_id": 316, "id": 4565}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 171.0, 50.0, 76.0], "area": 2305, "segmentation": {"size": [512, 512], "counts": "kUR66j?;E6I100000000000O10000000IZOTAf0l>700I70007Ii0XA[Oe>Q1N201O010O01O01O01O02O0O01O[AgNa01^=[1^BiNa=W1]BkNd=U1XBnNh=R1UBROj=n0TBTOm=`1O01O01O01O01O010O01ORJ"}, "image_id": 316, "id": 4567}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 187.0, 68.0, 64.0], "area": 2444, "segmentation": {"size": [512, 512], "counts": "RWi21l?4L3M4M2M6J3M4M2M3M40O00010ON3M2M3M4L310O00010O00010O0010O0010O0010O00010O00010O0bNcAZ1a>00010OmN_Ag0a>WOaAj0_>ROdAn0\\>oNhAP1d>10O00010O01O01O0O1N3L3M4L3N2M4L3M3N3LViT4"}, "image_id": 316, "id": 4568}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 203.0, 51.0, 58.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "lfb44j?2N2M4M2N3L3ZA^Ol=e0QB^Om=c0QB_Oo=a0nACQ>>lADU>;iAGW>:eAJ[>5cAM]>l00O00010O010O010ON2N3O010O0010O0010ON3M2N2M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L[ic2"}, "image_id": 316, "id": 4569}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 244.0, 77.0, 65.0], "area": 2771, "segmentation": {"size": [512, 512], "counts": "gXV51m?3L3N2M4L3N3L3N3L3N2M4M2M4M20001O010O01M2N3N10010O010O00010O010O00010O01]AmNX>R1eAQO[>P1bARO\\>P1bASO\\>Y1010O00010O010O00010O010O010O00010O010M2N2M4M2N3L3N2O2O001TOo@e0S?YOPAc0X?N3L3N2N3L3NeWc1"}, "image_id": 316, "id": 4570}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 245.0, 32.0, 30.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "XhR41m?3L3N2M4M2M4M2N30O0010O0010O010O0010O0010O010N1N3M2M3N3M2M4M2NSX]3"}, "image_id": 316, "id": 4571}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 255.0, 70.0, 61.0], "area": 2275, "segmentation": {"size": [512, 512], "counts": "UYm21m?3L3N3L3N3L3N2M4M2M4M2M4M2M7J2O2O00010O01N1O20OPO`A`0`>^ObAb0_>[OdAe0[>XOhAh0Y>UOjAk0U>ROnAn0c>O01O010O010O00010O010O001L3N2M4M2N3M200010O0M4M2N3L3N2N3L3O20O010O00XOl@d0Y?L3N3Bc@5_?Id@3f?NTgo3"}, "image_id": 316, "id": 4572}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 277.0, 26.0, 29.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "XiU42l?3L3N3M2M3N3c@^OW?h0M200010O010N1O2O01OO2M2M4M2N2M4M2M4MVW]3"}, "image_id": 316, "id": 4573}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 304.0, 78.0, 48.0], "area": 2190, "segmentation": {"size": [512, 512], "counts": "cjY22j?4L4K6K4L4L4L5K4N20000010O000001O01O0001O0001O0nNZAk0e>QO`An0i>0000101N0000010O0000N3O0001O01O0001O01O0001O0001O000L2N003O2010O0000010O00000010O0000010O00000L5K4L4L4LTV_4"}, "image_id": 316, "id": 4574}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 304.0, 103.0, 71.0], "area": 3594, "segmentation": {"size": [512, 512], "counts": "mZ\\41m?3L3@LRA7k>LQA7l>LRA6l>MQA6k>a0N3M2M4M2M3O2O010O01O010O0O1N3L3N3M2M4M2O110O00M4M0O13N1010O010O0010O0010O010O0010SOTAd0l>YOXAf0Q?10OZA[OQ>f0lA\\OT>d0jA_OV>a0fABZ>>dAE\\>:bAH^>m0010O010O010O00010O010O0010ON2N3L3N1O0O01101010O01O010O01O010O01O010O01OTOUAc0l>YOWAe0k>YOWAe0T?M2M4M2N2M4M2N]UP2"}, "image_id": 316, "id": 4575}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 349.0, 44.0, 86.0], "area": 1937, "segmentation": {"size": [512, 512], "counts": "Q]Z72l?3M2M4M2M3N3O010O0010O0010O001M2M3N3L3N3M2M3N3L3N3L3N2M4E[NWBg1g=[NWBh1f=:N3L3N2N3M201O0001N1M4M2MTE"}, "image_id": 316, "id": 4576}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 354.0, 35.0, 35.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "c[W32l?3L3N3L3N2M4M2010O0010O0k@\\Oo>d0n@_OQ?k0MO2N110O0N2N3M210O0Bi@3W?Kl@5T?Go@9\\?10O010O000N3M2M[TW4"}, "image_id": 316, "id": 4577}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 359.0, 64.0, 87.0], "area": 2898, "segmentation": {"size": [512, 512], "counts": "l\\P62k?4M2M4m@F\\>;`AI_>7_AKb>5[ANd>2YA1h>c001O010O010N1N2N3L3N3M2M3N3L3N3M2M3NO10O010O13N1010O00010M2N1O0003L3N3L3N2N3L3N3L3N3M2M3N3M2M4XOQA;Q?CRA9Q?DRA:\\?M2M3N3MWdo0"}, "image_id": 316, "id": 4578}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 367.0, 75.0, 56.0], "area": 2138, "segmentation": {"size": [512, 512], "counts": "Y\\T42k?4M2M4M2N2010O01N1N2M4M2M4L3O110O010O0010O[AoN[>Q1cAQO^>o0^ATOb>l0\\AWOc>S110O010O00010O010O00010O010O00010O010O01N1N2M4M2M4M2M3O2O010O01O01M2N3O01O01O010O01O01O0N3M2M3N3L3NkSf2"}, "image_id": 316, "id": 4579}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 371.0, 30.0, 33.0], "area": 594, "segmentation": {"size": [512, 512], "counts": "n[n22l?2N2`@LQ?8k@LR?6l@LR?b0010O01O01O010O010O01O01O010O01ON3M2M4M2M3N3L3N3LVdb4"}, "image_id": 316, "id": 4580}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 372.0, 69.0, 51.0], "area": 1760, "segmentation": {"size": [512, 512], "counts": "[\\l12l?2M3M4M2M4M2M4M2M310O0010O010O010O0010O0010O010O0010O0010O010O010O0010O001N1N3M200010O01O010O010O01O010UOUA?k>_OWAb0h>[O\\Ad0e>YO]Ah0n>O00010O010ZOk@?U?_Om@`0[?M2N2N3L3N3MZSQ5"}, "image_id": 316, "id": 4581}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 407.0, 24.0, 24.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "W]k61l?3N3M2M6K3M2N3O01O01ON3O0010O010O00N3L3N3M2M3NUch0"}, "image_id": 316, "id": 4582}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 409.0, 64.0, 68.0], "area": 2592, "segmentation": {"size": [512, 512], "counts": "_nU13j?3N2M4M2M4M20010O001N1N2M4M2M4HoN]AS1a>oN\\AT1a>8M2M4M2M3N3L3N3O00010O01N1M3N3L3N3O00010O0010O0010O0010O001N1O11M2N2M4M2@^A_Of>>\\A@f>=]A@f>>]A_Of>=]A@f>>`0L3N2M4MmRj5"}, "image_id": 316, "id": 4583}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 414.0, 64.0, 50.0], "area": 1840, "segmentation": {"size": [512, 512], "counts": "fmf32k?3N3M2M3N3M2M4M2M4SAWO_>k0_AWOa>i0]AYOc>S10O01O01O010O01O010O01O010O01O01O010O01OO2M0O2O2M4M2M3010O010O00010O010O00010O010O01O0N2M4M2N3L3N2M4M2MdRY3"}, "image_id": 316, "id": 4584}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 433.0, 65.0, 76.0], "area": 2782, "segmentation": {"size": [512, 512], "counts": "d^V23j?3N2M4M2b@DU?f0M2M4\\AVOP>j0nAXOR>h0kA\\OU>d0hA^OX>b0eAA[>`0bAC^>o0010O0001N1N3M2N2N3O0010O00010O010O0010O0M1O0010O011N4M201O01O01N1N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3NWRi4"}, "image_id": 316, "id": 4585}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 437.0, 7.0, 21.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "Unl73j?3N3L3M3N3L3[B"}, "image_id": 316, "id": 4586}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 449.0, 74.0, 63.0], "area": 2365, "segmentation": {"size": [512, 512], "counts": "m^T52l?2O2M3N1N3N2M3N1O2M3N1N3N2M2O2M3N2M2OO2010O010000O01000O0100fAiNj=V1UBkNk=V1RBlNo=T1oAmNR>S1kAoNV>P1iAQOW>P1fARO[>Y11O001O1O001O1O001O1O1O001O1O001O1O001O1M3N1O2M3N1N3N2M2O2N2M3N1N3N2M2O2Nk`f1"}, "image_id": 316, "id": 4587}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 472.0, 71.0, 40.0], "area": 1933, "segmentation": {"size": [512, 512], "counts": "eoR31l?3N3L3O2O010O0M3N3M1N2O2N3L3N2N3O0SAQOi>S1010O00010O001O00001O00O1N21O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001N1N2N3L3N3M2M3N3Li`i3"}, "image_id": 316, "id": 4588}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 504.0, 23.0, 8.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "ooo01m?2O1001ON2M31O001O001O00001O001O00001O001O00Q`d6"}, "image_id": 316, "id": 4589}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 59.0, 100.0, 87.0], "area": 4233, "segmentation": {"size": [512, 512], "counts": "VcQ31m?3N2N1O2N2N2N2M3N2N1O2N2N2N2M3N2N2N1O2IjNaAX1]>jN`AY1^>6N2N2N2N000O1000O100000001O2N2M2O2N2N2N20000O1000O10O100O1O100O10O01O10fAnNe=Q1YBQOh=n0UBVOk=j0QBYOP>f0nA\\OS>d0iA@V>U100M4M210O00010O010O00010O0010O0010O00N3L3N3L3N2M4L3N3L3N2M4M2M3M4M2M4M2G`@Ohl\\3"}, "image_id": 317, "id": 4590}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 66.0, 64.0, 54.0], "area": 2144, "segmentation": {"size": [512, 512], "counts": "ScR22l?3L3N2M4M2N3L3N3L3N2N3L3N3O000010O010O010O00010O010O00010O01M2M4M2O110O0010O0010O010O0010O001M2N2N3L3N3L3N3M2M3N3L3N3M2M3N3L3Nd]m4"}, "image_id": 317, "id": 4591}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 132.0, 66.0, 48.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "gTo62l?2M4M2M4M2N201O010O01O01O0M4O00010O0N3O00YOk@d0X?0M3N3M210O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O0\\OPA9o>ESA;n>BTA?k>^OYAa0S?010O0010O01oJ"}, "image_id": 317, "id": 4592}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 166.0, 65.0, 84.0], "area": 3083, "segmentation": {"size": [512, 512], "counts": "ZgP62l?2M4M2M3N3L3N2M4M2M4@ROjAP1S>SOjAP1T>SOiAP1S>SOjAP1T>ROjAQ1R>`0N2M4N1010O0001M2N3L3N2O2O010O01O01O010O00O2L3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N^jn0"}, "image_id": 317, "id": 4593}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 181.0, 70.0, 97.0], "area": 2652, "segmentation": {"size": [512, 512], "counts": "oWm61m?3L3N3M2M3N3M2M4M210O0N2VAVO[>n0aAUO]>m0aAUO]>n0`AUO\\>Y1N3M2M3N3M2M4M2N3M21O0N3M2M3N3M0O010O01O10O01003L3N2N3L3N3M2M4M2N2M4M2M4M2N3L3N2N2M01000O01011001O01O0M4M2N[J"}, "image_id": 317, "id": 4594}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 218.0, 7.0, 21.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "Ygl73k?3L3N3L3N2M4UI"}, "image_id": 317, "id": 4595}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 250.0, 9.0, 48.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "^hk72c02a>2\\A0b>2[A1b>2\\A1a>2[A1b>2\\A1a>2[A1b>h0N3UH"}, "image_id": 317, "id": 4596}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 270.0, 64.0, 76.0], "area": 2547, "segmentation": {"size": [512, 512], "counts": "XjX12k?4M2N3L3N2M4M2O2[OZOnAf0P>\\OPBe0l=_OTB`0j=BVB>h=DYBe0QBYOQ>e0RBWOR>f0PBXOR>e0RBXOP>f0RBWOR>f0PBXOR>h0d00O010O00010O01O0N2M4M2N3L3N3L3NQWg5"}, "image_id": 317, "id": 4597}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 332.0, 48.0, 44.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "\\:g0Z?0O010O010O00010O010O0i@ZOU?h001O010O01^Ok@8T?Fo@9R?DPAe0XA_Oe>c0YA_Oe>d0XA_Oe>o01O010O01M2M4M2N3L3N2N2N2M2O0000O0102N3L3O2O01OO2M2M4M210O00010O010O01M2N2N3M2M4M2N3L3^OoAkN1OP>V1RBgN11m=W1c0N2N3M2M4M2N3L3N3M2N2Aa@:d?N3M2MbRd0"}, "image_id": 317, "id": 4599}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 426.0, 77.0, 50.0], "area": 2279, "segmentation": {"size": [512, 512], "counts": "Z^a41m?3M2M3N3M2M4M2M3010O010O00010O010O00N3M2M4M000O03N3N10010O000101N010O0N2N3TAQOb>X101O01O010O01O01O010eN]AW1g>N1O2O00O2M21O010OO2M20010O01M2M3M4M2M3N3L3N3L3N2M4L3N3L]RX2"}, "image_id": 317, "id": 4600}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 472.0, 6.0, 11.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "k^m71o?2M2N2O2M2WA"}, "image_id": 317, "id": 4601}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 485.0, 13.0, 27.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "`oi71n?2N2N2N3M2N2O1N3M2N2g@[OT?i0O1"}, "image_id": 317, "id": 4602}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 489.0, 67.0, 23.0], "area": 922, "segmentation": {"size": [512, 512], "counts": "n_W42l?2M3N2M3N2M3N21O00001O00001O001O0000M3M3N2001O00001O001O00001O00001O001O00001O0Gd@N]?Oe@1[?Lh@4b?0M3N2M3001O00001O00001O001O00001O00001O001O0O1MYPg2"}, "image_id": 317, "id": 4603}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 16.0, 28.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "0k0U?00001O00O1N2M3N2M3N2M3N2N2M3NRPh7"}, "image_id": 318, "id": 4604}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 0.0, 43.0, 53.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "Zaa02Y?1WA2f>0WA3f>1WA2f>0WA3f>0XA2f>1VA3g>d0M4M2N201O001O00001O001O001O00N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2MSPi6"}, "image_id": 318, "id": 4605}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 0.0, 27.0, 23.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "^PV21m?2N3L3N3M2M301O001O001O00001O001O010O01M2N3L3N2N3L3Nn_\\5"}, "image_id": 318, "id": 4606}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 0.0, 10.0, 4.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "P`m31o?001O00001O001O00NR`m3"}, "image_id": 318, "id": 4607}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 0.0, 52.0, 33.0], "area": 979, "segmentation": {"size": [512, 512], "counts": "[PT41n?2N2N2N2N1O2N2N2N2N2N2O1O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O2N2NooQ3"}, "image_id": 318, "id": 4608}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 0.0, 84.0, 46.0], "area": 1986, "segmentation": {"size": [512, 512], "counts": "TPf62l?3M201O001O00001O001O001O001O001O00001O001O001O001O001O001O00001O001O001O001O001O00001O001O001O001O001O00001O001O001O001O001O00001O001O00mNVAR1k>010O010O01OTAoNi>U10mNXAl0h>ROZAn0m>TORAe0m>YOUAf0S?01O00ZO"}, "image_id": 318, "id": 4609}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 4.0, 43.0, 63.0], "area": 1652, "segmentation": {"size": [512, 512], "counts": "ha\\11l>0fA2H2_>NgA3G1_>0gA1G3_>NgA2H2^>0gA?W>CfAa0W>AgAa0V>g0O00010O010O0010O0010O010O0010M2N3L3N3L3N2N3L3N3M2M4M2N2M4M2M4M2N2M4M`om5"}, "image_id": 318, "id": 4610}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 18.0, 81.0, 67.0], "area": 2639, "segmentation": {"size": [512, 512], "counts": "ZQW32l?2M4M2M4f@Fj>;RAHn>9o@JQ?b0010O00010ON3M2M3N3L3N2010O010O00010O010O00010O01O01O010O01O01O010mN`Ad0`>ZObAg0]>VOgAi0Z>TOhAl0X>QOlAo0b>0010O0010O0010O0010O0010O0010O0001UOPAe0o>XOTAh0R?10O0010O0010O001N1M3N3L3N3L3N2MU^`3"}, "image_id": 318, "id": 4611}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 30.0, 48.0, 63.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "aRU21m?3L3N2N3L3N3L3^O\\OfAg0W>[OfAh0W>\\OfAf0X>\\OfAg0V>]OfAf0X>\\OfAg0W>b0N210O010O01O010O01O010O01O0M4M2M3N3M2M3NO0012M4M2N3L3N3L3N2M4M2H_@Od?NSoR5"}, "image_id": 318, "id": 4612}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 34.0, 51.0, 61.0], "area": 2056, "segmentation": {"size": [512, 512], "counts": "\\bW63V?N^A4`>O]A4_>O^A4`>O]A4`>N]A5`>O]A3a>O\\A5a>f0010O0010O0M4M2010O00010O010O00010O010O0010O0010O010O00010O010N1M4M2M3N3lNXAk0k>SOWAk0Q?\\Oo@5S?Io@4T?IPA4_?M4MPnn0"}, "image_id": 318, "id": 4613}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 71.0, 90.0, 66.0], "area": 2971, "segmentation": {"size": [512, 512], "counts": "XSk22l?2M3N3M2M4M2N2M4M2N3M2M4M2N2O20O01O01O010O0O2N10010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O0VAoNf>U100010O010O00010O010O0010O0010O010O000N3M2M4M2N2M4M2M4M2N3L3N2M4Mllg3"}, "image_id": 318, "id": 4614}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 71.0, 37.0, 59.0], "area": 1190, "segmentation": {"size": [512, 512], "counts": "ob]74i?3M4M2M3M4M2M4L3N2010O01O01O010O01O01O010O01O01OZATOX>m0eAUO[>k0bAXO_>h0^A[Oa>R110O00010O010]AfN^>^10gNcAo0l;"}, "image_id": 318, "id": 4615}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 97.0, 78.0, 55.0], "area": 2227, "segmentation": {"size": [512, 512], "counts": "ice53k?2N3L3N2N3M2M4M210O0010O0010O0N3M2N3L30010O010O010O00010O01POSAl0Q?O01O010O01O01O0N3M2N3O00010O010O010O0010O0010O010OnNZAj0g>SO[An0k>01O010O010O01O01O0O2M2M4M2N3L3N2N3M2M4MT\\S1"}, "image_id": 318, "id": 4616}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 127.0, 26.0, 25.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "`TT72k?3M3M4L3N2M4O000010O010O00010O00010O000N3L3M3M4L3Nmk>"}, "image_id": 318, "id": 4617}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 134.0, 91.0, 67.0], "area": 2993, "segmentation": {"size": [512, 512], "counts": "WUX51m?3M2M3N3M2M4M2N3L3N2N3L3N3L30010O010O0010O0N3O001O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O010O01O01VAmNe>W110O010O00010O010O01O01O010O010O00010O010ON3M2N2M4M2M4M2N2M4M2M4M2N3LmZZ1"}, "image_id": 318, "id": 4618}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 164.0, 50.0, 63.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "efo62l?2M4M2M4M2M3N3L3N3O00010O0IWOWAj0f>XO[Ag0b>\\O^Ad0`>_O`Aa0]>AcA?Z>EeAFhA:V>i0M4M2010O00010O010L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2MeZ7"}, "image_id": 318, "id": 4619}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 188.0, 65.0, 83.0], "area": 3116, "segmentation": {"size": [512, 512], "counts": "l6n0R?10O0010O0010O010O0010O001M2N3M2M3N3M2M4M2N3L3N2010O010O01O01O010O01N1M4M2M3N3M2M40O01O01O010oN`B@`=>cBB]=;eBD\\=9gBE[=9hBD[=9gBD\\=9hBD[=9gBE[=9hBC\\=9gBE[=9gBE[=9hBC\\=9gBE[=9X1M2M4M2NlYo6"}, "image_id": 318, "id": 4620}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 189.0, 5.0, 11.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "m5;e?N3M2M3NTZm7"}, "image_id": 318, "id": 4621}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 200.0, 70.0, 62.0], "area": 2322, "segmentation": {"size": [512, 512], "counts": "kVU24h?4M4K4M3L5N11O00010O0001O01O00010O`AYOg=g0UB^Oj=c0RB@n=`0oACR>8gAKY>5cAO^>i001O00010O0001O01O01O01O00010O0001O01O00010OM3M4K4M3M3O20O00010ON2M4K4O10001O01O01L3M3L4M4KYig4"}, "image_id": 318, "id": 4622}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 213.0, 14.0, 30.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "[Wi72l?3L3N2M4M2N3O00010M2N3L3N3ZI"}, "image_id": 318, "id": 4623}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 265.0, 41.0, 63.0], "area": 1561, "segmentation": {"size": [512, 512], "counts": "Y8g1Y>0010O0010O010O00_NhA[1X>bNkA]1\\>010O0010O0010O0010O0N3L3N3L3N2N3L3N3M2O2O01O01N1N3L3N2M4M2NRW[7"}, "image_id": 318, "id": 4624}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 270.0, 74.0, 47.0], "area": 2196, "segmentation": {"size": [512, 512], "counts": "WYb11m?3^OOPA4n>0n@3o>2k@1S??01O001O001O00001O010O0010O0010O010O00010O010O0010O0O2M2O110O010O01O01O01M2M4M2N30O0010O0010O010O00010O010O01O01O0oNXAj0i>ROZAn0m>N101O000N3Ij@@X?<9K4L5KQgX5"}, "image_id": 318, "id": 4625}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 288.0, 92.0, 57.0], "area": 2892, "segmentation": {"size": [512, 512], "counts": "hY]42k?3N3M2N2N3M2N30OM3N3L3N3O000010O010O01O01O010O01O01O010O010O00010O010O01O0M3N3N1010O0010O0010O0010O010O0010mNZAj0f>TO\\Al0i>4M2N3N10010O010O00010O010O001M200010O010O0N2M4M2M4M2M3N3M2M4M2M3N3L3N3M_fT2"}, "image_id": 318, "id": 4626}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 294.0, 69.0, 134.0], "area": 4779, "segmentation": {"size": [512, 512], "counts": "^kf52l?2M3dAKk<8QCKm<7QCLl<7QCKl<8RCJl<9PCKm<7QCKm<8PCKl<8RCJl<8QCLl<7QCKm<7QCLk<8SCIYiC_OY<>jC@Yb0RAAm>`0o@CQ?f010O010O01O01O0O2M2N1N2O2N3L3N3L3N210O010O01O01O010O010O00010O010O01O01OWOeAM\\>0fA1Y>LkA3V>LjA3W>MjAOY>1gAM\\>3cAK_>5bAGb>9]AEe>8_ADd>:^ADe>9b0M4M2MnUe5"}, "image_id": 318, "id": 4628}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 325.0, 104.0, 77.0], "area": 3567, "segmentation": {"size": [512, 512], "counts": "Qkj31l?4M2M3N3L3N3L3N2N3L3N3O01O01O010O01O01O010O01O01O010OO2M2M10O3N3N101O01O010O01O01O010O01O0VAoNf>P1XAROh>S110O0010O0010O0010O00eAkNl=7iAb08YOo=3kAd03]OQ>LPBg0L_Oa>a0\\ACc>=[AEf>j0010O01O01O010O01O01O010O3NO01O01L3N3L3N2O2O010O00010O010OM3N3L3QO]A?f>^O\\A?g>_O\\A>f>_O]A>g>_O\\A>T?M3N3L3NbTa2"}, "image_id": 318, "id": 4629}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 329.0, 18.0, 20.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "dj52k?4M2N3[@F`?>N3O01O010O010N1N3M2N2M4M2NdUa7"}, "image_id": 318, "id": 4630}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 329.0, 63.0, 58.0], "area": 1823, "segmentation": {"size": [512, 512], "counts": "`[l61m?2M3N3L3N3L3N3L3N210O0010O00m@XOo>m0O01O01O010O010O01O01ON3NN3N3M2M4M200010O01O010N1N2N3L010O10O3N2N3L3N3M2M4M2M3N3M2M4M2N3L3N2N3LcU4"}, "image_id": 318, "id": 4631}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 360.0, 14.0, 14.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "a[:2k?3N2M4N1010O00010O000N3L3Ngd^7"}, "image_id": 318, "id": 4632}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 369.0, 96.0, 62.0], "area": 3237, "segmentation": {"size": [512, 512], "counts": "Yle02k?4M2M3_@FZ?=c@EZ?b0N3M201O01O010L300010N1N2M4L3O2O01O01O01O010O01O01O010O00010O01O01O010OTAQOg>T1010OM3N3O010O0^AeN_>_10O00010O010O00010O00010O010O00010O00010O010N1N2N3M2M4M2N2O20O00010O010O00N3L3M3N3L3M4M2M3M4MjSj5"}, "image_id": 318, "id": 4633}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 380.0, 61.0, 65.0], "area": 2219, "segmentation": {"size": [512, 512], "counts": "hl`31l?4M2M4M2M3N3L3N3L3M3N3O0010O00010O01dAnNi=Q1UBROj=n0SBUOm=l0PBWOP>h0mA[OS>f0jA\\OW>c0gA@X>T1010O0010O001M20001O010O00010O01N1N2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3Ngc`3"}, "image_id": 318, "id": 4634}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 389.0, 63.0, 56.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "Z]`63k?2N3M2M4M2N3L3N2N3O0n@VOo>n0O0010O010O01O01O010O010O01ON2N02M3N3M200010O010O01O000N3L2O00O10O1003L3N2N3L3N3N101O0M3N3M2M4M2N3L3N2NfS`0"}, "image_id": 318, "id": 4635}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 397.0, 16.0, 44.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "]<\\1d>M4M2N3L3N3L3N2N3L3N3L3N2M4M2N_cg7"}, "image_id": 318, "id": 4636}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 408.0, 58.0, 61.0], "area": 1811, "segmentation": {"size": [512, 512], "counts": "a]_41m?2M4M2M4M2M3N3L3N3L30010O0010O010O00010O010O_ASOo=m0oAVOP>k0lAXOU>g0iA[OW>f0fA]OZ>b0cAA]>R10O01O01O010O01O01O010O010O0N2N3L3N3L3N2N3L3N3L3N2M4M2N3L3Ngbc2"}, "image_id": 318, "id": 4637}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 425.0, 89.0, 67.0], "area": 3289, "segmentation": {"size": [512, 512], "counts": "Y^<3k?3L3N2M4M2N3L3N2N3O010O00010O010O000N3L3N3L310O01O01O010O01O01O010O000M4M2M4N11O010O01O01O010O01O01O010O01O010O01O01O010O01O010O01O01O010O01OUOdA2]>JfA6Z>HiA8W>DlABnA?Q>_ORB`0o=\\OTBd0l=ZOWBf0a>01N1M3N3M2M4M2M3N3L]QW6"}, "image_id": 318, "id": 4638}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 451.0, 75.0, 61.0], "area": 2454, "segmentation": {"size": [512, 512], "counts": "i_h52l?2M3N3L3N3M2M3N3L3N3O0010O00N2N2M3N2M3O1001N1N2M4M201O01O010O01O010O01ORORAi0T?O00010O00O2M2M4M2N2M4GgNgA[1V>:M2M3O1001O0N2N3L3N3M200N3M2M4M2M3ISAYOP?c08N3L3N2M4M2N3LgQR1"}, "image_id": 318, "id": 4639}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 460.0, 48.0, 52.0], "area": 1810, "segmentation": {"size": [512, 512], "counts": "e_P34j?2M4]OHZA:d>IXA:e>IXA;e>GYA;d>IXA;e>a0M310O010M2M3010O01O01O001O00001O001O00001O001O00001O001O00001O0kN[Ao0f>nN]An0l>N2]OQA2S?KPA2R?KQA2S?Ko@3a?Lf`W4"}, "image_id": 318, "id": 4640}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 470.0, 61.0, 42.0], "area": 1704, "segmentation": {"size": [512, 512], "counts": "[o0;d?2a@FT?a0SA_Om>a0RA@m>a0SA_OR?Oi@3W?;000000O100001O001O001O001O001O001O001O0O2O001N101M2N3L3N_`Y6"}, "image_id": 318, "id": 4642}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 494.0, 64.0, 18.0], "area": 777, "segmentation": {"size": [512, 512], "counts": "o_m31l?3N2M3N2M3001O00O1N2N21O001O00001O001O00001O001O00001O001O00001O001O00001O001OO1N2M3N2N2N2001O00001O001O00001O001O00001M2N3M2M^`R3"}, "image_id": 318, "id": 4643}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 300.0, 110.0, 131.0], "area": 8663, "segmentation": {"size": [512, 512], "counts": "R\\Y61m?3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4N101O01O010O010O00010O010O00010O010O0010O0010N1N3L3N2M4M2M4O010O01O01O010O01O01O010O010O00010O010O00010O010O0010O0010O00QCbM]<^2`CdM`<\\2]CgMc_NnAc1P>8N2N2N2N2N2N1O2N2N2N2N2O10000O1N2N2N2N2N2M3N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2M3N2N2N2N2N2N2N2N2N2N2N2N2N]M\\EMb:3`EM^:3dEL[:4gELW:4kELS:4oELo94SFLk94WFLg94[FLb95`FK^95dFKZ95hFKV95lFKR95PGKn85TGKj84YGLe84]GLa84aGL]84eGLY84iGLU84mGKR85PHKn75THKj75XHKf75\\HKb75`HK^75dHKZ75hHKV75lHKR75PIKn65SILk64WILf65\\IKb65`IK^65dIKZ65hIKV65lIKgM`MX8e2SJNm50UJ0k5NWJ2i5KZJ5f5I\\J7c5H_J8a5FaJ:_5DcJ<]5BeJ>[5@gJ`0Y5^OiJb0W5\\OkJd0U5ZOmJf0S5XOoJh0Q5VOQKj0o4TOSKl0m4SOTKm0l4SOTKm0l4SOTKm0l4ROUKn0k4ROUKn0k4ROUKn0k4ROUKn0k4ROUKn0k4ROUKn0k4ROUKn0k4ROUKm0l4SOTKm0l4SOTKm0k4TOUKl0k4TOUKl0k4TOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4SOTKm0l4ROUKn0k4ROSKP1m4POQKR1o4nNoJT1Q5lNmJV1S5jNkJX1U5hNiJZ1W5fNgJ\\1Y5dNeJ^1[5bNbJa1]5`NaJb1]5`NaJb1]5`NaJb1]5`NaJb1]5`NaJb1_5^N_Jd1a5\\N]Je1d5[NZJg1f5YNXJi1h5WNVJk1j5UNTJm1l5SNRJo1n5QNPJQ2P6nMoIT2Q6lMmIV2S6jMkIX2U6hMiIZ2V6gMhI[2X6eMfI]2Z6cMdI_2\\6aMbIa2^6_M`Ic2`6eKRH_1\\1n2b6aKTHa1X1P3d6]KVHc1T1R3f6YKWHf1Q1S3P7mLnHU3R7kLlHW3T7iLjHY3V7gLhH[3X7eLfH]3Z7cLdH_3\\7aLbHa3^7_L`Hc3`7]L^He3b7[L\\Hg3d7XL[Hj3e7VLYHk3h7ULVHm3j7SLSHP4m7PLQHR4o7nKoGT4Q8lKmGV4S8jKkGX4U8hKiGZ4V8i1000000000000000000000000000000000000O10000000O10N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N1O2N2N2N0000000000000000000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N20000000O01N2N2N2000000O1N2N2N2N2N2N2^OeG\\I]8b6eG\\I]8b6eG\\I]8a6fG]I\\8a6fG]I\\8a6fG]I\\8a6fG]I\\8a6c0N2N2N2N2N2N2N2N2N2N1O2N2N2N20000O100O1N2N2N2N2N2N2N2N2N2N2M3N2N2N2IjDaKX;]4jDaKX;]47N`J"}, "image_id": 322, "id": 4646}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 252.0, 126.0, 125.0], "area": 5520, "segmentation": {"size": [512, 512], "counts": "hXS22m?2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N200000000000000000000000O10000000O10000000000000000000000000000000000000000000000000000000000000O1000000000O100000000000000000000000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2M3N2N2N2N2N2N2NVem3"}, "image_id": 322, "id": 4647}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 291.0, 34.0, 34.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "aYi12m?2N1O2N2N2N2N2N2N2N2N2N2N2N2O1000000000N1O2N2N2N2N2N2N2N2N2N2N2N2M^fe5"}, "image_id": 322, "id": 4648}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 325.0, 50.0, 47.0], "area": 1069, "segmentation": {"size": [512, 512], "counts": "ejW12m?2N2N2N2N2N2N2N2N2N1O2N2N2M3N2O10000000000000000000O1000000000OO2N2N2M3N2N2N2N2N2N0000002N2N2N2N2NRUo5"}, "image_id": 322, "id": 4649}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 337.0, 27.0, 27.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "mZ_21n?2N2N2N2N2N2N2N2N2N2N2N200000N2N2N2N1O2M3N2N2N2N2N2NSUS5"}, "image_id": 322, "id": 4650}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 350.0, 56.0, 55.0], "area": 1480, "segmentation": {"size": [512, 512], "counts": "b[l12m?2N2N2N2N2N2M3N1O2N2N2N2N2N2N2N2N2N2N2O10000O100000O1000000000000000000N2N2N2N2N1N3N2N2N2N2N2N2N2N2N2N2N2N2N2NRdW5"}, "image_id": 322, "id": 4651}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 350.0, 46.0, 91.0], "area": 2128, "segmentation": {"size": [512, 512], "counts": "]\\Y71n?2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N1O2N2N2N2QE"}, "image_id": 322, "id": 4652}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 381.0, 208.0, 131.0], "area": 14161, "segmentation": {"size": [512, 512], "counts": "Rnm22m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O1O1O1O100001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1OO1O1001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1OO11O1O1O001O1dN_AW1f>O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OQPj1"}, "image_id": 322, "id": 4653}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 390.0, 139.0, 122.0], "area": 4475, "segmentation": {"size": [512, 512], "counts": "g\\S52m?1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O0100000000000000000000000000000000O1000000000000000000000O1000000000O10000000000000000000000000000000000O1000000000000000O10000000000YAoN^>Q1`AQO`>o0^ASOb>l0]AVOc>j0[AXOe>R100000000000O10000000000O1O1O1OO1O1O1O1O1O1O1O1O1O1001N2N2N2N2N2N2N2N2N2N2M3N2N1O2N2N2N2N2N2N2N2N2NkPg0"}, "image_id": 322, "id": 4654}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 478.0, 115.0, 34.0], "area": 2008, "segmentation": {"size": [512, 512], "counts": "o_T11m?2O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1N2O1O1O11O1O1O001O1O1O1Fg@KZ?3h@LY?2i@MX?1j@NW?0k@OV?Nm@1a?0O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O100001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1OQPR5"}, "image_id": 322, "id": 4655}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 507.0, 10.0, 5.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "ooi71n?1O1O1O11O1O001O1OQP1"}, "image_id": 322, "id": 4656}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_n21P`Q5"}, "image_id": 322, "id": 4657}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 365.0, 311.0], "area": 64019, "segmentation": {"size": [512, 512], "counts": "[2[1d>2N2N2N2N2N1O2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N1O2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N1O2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N1O2N2N2M3N2N2N2N2N2N2N2N2N2N2iE_Jl9c5RF_Jl9b5SF`Jk9b5RFaJm9j5O1O1O1O00O1O1001O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O100000000000O0100000O1N2N2O1000000O1000000O1N1O2N2N2N2N2N2N2N2N2lG`He7b7YH`He7b7YH`He7b7YH`H>Ka6g7oH`H=Lb6f7oH`H=Lb6S8\\IoGb6S8\\IoGb6S8\\InGc6T8[InGc6T8[InGc6S8\\IoGa6T8]InGa6j8N000000000000O10000000000O10000001O2N2N2N2N2N1O2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N2_OlBSNV=k1lBSNV=k1kBTNW=j1kBTNW=j1kBTNW=j1kBTNW=j1kBTNW=j1kBTNW=i1b0N2N1O2N2N2N2N200000000000000000000O10000000000O1N2N2N2N2N2N2N2N2N2N2N1O2M3N2N2N2N2N2N2N2NiZY2"}, "image_id": 323, "id": 4658}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 0.0, 179.0, 95.0], "area": 8845, "segmentation": {"size": [512, 512], "counts": "PPm31o?1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OO1001O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O00O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N2N2N2N2N2N1O2M3N2O10000000000000000N2N2N2N2M3N2N2N2N2N2N2N2N1O2NR_Y1"}, "image_id": 323, "id": 4659}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 0.0, 14.0, 7.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "P`e61o?1O1O1O1O1O1O00O1O1O1O1O1OQ`S1"}, "image_id": 323, "id": 4660}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 0.0, 59.0, 79.0], "area": 2533, "segmentation": {"size": [512, 512], "counts": "caR71n?2N1O2N2N2N2M3N2N2N1O2N2N2M3N2N2N2N1O2N2M2O000000000000O01000000000000O0100002N1gAeNl=]1RBeNl=]1QBfNl=]1RBeNl=]1RBeNl=i1O1O001O1O1O1O1O1O1O00100O1N"}, "image_id": 323, "id": 4661}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 51.0, 76.0, 76.0], "area": 1712, "segmentation": {"size": [512, 512], "counts": "acm51n?2N2N2N2N2N2N2N2N2N2N2N2N2N00000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2NQ^l0"}, "image_id": 323, "id": 4662}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 71.0, 17.0, 16.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "^R_72m?2N2N2N2N2N2N1010O1N2N2N1O2N2N2Nc]8"}, "image_id": 323, "id": 4663}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 73.0, 36.0, 36.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "fbP72m?2N2N2N2N2N2N2N2N2N2N2N2N200000000000000000O1N1N3N2N2N2N2N2N2N2N2N2N2NS]="}, "image_id": 323, "id": 4664}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 101.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Uco72i<"}, "image_id": 323, "id": 4665}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 118.0, 88.0, 87.0], "area": 3302, "segmentation": {"size": [512, 512], "counts": "]d_61n?2N2N2N2N2N2N2N2N2N2M3N2N2000000000O1000O1o@VOi>l0UAVOi>R1N2N2N2N2JcNfA_1X>cNfA_1X>6N2N2N10100000000000000000\\NnAY1Q>fNQBZ1o=dNSB\\1m=bNSB`1m=^NSBd1m=ZNUBf1S>0000000000000O10O1000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1N3N2N2N2N2N2NaZ4"}, "image_id": 323, "id": 4666}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 147.0, 199.0, 177.0], "area": 3248, "segmentation": {"size": [512, 512], "counts": "c4`0a?O10000000O10O100000O1000O100000O1000O100000O1000O100000O10O100000O1000O100000O1000O100000O1000O100000O1000O100000O10O100000O1000O100000O1000O100000O1000O100000O10O100000O1000Od@CV?=g@EZ?a000O100N2N1N20O2N200000O01_Oh@9X?Ej@;V?Bm@>[?000O01000000000O01000000000O0100000000O01000000000O0100000000O01000000000O01000000000O0100000000O0100000000O01000000000O01000000000O0100000000Oe@BT??j@CV?=h@EX?b0000O10O1000O1N2M2O20000O1N2N1N3N2N2NYVl4"}, "image_id": 323, "id": 4667}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 257.0, 30.0, 30.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "^h\\52m?2N2N2N2N2N2N2N2N2N2N2N2N2O1000N2N2N2N2N2N2N2N2N2N2N2N2N2NaWT2"}, "image_id": 323, "id": 4668}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 291.0, 16.0, 16.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "ZYh42m?2N2N2N2N2N2N2O01N2N2M3N2N2N1Ohfo2"}, "image_id": 323, "id": 4669}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 380.0, 12.0, 23.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "X\\j71n?2N2N2N2N2M3N1O2N2N2N2SD"}, "image_id": 323, "id": 4670}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 404.0, 102.0, 108.0], "area": 6679, "segmentation": {"size": [512, 512], "counts": "a_]61n?2N2N2N2N2AGTA;j>GSA;l>GRA;l>GRA;l>GRA;n><00000O1N2N11M3N2N2N1O1O1O1O1OC\\A@c>`0_A_OT>FSBk0K^OQ>IRBi0O]On=LQBg03\\Ok=OPBe06\\Oi=1oAc0:[Of=4nAa0>ZOe=P1]BoNc=P1_BoNV=MbBS19oNT=0cBQ19nNS=3dBo09mNR=6eBm0n=UOQBl0m=VOQBl0h=kNSB;3l0h=kNSB;3l0h=[OVBg0h=j0N2N2M3N2N2N2G]MRCe2l<]MRCe2l<]MRCe2m<800000O01N2N1O1O1O1O1O1O1O11O1O1O1nLZCh2gnN`AR1h>0O010O0010O0010O010O00O2M2N3L3N3L3N2N3L3N3M2M3N^_c5"}, "image_id": 325, "id": 4674}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 0.0, 64.0, 24.0], "area": 853, "segmentation": {"size": [512, 512], "counts": "PP\\42n?1O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O000000N2N2M3N2N2M3N2N2N2MSPd2"}, "image_id": 325, "id": 4675}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 0.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "PPi51o?001O001O00OQ`S2"}, "image_id": 325, "id": 4676}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 0.0, 36.0, 20.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "QPn53l?1V@Oe?7O001O00001O001O00001O001O001O00001O001O00001O001O00O1M3N2N2M3N2M3NRP`1"}, "image_id": 325, "id": 4677}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 0.0, 48.0, 20.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "PPR71o?001O00001O00001O001O00001O001O00001O00001O0Z@Ka?:01O00001O00001O001O00001O001O000000N2M3N2M3M3N2MSP6"}, "image_id": 325, "id": 4678}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 11.0, 27.0, 29.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "n`j62l?2N2M4M2N3L3N3M2O20O010O01O01O010O01ON3M2M4M2N2M4M2N`og0"}, "image_id": 325, "id": 4679}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 22.0, 20.0, 50.0], "area": 733, "segmentation": {"size": [512, 512], "counts": "i0Z1g>O01O010O0O2L3O20O00010M2XOaALc>1`ALb>2`ALc>GVA5:0c>IUA4;1h>LZA2h>L[A0h]f7"}, "image_id": 325, "id": 4680}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 27.0, 64.0, 52.0], "area": 1894, "segmentation": {"size": [512, 512], "counts": "iaR41l?4M2N3L3N3L3N2N3L3N3L3N2O2O010O010O00010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O01O0N3L3N2M4M2N3L3N2M4M2N3Lc^m2"}, "image_id": 325, "id": 4681}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 33.0, 28.0, 29.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "eQU51m?3M2M3N3M2M4M2N3M20010O010O0010O010O0O2L3N2N3M2M4M2N3Ljn\\2"}, "image_id": 325, "id": 4682}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 37.0, 82.0, 57.0], "area": 2709, "segmentation": {"size": [512, 512], "counts": "_ah01j?6J5YAEi=;RBJn=6mAOS>2gA4X>LcA9]>G^A>c>?0001O02N00010O000000010O000000010O0000010O000000010O000lN\\Al0l>010O000000010O0000010O000000010O0000000100O1O0001O0001O0001O01O000001O01O00000L4K6J5L4K5KU^n5"}, "image_id": 325, "id": 4683}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 38.0, 63.0, 66.0], "area": 2386, "segmentation": {"size": [512, 512], "counts": "`bW62k?3N3L3N3O000HCn@=n>FRA:l>HTA9g>LYA3d>0\\A0JEc>>cA1Z>2fAOW>3jALS>8lAHR>:nAGQ>:mAGS>9jAKV>4hANX>m0010O010O01O01O010O01O01O010O010O00010O010O01O03L1N3L3N2N3L3N3L3N2N3L3N3L3N3L3N2N3L3NXnh0"}, "image_id": 325, "id": 4684}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 86.0, 69.0, 49.0], "area": 2021, "segmentation": {"size": [512, 512], "counts": "acc31m?2M3N3L3N3L3N2N30O01M2M3N3M2010O01O01O010O010O00010O010O00010O010OM4M2O110O0010O0010O0010O0010O001O0M4M01N4M2M4M2N2M4M2M4M2M3N3L3N3M2M3NUmY3"}, "image_id": 325, "id": 4685}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 88.0, 30.0, 32.0], "area": 563, "segmentation": {"size": [512, 512], "counts": "\\Sj44j?2N3L3N3M2M3N3M2N30O010O0010O0010O010O0O2L3N2N3L3N3M2M4M2NQmf2"}, "image_id": 325, "id": 4686}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 97.0, 87.0, 62.0], "area": 2519, "segmentation": {"size": [512, 512], "counts": "dS<1l?3N3L3N2N3L3N3O000010O010Ok@^Ol>c0PA@P?i00O0010O010O000N3M2M40O01O01O010O01O01O010O010O01O01O010O01O01O010O2O0O010O0nNWAm0n>3M010O010O0010oNSAm0P?0010O010O000SOPAk0R?010O010O00010O01M2M3N3M2M4M2M4M2N2Mm[X6"}, "image_id": 325, "id": 4687}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 121.0, 79.0, 31.0], "area": 2017, "segmentation": {"size": [512, 512], "counts": "jSe5>b?`0@000000000000000000000000000000O10000000O10000000000000000000000000000000000000000000000000000000000000O1002N000=C5KO1000000000000000000000000000000000000f[S1"}, "image_id": 325, "id": 4688}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 133.0, 73.0, 54.0], "area": 2246, "segmentation": {"size": [512, 512], "counts": "XeY31l?4M2M4M2M3M4M2M4M2M4M2M3N3M2010O0010O0010O010O0010O0010O010L3N3M20010O0010O0010O0010O010O0010O0010O010O0010O0010O010O0010N1M4M2M3N3M2M4M2N3L3N2NUka3"}, "image_id": 325, "id": 4689}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 146.0, 27.0, 28.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "Te_43k?3M2M3N3L3N3M2N210O010O0010O0010O0001M2M4M2N3L3N2M4MXkR3"}, "image_id": 325, "id": 4690}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 160.0, 58.0, 62.0], "area": 2128, "segmentation": {"size": [512, 512], "counts": "Xf=3k?2M3N3M2N3N1O2L3N2N3IWORAl0l>6L3N3M2M3N3O010O01O01O010O010O00010O010O010O00010O010O010O00010O010ON2N3]O_ACc>:aABc>;_ACc>:aACb>:`ACc>;b0M4M2N3LbZe6"}, "image_id": 325, "id": 4691}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 167.0, 64.0, 76.0], "area": 2670, "segmentation": {"size": [512, 512], "counts": "ieT51m?3L3N2M4M2M4M2WA]OT>d0jA^OW>b0fAAY>?dAD]><`AF`>:]AJb>7[AKf>h0O01O010O01O010O01O01dAjNl=V1QBmNP>S1lAQOS>o0kASOV>\\1O01O01O0N3M20010O010O01O01O010O010O00010OTOPBHQ>5RBKm=2VBNk=OXB0h=M[B4d=J^B6c=FaB8`=FbB7b=FaB7a=FbB8`=FbB7b=EbB8d>M4M2MVYk1"}, "image_id": 325, "id": 4692}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 172.0, 24.0, 26.0], "area": 381, "segmentation": {"size": [512, 512], "counts": "j55h?3N3M2M4M2N30O00010O010O00010O010L3N2N3L3N3L3N_jc7"}, "image_id": 325, "id": 4693}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 181.0, 48.0, 64.0], "area": 1758, "segmentation": {"size": [512, 512], "counts": "WWl22l?2N2M4M2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2N2O20O010O01O01O010O0O2M2M3N3L3N3M2M4M2AWAEk>9XACl>9WAEk>9WADm>9>M3N3MRj[4"}, "image_id": 325, "id": 4694}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 187.0, 34.0, 27.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "YVc32k?3N3M2M4M2N2010O01O010O01O010O01O010O01O010O01O010O01O0O2M2N2M4M2Nlik3"}, "image_id": 325, "id": 4695}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 190.0, 23.0, 24.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "^ff42k?3N3L3N3M2M4N11O010O010O01O01O0N3L3N3M2M3N3Mmim2"}, "image_id": 325, "id": 4696}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 200.0, 48.0, 57.0], "area": 1743, "segmentation": {"size": [512, 512], "counts": "eWi62k?3M3M4K4M3M4K4M3M3M4K4M3M4L31O01cA^NZ>d110O00010O0001M2000010O000010O00010OL4M3M4K4M3M4K4M3M4L3L4M3Mai>"}, "image_id": 325, "id": 4697}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 213.0, 20.0, 57.0], "area": 905, "segmentation": {"size": [512, 512], "counts": "]Wf7b0^??A0000000000000XOEkA;U>h0000000000000000O10[I"}, "image_id": 325, "id": 4698}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 214.0, 29.0, 35.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "agT41l?3N3L3N3L3M3N3L3N3L3O101O01O010O01O01L3N3L3N2M4L3N2M4M2MWi\\3"}, "image_id": 325, "id": 4699}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 34.0, 46.0], "area": 1111, "segmentation": {"size": [512, 512], "counts": "h6U1k>10O010O00010O010ON3N110O00010O010O0010O001L3N2M4M2N3WOk@b0[?N3L3N3M2M3Nmh^7"}, "image_id": 325, "id": 4700}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 233.0, 26.0, 27.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "lgQ61l?4M2M4M2M3N3L3O2O01O01O010O01O01O01N1M3N3L3M4M2M3NcXa1"}, "image_id": 325, "id": 4701}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 236.0, 31.0, 23.0], "area": 385, "segmentation": {"size": [512, 512], "counts": "hWa31m?2M3N3M2M4O0010O00010O010O010O0010O0010O010O0010O00N3L3N3M2M]Xo3"}, "image_id": 325, "id": 4702}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 246.0, 75.0, 57.0], "area": 2440, "segmentation": {"size": [512, 512], "counts": "XXd21j?5SAKj=:QBKj=;oAKn=8mAMS>3hA3W>MdA8\\>f00010O00000010O000000010O000001O01O0001O000001O01O0001O010O0K5K6J5L5J5L5O01O01O010O00010O010O00010O010O0010O0010O0010O0010N1M4M2N2N3L3NfWV4"}, "image_id": 325, "id": 4703}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 246.0, 27.0, 35.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "bXl41l?3N3M2M4M2N2M4M201N1N3M2M4NN4M2N3M2M4M2N2N3O0N3M2N3MVXf2"}, "image_id": 325, "id": 4704}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 259.0, 49.0, 67.0], "area": 1859, "segmentation": {"size": [512, 512], "counts": "XiQ42l?2e@Mg>5WANe>6XALf>6XAMe>5XANf>5WANe>5YAMe>i0M2N3L3N2010O01O010O010O00010O000N0O011N3N3MJPB`NP>^1RBcNn=Y1VBdNl=Z1VBdNm=Y1>M4M2M4M2N3L3N2N3L3N3L3N3M2MhgU3"}, "image_id": 325, "id": 4705}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 267.0, 25.0, 24.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "lhW71l?4K4M3M4K40001O01O01O01O0001O01O01O01O0N2L5L3M3Mcg;"}, "image_id": 325, "id": 4706}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 26.0, 28.0], "area": 600, "segmentation": {"size": [512, 512], "counts": "k8l0T?000000000004L0Mi@]OW?c030000000000000000000000000000000Qgb7"}, "image_id": 325, "id": 4707}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 285.0, 33.0, 31.0], "area": 618, "segmentation": {"size": [512, 512], "counts": "`YX32k?4M2N3L3N3M2M3N3O010O00010O010O0010O0010O0010O000M4M2M4M2M3M4M2MmVW4"}, "image_id": 325, "id": 4708}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 293.0, 54.0, 60.0], "area": 1924, "segmentation": {"size": [512, 512], "counts": "aZg42l?2M4M2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3O0010O010O0010O0010O010O010O000N3M2O2O0010OM3D^AXOe>e0^AXOd>f0^AWOf>e0X18M3M3M4L3M3L5L3M3M4L3M3MhVa1"}, "image_id": 325, "id": 4710}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 294.0, 2.0, 30.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "VYo7:f?d0VF"}, "image_id": 325, "id": 4711}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 295.0, 34.0, 42.0], "area": 803, "segmentation": {"size": [512, 512], "counts": "YZ[73j?3N2M4L3M4M2M3M4L3N2M4L3O2O0000N2M3N2M3N2M3M3N2M3M3O10000000000O10if3"}, "image_id": 325, "id": 4712}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 301.0, 69.0, 103.0], "area": 3075, "segmentation": {"size": [512, 512], "counts": "UlX62k?3M4L3N3L3M3N3L3M4L3N2N3O0010O01GjNgAU1V>oNiAQ1T>ROlAo0Q>TOoAk0n=XORBi0k=YORBj0k=g0M4M2M4L3N3L3M3NjNkBFQ=:RCEm<;VCBiMmA3P?0UT1"}, "image_id": 325, "id": 4714}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 355.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "S;2kdo7"}, "image_id": 325, "id": 4715}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 358.0, 47.0, 64.0], "area": 1610, "segmentation": {"size": [512, 512], "counts": "i;h02Gd>l0N2N2N2O01N2N2N2N3N0O0000000010O00000000010OLZAoNf>R130001O000001O0001O0000012M2N2N2N2Bk@NX?Ok@OV?Ol@OV?Ol@OV?Ol@OV?OlTX7"}, "image_id": 325, "id": 4716}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 372.0, 55.0, 70.0], "area": 1910, "segmentation": {"size": [512, 512], "counts": "Zml02m?2M2N3M3N1N3M2O2M3M2N3N1N3M3M2O2M2GiNgAZ1V>iNhAY1V>hNhAZ1V>:M2N3N0O1O01O101N3M3N1N3M2O2M3M2O2M2N3N2M2N30O0100O0Co@JS?4n@JT?4o@IT?4n@KT?3n@JT?4>M2OncW6"}, "image_id": 325, "id": 4717}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 390.0, 56.0, 66.0], "area": 2033, "segmentation": {"size": [512, 512], "counts": "mmf12l?2M4L3N2M4M2M4N100010O00010OZO[OoAh0m=[OQBg0m=[OPBh0m=\\OPBg0m=[OPBh0m=[OQBh0k=h000010N1N3L300010O010O00010O0001M2N3L3M3N3L3N3L3N2M4L3N3L3N2M4M2M3M4M_S]5"}, "image_id": 325, "id": 4718}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 413.0, 30.0, 34.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "gm=1m?3M2O2M2N3M2O2M3M2N3N1N3M2O1N11N3M3N1N3M2N3N1N3M3M2O2M2N3NlRS7"}, "image_id": 325, "id": 4719}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 419.0, 53.0, 67.0], "area": 1880, "segmentation": {"size": [512, 512], "counts": "`^b22l?2M3N3L3N3L3N2M4L3PAVOh>l0VAWO0Lc>Z1N3L3N210O01O01O010O01O01O010O00001M1N10O010O012M3M3N3L3N3L3N2M4M2M4L3N2M4M2M3N3L3NiRc4"}, "image_id": 325, "id": 4720}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 436.0, 58.0, 49.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "b^[32l?3L3N3M2M3N3L3N3L3N2O2O010O0010O0010O0010O0010O001N1M3N3L3010O01O01O010O010O0001L2OO4M2M4M2M3N3M2M4M2M3N3L3N3M2M3NXbg3"}, "image_id": 325, "id": 4721}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 439.0, 16.0, 18.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "R^62l?3N2N1N3N2N1N3N01N3N1N3N2N2M2OVba7"}, "image_id": 325, "id": 4722}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 445.0, 31.0, 33.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "enY11m?2M4M2N3L3N2M4M2N2M4O001O01O010O01O01O01OO2L3N3L3M3N3L3M3N3LoaV6"}, "image_id": 325, "id": 4723}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 447.0, 62.0, 65.0], "area": 2056, "segmentation": {"size": [512, 512], "counts": "Y_T42=0Q?2m@0R?2k@0S?2k@1R?1m@0Q?2m@0Q?`000O1O1O1O100O1O1O1O1O100O1O1O0001O00O1N3N1O1N2O1N2O20O2N1O2N1O1O2O0O2N1OO1N2N30O1O1O2N1M4L3N2M4L3N2M4M2M4L3N2Moal2"}, "image_id": 325, "id": 4724}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 456.0, 35.0, 30.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "k>3k?2M3N3M2M4M2M4M20010O010O01O01O010O010O00M4M2M4M2N3N10010O010O0M3N3M_Q^7"}, "image_id": 325, "id": 4725}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 477.0, 29.0, 30.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "boQ21m?2M3N3L3N3M2M4M2N2010O01O01O010O01O01O010M2M3N3L3N3L3N2Nn`_5"}, "image_id": 325, "id": 4726}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 491.0, 32.0, 21.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "l_V54h?4L4L4M300001O000000M300001O0000001O000^Oe@?^?000001O0000001O0N2L4Lb`Y2"}, "image_id": 325, "id": 4727}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 492.0, 36.0, 20.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "o_V61m?2M3N2M3N2N2M3N2001O00001O001O001O00001O001O001O00001O001O00001O0O2M2N[`W1"}, "image_id": 325, "id": 4728}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 497.0, 12.0, 15.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "moa11m?3L3N3M2M300001L3N3M2M`PX6"}, "image_id": 325, "id": 4729}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 499.0, 19.0, 13.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "o_i11m?2M3N2N2N2O11O001O00001O001O000O2L3NZPm5"}, "image_id": 325, "id": 4730}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 501.0, 17.0, 11.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "o_o21l?3N2N2M31O001O00001O001O00001O0MYPh4"}, "image_id": 325, "id": 4731}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 509.0, 8.0, 3.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "m_R23m?000001O001O00Q`i5"}, "image_id": 325, "id": 4732}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 0.0, 87.0, 33.0], "area": 1866, "segmentation": {"size": [512, 512], "counts": "ZP`04g?6K4O10001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O00000010O000000010O0000010O0000010O000000010O0000010O000000010O0000010O0O1K5L4K6Kk_T6"}, "image_id": 326, "id": 4733}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 0.0, 52.0, 34.0], "area": 965, "segmentation": {"size": [512, 512], "counts": "PPj01o?1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O1O1O2N1OO1O1O1O100O1O1O1O1O1O2N3N1N2N2N2N2N2N2N3N1Ndo[6"}, "image_id": 327, "id": 4734}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 0.0, 29.0, 28.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "\\Pl11n?2N2N2N3M2N2O1N2N2N2N2N2N3N00O1O2N3M2N2N2N2N2O1N2N2N3M2Nc_e5"}, "image_id": 327, "id": 4735}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 0.0, 60.0, 39.0], "area": 1140, "segmentation": {"size": [512, 512], "counts": "c`i21n?2N2N3M2N2N2N2O1N2N2N2N2N3M2N2N2N2N01O00001O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1OQ`X4"}, "image_id": 327, "id": 4736}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 0.0, 16.0, 9.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "PPP41o?2N1O1O1O1O1O1OO1O1O1O1O1O1O1OQPh3"}, "image_id": 327, "id": 4737}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 0.0, 44.0, 38.0], "area": 935, "segmentation": {"size": [512, 512], "counts": "``Y51n?2N2N2N2N3N1N2N2N2N2N2N2N2N2N2N3N1N2O1OO1O1O100O1O1O1O1O1O2N2N2N2N2O1N2N2N2N2N3M2N2N2Nb_P2"}, "image_id": 327, "id": 4738}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 0.0, 37.0, 19.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "P`V62n?1V@Oc?2[@0d?7O2N00O1O1O11O1O2N1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1OQPW1"}, "image_id": 327, "id": 4739}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 5.0, 23.0, 25.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "^`\\21n?2S@Oh?7M2N2O1N2N2N2N2N2001M2N2N2N2N2N2N2N2N2N2N_oW5"}, "image_id": 327, "id": 4740}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 10.0, 73.0, 67.0], "area": 1963, "segmentation": {"size": [512, 512], "counts": "nPa61n?2N2N2N2UAIj=9TBIj=9UBHi=:UBHi=:UBHi=:UBHj=:SBHk=:SBHk=:SBHk=:SBHk=:SBIl=7RBJo=6oAJS>6kAJW>6hAIZ>m03M2N2N2N2O1N2N2N2N2N2N2N0000000010O0000000000000002N000001O01O000000000000000000011N2N2N2N2N3M2N2N2N2N2N2OY_:"}, "image_id": 327, "id": 4741}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 11.0, 21.0, 22.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "cPS62m?2N2N2N2N2N2N2N2000000000N2N2N2N2N2N2N2NZ_b1"}, "image_id": 327, "id": 4742}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 15.0, 15.0, 14.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "e`k41n?2N2O2M2N2N2N0100O2N2N3N1N2N\\ol2"}, "image_id": 327, "id": 4743}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 31.0, 63.0, 60.0], "area": 1748, "segmentation": {"size": [512, 512], "counts": "aQd12m?2O1N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N2O2O000000000000010O000000000000O2M2N2N2N2N2O1N2N3M2N2N2N200000001O000N2N2C^@9g?M2N2N2Nc]\\5"}, "image_id": 327, "id": 4744}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 32.0, 42.0, 54.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "XQX43m?3M3L4M3M4K4M3M3L4M3M4L3L4M2NO01000O01000O10O10O10O10O2O3M3M3L4M3M4K4M3M3L4M3M4K4MfmR3"}, "image_id": 327, "id": 4745}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 37.0, 11.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "XQS52m?2N2N2O1000O1N2N2O1Nf^g2"}, "image_id": 327, "id": 4746}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 40.0, 31.0, 29.0], "area": 474, "segmentation": {"size": [512, 512], "counts": "bQo52m?3M2N2N2N2N2N2000000O1N2N2N3M2N1O02N2N3M2N2N2N2N2N2O1N2N2N2N[^a1"}, "image_id": 327, "id": 4747}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 41.0, 43.0, 53.0], "area": 1248, "segmentation": {"size": [512, 512], "counts": "kaZ71n?2N2N2N2N2O2M2N2N2N2N2N2N2O1N3M2N2N2N2N21O000001O0001O000000N2N01O1O1O102M2N2N2VOPA90HU?Jm@<0HZ?5h@IW="}, "image_id": 327, "id": 4748}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 55.0, 24.0, 23.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "SRU32m?2N2N2O1N2N2N2N3M1O0001O00001O2N2N2O2M2N2N2N2NQn^4"}, "image_id": 327, "id": 4749}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 56.0, 31.0, 31.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "Vbo62m?2O1N2N2N2N3M2N2N2O1N2N3M2N1O0010O3M2N2N2N2O1N3M2N2N2N2N2O2Mim`0"}, "image_id": 327, "id": 4750}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 64.0, 33.0, 31.0], "area": 527, "segmentation": {"size": [512, 512], "counts": "_RS51o?1N2N2N3M2N2N2N2O1N2N3M2N2N1O00010O01O2N2N2N2N2N3N1N2N2N2N2N2N3Na]\\2"}, "image_id": 327, "id": 4751}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 73.0, 37.0, 37.0], "area": 719, "segmentation": {"size": [512, 512], "counts": "jRl53l?2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N000001O2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2NW]a1"}, "image_id": 327, "id": 4752}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 81.0, 36.0, 61.0], "area": 1153, "segmentation": {"size": [512, 512], "counts": "a2d1]>000000000000010ON2N2N2N2N2N3M2O1N2N2N2N2N2N3M1O010O2N2N2N3M2N2N2N2O1N]l]7"}, "image_id": 327, "id": 4753}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 81.0, 57.0, 64.0], "area": 1586, "segmentation": {"size": [512, 512], "counts": "nce21n?3N1N2N2N2N2N2N2N2N2N2N2N3M2O1N2N0@POQBP1o=ROoAn0Q>TOmAl0S>WOjAi0V>YOhAg0Y>ZOeAf0[>\\OcAd0]>?01O000000000002N2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N3Mfl]4"}, "image_id": 327, "id": 4754}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 81.0, 35.0, 34.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "Tcj32m?2N2N2N3M2N2O1N2N2N2N2N2N1O000001O01O000002N2N2N2N2N2N2O2M2N2N2N2N2NRmc3"}, "image_id": 327, "id": 4755}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 85.0, 32.0, 30.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "QS\\51n?2O2M2N2N2N2N2N2N3NO0001O2N2O101O000000N3N1N2N2N2N2N2N3M2N2O1NjlS2"}, "image_id": 327, "id": 4756}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 96.0, 55.0, 68.0], "area": 1941, "segmentation": {"size": [512, 512], "counts": "PT]62m?2N2N3N1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2N2O0O001O000001O2XOoA^OS>`0oA^OS>`0oA^OS>`0oA^OS>a0oA]OR>a0PB]OS>`0oA^OS>`0oA^OS>`0oA^OS>`0oA^OS>`0oA^OS>`0h0Ha@La?2a@Ma?0a@Na?18N^\\g0"}, "image_id": 327, "id": 4757}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 114.0, 55.0, 54.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "gT`51n?2N2N2N2N3M2N2N2O1^O^OcAd0[>^OcAd0[>^OcAd0\\>]ObAf0[>\\OcAf0[>\\OcAe0\\>^OaAc0^>?000000001O00002O1N2N2N2N2N2N3O0001O00000000N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N^[d1"}, "image_id": 327, "id": 4758}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 124.0, 60.0, 51.0], "area": 1492, "segmentation": {"size": [512, 512], "counts": "fdY31o?4K6K4L5K4K3NO1000O1000O1000O10O1000O10O01O1O1O1O001O1O1O001O1O00000001O01O0000000002N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2Ob[h3"}, "image_id": 327, "id": 4759}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 129.0, 6.0, 12.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "UTm72m?2O1N3M2N2nK"}, "image_id": 327, "id": 4760}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 145.0, 55.0, 53.0], "area": 1402, "segmentation": {"size": [512, 512], "counts": "`e91n?2N2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N2N0000000001O0001O000000000001O00101N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2N2Okjj6"}, "image_id": 327, "id": 4761}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 152.0, 24.0, 24.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "VU]41n?2N2N2N2N3M2N2N2N1O0000000002N2N2O1N2N2N2N2N2NPkV3"}, "image_id": 327, "id": 4762}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 154.0, 26.0, 43.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "]Uc72n?1N2N3M2N2N2N2N2N2O2M2N2N2N2N2N2N2O2M2N000001O2N2NUK"}, "image_id": 327, "id": 4763}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 156.0, 20.0, 21.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "UU_62m?2N2N2O2M2N2N2N3N0O0003M2N2O1N3M2N2N2OjjV1"}, "image_id": 327, "id": 4764}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 172.0, 14.0, 22.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "`5b0]?00001O02N3M2N2N2N2N2N2O1N[jh7"}, "image_id": 327, "id": 4765}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 181.0, 58.0, 53.0], "area": 1503, "segmentation": {"size": [512, 512], "counts": "ffk31m?3N2M2N3M2N3M2N3N110O0100O0100O010O010O010N1N3M2N3M2O2M2N3M3M2N3M2N3O01N1N3N2M2N3M2N3M2N3N1N3M2N3M2N3M3M2O2M2N3M2NQZW3"}, "image_id": 327, "id": 4766}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 185.0, 56.0, 56.0], "area": 1671, "segmentation": {"size": [512, 512], "counts": "ffk02m?2N2N2_@JU?9h@IV?9h@IV?9h@IV?b0N2N2N2O0O0000002N2N2N2N2N2N2N2N00000011N00000000001O2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N2N2N3M2N2N`YX6"}, "image_id": 327, "id": 4767}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 208.0, 20.0, 20.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "fVo61n?2N2N2N2N2N200000001O0000O1N2N3M2O1N2NSif0"}, "image_id": 327, "id": 4768}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 208.0, 17.0, 17.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "jfg72m?2N2N2N3M1O0001O0000002O1N2N3M2N[I"}, "image_id": 327, "id": 4769}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 210.0, 36.0, 26.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "mV^52m?3M2N2N2N2N2N2O1N2N2N3M01O2N2N2N3N1N2N2N2N2N00000002N2N2N0002N2N2N3M2NWio1"}, "image_id": 327, "id": 4770}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 211.0, 58.0, 63.0], "area": 1701, "segmentation": {"size": [512, 512], "counts": "lg^42m?2N2O2M2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N1O1DiNnAW1R>kNmAT1S>nNkAR1U>POiAP1W>ROgAn0Y>TOeAl0[><000010O000002N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2NcXd2"}, "image_id": 327, "id": 4771}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 211.0, 56.0, 49.0], "area": 1306, "segmentation": {"size": [512, 512], "counts": "^WS63m?1N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N3M0001O0000000001O000001O00002N2N2O1N2N2N2N00001O0000011N2N3M2N2N2N2N2N2O1N2NPiP1"}, "image_id": 327, "id": 4772}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 217.0, 33.0, 32.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "Wg41o?1N2N3M2N2N2O1N2N2N2N2N2O1N2N2N2N11O2M2N2N2N2N2N2N2O1N3M2N2N2N2NghZ7"}, "image_id": 327, "id": 4773}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 227.0, 54.0, 53.0], "area": 1354, "segmentation": {"size": [512, 512], "counts": "SX_12n?2M2N2N3N1N3M2O1N3M2O2M2N3M100O00010O000IRO]Ao0c>SO[Al0e>VOYAj0g>710O00010O0000010O00010O02N3N1N3M2N2O2M2N3N1N3M2N2O2M2N3N1NVhe5"}, "image_id": 327, "id": 4774}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 228.0, 19.0, 18.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "]gW71n?2O2M2N2N2N3N0O00000011N2N3M2N2O1N2Neh>"}, "image_id": 327, "id": 4775}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 240.0, 12.0, 20.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "kWj71n?2N2N2N2N2N2N2N2N1O000aH"}, "image_id": 327, "id": 4776}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 250.0, 28.0, 27.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "[hl01n?2N2N2O1N2N3M2N2N0000001O0001O000000001O2N2O1N2N2N3M2NoWe6"}, "image_id": 327, "id": 4777}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 270.0, 65.0, 58.0], "area": 1794, "segmentation": {"size": [512, 512], "counts": "\\Ym12m?2N2N3N1N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2O101O00N3M2N000001O000001O000000000001O000001O0000002N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N2N3MoVR5"}, "image_id": 327, "id": 4778}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 275.0, 81.0, 62.0], "area": 2473, "segmentation": {"size": [512, 512], "counts": "XYQ51n?2N2N2N2N3N1N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2000001O000001O0000000001O000001M2N0000001O0001O0000000000000001O01O00000000000002N2N3N1N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2NiVf1"}, "image_id": 327, "id": 4779}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 276.0, 59.0, 61.0], "area": 1970, "segmentation": {"size": [512, 512], "counts": "[iR72m?2N2N2O1N2N3M2N2N2N2N2N2O1N3M2N2N2N2N2N20010O0000000000000010O000O1N1O000001O0000000001O000001O0000000001O00000001O0]G"}, "image_id": 327, "id": 4780}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 292.0, 61.0, 45.0], "area": 1603, "segmentation": {"size": [512, 512], "counts": "k91l?3N3M2N3M2N3M2N3M2N3M210O010O010O010O00010O010O010O010O010O010O010M2M4M2N3M2010O010O0010OO2M2N3L3N3M2N2N3M2N3M2N3M2M4M2NdVQ7"}, "image_id": 327, "id": 4781}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 306.0, 48.0, 55.0], "area": 1009, "segmentation": {"size": [512, 512], "counts": "mi]11n?2N3M2N2O1N2N2N2N3M2N2N20000000001PAVOg>j0WAXOj>e0VA]Oj>a0VAAj>>VACj>=TAFk>:SAHm>f0001O00000QOSAk0l>SOVAm0o>N1O00002N2N2N2O1N2N3M2N2N2N2N2N2OXUj5"}, "image_id": 327, "id": 4782}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 317.0, 17.0, 23.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "QZV12;OV?3h@OV?3h@OV?3h@OV?3h@OW?2g@0Y?:2N2N2N2N2N2N2N2N2N2NiUa6"}, "image_id": 327, "id": 4783}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 318.0, 57.0, 43.0], "area": 1370, "segmentation": {"size": [512, 512], "counts": "_je23m?5K6J5J6K2N00O0100000O10O100000O010000000O0100000O0100O1O1O001O100O1O001O1O1O11N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2Nae]4"}, "image_id": 327, "id": 4784}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 322.0, 86.0, 69.0], "area": 3108, "segmentation": {"size": [512, 512], "counts": "Tkh51n?3M2O1N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M000001O000002N2N2O101O000001O00000000000N2N2N2N000000000000010O0000000000000000000010O00000003M2N2N2N2N2N2O1N2N2]Ok@8W?Fk@8X?Ej@9`?N2N2N3MTUl0"}, "image_id": 327, "id": 4785}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 326.0, 27.0, 27.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "bjh02m?2N2N2O1N3M2N2N2N2N2N2N2O0O02N2N2N2N2N2O2M2N2N2N2N2N^ei6"}, "image_id": 327, "id": 4786}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 335.0, 23.0, 23.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "jjX13l?2N2N2O1N2N2N2N3M1O000001O2N2N2N2N2O1N2N2N3MXe[6"}, "image_id": 327, "id": 4787}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 336.0, 25.0, 25.0], "area": 317, "segmentation": {"size": [512, 512], "counts": "nZV21n?2N2N2N2N2N2N2N2N2N2OO000000011N2N2N2N2N3M2N2N2NWU]5"}, "image_id": 327, "id": 4788}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 338.0, 11.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "fZU71n?2N2N2O1N2OO3M2N2N2OYUe0"}, "image_id": 327, "id": 4789}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 349.0, 30.0, 30.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "Zk12m?2N2N3M2O1N2N2N2N3M2N2O1N2N1O03N1N2N2N2N3M2O1N2N2N2N3M2N2ObT_7"}, "image_id": 327, "id": 4790}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 349.0, 70.0, 74.0], "area": 2099, "segmentation": {"size": [512, 512], "counts": "_[`31n?2N2N2N3N1N2N2N2N2N3M2N2O1N2N2N2N3M2N2O1N2000010O00000000010O0000000000010O00000000010O0000000000O2M2N2N2N2O1N2N3M2N2N2N2N2O2M2N2N2N2N2N2O\\c\\3"}, "image_id": 327, "id": 4791}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 361.0, 26.0, 27.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "hkQ31n?2N2N2N2N2M2O2N2N2N2N2N2N1O02N2N2N2N2N2N2D^@7g?N2N2N2NZTa4"}, "image_id": 327, "id": 4792}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 374.0, 62.0, 45.0], "area": 1531, "segmentation": {"size": [512, 512], "counts": "]\\X12l?3L3N2M4M2M4L3N2M4O000010O010O00010O01O01O010O01O01O010O00010O010O00010O01O01O010O01O01O010O01O01O010O0N2N3L3M3N3L3N3L3N2Mjch5"}, "image_id": 327, "id": 4793}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 376.0, 82.0, 87.0], "area": 3145, "segmentation": {"size": [512, 512], "counts": "n\\g61n?2N3M2N2N2N2N2N2N2N2N2O1N2N2N2N2N002N2N3M2N2N2N2N2O1N2fA\\NU>i1N3M2N2NQOSBNk=2WBNg=2[BNc=2_BN_=2cBN[=2gBNW=2kBNS=2oBNP=1RCOl<1VCOjm<@SC`0o<^OQCb0Q=\\OoBd0S=ZOmBf0U=XOkBh0W=VOjBi0X=UOhBk0Z=SOfBm0\\=QOdBo0V>00001O00000000000001O001O2N2N2N2O1N3M2N2N2N2N2N2NmB"}, "image_id": 327, "id": 4794}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 389.0, 15.0, 16.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "[\\R31n?2N3M2N2N2N200000N2N2O1N2N3McSf4"}, "image_id": 327, "id": 4795}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 391.0, 30.0, 29.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "flb22m?2O1N2N2N2N3M2N2N2N2N2OO00000000001O2O1N2N2N3M2N2N2N2N2O1N^Sn4"}, "image_id": 327, "id": 4796}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 410.0, 41.0, 43.0], "area": 846, "segmentation": {"size": [512, 512], "counts": "WmX32m?2N2N2N2O2M2N2N2N2N2N3N1N2N2N2O10001O0001O000001ON2O2M2N2N2N2N2N2N3N1N2N2N2N2N3M[bR4"}, "image_id": 327, "id": 4797}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 417.0, 31.0, 39.0], "area": 665, "segmentation": {"size": [512, 512], "counts": "`=c0\\?2N2N2N2N2OO0000000000000000010O002N2N2N2N2N2N2N2N2O1N3M2N2NbR`7"}, "image_id": 327, "id": 4798}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 425.0, 33.0, 33.0], "area": 549, "segmentation": {"size": [512, 512], "counts": "fmU11n?2N2N2N2N2N2N2N2N2N2O1N2N2N3O00000000O1N2O2M2N2N2N2N2N2N2N2N2N2NUbY6"}, "image_id": 327, "id": 4799}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 426.0, 25.0, 24.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "emn32m?2N3M2N2N2N2N2N2N2O1N1O01O2N2N2O1N1O2N2N2N2N2N2N]bd3"}, "image_id": 327, "id": 4800}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 441.0, 13.0, 13.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "ome02m?2O1N2N2N2N1O02N2N2N2N2NSbS7"}, "image_id": 327, "id": 4801}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 445.0, 57.0, 49.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "g^\\42m?2O2M2N3N2M2N3N1N3N2M2N3N1N2N010O010O00010O00010O010O00010O00010O010O00010O00010O0100O2N3N1N3M3N1N3N1N3M3N1N3M2O_Qg2"}, "image_id": 327, "id": 4802}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 450.0, 61.0, 62.0], "area": 2224, "segmentation": {"size": [512, 512], "counts": "Znf25j?7J5K6J6J5K1N1000000000O01000000000O010000000O10O1hAmNc=S1\\BoNc=Q1\\BQOc=o0\\BROd=m0\\BTOc=m0\\BTOd=l0[BUOe=k0ZBVOf=j0YBWOg=h0YBYOg=g0XBZOg=g0XBZOh=f0WB[Oi=e0VB\\Oj=d0UB]Ok=b0VB^Oj=b0WB[Oj=f0WBXOj=h0UBXOl=h0SBXOn=g0RBYOo=g0PBYOQ>g0nAYOT>f0kAZO\\>`0cA@_>?`AA`>?`AAa>?^AAc>?\\AAh>i06^OQAHV?2k@L[?Ng@0`?Ja@4g?N2Ng`Z4"}, "image_id": 327, "id": 4803}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 462.0, 21.0, 32.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "f>h0X?O0000000001O000002N2N2N2N2N2O1N2N3M2N2NWQe7"}, "image_id": 327, "id": 4804}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 462.0, 27.0, 27.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "kn>2m?2N2N2N2O1N2N3M2N2N2N2N1O0001O2N2N2N2N3M2N2O1N2N2N2NWaS7"}, "image_id": 327, "id": 4805}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 468.0, 25.0, 24.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "Q_m31n?3M2N2N2N2O1N2N2N2N1O00000001O2N2N2N2N2O1N3M2N2NSQf3"}, "image_id": 327, "id": 4806}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 485.0, 38.0, 27.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "o_T51n?1O1O100O1O1O1O1O1O1O1M3C=0000000000001O0000000000000000000000000000000000j`X2"}, "image_id": 327, "id": 4807}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 487.0, 10.0, 19.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "__k71n?2N2N3M2N2O1N2N2N2h@"}, "image_id": 327, "id": 4808}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 490.0, 37.0, 22.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "o_Q11n?1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O12N1O1O1O1O1O1O1N2N2N3M2N2NXP\\6"}, "image_id": 327, "id": 4809}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 492.0, 30.0, 20.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "go:170_?2_@O`?4]@Mb?9O1O1O1O1O100O1O1O12N1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1OQPV7"}, "image_id": 327, "id": 4810}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 499.0, 7.0, 7.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "foP11n?2N2N2ON3N2N\\`k6"}, "image_id": 327, "id": 4811}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 499.0, 20.0, 13.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "j_g22m?2O1N2N2N1O1O1O1002N1O1O1O1O1O2N1O1O1OQ`n4"}, "image_id": 327, "id": 4812}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 431.0, 25.0, 22.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "mmY12j?5L3M3M4N1000010O00010O00010O000010O000M4L3L4M3M_bY6"}, "image_id": 328, "id": 4813}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 458.0, 73.0, 54.0], "area": 3136, "segmentation": {"size": [512, 512], "counts": "\\>Z1f>010O00010O00010O000010O0001L3M301O01OdA`NX>d100001O00001O00001O0000M300001O00001O00001O00001O00001O00001O000dN`AX1e>O00001O00001O00001SOYA?g>^O\\Ab0d>[O_Ae0b>WObAh0k>O1M4L3M3L5L3MjPk6"}, "image_id": 328, "id": 4814}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 0.0, 61.0, 35.0], "area": 1265, "segmentation": {"size": [512, 512], "counts": "RP93k?20001O00001O00001O00001O00Mh@Kk>3TA1k>OQA5o>Lm@7S?Ij@;U?80001O00001O00001O00001O00001O00001O00001O00001O0000001OQORAl0Q?0010O00010L3M3M4L3M3M4K4Ml_h6"}, "image_id": 329, "id": 4815}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 5.0, 13.0, 13.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "Y`i25j?3M2N101O0O11O000O2O1M5Kgoo4"}, "image_id": 329, "id": 4816}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 0.0, 58.0, 33.0], "area": 1060, "segmentation": {"size": [512, 512], "counts": "_P?1n?2N2N2N2N2N2N2M3N2N2K[Ol@g0S?4O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1001O1O1O00O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1OQPd6"}, "image_id": 331, "id": 4817}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 0.0, 31.0, 16.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "Q`[41n?2O2N1O1O1O1O2N1O1O1O1O1OO1O1O1O100O1O1O1O1O100O1O1O1O1O100OQPU3"}, "image_id": 331, "id": 4818}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 0.0, 57.0, 44.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "[`Q52<0S?2k@0S?2k@0S?2k@0S?2j@1T?1j@1T??N2N2N2N2N2O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N2M3N2NmoQ2"}, "image_id": 331, "id": 4819}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 21.0, 11.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "P`\\61o?1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1OQPY1"}, "image_id": 331, "id": 4820}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 15.0, 41.0, 42.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "XQb62m?2N2N2N2N2N2N2N2N2N2M3N2N2N2N2NO10000000000000001O2N2N2N2N2N2N2N2N2M3N2N1O2N2N2NT_i0"}, "image_id": 331, "id": 4821}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 16.0, 54.0, 62.0], "area": 1706, "segmentation": {"size": [512, 512], "counts": "iQd52m?2N2N2M3N2N2ECPA>o>Do@>o>CPA?n>CPA?n>;N2FoNbAS1\\>oNbAS1\\>oNbAS1\\>:N2N2N2N2N1O0001O2N2N2N2N2N2N2N2N1O2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N1N3N2N2N2Nfn`1"}, "image_id": 331, "id": 4822}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 18.0, 19.0, 19.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "m`W11n?2N2N2N2N2N2N2N00000002N2N2N2N2N2N2NXo^6"}, "image_id": 331, "id": 4823}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 21.0, 61.0, 51.0], "area": 1541, "segmentation": {"size": [512, 512], "counts": "]Q`12m?2N2N2N2N2N2N2M3N2N1O2O100000000O1O1N2N2N2N1O2N2N2N2N2M100002N1O2N2M3N2N2N00O100000000000O3N2N2N2N2N2N2N1O2N2N2M3N2N2N2Nn^a5"}, "image_id": 331, "id": 4824}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 33.0, 25.0, 43.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "Q1U1l>000000000000O1N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2NX^c7"}, "image_id": 331, "id": 4825}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 52.0, 27.0, 27.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "PRg01n?2N2N2N2N2M3N2N1O2N2N2000000000N1O2N2N2M3N2N2N2N1O2NP^k6"}, "image_id": 331, "id": 4826}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 56.0, 55.0, 53.0], "area": 1573, "segmentation": {"size": [512, 512], "counts": "abT21o?1N3N2M3N2M3N2M3N2M3N2GVOWAm0g>UOWAm0f>9NO01O01O010O010O010O010O010O010O010O010O010O010O2O2M3N2M2O2M3N2M3N2M3N2M3N2M3M3NZmo4"}, "image_id": 331, "id": 4827}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 57.0, 34.0, 32.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "ZRZ71n?2N2N2N2N3N1N2N2N2N2N2N2N2N1O00000001O1O2O1N2N2N2N3M2N2N2N2N2N2N2Oim4"}, "image_id": 331, "id": 4828}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 62.0, 18.0, 19.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "YRZ12m?2N2M3N2N1O2N2N1N12N2M2O2N2N2N2M3Nlm\\6"}, "image_id": 331, "id": 4829}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 66.0, 55.0, 57.0], "area": 1631, "segmentation": {"size": [512, 512], "counts": "VSo52m?2N2N2N2N2O1N2N2N2N2H]OPAe0n>]OPAe0n>9N1InN]AT1a>nN]AT1a>7N2N01O0001O000000000002N2N2N2O11O00N2N2O1N2N1O00003M2N2N2N2N2N2Bc@6_?Hc@6_?Hc@7d?N2N2NS]U1"}, "image_id": 331, "id": 4830}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 76.0, 10.0, 26.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "\\2j0W?ZOk@=X?Aj@=^?N2N2N2N2N2N2NVmj7"}, "image_id": 331, "id": 4831}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 82.0, 30.0, 30.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "nbe11n?2N2N2N2N2N2N2N2N2N2N2N200000000000O1N2N2N2N2N2N2N2N2N2N2Nn\\k5"}, "image_id": 331, "id": 4832}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 88.0, 12.0, 13.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "lb92m?2N2N2N20000000N2N2N2NR]`7"}, "image_id": 331, "id": 4833}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 89.0, 7.0, 8.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "mR51n?2M3N20M2O2NV]g7"}, "image_id": 331, "id": 4834}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 91.0, 54.0, 69.0], "area": 1748, "segmentation": {"size": [512, 512], "counts": "ncQ11n?2N2N2^OKUA8i>JUA8i>JUA7j>KTA7j>KTA7j>KTA7i>LUA6i>b0N2N2N2N2N2N2N200000O1000000000000000000kNdAe0\\>YOfAg0Z>WOgAj0X>UOjAk0V>SOlAm0T>POoAn0S>POoAn0S>POoAn0c>N2N2N2N2N2N2N2N2N1O2M3N2N2N2Ne[S6"}, "image_id": 331, "id": 4835}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 96.0, 51.0, 60.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "VTh63m?1N2N2N2BHQA:m>HQA:n>GPA;n>Ho@;n>GPA;f>]O_AP1_>RO_AP1_>RO_AP1`>9N2N2N3M2N1O000001O2O1N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2N2OQ\\>"}, "image_id": 331, "id": 4836}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 103.0, 15.0, 28.0], "area": 213, "segmentation": {"size": [512, 512], "counts": "cch71o?1N2N2N2N2N2N2N2N3M2O1N2N2N2hL"}, "image_id": 331, "id": 4837}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 105.0, 39.0, 42.0], "area": 892, "segmentation": {"size": [512, 512], "counts": "h38g?2N2M3N2N2N2N2N2N2N2N2N1O2N2O100000000000N1O2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2NR\\\\7"}, "image_id": 331, "id": 4838}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 112.0, 68.0, 62.0], "area": 2059, "segmentation": {"size": [512, 512], "counts": "dTg31n?2N2N2N2N2N2N2N2N2N2N2N2N2N2GUOZAm0d>UOZAm0d>UOZAn0c>9N2N2N2N2N2O1N000000000002N2N2N2N1O001O2N2N2N2N2N3M2N2N2N2N2O1N00000000000000001O2N2N2N2N2N2N2N2N3MokV3"}, "image_id": 331, "id": 4839}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 132.0, 9.0, 8.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "Wd[22m?2N2N20O1O1N1O2Nik_5"}, "image_id": 331, "id": 4840}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 135.0, 12.0, 12.0], "area": 81, "segmentation": {"size": [512, 512], "counts": "[dV22m?2N2N2O1N2000O1N2N2N2Nd[c5"}, "image_id": 331, "id": 4841}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 137.0, 14.0, 27.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "Y4k0V?O1M3N2N2N2N2N2N2N2N2N2N1O\\kh7"}, "image_id": 331, "id": 4842}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 139.0, 46.0, 55.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "\\UY72n?1N2N3M2N2N2N2N2H_On@c0Q?^Om@d0Q?^Om@d0Q?7N3M2N2N2N2N2N1O01O000001O000000000002N2N2N2N2O2M2N2N2N2N2N2N2N2N3MUK"}, "image_id": 331, "id": 4843}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 141.0, 59.0, 52.0], "area": 1508, "segmentation": {"size": [512, 512], "counts": "Qe`02m?2N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O10000001O0000000000000O1N2N2N2N2N2N2N1O000001O000001O2N2N2N2N2N2N2N2N3M2N2Nkja6"}, "image_id": 331, "id": 4844}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 149.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "fTo71n?2ZK"}, "image_id": 331, "id": 4845}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 158.0, 62.0, 60.0], "area": 1874, "segmentation": {"size": [512, 512], "counts": "deX41n?2N2N2N2N2N2N3M2N2N2N2N2O1WAVOY>l0eAVOY>l0eAWOX>k0fAWOX>k0fAWOX>k0fAWOX>k0fAWOX>Z1O10N2N2N2N2N2O1N3M000000000000000000001O00002O1N2N3M2N2N2N2N2N2N2N2N2N2N2N2O1N3M2N^Zh2"}, "image_id": 331, "id": 4846}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 164.0, 45.0, 45.0], "area": 876, "segmentation": {"size": [512, 512], "counts": "]eP31n?2N2N3N1N2N2N2N2N2N3O000001O0001N1N2N2O1N20001O01O00000001O00N3M2N2N2O1N2N3M2N2N2N2N2O1NoiX4"}, "image_id": 331, "id": 4847}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 173.0, 52.0, 58.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "j5?_?3N2N2N2N2N2N2N2N2N2N2N2O100000000000000000O10O1000000000000000O1N2N2N2N2N2N2N2N2N2N2N2N2M2O2N2N2N2N2N`iU7"}, "image_id": 331, "id": 4848}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 185.0, 33.0, 32.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "YVh31n?2N2N2N2N2N2N3M2N2N2O1N2N2N2N1O0001O2N2N2O1N2N2N2N2N2N2N3M2N2N2NiYg3"}, "image_id": 331, "id": 4849}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 198.0, 61.0, 57.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "Tgj41n?2N2N2W@Kd?:N2N2N2N2N2O2M200000000N2N2N2N2N2N2N2N1O00000001O00000000000000000001O0001O2N2N3M2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N3MViV2"}, "image_id": 331, "id": 4850}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 209.0, 17.0, 17.0], "area": 149, "segmentation": {"size": [512, 512], "counts": "jVe31n?2N2N2N3N1N2N1O000002N2O2M2N2N2NYYR4"}, "image_id": 331, "id": 4851}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 210.0, 61.0, 54.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "RWh22m?2N3M2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1000001O000000000001O000001O00N2N2N2N2N3M2N000000010O00000000002N2N2N3M2O1N2N2N2N2N2NeXY4"}, "image_id": 331, "id": 4852}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 211.0, 7.0, 15.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "ifl72m?2N2N3M2N2N2\\I"}, "image_id": 331, "id": 4853}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 229.0, 34.0, 38.0], "area": 623, "segmentation": {"size": [512, 512], "counts": "bgY42m?2O1N2N2N3M2N2N2N2N2N2N2N2O1N30O00000000000O1XOn@`0S?^Oo@`0S?^Oo@`0S?_On@?[?N3M2N2N2N2N2NQXU3"}, "image_id": 331, "id": 4854}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 231.0, 18.0, 35.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "W7S1n>N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2N2M[hf7"}, "image_id": 331, "id": 4855}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 239.0, 62.0, 57.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "Zh[51n?2N2Y@L_?6_@L_?l0[AVOc>m0ZAUOd>V1M2N2N2N10O2N2N2N2001O01O000000000000000N2N3M2N2N2O1N2N2N2N2N2N2N3M2N2N2N2O1N2N2NXWm4"}, "image_id": 331, "id": 4857}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 248.0, 39.0, 40.0], "area": 779, "segmentation": {"size": [512, 512], "counts": "WXg31n?2N2N2N2O2M2N2N2N2N2N2N2O2M2N2N2N2000001O00O1N3M2N2N2N2N2O1N2N3M2N2N2N2N2O1NbWe3"}, "image_id": 331, "id": 4858}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 268.0, 67.0, 57.0], "area": 1810, "segmentation": {"size": [512, 512], "counts": "eij52m?2N2N2N2N2N2JCf@`0W?6N2N2N00000000000001O1O200000O1N2N2N2N3M2N2N2N000000000001O00000000000001O000002N2N3M2N2N2N2N2N2N2O1N2N2N2N2N3M2N2N2NPgS1"}, "image_id": 331, "id": 4859}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 277.0, 31.0, 31.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "Sie42m?2N2N2N2N2N2N2N2N2N2O1N2N2N3M10O2N2O1N2N2N2N2N2N2N3M2N2N2N2Nmfj2"}, "image_id": 331, "id": 4860}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 278.0, 39.0, 40.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "ohW41o?1N2N2N2N3M2N2N2N2O1N2O10001O01O000000000000010O0000000N2N2N2N2N3M2O1N2N2N2N^fT3"}, "image_id": 331, "id": 4861}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 280.0, 33.0, 33.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "UYU31n?2N2N2N2N2N2N2N2N3M2N2N2N2O1000000000N2N2N2N2N2N3M2N2O1N2N2N2N2NfVZ4"}, "image_id": 331, "id": 4862}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 285.0, 56.0, 56.0], "area": 1589, "segmentation": {"size": [512, 512], "counts": "^Yc11n?2O1N3M2N2N2N2N2N2N2N2N2N2O2O000000000000000N2N2N2N3M2N2N2O1N2N2N2N2N2N10O2O1N2[OdAC^>;bAC`>;bAC`>;bAC`>;bAC`>;d0N2N2N2O1N2Naf`5"}, "image_id": 331, "id": 4863}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 293.0, 14.0, 14.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "Zie31n?2N2N2N2N3O0000O1O1N2N3M2NcVS4"}, "image_id": 331, "id": 4864}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 303.0, 10.0, 10.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "cYd31n?2N2N2N2000O1N2N2N]fV4"}, "image_id": 331, "id": 4865}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 316.0, 32.0, 32.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "[jW51n?2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N11N2N2N2N2N3M2N2O1N2N2N2N2N2N2NeUX2"}, "image_id": 331, "id": 4866}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 320.0, 52.0, 63.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "kZ^41n?2N2N2N2N2N2N3M2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N200000hNjAe0V>ZOkAf0U>XOmAh0S>VOPBg0R>WOPBg0R>WOPBg0R>WOPBg0S>VOoAh0S>VOoAh0f>N2N2N2N2N2N2N2N2O2M2N2Nedg2"}, "image_id": 331, "id": 4867}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 321.0, 9.0, 9.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "TZT52m?2N2N2000O1N2N2NkUg2"}, "image_id": 331, "id": 4868}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 324.0, 55.0, 52.0], "area": 1330, "segmentation": {"size": [512, 512], "counts": "eZQ11m?3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1100000000000000000000000000000O1N2O1N2OO2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2N2M3NoTS6"}, "image_id": 331, "id": 4869}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 328.0, 29.0, 28.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "eZh21n?2N2N2N2N2N2N2N2N2N2N2N2N2O10O1N2O1N2N2N2N2N2N2N2N2N2N2N[Ui4"}, "image_id": 331, "id": 4870}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 329.0, 8.0, 7.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "[Zo32m?2N2O1O01N2N2Neel3"}, "image_id": 331, "id": 4871}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 341.0, 19.0, 19.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "ljZ31n?2N2N2N2O1N3M2N2O1000O2M2N2N2N2N2O1NRe[4"}, "image_id": 331, "id": 4872}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 353.0, 33.0, 33.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "_kn11n?2O1N2N2N2N3M2N2N2N2N2N2O1N2N2N3OO1N2N2N2N2N2N2N2O1N3M2N2N2N2N2N_d`5"}, "image_id": 331, "id": 4873}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 354.0, 56.0, 61.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "\\[i31;0W?2g@0W?2g@0W?2g@0W?2g@0X?1f@1X?=N2N2N2N2N200000001O01OO1N2N2N2N2O1N2N2O100000001O0000000001O0O1hNbAk0`>SObAk0`>SObAk0`>SObAk0k>N2N2N2N2N2N2N2O1N2N2N3M2N2NdcZ3"}, "image_id": 331, "id": 4874}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 358.0, 31.0, 30.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "akU31n?2O1N2N2N2N2N2N2N2N2N2N2O2O00000000O1N2N3M2N2N2N2N2N2O1N2N2NYdZ4"}, "image_id": 331, "id": 4875}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 360.0, 64.0, 58.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "nk>1n?2N2N2M2O2N2M3N1O2M3N2N1N3N2N2N1N30000TAoNh>U10O0100000O0O2N2000O0100000O01000000O01000000O01M1002N1O2N200O0100N2N2^OUAKm>4UAJm>4UAJm>3UALm>2UAKm>4UAJm>3VAKl>3b0NnSa6"}, "image_id": 331, "id": 4876}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 362.0, 9.0, 8.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "\\kg52m?2N2O1O10O1N2N2NcdS2"}, "image_id": 331, "id": 4877}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 362.0, 64.0, 58.0], "area": 1734, "segmentation": {"size": [512, 512], "counts": "W\\P72m?2N2N3N1N2N2N2N3N1N2N2N2N3M2O1000010O0000001M00001O01O00000001O01O00000001O01O000000010O00000001O01O00002N2O1N3M2N2N2N2O2M2N2NZD"}, "image_id": 331, "id": 4878}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 388.0, 35.0, 35.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "_l_21n?2N2N2X@Jc?;O1N2N3M2N2N2N2N2O101O01OO1N20O1O1N2N3M2N2N2N2N2O1N3M2N2N2N2NXcn4"}, "image_id": 331, "id": 4879}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 394.0, 30.0, 29.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "g\\a61n?2N2O1N2N2N2N2N3M2N2N2O1N2N2N02N2O1N2N2N2N2N2N3M2N2N2O1N2NXco0"}, "image_id": 331, "id": 4880}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 396.0, 51.0, 55.0], "area": 1437, "segmentation": {"size": [512, 512], "counts": "QmW31n?2N2N2N2N2N3M2O1N2N2N2N2N3M2N2O1N2N2N2N3M2N2N2O1O10001O0000N2N3M2O1N2N2N2N2N3M2N2YOl@`0U?_Ol@?V?_Om@>\\?N2N2N2N2N2O2Mbbn3"}, "image_id": 331, "id": 4881}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 406.0, 57.0, 51.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "X=8f?3M2N3M2N3M2O2M2N3N1010O0100O010O010O010O0100O010O010O010O0100O0N3N1N3M2010O010O010O010O0100ON3N1N3[OVA0l>NVA0m>NUAOn>NTA0n>NUAOn>NUAOm>O_SS7"}, "image_id": 331, "id": 4882}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 406.0, 31.0, 31.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "Tmh12m?2N2N2N2N2N2M3N2N2N2N2N2N200000O1O1N2N2N2N2N2N1O2N2N2N2N2N2Nkbg5"}, "image_id": 331, "id": 4883}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 414.0, 20.0, 42.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "_]f72n?1N3M2N2N2N2N2c@AV?a0h@AV?f0N2N3M2N2N2O1N2N00002QC"}, "image_id": 331, "id": 4884}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 427.0, 63.0, 73.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "P^R62m?2O2M2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N3N100000000001O000001O000000000001O00000001O00O1N2N2N2O2SOZA;h>CZA;h>CZA;h>CZA;h>DYA:i>DYA:i>DYA:X?N2N2N2N3NlPn0"}, "image_id": 331, "id": 4885}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 429.0, 32.0, 30.0], "area": 508, "segmentation": {"size": [512, 512], "counts": "omR72m?2N2N2N2N2N2N2N2N2N2N0000001O2N2N0001O2N2N2N2N2N2N2N2N2N2N2N3MWR="}, "image_id": 331, "id": 4886}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 437.0, 60.0, 55.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "\\na21n?2M2O2M3N1O2M3N2M2O2N2M2O2M3N2N1O200O0100000O01000O10O1000O01000O10O1000O01000O10O1000ON3N2M2O2N2M3N1O2M3N2M2O2N2M2O2M_Q`4"}, "image_id": 331, "id": 4887}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 441.0, 42.0, 36.0], "area": 772, "segmentation": {"size": [512, 512], "counts": "c^Z11m?3N1IM`@5]?Ma@6]?8N1N3N2N2M3N1O2M2OO101O2M2O200O10OO2M3N2N1N3N2N0O01000O0100011O1M3N1O2M3NSbP6"}, "image_id": 331, "id": 4888}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 443.0, 34.0, 32.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "[nk11n?2M3N1N3N2N1N3N2M2O2N2N2O01000O0100000O0100N1N3N2M3N1N3N2N1N3N2M2OhQc5"}, "image_id": 331, "id": 4889}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 38.0, 38.0], "area": 1008, "segmentation": {"size": [512, 512], "counts": "b>i0W?1O01O01O01O01M2M4O01O01O01O01O01O01O010O00010O01O01O01O0M3M4M2M3M4L3M4M2MZa\\7"}, "image_id": 331, "id": 4890}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 467.0, 70.0, 45.0], "area": 1826, "segmentation": {"size": [512, 512], "counts": "do\\52m?2N2N2N2N2N2N2N2N2O1N1O1O1O11O1O1O1OO1O1O1O1O1O1O1O1O1O1LROUAo0j>4O1O1O1O1O1O1O1O1O1001O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1N2N2N2N2N2N2N2N2N2N2N2N2N`P`1"}, "image_id": 331, "id": 4891}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 472.0, 22.0, 30.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "X_e72n?1N2N2N2N2N2N3M2N2N2N2N10O00000000002O1N2NVA"}, "image_id": 331, "id": 4892}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 476.0, 29.0, 29.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "]_P12l?2O2M2N3M2N3M3M2O2N110O010O010O0100O010N1N3M2N3M2N3N1N3MlPa6"}, "image_id": 331, "id": 4893}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 478.0, 56.0, 34.0], "area": 920, "segmentation": {"size": [512, 512], "counts": "noR21n?2M2N2N2O1N2N2N2O1N2N2N2N2O1N2N2N2O1O1001O001O001O1O001O001^On@4R?JPA6Q?GRA8n>GTA8m>EUA;Y?O001O001O1O001O001O001O1O001O001O001O001OQPQ5"}, "image_id": 331, "id": 4894}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 479.0, 9.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "Tob12l?2O2M2010N1N3N1NQaX6"}, "image_id": 331, "id": 4895}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 490.0, 24.0, 22.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "h_c01l?3N3L3M3N3N101O01O010O00010O01O01O010M2N2M4L3Nb`P7"}, "image_id": 331, "id": 4896}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 493.0, 35.0, 19.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "eo\\1181\\?1b@0\\?2b@0\\?3a@O]?;O11O1O001O001O001O1O001O001O001O1O001O001O001O1O001O001O001OR`Q6"}, "image_id": 331, "id": 4897}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 499.0, 25.0, 13.0], "area": 175, "segmentation": {"size": [512, 512], "counts": "o_Z71n?1O1O1O100O1O1O1O1O100O1O1O11O1O1O1O1O2N1O1O1O1NSP9"}, "image_id": 331, "id": 4898}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 505.0, 12.0, 7.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "o_h71n?1O1O1O1O1O11O1O1O1O2NQ`1"}, "image_id": 331, "id": 4899}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 0.0, 263.0, 154.0], "area": 23407, "segmentation": {"size": [512, 512], "counts": "P`l31o?1O1O001O1O1O1O001O1O1O001O1O1PBDZ<=dCE[<;dCG[<:bCI]<8aCI_<8_CJ`<7^CKa<5]CNb<3\\COc<2[COe<2XC1g<0WC0j<0UC0l<1RCOo<2nBNT=3jBMW=4gBLZ=4eBK]=6`BKa=6]BJd=7ZBIg=7XBHj=9SBHn=9PBGQ>:mAFT>;jADX>Q11O1O1N2O1O1O1N2O1O1N21O1O1O1O001O1O1O001O1O1O1O001O1SB[N]=f1aB[N_=f1_B\\N`=d1^B_Na=b1]B`Nb=a1\\BaNc=`1[BaNe=l11N2O1O1O1N_BPNV=o1iBTNV=m1hBUNW=l1gBUNY=l1dBWN[=j1cBXN\\=h1cBZN\\=V2O001O1O1O00N2O1O1O1N2O100001O1O1O1O001O1O1O1O001O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O001O1O1O1O001O1O1ON2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1"}, "image_id": 335, "id": 4900}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 79.0, 29.0, 28.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "nRi42l?2O2N2N2M3N1O2N2M3N2N1O2O01N20O01N200000O01M3Bc@6_?Hc@6_?Hc@6d?N3N2NP]h2"}, "image_id": 335, "id": 4901}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 0.0, 3.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "P`<2n?00OQPb7"}, "image_id": 336, "id": 4902}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 0.0, 35.0, 22.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "\\PT11n?2DNh@3W?Ng@4X?Mf@5Y?Le@7Y?Kd@7[?8O1O00O1O1O1O1O1O100O1O11O1O1O1O2N1OO1O1O1O1O1O1Ea@4`?Kb@3_?Lc@2^?Md@1]oZ6"}, "image_id": 336, "id": 4903}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 0.0, 69.0, 46.0], "area": 1535, "segmentation": {"size": [512, 512], "counts": "hPP51n?2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N0000000001O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1OQ`m1"}, "image_id": 336, "id": 4904}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 0.0, 74.0, 56.0], "area": 2110, "segmentation": {"size": [512, 512], "counts": "Zan52n?1IN]@4b?M\\@5b?6N1O0000002N2N2N2N2N3M2N2N2N20O1N2N1O0010O000000000FTOaAl0_>VO_Aj0a>XO]Ai0b>YO\\Ai0b>YO\\Ai0b>=M2N2O1OO1O1O1O1O100O1O1O1O1O1O1O1O2N2N201O0O1N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2OY_l0"}, "image_id": 336, "id": 4905}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 0.0, 11.0, 6.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "P`Y72n?1O1O1O1OO1O100O1O1OQPa0"}, "image_id": 336, "id": 4906}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 4.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "T`o72j?"}, "image_id": 336, "id": 4907}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 9.0, 46.0, 53.0], "area": 1201, "segmentation": {"size": [512, 512], "counts": "l`e11n?2N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2N2N3N1O10000000001O01M2N5K2N2HSAZOo>d0SA[On>c0TA[On>d08N3M2N2B`@9e?N2N3M2Oh^c5"}, "image_id": 336, "id": 4908}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 15.0, 15.0, 30.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "k`h72m?3M2O1N2N2N2N2N3M2N2O1N2N2N2@"}, "image_id": 336, "id": 4909}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 22.0, 8.0, 8.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "jPk51n?2N2N2N02N2N2NXoP2"}, "image_id": 336, "id": 4910}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 26.0, 37.0, 36.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "_Qf01n?2N3N1N2N2N2N2N2N3M2O1N2N00000000000001O01O000001O2N2N2N2O1N3M2N2N2N2N2Nk^g6"}, "image_id": 336, "id": 4911}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 41.0, 28.0, 27.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "eQ\\71o?1N2N2N2N2N2N2N2N2N2N2N2N2O10N2N2N2N2N2N2N2N2N2N2N2N2N[n5"}, "image_id": 336, "id": 4912}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 44.0, 55.0, 69.0], "area": 1796, "segmentation": {"size": [512, 512], "counts": "oR]51n?2N2N2N2N2]OG\\A;b>G\\A;b>G\\A;b>G\\A;b>G\\A;b>H[A:c>H\\A:a>H]A:a>d0N2N2N1K^NkAb1U>400000000000000001O00000IjAeNX>Y1jAeNX>Y1jAeNX>Y19N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N2N2N2Nm]g1"}, "image_id": 336, "id": 4913}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 52.0, 56.0, 62.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "WRk61n?2N3M2N2N2N2N2N2N2VA]OT>e0jA^OT>c0kA^OS>d0kA^OS>e0jA]OT>e0jA]OT>e0jA]OT>e0jA]OT>e0jA]OT>e0jA]OT>Y1N2OO2N2N2N3M2N2N2O1N2N3M2N2N2N2N2N3M2N2N10O00000002N2N2N2N3M2N2O1N2N2N2N`m8"}, "image_id": 336, "id": 4914}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 57.0, 19.0, 18.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "RR12m?2N2O2M2N2N2N10O01O0002N3M2N2N2O1N2NP^e7"}, "image_id": 336, "id": 4915}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 57.0, 65.0, 63.0], "area": 2055, "segmentation": {"size": [512, 512], "counts": "jbV21n?2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N2N3M2N1O00000010O00000000001O2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N2N3M2N[mh4"}, "image_id": 336, "id": 4916}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 69.0, 22.0, 22.0], "area": 247, "segmentation": {"size": [512, 512], "counts": "`R72m?2O1N2N2N2N2N2N2N2N000002N2N2O1N2N2N2N2N2Ncm]7"}, "image_id": 336, "id": 4917}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 73.0, 40.0, 40.0], "area": 775, "segmentation": {"size": [512, 512], "counts": "PcU11n?2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N000000000000001O02N2N2N2N2N3M2N2N2N2N2N2N2N2N2NX]V6"}, "image_id": 336, "id": 4918}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 83.0, 44.0, 36.0], "area": 831, "segmentation": {"size": [512, 512], "counts": "\\Sa12m?2N2N2N2N2N2N2N2N2N1O00000000000000001O2N2N2N2N2N2O1001M2N2N2N2N2N2N2N2N2N3M2N2N2N2N2Nklh5"}, "image_id": 336, "id": 4919}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 88.0, 30.0, 44.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "V3n0Q?0000000000010O0000000000000003M2O1N2N2N2N2N2N2N3M2N2N2O1Nil`7"}, "image_id": 336, "id": 4920}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 95.0, 54.0, 67.0], "area": 1703, "segmentation": {"size": [512, 512], "counts": "bdT61n?2N2N2O1\\OI\\A9b>I\\A9b>I\\A:a>H]A:a>H]A:a>H]A:a>I\\A9b>I\\A9b>d0O1N1O0JbNkA^1U>dNiA\\1X>eNfA[1Z>501O00000000000001O0JhAeNX>[1jAcNX>[18N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2O1N2N3M2NZ\\P1"}, "image_id": 336, "id": 4921}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 109.0, 8.0, 9.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "_Sl71o?1N2N3N1000O1NaL"}, "image_id": 336, "id": 4922}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 112.0, 62.0, 49.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "WTf21n?2N2N2N2N2N2N2N2N2N0001O01O3N100000000000000O1N2N2O1N3M2N2N2N1O000000000000010O000001O2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2NnkZ4"}, "image_id": 336, "id": 4923}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 117.0, 20.0, 20.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "Pdl02m?2N2N2N2N2O1N2N0000000001O2O1N2N3M2N2NU\\i6"}, "image_id": 336, "id": 4924}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 127.0, 44.0, 44.0], "area": 705, "segmentation": {"size": [512, 512], "counts": "Pe81n?2N2N2N2N2N2N2N2N2N2N0000000000000000000000000001O0000000000000000002N3N1N2N2N2N2N2N2Nh[Q7"}, "image_id": 336, "id": 4925}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 142.0, 18.0, 19.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "e44k?2N2N2N2N2N2N2O10O1N2N2O1N2N2N2N3MYkf7"}, "image_id": 336, "id": 4926}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 146.0, 9.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "fd]72m?3M2N2N10O2O2M2N[k="}, "image_id": 336, "id": 4927}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 151.0, 10.0, 8.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "jdR71n?2O1N2N2O01N2N2O1NV[h0"}, "image_id": 336, "id": 4928}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 153.0, 31.0, 24.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "Pe]12m?2N3M2N2N2N2O1N201O00000001N1N00010O00000000002N2O1N2N3M2N2NojR6"}, "image_id": 336, "id": 4929}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 154.0, 56.0, 62.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "[eQ72m?2N2N2N2N2N2N2N2N2N2N3N1N2YAVOU>l0iAVOU>l0iAVOU>l0iAVOU>l0iAVOU>l0iAVOV>k0hAWOX>Y1000001O0001M2N2N2N2N2N2N2O1N2N3M2N2N2N2O1000000O1N3M2N2N2N2N2N2N2N2O1NPZ2"}, "image_id": 336, "id": 4930}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 157.0, 7.0, 13.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "m4=d?N2N2N2N2N2NnZl7"}, "image_id": 336, "id": 4931}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 157.0, 70.0, 64.0], "area": 2009, "segmentation": {"size": [512, 512], "counts": "XVP22m?2N2N2N2N2N2N2N2O2M2N2N2GXOWAj0g>XOWAj0g>XOWAj0g>9N2N2N00000001O0001O00000000000002N3M2N2O1N2N1O00000000001O01O00000000000001O2N2N2O1N2N2N3M2N2N2N2N2N2O1N2Ndjl4"}, "image_id": 336, "id": 4932}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 165.0, 13.0, 13.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "ZeW12m?2O1N2N2N2N20N2N2N2N2N2Nfja6"}, "image_id": 336, "id": 4933}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 167.0, 73.0, 57.0], "area": 1774, "segmentation": {"size": [512, 512], "counts": "nUZ31o?1N3M2N2O2M2N2N2N3N1N2N3M2O1N2N30O00010O0001O01O00001M2N2N10O0000010O000001O01HXOWAh0j>YOTAg0l>601O0001O0001O01O0001O0001O00011KRAUOP?i05N3N1N2N3M20000N3M2N2O2M2NWZa3"}, "image_id": 336, "id": 4934}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 172.0, 7.0, 13.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "ael71n?3N1N2N2N2N2cJ"}, "image_id": 336, "id": 4935}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 173.0, 46.0, 60.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "c6`0o>C^A?`>C^A?`>C^A?`>C^A`0_>B_A`0_>B_A?`>C^A=b>`01O000001O0000000001O000001O000002N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N2N3M2N2NliX7"}, "image_id": 336, "id": 4936}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 175.0, 37.0, 39.0], "area": 790, "segmentation": {"size": [512, 512], "counts": "Sfj01o?2M2N2N2N2N2N2N3N1N2N2N2N2N1O01O1001N2N2N2N2N1O0001O2N2@RAKQ?3PAKR?3PAKR?3PALQ?2QALQ?2QALQ?2QALQ?2a0MVjb6"}, "image_id": 336, "id": 4937}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 190.0, 42.0, 42.0], "area": 831, "segmentation": {"size": [512, 512], "counts": "gf`22n?1N2N2N2N2N2N2N2N2N2N2N2N2N3M1O0001O0000000000000000002N2N2N2O2M2N2N2N2N2N2N2N2N2NdYj4"}, "image_id": 336, "id": 4938}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 199.0, 24.0, 25.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "afo41n?3M2O1N2N2N2N2N2N2N2N200000N2N3M2N2N2N2N2N2N2N]Yd2"}, "image_id": 336, "id": 4939}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 204.0, 35.0, 38.0], "area": 667, "segmentation": {"size": [512, 512], "counts": "RW`11n?2N2N3M2N2N2N2N2O1N00003M2N2N2N2N2N000001O0000002O1Bn@KT?3n@KT?3n@KT?3n@KT?4n@IT?5=N2N2N[Yn5"}, "image_id": 336, "id": 4940}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 209.0, 47.0, 46.0], "area": 1077, "segmentation": {"size": [512, 512], "counts": "YgY42m?2N2O1N2N2N3M2N2N2N2N2N2N2O1N2N3M2N2N000000001O01O00001O2N2N2N2N3M2O1N2N2N2N2N2N2N2N3M2N2O1Nlhn2"}, "image_id": 336, "id": 4941}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 212.0, 54.0, 57.0], "area": 1566, "segmentation": {"size": [512, 512], "counts": "gg;1n?2N2N2N2N2N2N2N3F@o@b0o>@PAa0n>Bo@`0o>:N2N2O1N2N3M2N2N1O00000001O0001O000001O2N3M2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2NdXi6"}, "image_id": 336, "id": 4942}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 213.0, 25.0, 24.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "UWU12m?2N2N2N2N2N2N1O00001O0001O000000001O2N2N2N2N2N2NVY^6"}, "image_id": 336, "id": 4943}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 223.0, 14.0, 15.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "TWZ22m?2N2N2N2N21O00000N2N2N2N2Nih^5"}, "image_id": 336, "id": 4944}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 228.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Tgo72j8"}, "image_id": 336, "id": 4945}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 245.0, 31.0, 31.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "SXT21n?2N2N2N2N2O1N2N2N3M2N2N2N2N2N2OO2O1N2N2N2N3M2N2N2N2N2O1N2N2NmW\\5"}, "image_id": 336, "id": 4946}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 246.0, 53.0, 53.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "eXm41n?2N2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N000000001O00000001O0000001O2N2KWAPOk>n05N2N3M2N2N2N2N2N2N2N2N2N2O1N2NhWX2"}, "image_id": 336, "id": 4947}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 248.0, 17.0, 17.0], "area": 149, "segmentation": {"size": [512, 512], "counts": "ogX42m?2N2N2N2N2N2N2O11M2N2N2N2O1N2N2NPh^3"}, "image_id": 336, "id": 4948}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 249.0, 58.0, 61.0], "area": 1790, "segmentation": {"size": [512, 512], "counts": "nXl01n?2N2N2N3M2O1N2N2FAPAa0n>APAa0n>APAa0o>9N2N2N2N3M2N2N2N2N2N0001O0001O000000000001O2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N2O1N2N3M2N2N2N^gV6"}, "image_id": 336, "id": 4949}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 256.0, 28.0, 28.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "]ho32m?2N2N2N2N2N2N2N2N2N3M2N2N10O2N2N3M2O1N2N2N2N2N2N2N2N2NcWb3"}, "image_id": 336, "id": 4950}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 262.0, 51.0, 51.0], "area": 1229, "segmentation": {"size": [512, 512], "counts": "UYY31n?2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2JnNYAR1g>30010O00000000000000000001O2O1N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2NXWm3"}, "image_id": 336, "id": 4951}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 274.0, 11.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "fXW32m?2O1N2N2N01O2O1N2N2N[Wc4"}, "image_id": 336, "id": 4952}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 286.0, 30.0, 29.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "\\Ye21n?2N2N2N2N2N2N2N3M2O1N2N2N2N1O01O2N2N2O1N2N2N2N3M2N2N2N2N2Odfk4"}, "image_id": 336, "id": 4953}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 292.0, 53.0, 57.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "WZ\\11n?2N2O1N2N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2N0N2N2M3N3M2001O1O2N1O1O101N1O1O1O2N1O1O1O2O0O1O2N1O1M3M4K4M3L4M4KiVi5"}, "image_id": 336, "id": 4954}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 295.0, 30.0, 36.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "Pjf42m?2N2N2N2N3BESAFSAFSAFSA;l>GRA9n><0000001O3M2N2N2O1N2N2N2N2N3M2N2N2N2OYVj2"}, "image_id": 336, "id": 4955}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 297.0, 55.0, 55.0], "area": 1554, "segmentation": {"size": [512, 512], "counts": "nY]51n?3M2N2O2M2N2N3M2N3M2n@YOh>i0VAYOh>j0UAXOi>j0VAWOh>R1O1N2N2N2N2N3M000002N2N3M2O1N2N2N2N2N1O00000000011N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3MQVg1"}, "image_id": 336, "id": 4956}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 300.0, 14.0, 15.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "`iY43l?2O1N2N2N20000000O1N2N2N3M\\V_3"}, "image_id": 336, "id": 4957}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 307.0, 28.0, 29.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "PZd3140e?2Y@0e?7N2N2N2O1N2N2N001O2N2N2N02N2N2N2M3N2N2N2N2N2N2N2N2NQfm3"}, "image_id": 336, "id": 4958}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 309.0, 43.0, 51.0], "area": 1125, "segmentation": {"size": [512, 512], "counts": "djo32m?2N2N2N2AHSA:k>HSA:k>HTA9j>ITA9j>ITA9k>HSA:k>?N3M2N2N2N2N2N0002N2N3M2N2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2NceZ3"}, "image_id": 336, "id": 4959}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 323.0, 55.0, 62.0], "area": 1730, "segmentation": {"size": [512, 512], "counts": "Zko12m?2N3M2N2N2N2N2H@m@b0Q?@n@a0P?An@a0P?:M2GoN`AS1^>oNaAR1]>POaAR1]>POaAR1]>:N2N2N1O0001O000001O02N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2NPeT5"}, "image_id": 336, "id": 4960}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 324.0, 32.0, 31.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "cjV32m?2N2O1N2N2N2N2N2N3M2N2N2O1N00000001O2N2O1N3M2N2N2N2N2N2N2N2O1N_UY4"}, "image_id": 336, "id": 4961}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 334.0, 37.0, 35.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "R[b42m?2N2N2N2N2N2O2M2N2N2N2N2N10O000001O00000001O001O2O1N2N3M2N2N2N2O1N2N3M2NUUk2"}, "image_id": 336, "id": 4962}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 334.0, 27.0, 28.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "iZj62m?2N2N2O1N2N2N2N3M2N2N2N21O000O1N2N2N2N2N2N2N2N2N2N2NTUh0"}, "image_id": 336, "id": 4963}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 346.0, 57.0, 56.0], "area": 1500, "segmentation": {"size": [512, 512], "counts": "U[R62m?2N2N2N2N2O1SADS>>kADS>>kADS>?jADS>>kADS>>kADS>>lACS>>kADU>:gAHY>8eAJ[>6cAL]>l00O1N2O1N2N3M2N2N2N2N2N2N2N1O00000001O000001O000000001O2N2N2N2N2O1N2N2N2N2N3McTQ1"}, "image_id": 336, "id": 4964}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 359.0, 54.0, 63.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "ala22m?2N3M2N2O1N2N2N2N2N2N2N2N3\\OTOoAn0o=TOoAn0o=TOPBm0o=TOoAn0o=TOoAm0P>UOnAk0R>WOlAi0T>YOjAh0U>a000000000002N3M2N2N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2N2NmSc4"}, "image_id": 336, "id": 4965}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 366.0, 35.0, 34.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "Rla32m?2N2N2N2O1N2N2N2N3M2N2N0001O1O000000000000001O002O1N2N2N2N2N2N2N2N2NYdl3"}, "image_id": 336, "id": 4966}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 376.0, 5.0, 9.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "h;9i?N1N2N2NTTm7"}, "image_id": 336, "id": 4967}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 377.0, 54.0, 51.0], "area": 1384, "segmentation": {"size": [512, 512], "counts": "^li61n?2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2000000000O1N2N3M2N2N2N2N2N2OO0001O2N2N2N2N2N2N2N2N3M2O1N2N2N]S;"}, "image_id": 336, "id": 4968}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 390.0, 75.0, 61.0], "area": 2297, "segmentation": {"size": [512, 512], "counts": "ilm42m?3M2N2O1N2N2N2N2N2N2N2N3M2N2N2O1N2N20000000001O000001O00000000O1N2N3M2N2N00000000000010O000000000LcAfN]>Z16N2N2N2N2N2N2N2N2N2OO0002N2N2N2N2N2N2N2N2N2O1N3MVcl1"}, "image_id": 336, "id": 4969}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 400.0, 38.0, 58.0], "area": 1245, "segmentation": {"size": [512, 512], "counts": "m<[1d>2N2N1O00000001O0000000000002O1N2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N3M2N2N2Nhb\\7"}, "image_id": 336, "id": 4970}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 406.0, 31.0, 30.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "YmR42m?2N3N1N2N2N2N2N2N1O0001O000001O000000000002N2N3N1N2N2N2N2N2NQc]3"}, "image_id": 336, "id": 4971}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 409.0, 62.0, 55.0], "area": 1622, "segmentation": {"size": [512, 512], "counts": "gmo23l?2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2O10001M2N000000000001O0001O0000000000000001O2O1N2N3M2N2N2N00000000102M2N2N2N2N2N2N2N2N3N1NgRQ4"}, "image_id": 336, "id": 4972}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 422.0, 34.0, 34.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "h]d61n?2N2N2N2N2N2N2O1N2N2N2N2N3M2N0000000002N2N2N2O1N2N2N2N2N2N2N2N2N2N]bj0"}, "image_id": 336, "id": 4973}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 433.0, 36.0, 33.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "Pnl41n?2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N20N2N2O10001N1N2N2N2O1N2N2N2N2N2N2N2N2NlQa2"}, "image_id": 336, "id": 4974}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 439.0, 50.0, 51.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "Y^W71n?2N2N2N2N2N2N2N2N2N2N3N1l@XOo>h0o@[OP?k000O1N2O1N2N2N2N2N2N3M001O02N2N2N2N2N2N2N2N2N2N3M01O0000000000000001O00XB"}, "image_id": 336, "id": 4975}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 440.0, 56.0, 54.0], "area": 1394, "segmentation": {"size": [512, 512], "counts": "jnk53l?2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N1O01O000001O00000000000001O000001O00000001O2N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1NfQX1"}, "image_id": 336, "id": 4976}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 442.0, 58.0, 54.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "en71o?1N3M2N2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2001M2N0000001O0000000001O0002N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2NbQk6"}, "image_id": 336, "id": 4977}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 443.0, 51.0, 59.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "Qob32m?2N2O1N2N2N2EDo@?n>CPA?n>CPA?n>CPA?o>:EQObAQ1]>POaAR1]>POaAR1]>POaAR1]>:N2N2N00001O0001O2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2NZac3"}, "image_id": 336, "id": 4978}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 451.0, 18.0, 17.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "\\n[43l?2N2N2N2N2O0O1O000002N2N2N2N2O1N2NgQ[3"}, "image_id": 336, "id": 4979}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 474.0, 36.0, 37.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "^_Z52m?2N2N2N3M2O1N2N2N2N2N2N3M2N01O000000000001O02N2N2N3M2N2N2O1N2N2N2N3M2Nh`S2"}, "image_id": 336, "id": 4980}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 476.0, 9.0, 17.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "l>a0`?N2N2N2N3M2N2N2OkPk7"}, "image_id": 336, "id": 4981}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 477.0, 55.0, 35.0], "area": 1281, "segmentation": {"size": [512, 512], "counts": "aol03l?2O1N2N2N2N3M2N2O1N2N2N1O1O1O100O1O1O1O1O1O100O11O1O1O1O1O1O00O1O1O1O1O1O11O1O1N3M2N2O1N2N2N3M2N2N2N2O1N2N3M``W6"}, "image_id": 336, "id": 4982}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 480.0, 46.0, 32.0], "area": 869, "segmentation": {"size": [512, 512], "counts": "coW41n?2\\@NW?4g@MX?5f@LY?6f@JY?9d@H[??O1O1O100O1O1O1O1O1O1O1O1O1O11O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1N2N2NVPQ3"}, "image_id": 336, "id": 4983}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 486.0, 40.0, 26.0], "area": 546, "segmentation": {"size": [512, 512], "counts": "o_m61n?1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1001O1N2N2N2N2N2N2N2N2N2N2N2N^`>"}, "image_id": 336, "id": 4984}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 491.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "[oo71d0"}, "image_id": 336, "id": 4985}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 502.0, 19.0, 10.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "oo=1n?1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1OQ`X7"}, "image_id": 336, "id": 4986}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 502.0, 14.0, 10.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "m_o52n?1N1O1O1O1O1O100O11N2O1N2NW`i1"}, "image_id": 336, "id": 4987}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 504.0, 17.0, 8.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "oo\\71n?1O1O1O100O1O1O1001O1O1O1O1O1O1OQ`:"}, "image_id": 336, "id": 4988}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 509.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "ooU61n?1O1001O1OQPg1"}, "image_id": 336, "id": 4989}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 174.0, 60.0, 67.0], "area": 2170, "segmentation": {"size": [512, 512], "counts": "gVS52l?2O2VOLdA7Y>LdA6[>KdA7Y>LdA6Z>LdA7Y>LeA6X>LfA6X>MeA6Z>JeA8[>HbA:_>d0O01N2O010O01000O0100O0100O01000O0100O0100O0100O0100O01000O010M2O2M3M2O2M2N3N2M2Fm@FU?8l@FW?7l@FV?9:M2N3Nein1"}, "image_id": 337, "id": 4990}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 26.0, 62.0, 64.0], "area": 1825, "segmentation": {"size": [512, 512], "counts": "^aX31n?2N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2O1000000000001O01O0000000000000001ON3M2N2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2Nm]h3"}, "image_id": 338, "id": 4991}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 201.0, 73.0, 79.0], "area": 2515, "segmentation": {"size": [512, 512], "counts": "lVk41o?1N2N2N3M2N2N2O1N2N3M2N2N2O1N3M2N2N2N2N2O2M2N2O1000001O01O00000001O01O000001O0001O000001O01O00000O1O1N3M2O10000IXAUOh>R11O0O1N2N2N3M2N2O1N2N2Cf@1]?Le@2]?Me@0]?Ne@0]?Ne@1ZgP2"}, "image_id": 338, "id": 4992}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 157.0, 82.0, 76.0], "area": 3249, "segmentation": {"size": [512, 512], "counts": "Pfl12j?5J5K5L4O2O003M01O01O000N2K5K60O00M3M301O01O0001OO1K6O00[AnN\\>[1000010O00000010O000001ON3J5NlAnN^=R1]BSOc=n0VBYOi=g0RB^Oo=Y10001O0001O0001O0001O01OYNPB^1P>^NUBa1U>0fA\\NX>f10ROkA2T>IQB7P>DTB0001O0001O0001O0K5K5L4K6JeYj4"}, "image_id": 339, "id": 4993}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 458.0, 14.0, 44.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "R_i72j?4M3M4L3M3M3M4M200YAVOY>j0cAYO]>g0`A\\O`>d0]A@c>n0dA"}, "image_id": 339, "id": 4994}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 0.0, 18.0, 9.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "PP<2n?1O1O1O1O1O1O1O00O1O1O1O1O1O100O1OQP[7"}, "image_id": 340, "id": 4995}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 0.0, 17.0, 9.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "PPS31o?1O1O1O1O1O1O2NO1O1O1O1O1O1O1O10P`d4"}, "image_id": 340, "id": 4996}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 0.0, 51.0, 32.0], "area": 845, "segmentation": {"size": [512, 512], "counts": "``g31n?2N2N2N2N2N2O2M2HAl@a0S?@k@b0S?7N2O1O1O0000O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1OQP_3"}, "image_id": 340, "id": 4997}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 0.0, 69.0, 47.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "k`]41n?2N2N2N2N2O1N2ICh@?V?Ch@`0U?7N2N2N2N2N2N2N2O1N1O000001O2N2N2N1O001O000001O01O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O11O1OO1O1O1O1O1O1O1O1O1O1O100O2N2Noo_2"}, "image_id": 340, "id": 4998}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 0.0, 48.0, 24.0], "area": 598, "segmentation": {"size": [512, 512], "counts": "PPb51o?1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O2N00O1O1O100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1OQPf1"}, "image_id": 340, "id": 4999}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 1.0, 24.0, 24.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "ZP11n?2N3M2O1N2N2N2N2N2N20000000N2N2N2N3M2N2O1N2N2Nbob7"}, "image_id": 340, "id": 5000}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 4.0, 64.0, 69.0], "area": 2076, "segmentation": {"size": [512, 512], "counts": "fQh01o?1N2N2N2N2N2@EXA=f>EXA=f>FWAFWAFWA=f>EXA=f>`0N2O1N2N2N00000000001O0000000001O00000000000001O000001O2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2N2O1NToW6"}, "image_id": 340, "id": 5001}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 5.0, 9.0, 9.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "XPh11n?2N2N2O1000N2N2Nh_S6"}, "image_id": 340, "id": 5002}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 7.0, 53.0, 57.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "\\ab21n?3M2N2N2O1N2N2N2N2N2N2N2N2N2FUO\\An0a>UO\\Am0b>UO\\Am0b>800000000001O000000000001O02N2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2NTob4"}, "image_id": 340, "id": 5003}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 25.0, 63.0, 52.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "lQX62m?2N2O1N2N3M2N2N2O2M2N2N2N2O0O00000000010O000000010O00000000010O003M2N2O1N3M2N2N2N2O2M10O2N3N1N2N2N3M2DUAAl>=VAAl>=WA@k>?VA_Om>>QA@Q?>:N2K^@Id?55N2N2Nf\\P5"}, "image_id": 340, "id": 5014}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 106.0, 57.0, 50.0], "area": 1511, "segmentation": {"size": [512, 512], "counts": "RTa53m?1N3M2O2M2JFc@=[?5N3M3N1N3M2O2M2O2M00010O00010O00010O0002O1N2OO00010O00010O0002O2M2N3N1N2OO01O012M2N3N2M2N3N1N3M2O2MQ\\b1"}, "image_id": 340, "id": 5015}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 124.0, 54.0, 52.0], "area": 1404, "segmentation": {"size": [512, 512], "counts": "idc42n?1N2N3M2KG`@;^?5O1N2N2N2N2N2N2N3M2N2N2N2O1N11N2N000000000001O0001O0000002N2N2N2N3M2O1N2N2N2N2N2N2N2N3M2N2N2O1N`[a2"}, "image_id": 340, "id": 5016}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 125.0, 26.0, 34.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "\\T[31n?2N2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N02_ORAMP?1RANo>1RAMQ?0QANQ?0QANQ?0QANQ?0RAMP?1ZlW4"}, "image_id": 340, "id": 5017}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 137.0, 44.0, 53.0], "area": 1207, "segmentation": {"size": [512, 512], "counts": "mTl21b01h>1VA1h>2UA0i>2VAOh>3VAOi>2UA1h>1VA1h>1VA1h>1VA1h>e0N2N2N000001O0000000001O0002N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2NQk]4"}, "image_id": 340, "id": 5018}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 144.0, 60.0, 58.0], "area": 1775, "segmentation": {"size": [512, 512], "counts": "ZeT61n?2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2N2O1O10N2N2N2N2N2N2N002N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N2N2O1N2NdZm0"}, "image_id": 340, "id": 5019}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 146.0, 27.0, 28.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "jdX41n?2N2N3N1N2N2N2N2N2O11O0000000001O00N2N2N2N2N2N3M2N2NmjY3"}, "image_id": 340, "id": 5020}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 150.0, 37.0, 38.0], "area": 694, "segmentation": {"size": [512, 512], "counts": "[UV52m?2N2N2N2N2N2O1N2N2N2N2N2N2N3M0000000000000002N2N2O1N2N2N2N2N2N2N2N2N2N3MlZW2"}, "image_id": 340, "id": 5021}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 155.0, 29.0, 25.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "Weo31o?1N2N2N3M2N01O0001O001O3N1N2N2O20O00000N2N3M2N2N2O1N3M2Ngja3"}, "image_id": 340, "id": 5022}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 155.0, 34.0, 33.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "\\UW71n?2N2N2N2N2N2N3N1N2N2N2N2N2N2N0001O00001O2N2N3N1N2N2N2N2N2N2N3N1N2Ngj7"}, "image_id": 340, "id": 5023}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 160.0, 15.0, 31.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "P5o0R?N3M2N2O1N2N2N2N2N2N2N2N2N3MaZh7"}, "image_id": 340, "id": 5024}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 175.0, 14.0, 14.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "del52m?2N2N2N2N200000O1N2N2N2N2OYZl1"}, "image_id": 340, "id": 5025}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 177.0, 9.0, 18.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "hek72m?2N2N3M2O1N2N2N2^J"}, "image_id": 340, "id": 5026}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 181.0, 67.0, 64.0], "area": 2031, "segmentation": {"size": [512, 512], "counts": "b6h0T?ZOm@i0P?5N2O1N2N2N2N2N2N2N00001O00000002N3M2N2N2N2N2N1O1O100O1O1O1O000000000000000000010O00000001O3M2N0001O3N1N2N2N2N2N2N2N2N2N3N1N2N2N2NkYn6"}, "image_id": 340, "id": 5027}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 186.0, 50.0, 50.0], "area": 1321, "segmentation": {"size": [512, 512], "counts": "eV^32m?2N2N2N3M2N2`@B\\?b0N3M2N2N2N2N2N2O2M1O001O00000001O000001O00000003M2N2N2O1N00001O000000011N2Bk@NW?0k@NW?0k@NW?0l@MV?1l@MW?1Zjh3"}, "image_id": 340, "id": 5028}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 187.0, 14.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "PVP62m?3M2N2N2O1N2OO2N2N2N3M2O1Noih1"}, "image_id": 340, "id": 5029}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 209.0, 54.0, 54.0], "area": 1368, "segmentation": {"size": [512, 512], "counts": "hWb52m?2N2N2N2IHc@:[?Hc@:[?7N2N2N2O1N2N2N1O0000000000001O2N1O0000000001O000001O00000002N2N2N3M2N2N2N2N2N2N2N3M2N2N2N2N2Olhb1"}, "image_id": 340, "id": 5030}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 215.0, 73.0, 64.0], "area": 2131, "segmentation": {"size": [512, 512], "counts": "Th<1d?2c@0[?2c@0[?2c@1Z?1d@1Z?X130001O0001O000000000001O0001O000000000001O01O000000000002N2N2O1N2N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2N2Oiff5"}, "image_id": 340, "id": 5039}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 281.0, 13.0, 12.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "oX91m?3L3O2O010O010O02O0N1N3MSW`7"}, "image_id": 340, "id": 5040}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 302.0, 31.0, 31.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "li]51n?2N2N2N2N2N2N2N2N2N2N2N2N2N2O1000N2N2N2N2N2N2N2N2N2N2N2N2N2NTfR2"}, "image_id": 340, "id": 5041}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 307.0, 8.0, 8.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "fYl61n?2O1N2N2OO2N3M[fo0"}, "image_id": 340, "id": 5042}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 311.0, 52.0, 53.0], "area": 1299, "segmentation": {"size": [512, 512], "counts": "ZZQ71o?1N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N3O0001O000000000000000N2N2N2N3M2N2N2N2N2N2N2N2O1N2N2N3M2N2N2NYe4"}, "image_id": 340, "id": 5043}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 317.0, 50.0, 59.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "fZl51n?2[@NY?5d@MZ?5d@MZ?5d@MZ?>N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N1O000020O1N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2Bl@NU?0n@MT?2m@LU?2m@LU?2m@LV?1>N[eZ1"}, "image_id": 340, "id": 5044}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 325.0, 91.0, 90.0], "area": 3024, "segmentation": {"size": [512, 512], "counts": "W\\b12m?2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N000010O000000MhN_AX1a>3001O0001O0000000000000001O0001O00000000M_AhNa>X1301O00000001O000000000001O000001O00000002N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2O1N2NVUP5"}, "image_id": 340, "id": 5045}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 327.0, 5.0, 10.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "W::g?N3M2N2NeUm7"}, "image_id": 340, "id": 5046}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 333.0, 12.0, 12.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "bZ41n?2N2N2N2N2000N2N1O2N2M`ee7"}, "image_id": 340, "id": 5047}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 344.0, 10.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "lZP33l?2N2N2N20N2N2N2N2NTej4"}, "image_id": 340, "id": 5048}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 356.0, 47.0, 71.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "g;d1[>000000000000000000010O000000000000000002N2N2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N2NRTX7"}, "image_id": 340, "id": 5049}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 367.0, 6.0, 7.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "b[g22m?2N2N02N2N`dU5"}, "image_id": 340, "id": 5050}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 370.0, 93.0, 91.0], "area": 3097, "segmentation": {"size": [512, 512], "counts": "emV21n?2N2N3M2O1N2N2N2N2N3M2N2O1N2N2N2N3M2N2N2O0O00000000001O0MgN`AY1`>3001O00000001O000001O00000001O00000001J`AkN`>U1bAiN^>X140000000001O0001O0000000001O000001O00000001O2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2NicZ4"}, "image_id": 340, "id": 5051}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 376.0, 50.0, 55.0], "area": 1434, "segmentation": {"size": [512, 512], "counts": "`l\\62n?1N2N2N2N3]@F[?GaA:_>DbA<_>BcA=^>BbA?^>_OcAa0^>]OcAc0^>[OdAe0\\>YOeAg0\\>WOeAi0j>0O000010O00010O0001O01O01O01O00010O0001OM4L3L4L5K4Lda[3"}, "image_id": 340, "id": 5058}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 431.0, 28.0, 28.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "kmR31n?2N2O1N2N2N2N2N2N2N3N1N2N2N20O1N2N2N2N2N2N2N2N2N2N2N2NUR_4"}, "image_id": 340, "id": 5059}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 433.0, 9.0, 17.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "hmk71n?2N2N2O1N2N2N2N3]B"}, "image_id": 340, "id": 5060}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 451.0, 24.0, 24.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "[nn22m?2N2N2N2N2O1N2N2N2O1000001OO1N2N3M2N2N2N2N2N2N`Qe4"}, "image_id": 340, "id": 5061}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 463.0, 48.0, 49.0], "area": 1292, "segmentation": {"size": [512, 512], "counts": "o_X71n?1O1O1O1O1O1DJm@7R?Ll@4S?Nk@3T?Oj@2U?0i@1V?=O1O100O1O1O1O1O1O1O1O1O100O1O1O1O0000000001O01O0000000002N3M2N2O1N2N2NZA"}, "image_id": 340, "id": 5062}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 483.0, 24.0, 25.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "[_V21n?2O2M2N2N2N2N2N3N1N2000001ON3M2O1N2N2N3M2N2N2O^`]5"}, "image_id": 340, "id": 5063}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 483.0, 47.0, 29.0], "area": 752, "segmentation": {"size": [512, 512], "counts": "mok22m?2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1001O2N1O1O1O1O1O1O1Dd@2]?Le@3\\?Kf@4[?Jg@5c?O1O1O1O1O1OQ`\\4"}, "image_id": 340, "id": 5064}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 506.0, 12.0, 6.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "o__11n?1O1O1O1O1001O1O1O1O1OQ`Z6"}, "image_id": 340, "id": 5065}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 0.0, 22.0, 24.0], "area": 213, "segmentation": {"size": [512, 512], "counts": "PP22n?1O1O2N1O2N1O2N1O20O010O010O010N1N3M2O2M2N_ob7"}, "image_id": 341, "id": 5066}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 0.0, 35.0, 17.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "PP81o?1O2N1O2N1O2N1O1O2N1O2NO100O1O1O100O1O100O1O1002NO100O1O100L4O1O100OQ`V7"}, "image_id": 341, "id": 5067}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 0.0, 7.0, 4.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "PPo01o?2N1OO1O100OQ`m6"}, "image_id": 341, "id": 5068}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 17.0, 15.0, 13.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "gP`01n?3N1N3N1N1O010O0001O2O2M2N3NY_X7"}, "image_id": 341, "id": 5069}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 21.0, 81.0, 77.0], "area": 3158, "segmentation": {"size": [512, 512], "counts": "gQh01n?2a@Nm>5PANn>4PAMn>5PANm>5QALm>6QAMm>5PANm>d0N3N1N3M2O2M2N3M3N1N1O01O01O02N3N0O10O00010L_NhAa1U>6O2O00EoAhNR>X1oAgNP>Y1SBdNm=]1TBaNm=^1;O100O1O100O1O2O1N02OO01O010O00010O2N2OO101N3M2O2M2N2O0O1KSAVOn>i0410O2N100O1Li@^OY?a04N2O1N2N3Ln^o5"}, "image_id": 341, "id": 5070}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 33.0, 54.0, 73.0], "area": 2747, "segmentation": {"size": [512, 512], "counts": "nRm5:Y?=C=H801O000O1D<0000000000000001O000H8O1O1O1O1O1O100000J6E;0000000002O0O1O2N1O1O000000000001O00B>E;Bl@0b?00000YnW1"}, "image_id": 341, "id": 5071}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 36.0, 67.0, 69.0], "area": 2959, "segmentation": {"size": [512, 512], "counts": "dRg4<]?701O00000000000AFXA:]>1dAN\\>2dAN\\>2dANP>>PBBP>>mAES>T101O0000000000000000000000001O00000000000M3000000000005K000000VOXB\\Oh=3iBMW=MoB3Q=MoB3Q=MoB3Q=MoB3Q=MoB3Q=LPC4^>000000001O000001O0000000Fg]W2"}, "image_id": 341, "id": 5072}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 62.0, 43.0, 54.0], "area": 1407, "segmentation": {"size": [512, 512], "counts": "P2b1]>3M0102M2N3NO010O0001K_AkN`>U1cAhN_>V17O2M2N3N0O0003N1N3M2O0O03N1N3M3N1N3M2O2M2N3N1N3N1N3M3N1N3MU]Z7"}, "image_id": 341, "id": 5073}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 114.0, 83.0, 58.0], "area": 3126, "segmentation": {"size": [512, 512], "counts": "fTV52i?5K5L4K6J5K5M31O01O000001O01O00000010O000000010O000000010O000000010O00UAROd>U1000010O000000010J5K5N20001O01O0001O0001O0001O01O000001OL5J5K5M300010O00000010N1K5K5L4K6J5K5K\\\\`1"}, "image_id": 341, "id": 5074}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 124.0, 38.0, 54.0], "area": 1284, "segmentation": {"size": [512, 512], "counts": "XUV33j?3M4K4]OC`A`0]>C`A`0]>D_A`0\\>D`A`0]>C`A`0`>a0O01O01O0O1N3O0001O01O01O01OO2L3M3L5L3M3M3L5L3M3M4K4M3M4LPlV4"}, "image_id": 341, "id": 5075}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 149.0, 12.0, 10.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "iTn43i?5O000000010O0000001OL^kk2"}, "image_id": 341, "id": 5076}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 178.0, 70.0, 50.0], "area": 2307, "segmentation": {"size": [512, 512], "counts": "dVQ52j?4L4L4L4L5K4000001O0001O0001O0001O01O000001O01O000001O01O0000001M2L4K5M301O01O0001O01O0000010O0000010O00000010O000001O01O0001O01OYO\\A2e>I_A5c>GbA4b>HbA3c>IaA2jXl1"}, "image_id": 341, "id": 5077}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 187.0, 184.0, 163.0], "area": 15110, "segmentation": {"size": [512, 512], "counts": "g7U2j=3N3M2M4M3M2M4M1N01000O0100O011O0O1000O0100O0gNkBIU=7mBGR=:QCBP==SCAmR1nAPOP>P1PBSOm=m0RBVOk=j0VBYOg=h0WB[Og=h0VB[Of=h0XBZOf=`1O01000O0100O0100O4M2N3L3N3M3L3N3L31000O01N2M2N3L4M2N3L4M2M4mMWBi1k=UNXBh1S>L3N0O10002M4M3M2M4M3L3N3M3L3N3M2M4M3L3N3MYgS5"}, "image_id": 341, "id": 5078}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 206.0, 27.0, 16.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "efa42j?4M3010O00010O000010O00010O00HZ@6h?1O01O01O01O00010O0N2M\\iP3"}, "image_id": 341, "id": 5079}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 240.0, 68.0, 57.0], "area": 2218, "segmentation": {"size": [512, 512], "counts": "]hi44h?5K4K5L4L5L31O0001O0001O0001O01O0001O0001O01O0001O01O002N01O0001O01O000O100010O000L4ZAoNY>V1aAPOY>^1001O01O0001O0001O0001O0ROfA7Y>EkA;U>@PB`0Q>[OSBe0m=WOXBi0_>00VOPAd0W?N1L4L4K5LaWT2"}, "image_id": 341, "id": 5080}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 248.0, 11.0, 17.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "hW[474M]?;10O00000010N1J6O1OWX_3"}, "image_id": 341, "id": 5081}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 301.0, 73.0, 54.0], "area": 2295, "segmentation": {"size": [512, 512], "counts": "cja41k?4K5L5J5O10000010O00M3K6K4K5M30001O01O0001O01L3K5O100010O000001O01O0000010O000001O01O0001O01O0YAQO]>o0_AUOa>U11O0001O01O0001O0001O01O0001mN^ALNd0d>\\OiAc0j>1O0000K6K4K5L4LheY2"}, "image_id": 341, "id": 5082}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 342.0, 10.0, 15.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "Q[o33b?;00001O000001OO1Fcek3"}, "image_id": 341, "id": 5083}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 358.0, 61.0, 101.0], "area": 3580, "segmentation": {"size": [512, 512], "counts": "]\\R39f?3L3N2O1d@Fo>f000O100000001O01O000000000001O00000G900000000TBmNh]ObA13b0\\>WOfA32f0m>00010O0000010O0000010O00N2K5L5K4L4Kmc]2"}, "image_id": 341, "id": 5085}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 387.0, 59.0, 83.0], "area": 2517, "segmentation": {"size": [512, 512], "counts": "e]W23l?5ZOLVA7h>ORA2n>a0O1O0O2O00000O10O1000ZOTOWBm0i=WOSBi0m=[OoAd0Q>AkA?U>EgA;Y>f00O0101O001N101O002M5L4L4L4K5L0000fMcBQ2\\=oMiBl1X=TNlBh1T=XNPCd1P=\\NTC`1P=[NUC`1j=L4L4L4K5L4L000O01003M4K5L4L4L4KbQk4"}, "image_id": 341, "id": 5086}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 392.0, 16.0, 10.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "[\\Q45h?300001O01O00000000010O000000Jlcf3"}, "image_id": 341, "id": 5087}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 410.0, 26.0, 54.0], "area": 740, "segmentation": {"size": [512, 512], "counts": "jN1N3N2N2N2M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2Ncbb7"}, "image_id": 341, "id": 5088}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 416.0, 61.0, 91.0], "area": 3280, "segmentation": {"size": [512, 512], "counts": "b]j02m?3N2M4Z@H^??M3N3M2M3N2M4M2M3N3M2M3N2M4M2M3N0O100SBWNc=k1[BWNb=l1\\BTNd=T20O21N2M3N3L3N1N10O10O010O010O10O10O010OQOgBXO\\=f0gBWO\\=e0gBYO[=e0gBXO[=f0hBXOY=g0jBUOZ=c0[BnN==Z=c0mBZOV=c0nBZOT=d0nBZOU=b0nB[OU=8kAO[?Og@N]?Oe@O]?Ng@O[?O[RW6"}, "image_id": 341, "id": 5089}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 423.0, 15.0, 12.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "\\]a03m?2M3N0O010O010O0100O01002M3NeRW7"}, "image_id": 341, "id": 5090}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 423.0, 80.0, 55.0], "area": 2732, "segmentation": {"size": [512, 512], "counts": "Y^U41k?5J5L4L4K6L300000010O0000010O0000010O0M3L40001O0001O01O0001OO1L5M200`AgNZ>_10001O01O0000010O0000010O00000010O0000010O000001K0O3N4K5N201O0M3M310O0O1L4K6N10000O2N2N2Ncbb2"}, "image_id": 341, "id": 5091}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 458.0, 11.0, 21.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "k^l33d?9H8000000000001OL4GoQn3"}, "image_id": 341, "id": 5092}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 470.0, 64.0, 42.0], "area": 1679, "segmentation": {"size": [512, 512], "counts": "e?9f?2N2N1O1O1O1O1O1O100O1O11O1O1OO1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1001O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1^Ok@5X?Ji@4Y?Ji@4Y?Ji@4c?N2NZ`o6"}, "image_id": 341, "id": 5093}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 29.0, 29.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "0f0Z?1O00001O001O00001O001O00001O001OO1N2M3N2M3N2M3N2N2M3N2MS`a7"}, "image_id": 342, "id": 5094}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 46.0, 37.0], "area": 1105, "segmentation": {"size": [512, 512], "counts": "g``02k?3N2N3M2M4M2N3L30010O0O2L3O2O00001O001O001O00001O001O001O0000N2N2N2M3N2N2M3N2N2M3N2N2M3N2NR`h6"}, "image_id": 342, "id": 5095}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 0.0, 4.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "P`Y11o?00001o_d6"}, "image_id": 342, "id": 5096}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 0.0, 56.0, 54.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "Wa`11m?2O2M2N3M2N3\\OD`A>^>D`A?^>C`A>^>D`A?]>D`A?]>CaA?_>b0O001O001O001O001O1O001O001OO1N2N2O1N2N2N2N2N2O1N2N2N2N2N2O1N2N2N2N2N2O1N2N2N2N2N2O1NR`c5"}, "image_id": 342, "id": 5097}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 0.0, 15.0, 10.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "S`m53k?2O2O001O00001O001O00O1N2M3NRPk1"}, "image_id": 342, "id": 5098}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 1.0, 59.0, 64.0], "area": 2062, "segmentation": {"size": [512, 512], "counts": "oP\\62n?1`@On>3PAOn>3PA0m>3PAOn>3PAOn>3PAOn>3PA0n>b0N2N2N2N3M2N10O000001N1N2O1O1N2O2O01O1O2N101N1O2N1O2N100O2N1O2NO1O20O1O2N1O1O2N1N3M2M4M2N3L3N2N3M2M4Mh_f0"}, "image_id": 342, "id": 5099}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 16.0, 52.0, 62.0], "area": 1827, "segmentation": {"size": [512, 512], "counts": "oa^22l?2M4M2M4M2N3L3N2N3L3N3M2M4M2M3N3M2M4M2N2010O010O01O01O010O010O0N2M4M2N3L3N3M2M3N3L3N3M2M3N3M2M4M2M4M2NU_g4"}, "image_id": 342, "id": 5100}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 17.0, 28.0, 50.0], "area": 707, "segmentation": {"size": [512, 512], "counts": "WQb71m?2M4M2N3L3N3M2M3N3N110O010O00010O010O010VAXOZ>h0cA[O^>e0_A]Oa>c0]A@b>`0[ACf>l0O010VO"}, "image_id": 342, "id": 5101}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 38.0, 55.0, 60.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "cRX31m?2N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2O110O0010O0010O010O00010O010O00010NOO10O4M2N2M4M2M4M2M3N3L3N3L3N2M4Ca@5f?M4M_^l3"}, "image_id": 342, "id": 5102}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 40.0, 26.0, 25.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "hah02l?2M3N3L3N3L3O1010O010O00010O010O00010O0N3M2M3N3L3Nc^j6"}, "image_id": 342, "id": 5103}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 54.0, 21.0, 48.0], "area": 604, "segmentation": {"size": [512, 512], "counts": "f1_1a>0010O0O2M2N3M2M4M2N2N3L3N3M2M4M2N2N3L3NT^e7"}, "image_id": 342, "id": 5104}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 59.0, 32.0, 29.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "[Rb13k?2N3M2M4M2N2N3N1010O010O01O010O01O010O010O010O0N2N3M2N3L3N3M2Nmmm5"}, "image_id": 342, "id": 5105}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 64.0, 28.0, 31.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "gRo11l?3N2M4M2M4M2M3N3L310O01O01O010O01O01ON3L3N3L3N2M4M2M4Mjmb5"}, "image_id": 342, "id": 5106}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 72.0, 63.0, 69.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "Tc;1l?4M2N2M4M2M4M2N2M4M2N3M21O010O000M4M210O00010O010O00010O\\AWOQ>j0lAXOU>g0hA]OW>c0gA_OY>b0cAB0CW>j0gAE0CY>b10O01O01O010N1N2N30O010aAdNX>b10HfAjN[>S1gAnNX>o0lAPOU>m0mATOR>i0RBSOQ>k0QBSOR>i0d0N3L3N2M4M2N3L3N2MWmd6"}, "image_id": 342, "id": 5107}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 74.0, 72.0, 79.0], "area": 2984, "segmentation": {"size": [512, 512], "counts": "VTi62k?3N2M4L3M3M4L3O110O00010O00010O00010O0010O0001N1M3M4I6M3M4L3N210L3M3FZNWBj1f=9M3010O00010O00O20O00010O00010O000N3L3M3M4@PBkNS>R1QBjNR>S1QBjNS>R1?M3M4L3N2M4L3M3M4L3M3M_m2"}, "image_id": 342, "id": 5108}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 76.0, 61.0, 76.0], "area": 2504, "segmentation": {"size": [512, 512], "counts": "gSn32k?4M2N2M4M2N3L3N3M2M3N3M2M4M2M4M2N2M40O010O01O01O010O010OjA`Nl=a1QBaNo=_1oAdNQ>e10010O010O0010O0010O010OROoAMQ>1RBOn=NTB2l=KXB4i=IYB8f=F\\B:e=B_B>`=@bB`0^=^OeB`0]=\\OfBb0\\=\\OgBa0\\=\\OfBa0^>N3M2M4M2N3LS\\S3"}, "image_id": 342, "id": 5109}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 110.0, 107.0, 82.0], "area": 5582, "segmentation": {"size": [512, 512], "counts": "idb13k?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3O2O0hA_NQ>a1mAbNS>e1010O00010O010O0001lAVNo=n1N3L30001O010O01O01O010O010O01O01O010O01O01O010OM4O00010O010O00010O010O001N1N2N3O010O00010O010O00010O010O00010O010O01O01N1M4M2M4M2M3kN^Aj0e>SO^Ai0n>N3M2M4M2M3N3L3N3L^kg4"}, "image_id": 342, "id": 5110}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 120.0, 12.0, 15.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "TdV11l?3N3L3M3O20O001M2N2M4LX\\c6"}, "image_id": 342, "id": 5111}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 126.0, 20.0, 17.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "WdV32k?3N2M4N110O00010O010O00010O010OM3N3L3Nn[_4"}, "image_id": 342, "id": 5112}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 133.0, 10.0, 26.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "U4i0X?0OO1M4M2M4L3N2M4Ljkj7"}, "image_id": 342, "id": 5113}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 136.0, 50.0, 64.0], "area": 1777, "segmentation": {"size": [512, 512], "counts": "We53<0P?3m@OP?4m@0P?2n@0P?b0L3N201VAnNe>Q1XASOg>S10010M2M4M2M3N3L3N3M20010O0010L3N3L3N2M4M2M4M2M3UOVA`0k>\\OXAd0h>ZO[Af0o>00010O010OO1N3L3N3L3N2M4M2MU[Q7"}, "image_id": 342, "id": 5114}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 151.0, 16.0, 19.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "Te^32l?2M4M2M4M2O110O010OO1M4M2M4M2MX[Y4"}, "image_id": 342, "id": 5115}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 151.0, 87.0, 73.0], "area": 3169, "segmentation": {"size": [512, 512], "counts": "UVn52l?2N2M4M2N3L3N2M4M201O01O010O01O010O01O010O01O01O010O01O01O01KWORAh0k>9M2M4N11O01N1M4M2N2M4M2N30O0010O0010OO2M2M3N3N1010O0010O0010O002WOVBXOk=e0WBXOl=e0XBXOj=f0XBWOl=e0XBXOj=f0XBWOl=e0WBYOk=e0XBXOj=f0j0M2010O010O@e@;[?Ch@<^?1O010OM4M2M4MgZf0"}, "image_id": 342, "id": 5116}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 158.0, 88.0, 62.0], "area": 2903, "segmentation": {"size": [512, 512], "counts": "lef31l?4M2M3N3L3N3L3N2M4M2N30O00010O010O01O01O010O01O01O010O010O0001M2O2O01O010O01O01ON3L3N3M2N210O010O00010O010O00010O010O0010O0010OTObA6^>HdA9\\>ChABjA>W>_OkAb0T>[OPBd0P>ZORBg0n=UOVBj0`>0010O010O00001M2M4M2M3N3L3N3L3NmYm2"}, "image_id": 342, "id": 5117}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 188.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "leo73Q:"}, "image_id": 342, "id": 5118}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 211.0, 38.0, 26.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "QWQ23k?2N2M4O010O00010O00010ON210ON2M4L3M301O01O01O01O01O01O01O01O01ON3L3M4L3M3MZi[5"}, "image_id": 342, "id": 5119}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 214.0, 77.0, 62.0], "area": 2497, "segmentation": {"size": [512, 512], "counts": "fg]53k?2N3L7J2N3L3N2O20O01O010O01O010O01ON3M2O20O010O01O01N1M4M2M4M2N2M4M2M4M201O01O010O01O010O01O010O01O010O01O010O01O010O01O010O0oNaAb0^>\\OeAKM?_>CfALMb0\\>_OjALNd0Y>]OUBc0k=[OXBe0a>M4M2N2M4M2N3L3NRh[1"}, "image_id": 342, "id": 5120}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 214.0, 10.0, 39.0], "area": 228, "segmentation": {"size": [512, 512], "counts": "fWk73X?NZA4c>O[A4b>N\\A4b>OZA5b>N\\A4c>NZA4g>KWA8h>`010WI"}, "image_id": 342, "id": 5121}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 217.0, 35.0, 28.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "VgW74j?2N2M4M2M4N11O010O01O010O01O01O010O01O01O010O01O01O010O01N1M4M2M3N3Lnh6"}, "image_id": 342, "id": 5122}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 218.0, 76.0, 49.0], "area": 2169, "segmentation": {"size": [512, 512], "counts": "ag]32l?2M4L3N2M4L3M4M2N210O00010O0010O0010O0010O0010O00010O0010O0010O00010O0010O0010O0010O0010O00010O0M4M2M3M4M2N3O01O010O00010O010O0N2M102[OYANj>OYANj>0YAMj>0XAMk>3VAIn>7QAGQ?6>N3LTY\\3"}, "image_id": 342, "id": 5123}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 226.0, 108.0, 96.0], "area": 4840, "segmentation": {"size": [512, 512], "counts": "jXl02l?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2N3O0010O0010O0010O001PNSBl1l=RNVBn1o=0O01O01O010O010O00010ON3M2M3N3M2010O00010bA`NZ>c110O00\\NgAb1\\>O00010O010OjA`Nl=`1RBbNn=_1nAeNR>d10XNoAa1R>\\NPBe1V>O01O01O010O01O01_NfA[1Y>bNjA^1]>oN`A`0`>]OcAd0\\>ZOgAe0Z>WOiAj0V>TOlAl0e>O010O01O01O010O01O01O0O2M2M3N3L3N3L3N2M4M2Mlf]5"}, "image_id": 342, "id": 5124}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 257.0, 19.0, 25.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "dX12k?3N2N3L3N3M2M3N3O010ON2M4M2N3L3N2M4MmWe7"}, "image_id": 342, "id": 5125}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 261.0, 22.0, 13.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "WXj31m?3O000V@Mh?500010O010O00010O00010O0010O0010M2Mggj3"}, "image_id": 342, "id": 5126}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 262.0, 70.0, 89.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "ZYg22k?4L3M3M4M2M3M40O0010O000k@ZOQ?k0O01O01O01O010O00iAWOW=j0TBVO63g=f0PBZO64i=b0nA]O64l=k0PBXOQ>g0mA\\OR>Y11O01O01O010O0001N1M3N3O00010O0N1N01O001FVB_Nj=a1ZB[Ng=d1\\BYNd=g1:^OnAWOU>f0nAWOU>f0nAWOV>f0mAVOV>k0`0O00010O00001L3N2M4L3M3M4L3MggU4"}, "image_id": 342, "id": 5127}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 273.0, 99.0, 50.0], "area": 2491, "segmentation": {"size": [512, 512], "counts": "^iQ52l?3L3M4M2M4M2M4N110O0010O00010O010O01O010O010O00010O01N1N2N3M2N2M40M2N3L3N3L3N3L3N3TAoNf>W1O01O01O010OO2N110O010O0010O01mNWAn0n>O01O010O010O01O010M2N2N3M210O0010O0010O010O010O01O01O010O010O01OO2M2M4M2N3M2N3L3Ncf\\1"}, "image_id": 342, "id": 5128}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 283.0, 55.0, 29.0], "area": 1382, "segmentation": {"size": [512, 512], "counts": "ZiT7>b?000O100000000000000CCWA=i>=000000000000000000000000000000000000000000000000000O01000000000000000000000000000000UG"}, "image_id": 342, "id": 5129}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 286.0, 59.0, 84.0], "area": 2728, "segmentation": {"size": [512, 512], "counts": "mZj36g?5M3N0lNEcB;Z=KbB7Z=OcB1]=0bB1\\=0cB1]=OaB4^=M^B6a=K]B7c=JYB:f=FWB=g=EWB>e=FWB=g=P1L3N2N3N110O010O00010O010ON3M2N2M4M2M4M2N3L3N2N3mNcA>`>@cA>_>^OdAc0[>[OhAd0Y>YOiAh0V>UOnAj0d>0010O010L3N3L3N2M4M2M4M2MSVX3"}, "image_id": 342, "id": 5130}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 304.0, 31.0, 33.0], "area": 617, "segmentation": {"size": [512, 512], "counts": "VZg42k?4M2M4M2M3N3L3M4N1010O00010O010O0010O0010N1N3M2M3N3L3N3L3N2M[Vi2"}, "image_id": 342, "id": 5131}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 333.0, 88.0, 59.0], "area": 2968, "segmentation": {"size": [512, 512], "counts": "W[d6m0S?00000000000000000000000000003M0000000000001O000000000000000000000000000N200000000HnNaAS1^>oN`AR1_>PO_AQ1`>8O100O1O001O000001O0000002O1N2N1O0000000010O0001O3M2N2N2N2O1N2N3M2N2N2N2N2O1N3M2N2N2NoD"}, "image_id": 342, "id": 5132}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 345.0, 68.0, 96.0], "area": 3145, "segmentation": {"size": [512, 512], "counts": "XmY23k?3L3N2M4M2O2O00010O0N3M2M3N3M2M4M2M3N3L3N3L3N2M4CWN_Bk1^=YN^Bk1_=WN_Bk1^==N3L3N3L3N2O20N1N2M4M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3NlTd4"}, "image_id": 342, "id": 5133}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 355.0, 66.0, 66.0], "area": 2450, "segmentation": {"size": [512, 512], "counts": "nkf41l?4M2N3L3N2M4N1010O010O0010VA^OT>b0jA@V>a0fACZ>:bAI^>7^ALb>4\\ANd>h00O010OO1N3L3N3L30010O001O0N3L3N2O2O010O01O000M4M2N3L3N3L3N2010OoN]Ae0d>WO_Aj0`>TObAl0^>ROeAm0g>10ZOQA9o>ESABWA=j>_OYA?i>_OZA=V?N2M4M2MmSX2"}, "image_id": 342, "id": 5134}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 367.0, 73.0, 98.0], "area": 3393, "segmentation": {"size": [512, 512], "counts": "kmS33k?2N2Z@J_?>N100010OO2M200O2M2N3L3O2O01O010OO2L3N2M4M2M4M2\\O`NeBc1Y=_NeBd1W=_NfBd1X=_NeBd1W=_NfBd1X=_NeBc1X=e0M2M4M2O20O001N1M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2N3L3N3L3N2M4M2M4M2M3N3L3N3LUdg3"}, "image_id": 342, "id": 5135}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 378.0, 11.0, 22.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "Rlj72m?2N2N2O1N3M2N2N2N2O2TD"}, "image_id": 342, "id": 5136}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 388.0, 37.0, 32.0], "area": 602, "segmentation": {"size": [512, 512], "counts": "h\\U72m?2N2N3N1N2N2N1O00000000101N3M2N2N2N2N001O01O002N2N3M2N2N2N2O1N2N2N3M2N2N^S8"}, "image_id": 342, "id": 5137}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 399.0, 30.0, 43.0], "area": 720, "segmentation": {"size": [512, 512], "counts": "`ml52k?3N2M4M2M4L3N3L3N2M4M2M4M2010O00010O010O0TOZA@[ABZAB[A;V?M3N3M2M4MnRd1"}, "image_id": 342, "id": 5138}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 404.0, 6.0, 13.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "h\\m73l?2N2N2O1N3ZC"}, "image_id": 342, "id": 5139}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 426.0, 49.0, 74.0], "area": 2018, "segmentation": {"size": [512, 512], "counts": "_nW42l?3L3N3L3N2M4XA^On=d0PB_Ol=e0QB]Om=e0QB^Ok=f0QB]Om=e0QB]Ol=f0RB]Ok=f0QB]Ol=^1M210O00010O010O0O2O01O01ON3L3N3M2M4M2EhAlN[>P1hAmN[>Q1gAmN[>Q1b0jAAS>a0kAAR>c0jAAS>a0kAAS>a0kABS>`0jABV>>hAEX>Q10010O0O2L3N2N2N2N2001O000O2M2N3L3N3M2HdAjN_>R1eAjN^>T18N3M2M4M2N3M2M4M2N2N3L3N3Mja[2"}, "image_id": 342, "id": 5141}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 454.0, 80.0, 58.0], "area": 2553, "segmentation": {"size": [512, 512], "counts": "[om51m?3a@O_>0\\A33O_>0[A52O_>0\\A33O_>0\\A42O_>P1M3O100N2M3N2001O001O00001O001O00001O001O00001O001O0O2M2M3N3M2M2O3M2M4M2N3L3Gd@L^?1e@M^?080O2O3L3M4O0e@BU?=h@FX?b0O010O00010N1N2O20O01O01O010O01O01O010O0YOl@?W?_Ol@>\\?M4M2M3N`Qj0"}, "image_id": 342, "id": 5142}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 478.0, 42.0, 34.0], "area": 1030, "segmentation": {"size": [512, 512], "counts": "g_P72l?2M4M2^@HZ?;c@GZ?a0N2N2M3N2N2N2001O001O001O00001O00O1N200001O001O00001O0O2L3N3M2M3N3M2M4M2Nl`:"}, "image_id": 342, "id": 5143}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 0.0, 198.0, 162.0], "area": 3345, "segmentation": {"size": [512, 512], "counts": "PPm41o?001O1O1O1O1W@Ld?4[@Nd?8O001O1O1O1O1O001O00N2O1001O1O1O1O1O001O1O\\Oh@a0W?^Ok@b0Z?00O1000O100000O1000O1000O100000O1000O1000O100000O1000O100000O1000O100000O1000O1000O1000O100000O1000O100000O1000O100000O1000O1000O100000O1000O1000O100000O1000O100000O1000O100000O1000O1000O100000O1000O1000O100000O1000O100000O1000O10_Oc@>`?O10000000O10O100000O1000O100000O1000O100000O1000O100000O10O100000O1000O100000O1000O100000O1000O100000\\K"}, "image_id": 343, "id": 5144}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 76.0, 25.0, 41.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "Vcc72m?2N2N2N2N00000O10002N2N2N2N2N1O2N2N2N2N2N2N2M3N2cM"}, "image_id": 343, "id": 5145}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 11.0, 21.0, 29.0], "area": 379, "segmentation": {"size": [512, 512], "counts": ";h0X?010O01000O010O010O01N1N3M3N1N3M2N3M2O2M\\_e7"}, "image_id": 344, "id": 5146}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 47.0, 34.0, 30.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "nQl02l?2O2M2N3N2M2N3N1N3O1O010O010O10O010O10O010O10O001M3M2O2M2N3N1N3M2OVnb6"}, "image_id": 344, "id": 5147}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 171.0, 15.0, 13.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "ael31n?2N1N3N2N2O10O010O1N2N1O2M3N`jk3"}, "image_id": 344, "id": 5148}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 248.0, 15.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "mgb52m?2N2N1O2N2O100000O1M3N2N1O2NRhU2"}, "image_id": 344, "id": 5149}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 35.0, 37.0], "area": 885, "segmentation": {"size": [512, 512], "counts": "4g0W?2N30O01O01O010O01O01O010O01N1M3O2O001O0001SOSAf0l>XOWAg0i>VOZAj0o>0OO1M4L3N3L3N2M4M2Mf_^7"}, "image_id": 345, "id": 5150}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 0.0, 11.0, 4.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "P`R11o?001O00001O001O00O1NRPh6"}, "image_id": 345, "id": 5151}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 0.0, 78.0, 49.0], "area": 2369, "segmentation": {"size": [512, 512], "counts": "b`j12k?4M2N3L3N2M4M2O2O00001O001O00001O001O001O00001O001O00001O010O0010O0010O010O00010O010O00010O010O010M2N2N3L301O0]AgN_>]101O001O0000O1N30O00N3L3N3M2M4M2M3Ck@MX?Ok@NX?0k@MX?Ok@OW?OX`n4"}, "image_id": 345, "id": 5152}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 46.0, 71.0, 65.0], "area": 2714, "segmentation": {"size": [512, 512], "counts": "`2g0Z?O0010O002OO010O010OVO[OYBe0e=]O[Bd0a=@^B`0`=B^Ba0^=C^B`0`=B^B`0a=A\\Bc0d=]OYBd0h=\\OUBe0n=[OoAd0T>\\OjAd0Y>>3N210O0N3M21O01O010O_AhNZ>_1010O010O01O01O010O010O00010O0O2O01OM4M2N3L3N2O2O0010ON2N3L3N3L3N2M4M2M4M2Nn]l6"}, "image_id": 345, "id": 5153}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 143.0, 13.0, 14.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "gTf03j?3M4M200010O00010N1M3M4La[S7"}, "image_id": 345, "id": 5154}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 148.0, 26.0, 27.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "Uej03k?3M2M4M2M3N3L310O01O01O010O010O0001N1M4M2N2M4M2M4MV[h6"}, "image_id": 345, "id": 5155}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 149.0, 38.0, 62.0], "area": 1538, "segmentation": {"size": [512, 512], "counts": "e4`1a>0O00010O010O00010M2N3O000010O010O0005LO0GjAhNW>U1lAkNS>R1PBnNQ>o0QBROn=k0VBTOj=j0XBSOl=i0XBTOj=j0h0L3N2M4M2M4M2M3N3LTk\\7"}, "image_id": 345, "id": 5156}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 180.0, 80.0, 63.0], "area": 2701, "segmentation": {"size": [512, 512], "counts": "eff04j?2M4M2M4M2M3N3M2M4M2M3N3L3O2O01O010O01O01O010O01O010O01O0lN\\Am0c>POaAo0h>10O00010OM4M2M3N3M2M4N110OiNiAe0X>XOjAh0V>UOnAk0Q>SOQBm0P>POSBP1^>00010O010O00010O010O001O0N2N3N1001M2M3N3L3N3L3N2N3L3N3L3N2M4McYQ6"}, "image_id": 345, "id": 5157}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 232.0, 28.0, 60.0], "area": 794, "segmentation": {"size": [512, 512], "counts": "a7b1_>O00010M2N3L3N2N3L3N3L100O010O010O4M2M4M20010O0Ed@O`?Nb@2f?01L^ha7"}, "image_id": 345, "id": 5158}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 260.0, 17.0, 16.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "^hY11m?3L3N3L31O010O00010O010O0O1M4M2Mjg]6"}, "image_id": 345, "id": 5159}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 266.0, 75.0, 64.0], "area": 2546, "segmentation": {"size": [512, 512], "counts": "eY:1m?3L3N3M2M3N3M2M4M2N3L3N201O010O010O01O01O010O01O010O01O010O01O010O01O001M2N2M4M2M4M2N3L3N2N3N11N1N3M2M4M2N2M4N101O010O0M3N3L3N3M2M4M2N2M4M2N3L3N2N3L]W`6"}, "image_id": 345, "id": 5160}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 327.0, 60.0, 65.0], "area": 2101, "segmentation": {"size": [512, 512], "counts": "W:`1a>O010O0010OkN`Aj0`>TObAm06nNl=1RBP1OROo=LTBS1JTO\\>l0bAVO_>U1010O0N3L3N2N3N110O01O01O010O010O01O01O0O2L310O01O010O01O01M2N3M2M4ON2M4M2N2M4M2M4M2N2M4M2N3L3N3MReQ7"}, "image_id": 345, "id": 5161}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 414.0, 512.0], "area": 65686, "segmentation": {"size": [512, 512], "counts": "9R7BmH=U7CiH>X7BgH>Z7BeH?[7AdHa0[7_OdHc0[7]OdHe0[7[OcHh0\\7@[H`0f7KnG4T87`G^O]MET;W1TGTOjMCS;d1fFjNXNAS;f1`FlN_N\\OR;h1_FkNaNZOR;k1]FjNn9V1RFhNP:W1QFiNo9W1QFiNo9W1QFiNo9W1QFiNo9W1QFiNo9W1QFiNo9W1WFcNi9f1nEYNS:R2bEnM^:\\2XEdMh:g2mDYMS;R3bDnL^;j300000001O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001SETKd:m4ZEUKe:l4YEVKf:k4XEWKg:R5O1O001O1O1OO1O1O1O1001O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O00^LiH\\NV7b1mH^NR7a1PI_No6_1TIaNk6]1XIbNh6\\1ZIeNe6Z1]IfNb6X1aIhN^6V1eIjNZ6T1iIlNV6R1mImNS6R1oInNP6P1RJQOm5m0VJSOi5k0ZJUOe5j0]JVOb5h0aJWO_5g0dJYO[5e0gJ\\OX5b0kJ^OT5a0nJ_OQ5?RK@n4>UKBj4V3AlL>T3@oL`0P3^OSMb0l2\\OWMd0h2ZO[Mf0d2YO^Mf0b2XO`Mi0_2UOdMk0[2SOhMm0W2ROkMn0T2POoMP1P2nNSNQ1m1XNRJlNS4m2j1WNTJiNU4P3f1WNVJgNV4R3c1WNXJdNX4U3_1WNYJbN[4W3[1VN\\J`N\\4Y3X1WNUOj1j0VNWOj1h0VNYOj1f0VN[Oj1d0VN]Oi1c0WN^Oi1d0TN]Ol1m0iMSOX2W1]MjNc2U1\\MmNd2R1\\MoNc2Q1]MPOc2V1VMkNj23aLmK:P4V31aLYLOg3`3NbLi1]3VNdLk1\\3SNeLn1[3oMgLR2Y3lMhLU2X3iMiLW2X3gMiLZ2V3eMkL\\2U3aMmL`2S3^MnLc2R3[MoLf2Q3XMPMh2Q3VMPMk2o2SMSMn2m2PMTMQ3l2mLUMT3k2jLVMW3j2gLWMY3j2dLXM]3h2aLYM`3V2^H`MP4;c3S2_HaMm3=e3o1`HeMh3>i3k1aHgMd3?l3h1bHiM`3`0n3f1dHjM\\3a0Q4c1eHlMX3b0T4`1eHoMT3c0X4[1gHRNo2d0[4X1hHTNk2e0^4U1iHVNg2e0a4T1jHVNd2f0d4R1jHXN_2f0j4P1hH[N@A`2T1Z5m0iH^N[OC`2R1^5k0iH`NWOE`2P1b5i0iHbNSOG`2n0f5g0iHdNnNJ`2k0k5f0iHQOZ19o5c0iHTOW19R6a0iHVOS19V6?iHXOo08[6>hHZOj09`6;hH\\Of09c6:iH]Ob09g67iHA>8k65iHC98Q73hHE58U71hHF29X7OhHHN9\\7MhHJJ9_7KiHME8e7IhHOA8i7GhH1]O8m7EhH3YO8Q8ChH5UO8U8@iH8oN8Z8_OhH9mN8]8]OhH;iN8a8[OhH=eN8e8ZOgH>bN8i8ZOeH>_N8o8YOcH?\\N8R9ZOaH?[N7V9ZO_H?YN7Z9ZO]H?VN7`9ZOZH>UN8c9ZOXH>SN8g9ZOUH?RN7k9YOTH`0oM7n9ZOSH?lM7T:ZOPH?jM7X:ZOnG?hM7\\:ZOlG?fM7`:ZOiG`0eM6d:ZOgG?cM7h:ZOfGe1\\8[NdGc1^8]NbGa1`8_N`G_1b8aN]G]1f8cNZG[1h8dNYGZ1h8gNXGW1j8iNVGT1m8lNSGR1o8nNPGQ1R9oNnFo0T9QOlFm0U9SOlFj0W9VOiFh0Y9WOhFg0Z9WOgFh0[9UOhFi0Z9UOhFh0Z9WOhFg0Z9WOkFd0W9YOaG0ln`1"}, "image_id": 346, "id": 5162}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 0.0, 164.0, 215.0], "area": 22639, "segmentation": {"size": [512, 512], "counts": "jR^51n?1O2M3N2N2M3N2N1O2M3N2N2N2M3N1O2M3N2N2N2M2O2N2M3N2N2N2M2O2N2M3N2N1O2M3N2N2M3N1O2N2M3N2N1N3N2N2N2ROkLmDW3S;jLkDX3T;hLkDZ3T;gLjD[3U;fLhD]3W;dLgD^3X;bLgD_3Y;bLdDa3[;`LcDb3\\;_LbDc3];^LaDd3^;\\L`Df3`;[L^Dg3a;=O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1OO1O1N2O1O1O1O1N2O1O1O1"}, "image_id": 346, "id": 5163}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 323.0, 68.0, 64.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "Q[n61m?2M4M2N3L3N2N3L3N3M2M3N3M210O0010O0010O010O0010O0010O010O0010O\\AnNZ>R1cAQO^>o0_ATO`>V11O01O010O010O0M3N3M2N3O01O01O010O010O01O01O01M2M4M2M3N3M2M4N110O00aE"}, "image_id": 347, "id": 5164}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 381.0, 75.0, 67.0], "area": 2822, "segmentation": {"size": [512, 512], "counts": "Sm^62k?3N3M2M4M2M3N3L3N3L3N2M4M2N3L3O2O01O01O010OO2O01O01O010O010O01O01O010O0fN^AW1a>gNaAY1c>010O010O0010O0010O1^AfN]>Y1`AjN`>[1010O01N1O110OO1M40O01O010O01O01PO^A`0b>^OaAb0_>ZOdAd0^>ZOeAb0^>\\OdA1E;Z?Bi@;_?M3N3L3Ngb;"}, "image_id": 347, "id": 5165}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 449.0, 94.0, 54.0], "area": 2947, "segmentation": {"size": [512, 512], "counts": "Q_S61l?3N3L3N2M4M2M4M2M4M2M3N3M201O01O010N1M310O010O01O01O010O01O0UAoNh>U1O010O0010O0010O0010O0010O0010O00M4M2M4M200001M2M3N3L3N3L3O101O001N10O1N200010O010O00010O0N3M2O1010O010OZOk@a0U?]Om@c0X?10O0O2L3N2M4M2M4L^a="}, "image_id": 347, "id": 5166}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 482.0, 37.0, 30.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "eo[42l?2M4M2M3N3L3i@^On>e0n@^OP?j0001O00001O001O00001O001O00001O00001O001O000O2L3N3L3N2M4Me`Q3"}, "image_id": 348, "id": 5167}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 254.0, 24.0, 40.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "Qie52l?3M2N3M2N2M4M2N1O000O100000O10O13M2N3L3N3M2N2NQXn1"}, "image_id": 349, "id": 5168}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 269.0, 16.0, 20.0], "area": 149, "segmentation": {"size": [512, 512], "counts": "mXl51m?2N3M2N3M2N3M1O02N2N2N3M2N3M2Ncgk1"}, "image_id": 349, "id": 5169}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 274.0, 16.0, 20.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "RiP61m?3M2N3M2N3M2N2N02N2N3M2N2N3M2N^Wg1"}, "image_id": 349, "id": 5170}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 322.0, 31.0, 33.0], "area": 619, "segmentation": {"size": [512, 512], "counts": "dj`72l?2M4M2N2N3L3N3M210O010O00010O010O010O010O00010O010O010O00010cE"}, "image_id": 349, "id": 5171}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 0.0, 33.0, 22.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "RP_11m?201O0[@N\\?2a@2^?O_@3a?601O001O001O00001O001O00001O001O00001OO1M3N2M3N2M4M2NQ`P6"}, "image_id": 350, "id": 5172}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 0.0, 100.0, 48.0], "area": 3029, "segmentation": {"size": [512, 512], "counts": "j`T22l?3M2M4M2M4M2N2M4M2M4M200010O010O010O00N3N101O00001O00N2N2M3N2N2M3N2M3N200001O001O001O00O10QA]Oa>c0]A@b>?\\ADd>:XAIg>6XALh>1XA3g>KYA7g>GYA;g>EWA>h>=01O00001O001O00001O001O00001O001O00001O001O00001O00N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3NR`Y4"}, "image_id": 350, "id": 5173}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 0.0, 27.0, 27.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "c`W71l?4L3N3L3M3M4M20001O001O00001O001O02N0O2L3M4M2M3N3L3Mno:"}, "image_id": 350, "id": 5174}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 0.0, 21.0, 26.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "a`e71m?2N3L3N3L3N3L30001O001O00001O001O00001O00"}, "image_id": 350, "id": 5175}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 55.0, 53.0, 47.0], "area": 1638, "segmentation": {"size": [512, 512], "counts": "ZRn42k?4M2e@Ij>:TAHi>:QAJn>6PALQ?a0010O01N1M3M4M210O01O01O01O01O010O01O01O01O010O01NN003001O010O010O0010O0QOZAd0f>ZO]Ad0e>XO^Ae0Q?M2M4L3N2M4M2Mg]W2"}, "image_id": 350, "id": 5176}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 65.0, 45.0, 101.0], "area": 2501, "segmentation": {"size": [512, 512], "counts": "a2c2]=10O00M4M2M0010O011N4M2M3M4M2O2O01O01O0N3L3N2ZNSBV1Q>gNRBV1P>gNSBV1Q>gNQBV1_>M2M4O000010O01N1M3N3L3M4M2M3N3L3N3LW]Y7"}, "image_id": 350, "id": 5177}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 78.0, 67.0, 101.0], "area": 3479, "segmentation": {"size": [512, 512], "counts": "idc02l?3M2d@Jk>:RAIk>9SAIk>:QAIl>:RAIk>g0N3@kNQBW1m=kNPBY1m=jNPBX1m=kNQBX1`=bNeB9HW1a=cNdBl1Z=VNdBm1Y=UNdBn1Y=?M2N3M2N30O0N3L10000O4M2N3M2O20ON3M2N2N3L3N3M2N3L3N2]NiA[1_>M4M2N0010101O010N1NO10O1000002M4M2Co@IS?5o@HU?5n@HT?6=L3N_mZ6"}, "image_id": 350, "id": 5178}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 87.0, 35.0, 51.0], "area": 1309, "segmentation": {"size": [512, 512], "counts": "Vcn53e?8I8O000\\AAg=?RBHn=8kAOU>1cA7^>e001O0000000001O01O0000000000010O00000000N2I8G8I7I7I7H]m_1"}, "image_id": 350, "id": 5179}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 98.0, 56.0, 50.0], "area": 1706, "segmentation": {"size": [512, 512], "counts": "Tdd42l?3L3DKk@8Q?Kl@8R?Kk@7R?>L3N3L3O110O010O00010O010O00010OO2M210O00010O010O00010O010O00010O010O00010O0N3L3N2M4M2M4M2M3N3L3N3L^\\_2"}, "image_id": 350, "id": 5180}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 109.0, 88.0, 61.0], "area": 3109, "segmentation": {"size": [512, 512], "counts": "[Tc13k?2M4M2N2N3L3N3N101O01O010O01N1M4M2N2M4O0010O01O01O010O010O01O01O010O010O01M2N2N3O010O0010O00O2L3O2N101O000O2O001O010O01O01O0102M3N1N0N2M4M2N3L3N3M2M3N3M2O2O0O101N101O0O2M5K5LkkP5"}, "image_id": 350, "id": 5181}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 114.0, 46.0, 57.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "Peh61m?2M4M2N3L3N3M2H]OPAf0n>\\OPAg0m>7M4M2O2O0O0O3M2M4M2N210O0010O010O0N3L3N3M2M3N3M2M4M2N3L3N3M2M3N3M2M4M2NU\\`0"}, "image_id": 350, "id": 5182}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 134.0, 28.0, 28.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "gd[53k?3L3N3L3N2M4M201O01O010O01O01O010O01O01N1M4M2N2M4M2M4Md[V2"}, "image_id": 350, "id": 5183}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 155.0, 29.0, 29.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "_eW61m?2M4M2M3N3L3N3L31O010O01O01O010O01O010O01L3N2N3L3N3L3N2MPkY1"}, "image_id": 350, "id": 5184}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 161.0, 50.0, 30.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "Re\\4m0S?000000000000000000000000000000000000O10000000O100000000000000000000000000000000000000000000000000oZj2"}, "image_id": 350, "id": 5185}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 164.0, 25.0, 25.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "dU`53j?3N2M4L3N3L30010O0010O0010O0010O001M2M3N3L3N3L3NgZS2"}, "image_id": 350, "id": 5186}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 167.0, 89.0, 66.0], "area": 3122, "segmentation": {"size": [512, 512], "counts": "TfS12l?2M4M2N2M4M2N3L3N2N3O0010O00010O01O01O01O010O000N3L3N3N10001N101O0O101O001O01O010O01O01O010O010O0010O010^AeN]>_101O0O101N101O0O101O010O01O010O0iNjAGOe0X>@lAGOi0X>ZOlAKOj0W>WOUBj0`>010O01O01O01N1N3L3N2M4M2M4M2M3Nai_5"}, "image_id": 350, "id": 5187}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 182.0, 38.0, 68.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "WW]73k?2N3L3N3M2M4M2N2N3L3N3M2M4M2N2M4M2N3M2M4M2O1010O010O01O01O010O0oNPB3P>JSB5m=IUB8j=EZB:g=C[B=e=A]B`0f7"}, "image_id": 350, "id": 5188}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 183.0, 48.0, 56.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "lVl53k?2M3N3M2_@D[?c0M2N2M4M2M4M2N3L3N2M4N1010O00010O010O00010O010O01O01O010O0N2N3M2M3N3L3N3L3YOn@>T?@o@=[?M3N3L3Nhi[1"}, "image_id": 350, "id": 5189}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 201.0, 23.0, 21.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "fVY51m?2M4M2N3M2N3O00010O010O010O00010OO2M2M4M2N2MdY[2"}, "image_id": 350, "id": 5190}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 213.0, 51.0, 41.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "bWn33d?9G9G9N2001O01O0000000001O01O0000000001O01O000000000SAROh>S11O00000001ON2M4O0000000001O01O000000000L4H8I8H[YX3"}, "image_id": 350, "id": 5191}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 218.0, 33.0, 54.0], "area": 1066, "segmentation": {"size": [512, 512], "counts": "YXQ51^?b0G8M3N3L3M4M2M3N3L3O2O0010O0N3L3N3M2M3N3L3N3M2M4M2N2M4M2N3L3N2NPY^2"}, "image_id": 350, "id": 5192}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 239.0, 45.0, 50.0], "area": 1341, "segmentation": {"size": [512, 512], "counts": "ch_52l?3L3M4M2M3N3L3M4M2M4L3N2M4M210O00010O010O00010O010O0010O001N1M4M2M3N3L3N3M2M3N3L3N3M2M4MVhi1"}, "image_id": 350, "id": 5193}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 262.0, 50.0, 57.0], "area": 1488, "segmentation": {"size": [512, 512], "counts": "VYW72l?2M4M2N3M2N2N30O01O01O010O01O010O01O010O01O010O0O1N3L3N3M2M3N3M2M4O001O01O010O01O001M2M3O20OfA]NW>f110O0hG"}, "image_id": 350, "id": 5194}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 270.0, 35.0, 35.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "Sio51m?3M2O2M3M2O2M3M2O2M2N3M3O010O01000O01000O01O0N3N2M2N3N1N3M3N1N3M2O2MWg^1"}, "image_id": 350, "id": 5195}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 272.0, 51.0, 34.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "WYd35b?9H9I600000000000010O00000000000010O000000000000010O00000000000010O00000000000010O0000000000000I8G8IaWb3"}, "image_id": 350, "id": 5196}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 273.0, 32.0, 30.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "Tie42k?3N3L3N3L3N2M4N110O00010O010O0010O0010O010O0001L3N3M2M4M2M3N3MWWj2"}, "image_id": 350, "id": 5197}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 285.0, 23.0, 23.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "]YS51l?3N2M4M2M4M2O2O010O01O010O01O01N1N3M2M4M2N2MPWa2"}, "image_id": 350, "id": 5198}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 302.0, 38.0, 31.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "niT72k?3N3M2N3L3N3M21O01O010O010O01O010O01O010O01O010O01O010O01O010M2N2N3L3N3M2MXV8"}, "image_id": 350, "id": 5199}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 316.0, 67.0, 89.0], "area": 2846, "segmentation": {"size": [512, 512], "counts": "U\\k42l?2XONaA4\\>NbA5\\>MbA4]>N`A5]>MbA5\\>MbA5[>NbA4]>NaA4]>MaA6\\>MbA4]>i0N2M2O2M3N2N1N3N2M10O10O0100O0100O0100O010O01000O3N2M2O2M3N2N1N3N2M2O2M3N2N1N3N2M3N1N3N2N2M2O2M3N2M2O2N2M3N1N`US2"}, "image_id": 350, "id": 5200}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 328.0, 59.0, 43.0], "area": 1479, "segmentation": {"size": [512, 512], "counts": "nZY33k?2N2M4M2M4M2N2M4M2O2O010O01O010O01O01O010O01O010O01O010O01O01O010O01O01O01N1M4M2N3M21O010L3N2M4M2M4M2N3L3N2M4M2N3L3NbUi3"}, "image_id": 350, "id": 5201}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 332.0, 52.0, 44.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "ejc647N\\?7^@O]??J6K5K2N000O100000O10O100000O10O100000O10O100000O10O100000O1000O1000O1000O1000O1000O11O5J6K5K5K5K5JfTb0"}, "image_id": 350, "id": 5202}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 335.0, 28.0, 29.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "TkZ41l?3N3L3N2M4M2M4L300010O01O01O01O010O01O0N2M4M2M4L3N2M4M\\UW3"}, "image_id": 350, "id": 5203}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 375.0, 67.0, 56.0], "area": 2028, "segmentation": {"size": [512, 512], "counts": "W\\51o?4K4M4L3L5L4L3M4K5L3M3M0O10O1000O01000O10O1000O10O1000O01C_A[Oa>d0cAYO]>g0gAUOX>l0>00O01000O10O1000O10O1000O10O10O10O1000O10O1000O10O10O5L4L3M4K5L3M4K`Si6"}, "image_id": 350, "id": 5204}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 377.0, 30.0, 30.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "U\\h61n?2N2N2N2N2N2N2N2N2N2N2N200000000000N2N2N2N2N2N2N2N2N2N2O2Mfch0"}, "image_id": 350, "id": 5205}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 383.0, 29.0, 29.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "[lY61n?3M2N2O1N3M2N2N2N2O2M2N2N2N10O2N2N3M2N2O1N2N3M2N2N2O1N3MbcW1"}, "image_id": 350, "id": 5206}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 387.0, 52.0, 58.0], "area": 1414, "segmentation": {"size": [512, 512], "counts": "Y]^52n?1N2N2N3M2N2N2N2O1N3M2N2N2N2N2AQOkAP1S>ROkAo0T>TOiAl0W>VOgAj0Z>WOdAi0\\>YObAg0^>=01O000001O00002O2M2N2N2N2N2N3M2O1N2N2N2N2N3M2O1N2N2N2N3M2NTcg1"}, "image_id": 350, "id": 5207}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 395.0, 30.0, 62.0], "area": 1271, "segmentation": {"size": [512, 512], "counts": "n]a72l?2ROMmA6o=NmA5Q>MmA6P>MlA6Q>MmA5Q>NkA6R>LlA6T>JjA9V>GfADdA?\\>b000010O010O00010O01M2N2N0O11O3L3N3LfC"}, "image_id": 350, "id": 5208}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 401.0, 16.0, 20.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "h\\R72m?2N2N2N2N2M3000000000G_@2a?La@3`?Kb@3g?N2NPce0"}, "image_id": 350, "id": 5209}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 427.0, 36.0, 29.0], "area": 716, "segmentation": {"size": [512, 512], "counts": "lme65f?5K5K6M2001O01O0000010O000001O01O0001O01O0001O01O0001O01O0000L5K4L4K5LbRh0"}, "image_id": 350, "id": 5210}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 439.0, 52.0, 56.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "W^j52m?2O1N3M2N2N2N3N1N2N3M2N2O1N3M2QAQOj>T1N2O1N2N20O10000010O0000010O00O1O1N3M2N2N2N3N1N2N2N2N3N1N2N2N3M2N2O1N3MTa[1"}, "image_id": 350, "id": 5211}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 442.0, 62.0, 36.0], "area": 1724, "segmentation": {"size": [512, 512], "counts": "Una2h0X?1O00000000000000000000000000000000000000000F:000000000000000000000000000000000000O100000000008A700000000000000O10000000c0]O[Q_4"}, "image_id": 350, "id": 5212}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 457.0, 29.0, 29.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "Po\\71f?0a@2]?0a@2]?0a@2]?0a@2]?:N1O000000002N2N2N2N11N2N2N2N2N2M3N2N2N2N2N2N2N[a4"}, "image_id": 350, "id": 5213}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 479.0, 29.0, 28.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "a_V31m?2N3L3N3M2M4M2N3O01O01O010O010O01O010O01ON3M2M4M2N3M2M3NkP[4"}, "image_id": 350, "id": 5214}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 483.0, 52.0, 29.0], "area": 811, "segmentation": {"size": [512, 512], "counts": "ook31n?100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O11O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1N2N2N2N3MXPZ3"}, "image_id": 350, "id": 5215}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 488.0, 31.0, 24.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "c_W73j?4M2M4O000^@E`?>O00010O0N3L300010O00010O00001O001O0O1M4M2M3M4M2MeP9"}, "image_id": 350, "id": 5216}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 496.0, 38.0, 16.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "j_X22m?2N2N2N2M2O1O1O1O1001O1O1O1O1O1OO1O1O1O1O11O001O1O1O1O1O1O1O1O1O1O001O1O1OQ`T5"}, "image_id": 350, "id": 5217}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 500.0, 24.0, 12.0], "area": 188, "segmentation": {"size": [512, 512], "counts": "noP32k?3N2N2N2O1001O00001O001O001O001O00001O001O0O2MVPc4"}, "image_id": 350, "id": 5218}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 0.0, 16.0, 11.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "V`>1n?1O2O0O1O1O2N10001O00001OM3M3MS`Y7"}, "image_id": 353, "id": 5219}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 0.0, 66.0, 41.0], "area": 1803, "segmentation": {"size": [512, 512], "counts": "aPi02k?4M2M3M4L3M301O00001O00001O00001O00001O00001O001O00001O0PATOl>P101O00001O00001O00001O001O00001O00001O00001O0000M3M3M3M3N2M3M3M3M3M3M3M3MSPV6"}, "image_id": 353, "id": 5220}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 0.0, 57.0, 59.0], "area": 2138, "segmentation": {"size": [512, 512], "counts": "SQQ23i?4M3M4K4M3N3O00010o@[Oe>e0XA^Oh>b0TABm>h001O0N2M4K4M301O01N1000M3M4N10001O0000001O00001O00001O00L4M3M3L4M3M3L4M3M3L4M3M3L4M3M3L4MS`R5"}, "image_id": 353, "id": 5221}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 0.0, 79.0, 45.0], "area": 2621, "segmentation": {"size": [512, 512], "counts": "QQW43]?`0000000000001O00000000000C=00000000001O00000001O000000000000000000000K5000000000001ZAkN_>[10000000000000000000000000000000000000000000000000M3B>000000O1H8I700000P`a2"}, "image_id": 353, "id": 5222}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 17.0, 62.0, 65.0], "area": 2873, "segmentation": {"size": [512, 512], "counts": "oQQ3a0[?400000000000001O0000000000000mNS1N200000000000000001O00000000J60001O00I71O000000000000000001O0000000000000000000SOlANT?O0000000_Oono3"}, "image_id": 353, "id": 5223}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 34.0, 6.0, 9.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "RaQ48h?000000001m^k3"}, "image_id": 353, "id": 5224}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 54.0, 81.0, 59.0], "area": 3022, "segmentation": {"size": [512, 512], "counts": "nbY44e?7J6J6I7N3O0000000001O01O00000000000010O000000000001O01O0000000001O0001O0000M3L:J001O00000000000010O00000000O1G:O00000001O0001O0000N2E9fAJW>:eAIX>=bAF[>Q1M4N100TB\\N\\=e1`B^Na=a1\\BbNd=^1YBfNf=[1VBhNk=f101O01O01O01O01O000O20O0001O0M3M4L2N00000003M3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L30010L3M3M4L3M3M4L3MTnn6"}, "image_id": 353, "id": 5226}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 82.0, 60.0, 95.0], "area": 3022, "segmentation": {"size": [512, 512], "counts": "Tef04i?3M4L3M3M3010O00O2L3M3M4_OQOlAR1Q>QOlAR1Q>ROjAS1R>POkAS1R>?L5M21O0001M2M3M4L3M3L5L3M02M4M4L30001O01O01O0O1M4lN]BEg=8]BFd=7_BIb=2bBN^=OfB1Y=LjB4V=ImB7T=EPC;o:gAIU>;gAIV>:gAJU>:gAIV>:gAIV>:gAJU>R1L4M3M4L3N2010O00010O00010O00010O00010O000N2L5L3M3M4_NjAS1Y>jNjAS1Y>jNkAQ1d>M3M4L31O00010OO1L5L3M3M4L3M3L`lW5"}, "image_id": 353, "id": 5229}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 128.0, 99.0, 167.0], "area": 7993, "segmentation": {"size": [512, 512], "counts": "dTf41g?8H8L40000000001O01O0000000000010O00000000\\A]Ok=f0RB_Oi=f0RB^Ok=f0oA@l=e0oA[OQ>BPB_11`No=a1QBcNk=]1UBhNf=Z1XBkNc=i10O_BlMW=U2hBQNS=o1mBVNni0[A_Oe>o0O0001OJ6000001O000001O00000001O0M3000000000010O00000000000010O00000K5I7H8H9H7Hh[\\3"}, "image_id": 353, "id": 5231}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 142.0, 74.0, 67.0], "area": 2812, "segmentation": {"size": [512, 512], "counts": "je^21l?4L3M3M4N10010O0000Mm@Fg>5UA4j>IQA?o>]OPAh0U?O00000000E]OYAd0d>^O\\Ab0d>_O[Aa0e>_O\\A`0e>_O[Aa0e>_O[Aa0e>_OWAe0i>9N2H9F9I70000000000010O000000000000010O0000000WNSB_1W>00001O0000_NiAY1_>0000I7H8J7O0000000000000010O00J6H8HT[\\4"}, "image_id": 353, "id": 5232}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 160.0, 6.0, 6.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "TU?1l?3O1010N1MRk]7"}, "image_id": 353, "id": 5233}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 168.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "X52fjo7"}, "image_id": 353, "id": 5234}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 176.0, 67.0, 76.0], "area": 2914, "segmentation": {"size": [512, 512], "counts": "iV13j?3M4K4M3M3L5L3M3M4N100010O0001O01O0001cAoNj=P1SBSOm=m0PBWOo=j0lAZOU>e0hA^OX>V1O0001O01O01O01O01O01O0N2M3L5L3M3M4O01M2N20010M2M3M4CQBcNS>[1PBaNS>_18O000L4M4L3M3L5L3M3M4K4M3M4K\\Zm6"}, "image_id": 353, "id": 5235}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 192.0, 63.0, 79.0], "area": 2920, "segmentation": {"size": [512, 512], "counts": "ngP13j?3L5L3M3M4K4M3M4L31O00010ON2M4L3M3M4N10010O000O1M4L3N201O01O01O01O01O0N2M3M4M2O110L3O101O01O0L4M4L3M3TNQBW12nNV>o0mAnNV>o0nAmNV>o0?L4M4L3M3L5L3M3Miio5"}, "image_id": 353, "id": 5236}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 198.0, 9.0, 8.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "Xfc42m?5L00000O1001N5LeiW3"}, "image_id": 353, "id": 5237}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 203.0, 9.0, 12.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "afU24i?4L31O01O01N1L4Lgie5"}, "image_id": 353, "id": 5238}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 212.0, 55.0, 74.0], "area": 3012, "segmentation": {"size": [512, 512], "counts": "\\WQ2=c?000000001Ob0^O0000000UAYO]>U1000000O10000G9O100000000jAYNS>j10001B=0000000000000N20001O0000000001O000000000000000000gNfBLZ=YOaCg0l=FXXS5"}, "image_id": 353, "id": 5239}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 216.0, 45.0, 67.0], "area": 2194, "segmentation": {"size": [512, 512], "counts": "gWT31o?8H8G8XO]OeAc0[>E]A:d>a000DPOgAQ1Y>;07I6J00000O01000;EJ]NoAc1P>700000O100000O10000000O1000007@iAiNZ>T19000004L8G8I8H7IiWU4"}, "image_id": 353, "id": 5240}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 219.0, 63.0, 62.0], "area": 2507, "segmentation": {"size": [512, 512], "counts": "SWn34l?6J5J7J6J5K1OO010004L6I6K6J1O000O010000000O0100002N0O0100000O1000O1000O1002GkAcNZ>X1k0`AYO51V>R1fASOU>a1K5K5J10O1000UDjMk9W2ZDjM73P10[:]2`DhMP10[:l2_EYM]:l2^ETMa:R3ZEnLf:U3WEkLi:U3WEjLj:V3UEkLk:U3UEkLj:V3VEjLj:U3hDaL7:Q;b3iD^LX;W4M5L0000O0100000O10O100000O01001O5J5O101K4L1O000O2O5K4]LPDn2U0000000001O01O000000J6H8000000000000000000000000000000010O00000000000000000000000cNXBb0c>0f@^OV?f000000001O00000000000000000000000001O000AbTf5"}, "image_id": 353, "id": 5245}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 367.0, 95.0, 62.0], "area": 3233, "segmentation": {"size": [512, 512], "counts": "[l]23m?5K4L5J6K5K4L3M0O1000O1000O1000O10O1000O1000O1000O1000O1CmNmAS1S>ROhAm0Y>WOcAi0]>=00O0100000O10O100000N2OLeAdN\\>\\14O1000O1000IaAlN`>T1510O100000O0100003M3L0103M5K5K4K6K10N4M000000O0100000O10O100000O010000000O3N5K5K4L5JZcR4"}, "image_id": 353, "id": 5246}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 389.0, 81.0, 49.0], "area": 2559, "segmentation": {"size": [512, 512], "counts": "UmU45k?5J6K2N0000OAC\\A>d>GWA9h>MSA3m>`00O1000O1000O10KSOWAm0h>6000O1000O1000O1000O1000O1000O1000O1000O12N2M10O100000O10O100000O10O100000O10O100000O10O100000O10O10HUA[Ok>e0ZAVOf>i090O100003L6K5K5K5KTca2"}, "image_id": 353, "id": 5247}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 422.0, 80.0, 51.0], "area": 3000, "segmentation": {"size": [512, 512], "counts": "^mn0`0\\?40000000000000000i@]O=0Q>]1O000000000L41O00000000000000000000000000000001O00M3000001O000000000000000hNdAl0i>O00000000000001O000000000000000000000000000001O00000J6H80000000ZRi5"}, "image_id": 353, "id": 5248}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 435.0, 57.0, 76.0], "area": 2715, "segmentation": {"size": [512, 512], "counts": "knZ31n?7J5@Hn@=m>Hn@=m>;O0100000ISO[Am0e>6100000O10OCmNoAS1Q>ROjAn0V>>O1000O100000O10O100002M7J5K6J00000O10O100000O1003L7J5K5K6J5J6K6J5K6J5K5J7JY`h3"}, "image_id": 353, "id": 5249}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 443.0, 51.0, 69.0], "area": 2698, "segmentation": {"size": [512, 512], "counts": "`_]2?a?1VOLfA4Z>i000000000O1ATOjAl0V>?00O10000000000000000000000000000F:0000000000O10000000000000000002N`0@a0_O;E000000=CZPi4"}, "image_id": 353, "id": 5250}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 450.0, 80.0, 46.0], "area": 2257, "segmentation": {"size": [512, 512], "counts": "]n[43m?5^@Ln>8n@Ml>9n@Ln>f0MO1000O10O1000O1000O1002M1000O10O1000O1000O1000O10O1000JYAROh>n050O10O1000O1000O1000O10O1000O1002M5L5K000O01000000O01000000N11000O1000O10O1000O1000O2O5K4L5JZQ\\2"}, "image_id": 353, "id": 5251}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 483.0, 89.0, 29.0], "area": 1802, "segmentation": {"size": [512, 512], "counts": "Yoj0e0Z?2O00000000001O00000000000000000K500000000000000000000000000000000006J000000000000000000000000007I00000000000000000000001O0000000000002N000000000000000000001O00000000000000000O]`h5"}, "image_id": 353, "id": 5252}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 0.0, 82.0, 18.0], "area": 1252, "segmentation": {"size": [512, 512], "counts": "R`m0?_?20000000001O000000000000000000000000000000000000000000000000000000L40000001O000000000000000000000000000000000000M300000000000000000000000000000000001O00000000000P`i5"}, "image_id": 354, "id": 5253}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 0.0, 50.0, 9.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "P`]21o?0000008H00000000O100000000000000000000000000000000O100000000LX@0h?0400000000O100000000000000000000000P`i4"}, "image_id": 354, "id": 5254}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 38.0, 63.0, 35.0], "area": 1367, "segmentation": {"size": [512, 512], "counts": "_aV64k?5L5K4L5K4K1000000O01000000O01000M3O010000000O01000000O01000000O01000000O01000002M1000O1000O10004L2M100000O10O1000O10O5L4L5KXni0"}, "image_id": 354, "id": 5255}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 60.0, 77.0, 102.0], "area": 4672, "segmentation": {"size": [512, 512], "counts": "^R^64l?4L5K4K6K3M0o@WOk>i0UA\\Oe>e0[A_Oa>a0^AD^>;cAIY>7gANT>2lAOR>2mAO2ZOg=f0XB01^Oc=b0\\B01B_=>`BO1IY=9fBN1MU=5iBO2OR=KkB_O2g01OmlNfAo0k>K4L4L4K3N002N4L5Jd[;"}, "image_id": 354, "id": 5256}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 65.0, 96.0, 51.0], "area": 2849, "segmentation": {"size": [512, 512], "counts": "cbj44l?5K5K5K5J4M00000O10OJWOWAi0i>7000O0100000O10O100000O0100000O10O10003M000O10O100000O010000000O010000000O010000000O04M4L5K4K100000004010J5L1O0000O01000000O01000000O01000000O01000000O0100003M5J5L5K4Lg\\e1"}, "image_id": 354, "id": 5257}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 72.0, 100.0, 71.0], "area": 3587, "segmentation": {"size": [512, 512], "counts": "WST31o?5K5K5J5L5K5K1OO03N5K3M000O0100000OF_AWOa>i0dAQO]>o0:01N100000O10OO20N1100000O010000000O010000000O01000000O01000000O010000000O010000000O0100DVObAj0^>[O]Ad0c>=0000000O010001O5J6K5K1O000O10O100000O0101O5K5K4K6K5K5K4L5J6K5K4LSlY3"}, "image_id": 354, "id": 5258}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 97.0, 93.0, 71.0], "area": 3853, "segmentation": {"size": [512, 512], "counts": "ZTg12n?5_OJn@:n>Kl@;o>Im@;o>:10O10005K5J2O000000O0100000O104L00O10O100000ZOnN]BS1c=ROXBk0FlNR>>TBe0o=@lA`0T>f000O010000000O010000000O01000000L30100000O10O100000O10O10005K4K6K5K4L2N0O010000000O0105K5K2N0O1000O1000O13M5J5L5K5K4L5J6KV[j4"}, "image_id": 354, "id": 5259}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 114.0, 82.0, 53.0], "area": 3331, "segmentation": {"size": [512, 512], "counts": "ec6b0^?000000000000a0_O000000000000000000?A00000000000000000000000000000000000M_NgAa1Y>30000000000000000000000002N00000000000000000000002N00000XOh000000000000000000000000000000c0]Og[`6"}, "image_id": 354, "id": 5260}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 123.0, 5.0, 6.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "lch44l?1N11O4LPlT3"}, "image_id": 354, "id": 5261}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 138.0, 63.0, 76.0], "area": 2753, "segmentation": {"size": [512, 512], "counts": "ge^44l?4K5L5oNAWB?h=GSB9m=KnA5S>OiA1W>4dAL[>9aAG_>l0100000O10OJcNkA]1T>iNgAW1Y>:N000O1000O1000O10O1000OO2O100005J5L5K4L2M10O4M4L5K4L5J5L4L5K4K5L5K2NO0100000001N5L5K4L4Keia2"}, "image_id": 354, "id": 5262}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 141.0, 74.0, 72.0], "area": 3130, "segmentation": {"size": [512, 512], "counts": "ae`53m?5K4L5J5mN]OaBg0[=]OaBc0_=A]B?c=FXB:h=JSB6m=0nA0R>4jALV>o00O010002N0O10O100000O010004L4K5L5K4L1MUAoNk>Q12O0100000O010000000O0103M4L5J5L3M0000O01000000O01000000O01003M4K6K4L5K4L4K6K4L5K4KfYZ1"}, "image_id": 354, "id": 5263}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 149.0, 70.0, 63.0], "area": 2783, "segmentation": {"size": [512, 512], "counts": "WUh65j?6K5K5K5K0O010000000O010PAWOi>i0VA]Oe>c0[AB`>>`AGZ>9gAGY>9gAGY>9fAHZ>1bAC4;U>ChA?4^OO`0U>8RBHn=8RBHm=8SBIm=7SBHn=8nAXOK`0W>8nALR>4nALQ>4oAMQ>3oALR>4nALR>4nALR>4nALQ>4oAMQ>3oALR>4nALR>6lAJT>;gAFW>R100000O4M00O1000OM_NiAa1W>400000O010000000O014M4K:F5K5K5J6K2N001O5J6Kgi4"}, "image_id": 354, "id": 5264}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 159.0, 21.0, 38.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "Q5T1k>0002N2N2N3M2N2O1N2N2N2N2N002N2O1N3M2N2N`Ze7"}, "image_id": 354, "id": 5265}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 161.0, 77.0, 56.0], "area": 2705, "segmentation": {"size": [512, 512], "counts": "Vfj23m?5J5L5K2@A[A?e>EWA:j>>O100000O10O1000O10O4M000000O01000000O01000000O01000000O01MVAPOj>o04000O10O1000O1000O1000FROaAo0_>VO\\Ai0e>904K4M00O1000O1000O1000O10O10001O5K4K6K4L5K4K6K5K4Lein3"}, "image_id": 354, "id": 5266}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 163.0, 14.0, 7.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "SeW27i?000000000000000000000002NkZa5"}, "image_id": 354, "id": 5267}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 164.0, 11.0, 9.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "UUa18h?00000000000O100005KgZY6"}, "image_id": 354, "id": 5268}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 204.0, 74.0, 72.0], "area": 2975, "segmentation": {"size": [512, 512], "counts": "R7Q1o>O01O2N2N2N2N2N2OO012O00000001O00000N2N2N2N2N1O000001O2N1O001O01O00000000000001KdAeN\\>[1fAdNY>\\150000001O0101000000O1N2N3M2N2N0001O03M2N2N2N2N2N2N2@e@9]?De@:b?N2N3M2Nihj6"}, "image_id": 354, "id": 5269}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 229.0, 54.0, 32.0], "area": 1502, "segmentation": {"size": [512, 512], "counts": "XgP7`0`?:F000000000000000000000O100000000000O10000000000004L0000000O100000000000000000000000O10000000O10000000CVADR?4PY4"}, "image_id": 354, "id": 5270}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 237.0, 11.0, 9.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "`gW13l?3N1OO010O10O10O103M`hb6"}, "image_id": 354, "id": 5271}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 256.0, 88.0, 50.0], "area": 2880, "segmentation": {"size": [512, 512], "counts": "iXm3>b?;E000O10000000000000O100000000000XOBnA>R>h0000000000000000K5000000000000000001O3M000000000000000000000000000O:G00000000000000000000000000000000000000O14L6J00000000000O100000000000Zgf2"}, "image_id": 354, "id": 5272}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 260.0, 82.0, 41.0], "area": 2582, "segmentation": {"size": [512, 512], "counts": "kXb59g?9GO10000000000000@D\\A`001O000000000000O100000K50000O10000000000000O100000000000O10000002N3M000O1000O100000000000000000000000O10O10000000000000I7O1000000000O1004L>BXgT1"}, "image_id": 354, "id": 5273}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 264.0, 55.0, 31.0], "area": 1504, "segmentation": {"size": [512, 512], "counts": "[XQ78h?b0^O0000000O10000000O100000000000000000000000000000O4M00000000000000000000000000000000000O0100000000000000Bo@M^?FiW3"}, "image_id": 354, "id": 5274}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 267.0, 80.0, 47.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "Sib24l?a0_OO010000000000000000H80000000000O10O100000L4000004L00000O01000000000000000004L000000000000000O10DSOeAm0[><0L^AjNb>V14000000000000000000000000O010000000000000000?A5K00=CdVU4"}, "image_id": 354, "id": 5275}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 274.0, 90.0, 50.0], "area": 3010, "segmentation": {"size": [512, 512], "counts": "[il02n?e0ZO3N000000O100000000000000000000000000000L31000000000]OUOQBk0o=c0000000000000000O1000000000O10000000000004L00000O10000000000000O10000000000000O1K5000000000c0]O00O100000O100000000000000000iVf5"}, "image_id": 354, "id": 5276}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 395.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "[<1dco7"}, "image_id": 354, "id": 5277}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 395.0, 33.0, 70.0], "area": 1433, "segmentation": {"size": [512, 512], "counts": "k0000000000000000002N2N2N00000001O2nNkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HkA6W>HlA5V>IlA5Q?N2N\\R_7"}, "image_id": 354, "id": 5278}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 419.0, 83.0, 78.0], "area": 3820, "segmentation": {"size": [512, 512], "counts": "i]^6152^?2_@1]?k0RBAm=?oAEQ>U10010O0001O01O00010O0000010O0000_NnAT1S>gNQBZ1n=bNWB]1V>000010OaAcN[>a101O01O0001O01O01O0001O01O01O0001O01O0001O01ON2M4K400010O00000N3K4M3L4L5K4L40010O0O1L5K4Bh@1[?Kj@0Z?Lj@0aa8"}, "image_id": 354, "id": 5279}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 423.0, 79.0, 77.0], "area": 2948, "segmentation": {"size": [512, 512], "counts": "S^b02m?1O2FLe@6Y?Le@6Y?Ld@7Z?9N2N2N1N3N200O1N2N2N2N1O2N2M3N11N2N2N2N2NeAmNk=R1UBPOk=n0UBTOk=l0RBWOn=i0PBYOP>g0nAZOS>f0kA\\OU>d0iA^OW>b0gA@X>U10000000O1000O1000000000N2N2N1O2N2M3N1OO10O100000O10O10001O20O10N2N2N2M3N2N1Bl@OV?Ol@OV?Ol@NW?0k@NW?Ol@OV?Ok@0RaV6"}, "image_id": 354, "id": 5280}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 427.0, 49.0, 69.0], "area": 2655, "segmentation": {"size": [512, 512], "counts": "\\mU44l?c0]Oc0]Oc0]O7I00000000000000000000000000000O16J000O10000000000000000000000000000jNPBB00000]OVOPBj0P>c0000008H0000000000000000O10000000O1000000L4O:G7I000000000000000000000000000000000009Gd0\\Od0\\O=C00>BXPQ4"}, "image_id": 354, "id": 5282}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 428.0, 47.0, 82.0], "area": 2587, "segmentation": {"size": [512, 512], "counts": "X^S55k?`0@a0_O`0@0000O010000\\O]NkBc1U=mN[BS1e=d00000000000000000000O1000J6000\\OaBgN_=Y1RCVNnLSA3n>KTA4m>JUA5\\?O1O1O1O1OQ`k6"}, "image_id": 354, "id": 5284}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 502.0, 22.0, 10.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "no[62k?3N2M300001O001O001O00001O001O001O00003M00QPY1"}, "image_id": 354, "id": 5285}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 510.0, 6.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "noh22n?0001O0000QPT5"}, "image_id": 354, "id": 5286}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 123.0, 178.0], "area": 18108, "segmentation": {"size": [512, 512], "counts": "Z49f?1gKKgH5Y72`HN`79nFEhN1[:b0eFFgNHd:i0^FGfN@l:P1WFIdNWOU;W1oEKdNQOZ;[1kEh9IQF:l9MmE3S:4fELZ:<^EDb:m20000000000000000000000000004L1O00000000000000O1000000000000O100000000000000O100000000000000O1000000000000O100000000000000O100000000000000O1000000000000O100000000000000O108H7I7I7I7I8H7H8I7I7I8H7I7I7I8G8I7I7I7I8H7I7HR[R6"}, "image_id": 355, "id": 5287}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 130.0, 22.0, 45.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "TTo17i?7I6J7I6J7H4M0000000000O0100006J6J7H7J7I6Jhje5"}, "image_id": 355, "id": 5288}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 253.0, 13.0, 22.0], "area": 218, "segmentation": {"size": [512, 512], "counts": "ngh34l?7I6J4LO10O1000000007H7JfgP4"}, "image_id": 355, "id": 5289}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 270.0, 84.0, 112.0], "area": 3309, "segmentation": {"size": [512, 512], "counts": "[k_32n?1N2N3M2N2O2M2L4N2O2M2N2N3M01O0000010O000000010O00_OTOoAl0R>VOkAj0U>XOiAi0V>YOiAf0W>\\OgAd0Y>_OdAa0\\>AbA?^>CaA=_>a000000001O0001O]OcNdB]1]=dNaB\\1_=gN^BZ1a=hN]BX1c=jN[BV1e=lNYBT1h=nNVBR1i=POUBQ1j=c0O01O000001O01XOUBZOk=g0VBWOe=]O]B\\11UOa=A\\BZ15SO_=R1cBlN^=S1dBkN\\=V1fBgNZ=[1fBdN[=\\1eBbN]=\\1eBbN\\=]1gB`NZ=_1c002O2M2N2N3WOYA4i>JYA5h>IZA5h>IZA5i>IYA4i>JYA5h>IZA5h>I[A4Z?N2N^VV3"}, "image_id": 355, "id": 5290}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 304.0, 77.0, 120.0], "area": 4381, "segmentation": {"size": [512, 512], "counts": "h\\\\13k?2N3L3N2M4M2M4M2M3N3L3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N2M4M2M2OO010O01000O010O102M4O001L3N2M4M2M4M2N2M4M2M4M2O101N1M4M2HeAiN]>T1gAhN]>U18M4M2M4M2N2Hj@CZ?9i@EY?99L3N2NUV]5"}, "image_id": 355, "id": 5291}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 336.0, 87.0, 75.0], "area": 2066, "segmentation": {"size": [512, 512], "counts": "[lR41n?2N2N2O1N2N3M2N2O1N2N2N2N3M010O00000001O01O000001O01O000001O2N2O2M1O000002N2O1N2N1O000JVOWAj0i>XOVAg0j>[OTAe0m>7O000000010O00000001O01O000001O01O0000JTAYOm>g0TAWOl>i05000001O01O02N3M2N2O1N3M2N2N2N2O2M2NRea2"}, "image_id": 355, "id": 5292}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 362.0, 94.0, 92.0], "area": 3418, "segmentation": {"size": [512, 512], "counts": "Wm^41n?3N1N2N2N3M2O1N3M2N2O1N3M2N2N2O2M2N2N2N3N1N2N2N2N10O00000010OYOdAK\\>6eAH[>8gAFY>:iADX>;kACT>>mA@S>`0oA^OQ>b0QB\\Oo=d0TBYOm=i0RBVOm=l0SBROm=P1`02N3N1N2N3M2N2O1N000010O0000000YOeAK[>4gAJY>6iAHW>8lAET>=PBAQ>?PB_OP>c0QBZOo=i0PBUOP>m0PBROo=P1QBnNP>T1<2N2N2N3N1N2N1O01O2N2N3N1N2WOZA5h>IZA5i>HYA6i>HYA7h>HZA5h>IZA5i>HYA6i>HZA6Y?N2N2NcSR2"}, "image_id": 355, "id": 5293}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 418.0, 108.0, 94.0], "area": 4447, "segmentation": {"size": [512, 512], "counts": "`^V52n?1N2N3M2O1N3M2N2N3N1N2N2N3N1N1O00010O00000010O0000010O00^AROU>l0kAVOU>j0iAXOW>h0gA[OX>f0fA[O[>d0cA^O]>b0aA@_>a0^ABb>n000N2DaNTBb1j=_NTBc1j=_NUBb1i=`NUBb1i=aNTBb1i=0oA0S>NmA3U>KjA6W>HjA:U>DkA>U>@kAb0V>\\OiAg0V>WOkAj0U>TOkAn0b>3M20010O00010ON2O2M2N2N2N3M2N2N3N1N2N3M2N2N2N3MTaS1"}, "image_id": 355, "id": 5294}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 498.0, 37.0, 14.0], "area": 277, "segmentation": {"size": [512, 512], "counts": "noj62k?3N2N2M3N200001O001O001O00001O001O001O001O001O00001O001O001O001O001O0000Q`b0"}, "image_id": 355, "id": 5295}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "n_h62n?01O00Q`U1"}, "image_id": 355, "id": 5296}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 320.0, 33.0, 18.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "Tjg64l?:F0O01000000000000000O0100000000000000000O01000000000000000O0:Ggeg0"}, "image_id": 356, "id": 5297}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 347.0, 31.0, 37.0], "area": 902, "segmentation": {"size": [512, 512], "counts": "ak^68h?000O100000000[Ol0I00000000O1000000000000000000000000000000000UeQ1"}, "image_id": 356, "id": 5298}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 405.0, 14.0, 17.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "g\\h6>`?200000000000000000001O00@kcP1"}, "image_id": 356, "id": 5299}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 0.0, 143.0, 200.0], "area": 16600, "segmentation": {"size": [512, 512], "counts": "lch51n?2M3N1O2N2N2M3N2N1O2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N1O2M3O1O1000O1000O1000O100O1M2O00O1000O10O100000O10O100000O010000000O01000000O0100000001N2O1O1O1O1N2O1O1O1O1N2O1O1O1O2M3N2N2NM"}, "image_id": 357, "id": 5300}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 235.0, 401.0, 277.0], "area": 55255, "segmentation": {"size": [512, 512], "counts": "nof02m?1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1\\OaLeDa3Z;aLdD`3[;aLdD`3[;bLcD_3\\;cLbD^3\\;eLaD]3^;eL`D\\3_;fL_D[3`;c0O1001O1O1O001O1O1O1ON2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1001O001N2N2N2^N\\HeJf7Y5\\HeJe7Z5]HcJf7Z5]HdJe7Z5]HdJe7Z5\\H_JFVOP8Y6\\H^JGWOo7Y6\\H^Jl7`5WH^Jk7`5WH^Jk7`5WH^Jk7`5VH^Jm7`5UH^Jm7_5VH_Jk7`5WH^Jk7`5WH^Jk7`5WH]Jl7U5SGoJR1Km7T5SGnJS1Ll7S5TGoJR1Lk7T5TGoJX:o4jEoJX:P5=00000N2N1O2\\EkJY:X5eEjJY:X5eEiJZ:`5N2N2M3N2N1O2N2M3N2N2N1O2M3N2O100OO2M3N2N2TO[FZKg9d4[FZKf9e4\\FXKg9e4\\FYKf9e4\\FYKf9e4\\FYKf9e4[FZKg9d4[FYKg9e4\\FYKf9e4\\FYKf9e4\\FYKf9e4\\FYKf9e4l0M0101O1O2N2M3N2N2N1O2M3N2N2N1OO01002N2N2M3N2N1O2QOREiLP;U3REhLQ;V3PEiLR;U3PEiLR;U3PEiLQ;U3REiLP;U3REhLQ;V3QEhLQ;V3PEiLR;U3PEiLR;T3QEjLP;U3REhLQ;V3QEhLQ;V3PEiLR;U3n0N20O01000000000O01000000000O01N2N2N2N2N1N3N2N2N2N2M2UO_BVOc=h0_BVOc=h0_BVOc=g0`BWOb=g0_BWOd=g0^BWOc=h0_BVOc=h0_BVOc=g0`BWOb=g0`BVOc=h0^BWOd=g0^BWOc=h0l0M3N2O1000O10O10000000O10O100O1M3N1O2N2N2M3N2N1O`fP1"}, "image_id": 357, "id": 5301}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 363.0, 64.0, 149.0], "area": 5265, "segmentation": {"size": [512, 512], "counts": "Q_P71m?3N2N1n@JY>8eAJX>9eAJY>8eAJY>8eAJY>7fAJX>9eAIZ>9dAH[>:cAG\\>;bAF]>;aAG]>o0O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2aNmMPET2n:oMPER2o:oMPER2o:PNoDQ2P;QNmDQ2Q;RNmDo1R;SNlDn1S;SNlDn1S;TNjDn1T;UNjDl1U;VNiDk1V;WNhDj1W;WNgDk1X;WNfDj1X;YNfDh1Y;ZNeDg1Z;[NcDg1\\;ZNcDg1[;\\NcDe1\\;]NbDd1];^N`Dd1_;^N_Dc1_;`1O1O1O1N21O1O001O1O1O"}, "image_id": 357, "id": 5302}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 407.0, 22.0, 24.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "W]U71m?3N1O2M3N2N1N3N2N2M010001N2O2N2M2O2N2M3N1NVc?"}, "image_id": 357, "id": 5303}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 150.0, 43.0, 34.0], "area": 825, "segmentation": {"size": [512, 512], "counts": "XUV13k?2M4L3N3L3M3N30O00010O0010O0010O00010O0010O0010O00010O0010O0010O00010N1M4L3N2M4L3N2MP[T6"}, "image_id": 358, "id": 5304}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 167.0, 24.0, 16.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "]Ul12l?2M3O2O010O010O01O01O010O01O01O010O010O01M2N2Mdjg5"}, "image_id": 358, "id": 5305}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 181.0, 20.0, 33.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "gU61o?4L5J6K4L5K5K2M0103M2N0O10001O5K5J5L5Kbi_7"}, "image_id": 358, "id": 5306}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 188.0, 51.0, 66.0], "area": 1812, "segmentation": {"size": [512, 512], "counts": "^gd12Q?0hA3V>NiA3U>0iA2U>0hA3V>NiA3U>0hA3V>OhA3U>0hA3V>NiA3V>OhA3U>0hA3V>NiA3U>n0N2M2O2O1000O10O01N2N1N3N2M3N1N3N2N1N3N2M2O2N2M3N1N3N2M2O2N2M3N1N3N2N1N3N2M^ia5"}, "image_id": 358, "id": 5307}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 211.0, 24.0, 66.0], "area": 1166, "segmentation": {"size": [512, 512], "counts": "e6P2P>00O010000000O010000001N6K5K5K5K5J6K6J5K5K5J6Kdgc7"}, "image_id": 358, "id": 5308}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 212.0, 36.0, 56.0], "area": 1620, "segmentation": {"size": [512, 512], "counts": "mVa0b0^?00004L0G9000000000h0XO0000000000000000000000000000000018G00000000000000Sil6"}, "image_id": 358, "id": 5309}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 227.0, 45.0, 60.0], "area": 1510, "segmentation": {"size": [512, 512], "counts": "bhf23k?3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3O0010O0010O01M2N2M4M2M4M2M4L3N2M4M2M4M2M4M2M3N3L3Nehb4"}, "image_id": 358, "id": 5310}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 258.0, 22.0, 16.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "[Xi31l?3N3L3O1010O0010O0010O00010O00010O000O2L3Njgk3"}, "image_id": 358, "id": 5311}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 270.0, 16.0, 17.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "ih[32k?3N3M2N3M200010O010O0M4M2N3M2MaW\\4"}, "image_id": 358, "id": 5312}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 277.0, 51.0, 91.0], "area": 2658, "segmentation": {"size": [512, 512], "counts": "SYW14l?4L5K4L5B^OSAf0j>800000O02O5K4L5J5L5K5K4L5J100000O03N4L5K4L1N100000O3N4L5K4L5J5L5K5K4K6K4L5K4L5J5L5K5K4K6KgTo5"}, "image_id": 358, "id": 5313}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 280.0, 59.0, 58.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "liV31n?2O1N2N2N3M2O1N2N2N3M2N2O1N2N3M2N2O1N1O1O000001OKmN^AS1b>oN\\AR1c>50000000010O0000000001O2O1N2N2N3M2N2O1N2N2N3M2N2O1N2N3M2N2N2O1Nbfk3"}, "image_id": 358, "id": 5314}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 297.0, 30.0, 36.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "Tje22l?3L3N2N3L3N3M2M3N3L3N3N110O00010O00O2M2M4M2N2M4M2N3L3N2M4MbVk4"}, "image_id": 358, "id": 5315}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 324.0, 43.0, 53.0], "area": 1286, "segmentation": {"size": [512, 512], "counts": "^[Z41l?4M2M4M2N3L3N2M4M2N3L3N2M4M2M4M2N3N10010O0010O001N1N2N3L3N3L3N2M4M2N3L3N3L3N2N3L3N3LeUP3"}, "image_id": 358, "id": 5316}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 327.0, 13.0, 13.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "YZ_22n?3M4K3N00000O01000003L5LbUZ5"}, "image_id": 358, "id": 5317}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 340.0, 29.0, 28.0], "area": 550, "segmentation": {"size": [512, 512], "counts": "P[U23l?5L4L5CDk@=U?700O0100000O10O1000O10O4M4L1OO0O@d@`0\\?2O1004K5L5K4LgT\\5"}, "image_id": 358, "id": 5318}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 346.0, 48.0, 65.0], "area": 1784, "segmentation": {"size": [512, 512], "counts": "`lR51l?3N3M2M4M2ZODfA?W>CfA`0W>CgA?W>DfA?W>CfA`0W>DfA?W>CgA?W>g0L3O2O00010O010O001M21O0O1N3L3N3M2M4M2M3N3L3N3M2M4M2M3N3L3N3M2M4M2M3NnTU2"}, "image_id": 358, "id": 5319}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 374.0, 29.0, 26.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "kke24l?5K5K5K2M1000O100000O10O100000O01000000Bg@5Y?Kl@OT?2QAIo>7=O1000O12NXdk4"}, "image_id": 358, "id": 5320}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 374.0, 64.0, 72.0], "area": 2168, "segmentation": {"size": [512, 512], "counts": "bmb53k?2M4M2N2M4M2N3L3N30O00010O010O01O01OO2M2M4M2M01000O4M2N3L3N2M4M2N2M1000O103M2M40O00010O01O0N3L3N2M4M2N3L3N2M4M2N3L3N3M2M3N3L3NQT]1"}, "image_id": 358, "id": 5321}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 378.0, 56.0, 69.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "amc63l?2N2N2N2DFo@Fo@Fo@Fo@Fo@nN_AR1a>PO]AP1c>6KgNdAY1Z>kNdAU1\\>7000000N2000000100O000000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N^S`0"}, "image_id": 358, "id": 5322}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 387.0, 55.0, 46.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "b\\P13m?2M4M3M3L3N3M3L4M2N3L2O000O01000O010O01000O01000O01000O0100O01000O0100O01000O0100O01003L3N3M3L4M2N3L4M3L3N3MoRT6"}, "image_id": 358, "id": 5323}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 391.0, 41.0, 38.0], "area": 1114, "segmentation": {"size": [512, 512], "counts": "]\\P21o?4L5K4L4K6K4L4L1O0O10O1000000O0100000O10O100000O10O1000O1000O10O10004L4L4K6K4L4LPS[5"}, "image_id": 358, "id": 5324}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 392.0, 47.0, 34.0], "area": 1280, "segmentation": {"size": [512, 512], "counts": "[\\U3d0\\?7I000O100000O100000000000000000000000000005J10O1000000000000J6000000000000000000000O10000c0]OUSS4"}, "image_id": 358, "id": 5325}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 394.0, 28.0, 27.0], "area": 448, "segmentation": {"size": [512, 512], "counts": "kl[42l?2N3L3N3M2M3N3N101O01O010O010O01O01O010M2N2N3L3N3L3N3M`SV3"}, "image_id": 358, "id": 5326}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 412.0, 33.0, 59.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "hm_74l?4L3L5L3M4L3L4M000O4M00O10O10O10O100]OoNWBP1j=TORBl0m=XOoAi0Q>[OkAd0V>_OgAa0X>d0000O10O10O10O100TC"}, "image_id": 358, "id": 5327}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 414.0, 18.0, 15.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "Smm3:f?000000000K50000000000005K00000000mRi3"}, "image_id": 358, "id": 5328}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 422.0, 26.0, 26.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "g]T52k?3N2N3L3N3L3N2010O01O01O010O01O010O01M2M4M2M3N3L3Neb^2"}, "image_id": 358, "id": 5329}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 431.0, 30.0, 14.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "_][3>b?000000000000000000000000000000000000000000000000000000000abU4"}, "image_id": 358, "id": 5330}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 436.0, 36.0, 45.0], "area": 827, "segmentation": {"size": [512, 512], "counts": "lnT63k?2N3N110N1M3N1N1O1KCi@O]A1d>K`A4`>JbA7]>FfA:[>ChA=W>@lA?k>M4M2M4L3NaQY1"}, "image_id": 358, "id": 5331}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 445.0, 26.0, 26.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "^nk52k?3N3M2M4M2M4M20010O010O01O01O010O01O0N2M4M2M4M2N3LnQg1"}, "image_id": 358, "id": 5332}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 461.0, 33.0, 47.0], "area": 1033, "segmentation": {"size": [512, 512], "counts": "h^c34h?4001O0000QALR>4gA3Y>N_A9a>GXA`0h>=N1I70000000001O01O00000000010O00000I7I7H8I8H7IgQl3"}, "image_id": 358, "id": 5333}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 464.0, 48.0, 48.0], "area": 1294, "segmentation": {"size": [512, 512], "counts": "k^P51n?3N2M3N2M3N2M3N2M3N2N2M3N3L3N2M3N2M2O0O100O1001O1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2N2N2N2MT`W2"}, "image_id": 358, "id": 5334}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 474.0, 54.0, 38.0], "area": 1401, "segmentation": {"size": [512, 512], "counts": "T_X13m?4K6K4L4L4L4K010000000O010000000O010000000O0100000000O010000000O010000000O010000000O010000000O014L5K5K5J6K^`l5"}, "image_id": 358, "id": 5335}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 489.0, 19.0, 14.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "^oU63k?2M4O0010O0010O00010O0010O0010OO1M4Mc``1"}, "image_id": 358, "id": 5336}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 505.0, 13.0, 7.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "ooj61n?100O1O1O1O1O11O1O1O1O2NQ`n0"}, "image_id": 358, "id": 5337}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 24.0, 512.0, 488.0], "area": 127700, "segmentation": {"size": [512, 512], "counts": "j<`0R2LQ;5mDKT;5lDJU;6jDKW;4hDLY;4gDKZ;5eDL[;4dDL];3dDM\\;3cDM^;3aDM`;3`DM`;3_DMb;3]DMb;5^DK`;7_DIZNES=d0bDGZNFR=e0eDGZ;;eDEZ;=eDCZ;?fD@Y;b0fD^OX;e0gD[OX;g0hDXOW;:VCIb1MW;:YCI_1MW;:\\CH^1MU;:`CIZ1MT;;dCHU1OV;9gCHQ10W;8iCIn00X;7lCIj01Y;8mCGh02\\;7lCGe04`;5kCGc05c;4jCFb07e;3hCGa07g;2iCG=9k;0hCG;:n;NhCH8;Q[<@lC1H`0\\<^OnC2Da0_<[OoC4@b0boNf0n`01O1O1O001O1O1OO1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1N2oK[LfKf3Y4\\LeKe3Z4^LcKc3\\4_LbKb3_4_L]Kc3d4]LZKd3f4]LXKd3g4]LXKd3g4\\LXKf3g4ZLYKg3e4ZL[Kg3d4YL\\Kh3d4XL[Ki3e4XLXKj3i4VLUKk3l4TLSKm3n4SLPKn3Q5RLmJo3T5QLiJQ4W5PLgJQ4Z5nKeJS4\\5mKbJT4_5lK^JV4c5jK[JW4e5jKYJW4h5hKWJY4j5gKTJZ4m5fKPJ\\4Q6dKmI]4S6cKlI^4U6bKiI_4X6aKeIa4\\6_KbIb4_6^K_Ic4a6]K^Id4c6\\K[Ie4f6[KWIg4j6YKTIh4m6XKQIi4P7VKoHk4Q7VKmHk4T7UKiHm4X7SKfHn4[7QKdHP5]7PKaHQ5_7PK^HR5c7nJ[HS5f7mJXHT5i7kJVHV5k7jJSHW5m7jJPHX5Q8hJmGY5T8gJjGZ5W8eJhG\\5Y8dJdG^5]8bJaG_5_8bJ_G_5b8`J]Ga5d8_JZGb5g8^JVGd5k8\\JSGe5m8\\JQGe5P9ZJoFg5R9YJkFi5V9WJhFj5Y9VJeFk5[9=1O1O1O1O001ON2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2oKgHZOZ7e0hHYOY7f0iHXOX7h0hHWOY7h0iHUOY7k0gHTOZ7l0fHSO[7n0cHRO^7n0bHQO_7o0`HQOa7n0_HQOc7o0]HPOd7P1\\HoNe7Q1[HnNf7R1YHnNh7Q1YHmNi7S1WHlNj7T1VHkNk7U1THkNm7U1SHjNn7U1SHiNo7W1QHhNP8X1PHgNQ8Y1nGgNS8Y1mGfNT8Y1mGeNU8[1kGdNV8\\1iGdNX8\\1hGcNY8]1gGbNZ8]1gGULIl1b8o1dGULLj1a8Q2cGTLNi1`8S2bGSL0h1_8U2aGRL1h1_8U2`GRL4f1^8X2^GQL6e1]8Z2]GPL8d1\\8\\2\\GoK:c1[8^2[GnKa1Z8a2XGmK?a1Z8b2WGlK?b1[8b2VGkK?c1\\8b2UGjK?d1]8b2jG^MX8c2fG]M[8d2cG\\M^8d2aG\\M`8e2^G[Mc8f2ZGZMh8g2VGYMk8g2TGYMm8h2TGUMm8l2TGQMm8P3TGmLm8S3UGiLm8X3TGeLm8]3SG`Ln8b3RG[Lo8h3QGTLP9n3PGnKR9T4nFiKS9Y4mFdKT9^4lF_KU9d4jFYKW9i4iFSKY9o4gFnJZ9T5fFiJ[9Y5e03M2N2N2N2N1OO1N2O1O1O1N2O1O1O1N2O1000000000000000000000000N2F:F:F:ZO^DeLk;Y3?00O1M3M3M3MQEXMl8d2iFkMW9R2^F\\Nb9a1`F`N`9]1cFcN]9Y1gFgNY9V1jFjNV9S1[FfM^OW1W:o0[FoM]OR1X:l0[FUN^On0W:j0ZF\\N_Oj0W:IZELo0E@f0W:G]EIm0M_Oc0W:F_EFj05@`0V:CbEEh0;@=V:AeECd0c0A9V:@fE@d0j0@6V:_OhE]Oa0R1A2V:@hEXOa0Y1AOV:@dFe0VOKV:B^Fg0\\OGV:B[Fj0_ODV:CVFm0D@V:EQFo0H\\OW:EmES1MWOV:FjEV10TOV:FfEZ14POV:FeE[1KYO`:]OcES3]:nLaES3_:PM\\ER3d:V1O0000000000000000000000000000000000001O00000000000000000000000000000000001O00000000000000000000000000000000001O0000000000000000000000000000000000001O00000000000000N2O1O1N2O1N2O1O1N2O1N2O^ESKS:k4mEWKS:h4mEZKR:e4mE]KS:a4nE`KR:_4nEbKR:\\4nEfKR:Y4nEhKR:W4mEkKS:S4nEnKR:Q4nEPLR:n3nETLR:k3nEVLR:i3mEYLS:i2`E]M>KR:g2bE\\MR:Y2mEXM1`0R:W2oEVMOf0Q:R2SFVMLi0Q:P2UFTMJn0Q:l1XFTMGQ1Q:j1YFTMFS1Q:h1jFZNV9d1kF]NU9b1jF`NV9_1jFbNV9\\1kFeNU9Z1jFhNV9V1kFkNU9T1jFnNV9Q1jFPOV9n0kFSOU9l0jFVOV9h0kFYOU9f0jF\\OV9c0jF_OU9?kFCU9h9BVF`0j9_OVFb0j9\\OVFf0j9YOVFh0W8XMeHn1UOk0V8[MaH"}, "image_id": 360, "id": 5338}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 23.0, 34.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "0P1P?001O001O0000M3N2M3M3N2M3O1001O0000_Oe@<`?M3M4M2Mn_d7"}, "image_id": 364, "id": 5339}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 0.0, 42.0, 19.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "RPa02l?201O001O001O001O00001O001O001O00001O001O001O00001O001O001O010O01O010O0N2N3L3N3M2Nmoi6"}, "image_id": 364, "id": 5340}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 0.0, 31.0, 31.0], "area": 588, "segmentation": {"size": [512, 512], "counts": "f`Z11m?3L3N2M4M2N2M4M2M4O01O010O00010O00010O010O000O2L3M3M4M2M4L3NjoU6"}, "image_id": 364, "id": 5341}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 0.0, 36.0, 20.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "X`W23j?3M4M201O00001O00001O001O00001O00001O00001O001O00N2M30000Da@7_?Gc@:a?01O0M3M4Mk_V5"}, "image_id": 364, "id": 5342}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 0.0, 11.0, 4.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "P`R31o?001O00001O00001O00OQPh4"}, "image_id": 364, "id": 5343}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 0.0, 14.0, 11.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "VPX32k?3M301O001O00001O0000M3M3MSPa4"}, "image_id": 364, "id": 5344}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 0.0, 37.0, 22.0], "area": 593, "segmentation": {"size": [512, 512], "counts": "YP_34i?4K4N20001O00001O0000001O00001O00001O0000001O00001O0000001O00M3M3L4M3L4MS`n3"}, "image_id": 364, "id": 5345}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 0.0, 25.0, 22.0], "area": 363, "segmentation": {"size": [512, 512], "counts": "]`P62l?3L3N2N3L3O2O001O00001O001O001O00N2N2M3N3M2N3L3NPPc1"}, "image_id": 364, "id": 5346}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 0.0, 9.0, 4.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "P`c71o?00001O001O001ONRP8"}, "image_id": 364, "id": 5347}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 4.0, 29.0, 41.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "XQe61m?3L3N3M2N3L3N2N3L3N2NO01000O10011010OM3N3M2M4M2N3M2M3N3Mi_l0"}, "image_id": 364, "id": 5348}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 10.0, 30.0, 27.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "nP`42j?4M3L4M3L5M20000010O0000010O000001O01O0000010O000M3L5K4K5LeoP3"}, "image_id": 364, "id": 5349}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 16.0, 34.0, 54.0], "area": 1011, "segmentation": {"size": [512, 512], "counts": "Pa]53k?2N3M2M4M2WABn=a0nACn=`0PBBQ>=mAES>9hAJY>6dAM[>3`A\\O2a0_>5bAK]>5cAK^>5_AMa>3_ANa>h00ZO_AN`>i01YO_AN`>2`ANa>2_AMa>3_ANa>f02[O^AJd>3_AKd>2_AJd>4^AJe>2_AKc>3_AJe>3]AKe>2f0NW_Q2"}, "image_id": 364, "id": 5350}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 17.0, 13.0, 16.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "kP`73k?2M4M2M4O01O01O01L3N3L3M__9"}, "image_id": 364, "id": 5351}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 21.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "e`o71Z?"}, "image_id": 364, "id": 5352}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 32.0, 49.0, 52.0], "area": 1062, "segmentation": {"size": [512, 512], "counts": "\\aj12m?2O1N2N2N2O2M2N2N2O1N3M2N2N2O1N3O0001O0001O0001O0001O0001O0001O00N3M2O1N2N2N2N3M2O1N2N2N3M2N2N2Ojm\\5"}, "image_id": 364, "id": 5353}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 33.0, 35.0, 45.0], "area": 909, "segmentation": {"size": [512, 512], "counts": "oa^71m?2M4M2M3N3M2M4M2M4M2M3N3N1010O0010O0010O0SOVAd0k>XOYAh0f>VO\\Aj0n>0O00010O010O01O01O010\\N"}, "image_id": 364, "id": 5354}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 35.0, 44.0, 57.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "URh31l?4K4M3g@Fh>>TAFh>?TAEh>k0L4L4N30O00010O0000010O00010O0000010O0000010O00N3K10O4L4M4K4M3L4M4K4L4M4Ec@Ma?OToa3"}, "image_id": 364, "id": 5355}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 41.0, 25.0, 28.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "kaf03k?2N2M4M2M4M2M3O2O010O00010O010O00010ON3Gg@F\\?7g@G[?79L3Ncnl6"}, "image_id": 364, "id": 5356}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 44.0, 26.0, 40.0], "area": 637, "segmentation": {"size": [512, 512], "counts": "]R[61l?4L3M3N3L3M3M4L3M4M2M3N2001O010O01O01O0ZOWA4j>IYA4k>HYA4j>IYA5i>HZA5[?LQnW1"}, "image_id": 364, "id": 5357}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 46.0, 52.0, 55.0], "area": 1615, "segmentation": {"size": [512, 512], "counts": "URg22n?6J6I7J6J6JO1000O1000O100000O10O10000000O010_OTOnAl0R>ZOhAf0X>@bA`0^>`010O100000O10O100000O4M5K4L5K4L5L4K4M2M00001N101N3LZm^4"}, "image_id": 364, "id": 5358}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 49.0, 37.0, 44.0], "area": 882, "segmentation": {"size": [512, 512], "counts": "aRX14j?2M3N3L3O2O01OCBXA?e>C[A=c>F]A:_>IaA7]>KcA5^>K^A9a>G]A;c>FYA>g>=0010O0010O001N1N2M4M2M4M2M3N3L3N3L3N2M4MW^U6"}, "image_id": 364, "id": 5359}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 58.0, 45.0, 59.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "dRd43k?2N3L3N3L3N2N3L3N3]AUOl=n0QBUOm=m0QBUOP>k0lAYOS>g0kA[OV>e0gA^OX>b0eAA\\>Q101O010O010O00010O010ON3M2M3N3M2M4M2M3N3M2M4M2N3L3N2N3M2Mk]e2"}, "image_id": 364, "id": 5360}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 63.0, 41.0, 50.0], "area": 781, "segmentation": {"size": [512, 512], "counts": "fbV72l?2i@M^>7_AK_>7_AL]>8`AJ^>8`AK]>8`AJY>F`Ac05JX>=eAD\\>k02M4M2N2N3M2N3O000M4M2N3L3N3L3N2010O010O00010O010O01O01O01L3Nfm4"}, "image_id": 364, "id": 5361}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 72.0, 44.0, 57.0], "area": 1386, "segmentation": {"size": [512, 512], "counts": "ec_52l?3M2M4M2N2M4M2N3L3N2N3M2M4M2N2M4N1N3L3N210O010O0001O001M2cNfAR1\\>kNgAm0LSOi>n05O001M2M3N3L3N3L3N2M4M2MY]j1"}, "image_id": 364, "id": 5362}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 75.0, 4.0, 8.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "[28h?N2N3Mfmm7"}, "image_id": 364, "id": 5363}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 77.0, 30.0, 35.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "VS53j?4M2M3M4M2M3M4M2M4O00010M20010O0010O001N1M3M4M2M3N3L3E^@5h?N3L]m[7"}, "image_id": 364, "id": 5364}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 98.0, 47.0, 52.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "Sde13k?2M4M2M4M2M3N3L3N3L3N2M4N101M201O01O01O010O010O01O01O010O0O110O0101N010YO]AOc>O_A1a>MbA3^>IeA7[>GhA7Z>FhA8\\>ChA9Q?N2N3L3Nlkb5"}, "image_id": 364, "id": 5365}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 101.0, 43.0, 55.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "ncX62`00j>2SA1j>3SA0j>2SA1j>2TA1i>2SA1k>c0M4M2M4M20001N101O010O01O01O010O01O01O01O0M4M4K4N11O010OCYA^Ok>?WA_Ok>>YA^Oj>`0=L3N3L3N2N`lQ1"}, "image_id": 364, "id": 5366}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 120.0, 32.0, 37.0], "area": 654, "segmentation": {"size": [512, 512], "counts": "[Tk02l?2N3M2M3N3M2N3M2N3O01O010O01O01O01O0k@[OP?k0O100O0001N101FQA@P?>RA@P?:k@Fa?75M3MSld6"}, "image_id": 364, "id": 5367}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 120.0, 4.0, 9.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "oSn71m?2M4M2XL"}, "image_id": 364, "id": 5368}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 124.0, 47.0, 63.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "iTQ71m?2M4M2M3N3L3N3TA^OU>e0iA^OT>e0iA]OT>f0iA^OT>e0iA]OU>e0iA^OW>a0gABX>T10O010O01O010O01O011N01O010ON3M2M4M2N2N3L3O20OHTAYOP?d0SAYOo>d09M2N2M4M2N3M2Mg[7"}, "image_id": 364, "id": 5369}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 157.0, 68.0, 49.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "gUR11m?3L3N3L3N2M4N110O00010O010O00010O01O000010M2M4M2M3N3L3O20O0010O0010O0010O0010O0010O0010O0010O0010O0010O00POWAj0i>SOYAm0m>010OO2L3N2M4M2M4M2M3N3L3N_jk5"}, "image_id": 364, "id": 5370}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 178.0, 22.0, 21.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "oUf02k?4K4M3M4O001O01O010O010O01O01O0N3L3N2N3L3NZjn6"}, "image_id": 364, "id": 5371}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 186.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "jeo72T:"}, "image_id": 364, "id": 5372}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 201.0, 51.0, 51.0], "area": 1480, "segmentation": {"size": [512, 512], "counts": "Xg`31l?4M2M3N3L3N3M2M3N3L3N3N1010O00010O010OWAoNc>Q1ZASOe>T110O010O01O01O010O000NO0103L12N2M4M2M4M2M3N3L3N3M2M4M2M3N3Ldie3"}, "image_id": 364, "id": 5373}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 208.0, 51.0, 50.0], "area": 1410, "segmentation": {"size": [512, 512], "counts": "lWo03k?2M4M2M4M2M@e@a0Y?5M_Oj@:U?71000O01Mj@^OV?e0O3N3M2M4N1010O0010O0010O010O010O0010O0010O010O010O010O00010[OUA1m>LVA2m>KVA2l>LVA2m>KVA1m>LVA2^?McXW6"}, "image_id": 364, "id": 5374}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 216.0, 51.0, 57.0], "area": 1770, "segmentation": {"size": [512, 512], "counts": "QXZ44j?2M4AGSA;k>GRAHRA;l>FQA=o>;0O00O2L3N3M21O01O010O0M3N3L3N3N100010O010O00010O0N3M2M3N3L3N3L3N2M4M2M4M2O101O010Cd@3]?Jg@2\\?Lf@2Zhl2"}, "image_id": 364, "id": 5375}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 236.0, 51.0, 50.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "^hS51m?2M4M2N3M2M4M2N3M2M3N3O010O0010O010O0001M2N3M2M4M2O20O00010O010O010N1N3L3N2N3M2M4M2N3M2M4M2N2N3L3N3M[hR2"}, "image_id": 364, "id": 5376}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 250.0, 25.0, 21.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "UhQ32l?2N3M2M3N3O0010O010O010O01O01O010O010O0M4M2N2N3LQha4"}, "image_id": 364, "id": 5377}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 256.0, 48.0, 68.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "VYo52l?2N3L3N3L3N2N3L3N3M2_ATOl=o0QBSOo=m0nAWOQ>i0mAYOT>g0iA[OW>e0fA_OX>c0eA_OY>V1M2M3N3M2N3O00N3M2N3L3N3M2M3N3M2M4M2M4M2N2M4M2N3L10002M4M2M4I[@OaWY1"}, "image_id": 364, "id": 5378}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 262.0, 28.0, 30.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "lX<1m?2N3M2M3N3M2N3L3N3M2010O0010O010O01O0N2N3L3N3M2N3L3N3MdgU7"}, "image_id": 364, "id": 5379}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 272.0, 24.0, 35.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "`8m0S?10O00010O010O01O01O010O01OO2M2M4M2M3N3L3N3L3NYgc7"}, "image_id": 364, "id": 5380}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 274.0, 26.0, 37.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "`Yg41l?4M2N2M4M2N3L3N2M4M2N2M4N1010N1M4M2M3N3L3N30O00Df@0]?Me@0gVl2"}, "image_id": 364, "id": 5381}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 276.0, 29.0, 30.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "Wij32l?3L3N3M2N3M2N2N3M201O010O010O010O00010ON3M2N3L3N3M2N3M2MWgf3"}, "image_id": 364, "id": 5382}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 278.0, 21.0, 17.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "oX_31m?3M2N3L3010O0010O0010O010O0010O0001L3N3MUWV4"}, "image_id": 364, "id": 5383}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 279.0, 47.0, 37.0], "area": 994, "segmentation": {"size": [512, 512], "counts": "ZiP13k?2N3L3N2N3M2M4M2O2O01O01O010O00010O010O00010O010O00010O010O00010O010O00010OO2M2M3N3L3N3L3N2MnfW6"}, "image_id": 364, "id": 5384}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 279.0, 54.0, 35.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "Sin23j?3N2N3M2M4O010O010O01O010O01O010O010O01O010O01O010O010O01O010O01O010O010O01O010O01O010O010O01O001M2M3N3M2NfVV4"}, "image_id": 364, "id": 5385}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 279.0, 48.0, 57.0], "area": 1447, "segmentation": {"size": [512, 512], "counts": "RZi63k?2M3N3L3N2M4M2M4L3N2M4N110O01O01O010O01ON3M201N1M3N3L3N3L12N3L3N3L3N2M4M2M4M2M4L3N3L3N2M4M2M4MSg>"}, "image_id": 364, "id": 5386}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 305.0, 26.0, 23.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "nii32l?2N3M2N2N3M2O2O010O00010O010O00010O010ON3L3N2M4M2NZVi3"}, "image_id": 364, "id": 5387}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 306.0, 27.0, 26.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "Rj`53k?2M4L3N3L3N2O2O0010O0010O0010O0010O00O2M2M4M2M3N3L3NYfQ2"}, "image_id": 364, "id": 5388}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 307.0, 48.0, 53.0], "area": 1411, "segmentation": {"size": [512, 512], "counts": "fj=2l?2N3L3N3M2N2M4M201N1M310O001L3N2N3L3N3N101O0O1010O01O01O010O0PO`Aa0`>[OdAd0\\>ZOfAg0[>UOgAk0Y>ROkAm0d>10O01O01N1N3L3N3Ee@L^?1f@L\\?2;LfUj6"}, "image_id": 364, "id": 5389}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 307.0, 29.0, 48.0], "area": 975, "segmentation": {"size": [512, 512], "counts": "eja73g?O[@4a?7N2M4L3N2M4L3N3L3N2N30O0010O0010O00010O0010O0010O0001VF"}, "image_id": 364, "id": 5390}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 315.0, 18.0, 15.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "SjV42k?3M3N30O010O00010O010O00010O0N2M4MQV`3"}, "image_id": 364, "id": 5391}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 327.0, 24.0, 25.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "iZY61m?3L3N2N3L3N3L3O2O01O01O010O01O001M2M3N3M2M4M2NeeZ1"}, "image_id": 364, "id": 5392}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 333.0, 42.0, 60.0], "area": 1467, "segmentation": {"size": [512, 512], "counts": "Z[i22k?4M2M3m@G\\>=aAF\\>=`AF^><`AG\\>o0M4M2O2O01O01O010O01O01O010ON2M4L3N3L3N2M4M2M4M2M3M4M2M4M2M3N3L3NZea4"}, "image_id": 364, "id": 5393}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 339.0, 27.0, 27.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "Tk[43j?3N2M4M2N2M4M2010O00010O0010O0010O0010O00M4M2M3M4M2MYeV3"}, "image_id": 364, "id": 5394}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 341.0, 24.0, 40.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "gkT51l?3N2M4M2M4M2M3N3L3M4M2M3N3N02M2M4L3N2M4M2Hf@G^?5e@I]?0c@Old_2"}, "image_id": 364, "id": 5395}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 342.0, 19.0, 20.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "S[P42k?3N3M2N3L3O110O0010O0010ON3M2M3N3M2MXUf3"}, "image_id": 364, "id": 5396}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 346.0, 31.0, 36.0], "area": 651, "segmentation": {"size": [512, 512], "counts": "ekR72l?2N3M2M3N3M2N3M2M3N3M2N3N11O010O01O01O0O2L3N2N3L3N3L3N2M4M2MRe="}, "image_id": 364, "id": 5397}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 356.0, 46.0, 56.0], "area": 1573, "segmentation": {"size": [512, 512], "counts": "l[_33k?2N2h@Jc>:YAIe>9YAJc>:YAIe>9YAJd>8YAKd>j0N3L301O01O01O010O01O01O010O01O01O010O01O01O01L3IaAkNb>R17M3N3L3N3L3N2M4M2M4M2M3Nadi3"}, "image_id": 364, "id": 5398}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 364.0, 39.0, 36.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "Slh52l?2M4M2M4M2M3N3M2010O00010O010O00010O01O0N2N3M2N30O00010O0IUAXOm>e0UAYOm>d09N3L3N3L3N2M4M`dc1"}, "image_id": 364, "id": 5399}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 369.0, 53.0, 54.0], "area": 1714, "segmentation": {"size": [512, 512], "counts": "PlU41m?2M4M2M4M2SACT>`0iACW>>fAEY>;aA_OI9f>9]AMd>2ZA1e>e010O2N01O01O0O2O00010O010O00010O010O0010O0010O0010O0O2L3N3M2M3N3L3N3L3N2M4M2M4M2M3NRdo2"}, "image_id": 364, "id": 5400}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 385.0, 40.0, 48.0], "area": 1125, "segmentation": {"size": [512, 512], "counts": "V]h61l?3N3L3N2M4M2N3L3N3L3N2M4M2M4M200010O010O01O01O010O0O1M4M2N3L3N2M4M2Ij@BY?;7M4M2M3Nhcc0"}, "image_id": 364, "id": 5401}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 394.0, 49.0, 55.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "VmQ52l?2N2M4M2M4M2M4M2M3N3L3O2O01O^AoNU>R1gAROX>n0fATO[>l0aAWO_>V10O01O01O010O01O01O010O01ON3M2M4O010O00001M2N3L3N2M4@i@3Y?Ki@2[?Jh@4Z?Ji@2d?NPcU2"}, "image_id": 364, "id": 5402}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 409.0, 16.0, 21.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "X]h72k?3M4L3N2M4N110O00010OM4M2M3N3LVC"}, "image_id": 364, "id": 5403}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 412.0, 40.0, 59.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "h]Q63j?3N3k@H]>;`AH^>:_AJ]>:`AH]>;`AH^>:`AI\\>;`AH^>o0L3N2010O01O01O010O0010O0001L3M4M2M4M2M3M4M2M4M2M4M2M3M4M2M4MjbZ1"}, "image_id": 364, "id": 5404}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 429.0, 22.0, 27.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "P^b73j?3N2M4M2M4M2M4N11O010O010M2N2N3L3N3M2M4M2N_b2"}, "image_id": 364, "id": 5405}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 439.0, 47.0, 53.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "dnf68f?4L3L4M3L3M4M2M4L3N3L3O1010O010O00010O010O010O00010O010O00010N1010O01N1D[AZOh>c0\\AZOg>c0[AZOh>c0=M200010ODc@4_?He@4e?Nfaa0"}, "image_id": 364, "id": 5406}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 448.0, 44.0, 45.0], "area": 1162, "segmentation": {"size": [512, 512], "counts": "on22l?2M5L2M4M2M4M200010N11O0N3M2M3M4M2M40O00010O0010O0010O00010O0N2M4M2M4M2M3N3L3M3N3L3N2MjQW7"}, "image_id": 364, "id": 5407}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 464.0, 26.0, 48.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "Q_c71l?3j@0Z>2VAL64a>4UAK74b>;ZAIb>:\\AHe>j0O01O01M2M3O2O0001O001O00001O00001O001O"}, "image_id": 364, "id": 5408}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 471.0, 54.0, 41.0], "area": 1446, "segmentation": {"size": [512, 512], "counts": "joj01m?2M4M2M3N3M2M4M2M3N3M2M4M2M3O1001O0N3L3N3M210O01O010O01O010O01L3O1UAoNg>U11O001O001O00001O001N1N2VOPAb0S?[Oo@b0Z?M2M3N3M2M4MjPZ6"}, "image_id": 364, "id": 5409}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 477.0, 40.0, 35.0], "area": 897, "segmentation": {"size": [512, 512], "counts": "o_i11l?3N2N2M3N2N2M3N2N2M3N2N2M3N2O1001O00001O001O001O00001O001O0N2N3M2M4M2N3L3N2M4Mk`b5"}, "image_id": 364, "id": 5410}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 489.0, 3.0, 6.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "Y?6j?N2MjPn7"}, "image_id": 364, "id": 5411}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 505.0, 21.0, 7.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "oog21m?2M3O100001O001O00001O001O00001O001O0000Q`m4"}, "image_id": 364, "id": 5412}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 86.0, 20.0, 33.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "YSf72m?2M2O2N2N2M3N1O2M3N2N2N1N3N2O10N1N011O[M"}, "image_id": 366, "id": 5413}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 142.0, 27.0, 26.0], "area": 351, "segmentation": {"size": [512, 512], "counts": "jd^71m?3N2N1O2N2M3N2N2N2N110000000000O0O2N2N2N2N2M2O2N2N2NUk3"}, "image_id": 366, "id": 5414}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 290.0, 180.0, 215.0], "area": 17698, "segmentation": {"size": [512, 512], "counts": "Vjj31n?2N2N2]BJX;9fDIX;9eDJY;7fDJY;8eDJY;8eDJX;9fDIX;9fDIX;9eDJY;7fDJY;8eDJX;9fDIX;9fDIX;9eDJY;8eDJY;7fDKW;8gDIX;9fDIX;9fDIX;9eDJY;8eDJX;8gDJW;8gDIX;9fDIX;9eDJY;8eDJX;9fDIX;9fDIZ;6eDK\\;5bDM^;3_D0a;0]D2c;N[D4d;MZD5f;JYD7h;IVD9j;o10000O10O10000000O10O10000000O10O10000000O10O10000000O10O10000000O10O10000000O10O1000000000O01000000000O01000000000O01000000000O01000000000O01000000000O10O10000000O10O10000000O01N2fLTD]2n;aMTD]2n;aMTD]2n;aMTD\\2n;bMTD]2n;aMTD]2n;aMTD]2n;aMTD]2n;aMTD\\2o;bMSD\\2n;bMTD]2n;aMTD]2n;aMTD]2iKYA4g>OVA2j>0TAOl>4QAMo>5o@KP?b010O010O0100002N2N2N2N2N2N1O000000001O00000002N2M3M3M3M3M3M3M3M3M3M3L4MWob5"}, "image_id": 367, "id": 5423}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 0.0, 50.0, 57.0], "area": 1638, "segmentation": {"size": [512, 512], "counts": "]Q^32l?2M3N3L3N3L3N2N3L3N3HTOWAo0g>6M4M2M40ON3M2O101O001O00001O00N2N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3NRPi3"}, "image_id": 367, "id": 5424}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 0.0, 40.0, 33.0], "area": 867, "segmentation": {"size": [512, 512], "counts": "PP\\71o?2N2N1O2N2N2N1O2N2N1O2N2N2N1O2N2N1O2N1OO100O100O100O1O100O100O1O100O100Ik@BV?=m@AS??7O1"}, "image_id": 367, "id": 5425}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 2.0, 59.0, 71.0], "area": 2021, "segmentation": {"size": [512, 512], "counts": "aPX62m?3N1N3N2M3N1N3M3N1N3N2M3M2O2M3N2M2O2M3M3N1N3N2M3N1N3000O0100000N1N3N2M3N1N3N2O1M2N3N2M3N1N3N2M2N3N2M3N1N3M3N2M2O2M3NV^j0"}, "image_id": 367, "id": 5426}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 5.0, 20.0, 26.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "hP12k?3N2N3Z@H_?>N3M2M102010O000N3L3N3M2M4M2N2Mjod7"}, "image_id": 367, "id": 5427}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 6.0, 16.0, 16.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "`P<1m?3M2O2M2N3N2O0100O0N3N1N3M3N1Ngo[7"}, "image_id": 367, "id": 5428}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 13.0, 47.0, 87.0], "area": 2275, "segmentation": {"size": [512, 512], "counts": "[ba42j?4K5ROFRB?i=ESB?h=FTB>h=FTB>h=GSB>h=FTB>j=m01O01O0001O01O0N21O01O000cBmMnP1O101N100O10000O100O10001N100O1002N3M2N3M3M2O2L3D=CO01O000000N2D@_Aa0`>@^Ac0_>@^Ac0_>_O`Ab0_>@^Ac0_>`0N3N2M201O10O10ON3M2O2M3M2O2M3N1N3M2O2M3M2O2M2N3N2M2N3N1N3N2M2N[mR6"}, "image_id": 367, "id": 5438}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 78.0, 26.0, 27.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "nb\\23l?1N3N2M2O2M3N1N3N2M2O200O01N2N1N3N2M2O2M3N1N3N2M2OZ]V5"}, "image_id": 367, "id": 5439}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 79.0, 43.0, 23.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "bbn64i?300010O010O00010O010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010L3NSm;"}, "image_id": 367, "id": 5440}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 86.0, 16.0, 17.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "Pcd73k?2M4L3N201O0010O00010O0M4M2M3NY]3"}, "image_id": 367, "id": 5441}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 87.0, 26.0, 27.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "Yco44i?3M3L4M4K4O101O01O01O01O0001O01O01O01O0O1L4M4L3L4MW]c2"}, "image_id": 367, "id": 5442}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 103.0, 16.0, 14.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "]cW73j?4L30010O00010O01O01O010O00M4Lg\\`0"}, "image_id": 367, "id": 5443}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 104.0, 14.0, 13.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "_Sc73k?2M3N3O0010O0010O01M200O3Lel5"}, "image_id": 367, "id": 5444}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 108.0, 14.0, 14.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "ccR43k?2N3L30010O0010O010N1N2N3Lc\\f3"}, "image_id": 367, "id": 5445}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 114.0, 80.0, 56.0], "area": 2214, "segmentation": {"size": [512, 512], "counts": "[dV63k?2N3L3N3M2M4M2N2M4M2N30O010O0010O0010O010O0010O010ON201O0010O010N1O2O010O00010O010O010O00010O010O010O0SAROi>R101O01BVADj>9YAHg>7ZAHf>8ZAIf>7ZAHf>8ZAIc>:\\AFa>=`AB^>`0bA@^>a0bA\\O`>d0`AYOd>g0\\AVOf>j0810O0O2M2N3L3N3M2M3N3M2Nn[a0"}, "image_id": 367, "id": 5446}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 116.0, 33.0, 35.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "[de43k?3L3N2M4M2M4M2M3N3N1010O0010O0010O0010O0010O0O2L3N2M4M2M4M2M3N3LVli2"}, "image_id": 367, "id": 5447}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 118.0, 67.0, 63.0], "area": 1980, "segmentation": {"size": [512, 512], "counts": "ReS22k?3N2M4M2M4M2N2M4M2O20O010O00010O010O001YOn@?Q?^ORAb0V?10O0010O010O0O2L3N2N3L3N3L3N2M4M2N3L3N3L3N2N20N3L3N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2M4M2N2M4MSlj4"}, "image_id": 367, "id": 5448}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 130.0, 24.0, 25.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "ddg31l?4M2M3N3L3N3L30001O01O01O010O01O0N2M4M2M4M2M3Ml[l3"}, "image_id": 367, "id": 5449}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 149.0, 18.0, 24.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "Ven44j?2M4M2N2M4M2M40O00001L3N3L3N3M2M3NY[h2"}, "image_id": 367, "id": 5450}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 150.0, 5.0, 13.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "odm72l?2M4M2N3YK"}, "image_id": 367, "id": 5451}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 151.0, 45.0, 45.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "eeR32k?3N3L3N2N3L3N3L3N3M2M3N3N110O01O010O01O0N3N100010O010jNZAP1f>nN\\AR1j>0O0010M2N3L3N2N3L3N3L3N3M2M3N3MmjV4"}, "image_id": 367, "id": 5452}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 160.0, 50.0, 37.0], "area": 1130, "segmentation": {"size": [512, 512], "counts": "hUR62l?3L3N3L3N2N3O010O000M4M2N3O000N3M201O00010O01O01O010O01O0M3N3L3N0O0211O01O01O010O01N1M3N3L3N3L3N2MnjT1"}, "image_id": 367, "id": 5453}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 166.0, 14.0, 19.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "_Ui73k?2M5L30O0001L310O00010O010fJ"}, "image_id": 367, "id": 5454}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 172.0, 50.0, 60.0], "area": 1772, "segmentation": {"size": [512, 512], "counts": "jfn43k?2M3M4M2M3N3L3N3L3N2M4L3N3L3N2M4M2M4O00010O00010O010O00010O01O0N2M4L3N3L3N2M4M2M3M4M2M4M2M3N3L3N3L[ZX2"}, "image_id": 367, "id": 5455}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 176.0, 53.0, 47.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "^Vk32l?3L3N2M4M2M4M2M3N3L3N30O0010O010O00010O010O0010O0010O0O2M2M3N3M210O00010O01N1N2M4M2M4M2M3N3L3N3L3N3L3N2M[ZZ3"}, "image_id": 367, "id": 5456}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 193.0, 58.0, 79.0], "area": 2347, "segmentation": {"size": [512, 512], "counts": "Yga53k?3e@Lf>6WAMf>6XAMe>6XALf>6WANe>6XAOc>h0N3L3N2N3M2kAYNQ>k1010O0010O001L3N3M2M3N3MoN^BHb=6aBJ^=4dBK^=1fBL\\=2fBL]=1eBL^=1fBL\\=5cBI]=9dBC_=>`B@c=?^B^Od=b0\\B[Oh=e0XBXOj=h0VBVOm=j0d0010L3N210O01M2N3M2M4M2N2M4M2N3L3N3MjYa1"}, "image_id": 367, "id": 5457}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 204.0, 17.0, 16.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "_VW73m?5J6K0000O10O100000O10O100002N6I]Y`0"}, "image_id": 367, "id": 5458}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 207.0, 9.0, 24.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "Qgk73k?2N3L3N2N3L3N2N3`I"}, "image_id": 367, "id": 5459}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 220.0, 32.0, 51.0], "area": 1552, "segmentation": {"size": [512, 512], "counts": "lfc6j0V?00;E>B000000000000000000000000O100000000000000000000000000000TYl0"}, "image_id": 367, "id": 5460}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 223.0, 17.0, 23.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "_WX73k?3M2M4M2N2M4M201ON3M2N2M4M2N3L3NPY?"}, "image_id": 367, "id": 5461}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 229.0, 40.0, 38.0], "area": 859, "segmentation": {"size": [512, 512], "counts": "UX\\72f?0]@4_?8N3O001M2N2M4M1O0O10Jo@^OR?c0o@[OR?f0410O01O01O010O010O01O01O010O010O01O01O0N3M2N3L3N2NcH"}, "image_id": 367, "id": 5462}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 269.0, 19.0, 53.0], "area": 749, "segmentation": {"size": [512, 512], "counts": "jif74g?5\\OJYAEVA`0j>@QAe0o>6L4K6J5K50000010O000000010OaG"}, "image_id": 367, "id": 5463}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 288.0, 47.0, 64.0], "area": 1795, "segmentation": {"size": [512, 512], "counts": "^jf03j?3N3L3N2M4L3N2M4M2M4M2M3M4M2M4M2M3N3O010O00010O01O01O01O0MkA]NT>`1lAcNS>Z1mAiNS>U1mAnNS>n0kAWOU>f0kA]OV>`0iADV>9kAIU>5mAIV>3nAJT>3oAJT>4nAJU>2oAJT>4nAIV>3nAJT>3oAJT>4n0Lhfa6"}, "image_id": 367, "id": 5464}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 300.0, 25.0, 27.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "oYb12k?3N2N3L3N3L3N2N30O01N1010O00010O001M2M4M2M3D_@7f?N3L_VQ6"}, "image_id": 367, "id": 5465}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 324.0, 60.0, 55.0], "area": 1712, "segmentation": {"size": [512, 512], "counts": "Xk\\13k?2M3N3L3N3M2M4M2M3N3L3O20ON30O0010O010O0010YOWA5i>H[A7f>F\\A;e>@^A`0c>]O`Ac0n>010O000M4M2N3L3N3M2M3N3M2M3NO011O2M2O2N2M4M2N3L3N2N3L3N3M2M4M2M3JY@1`ee5"}, "image_id": 367, "id": 5466}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 334.0, 53.0, 64.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "hkZ22l?2M3N3L3N3L3N2M4M2N30O00010O010O000101N10O01ON3O010ON2N3L3N3\\AiNZ>Z1cAjNZ>_1M4M00GlAhNS>Y1oAdNT>Y1PBdNQ>[1;M3N2N3L3N3L3N2M4M2M4M2M3G[@5j?L^ej4"}, "image_id": 367, "id": 5467}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 349.0, 20.0, 24.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "_[k01l?4M2M4M2N2M4M2O20O01O01N1N3L3N3L3N2M4MPej6"}, "image_id": 367, "id": 5468}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 370.0, 50.0, 48.0], "area": 1374, "segmentation": {"size": [512, 512], "counts": "d\\Z31m?2N3L3N2M4M2N3L3N3L3N2M4O001O01O010O010O00010O010O01O01O010ON3L3N2M0103L3N2M4M2N2M4M2M4M2M3N3M2M3N[dl3"}, "image_id": 367, "id": 5469}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 378.0, 34.0, 29.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "[\\o11m?2N2N3L3N3M2N3M21O010O01O010O01O010O01O010O01O010O01OO2L3N3M2M4M2Nmc_5"}, "image_id": 367, "id": 5470}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 379.0, 51.0, 56.0], "area": 1544, "segmentation": {"size": [512, 512], "counts": "ilR41l?3N3L3M4M2M3N3L3N3L3N2N30O01O01O010O01O0_AmNV>R1gAQOY>P1dARO]>m0`AWO_>U1010O0010O0010O0010O00N3M2M4M2M4M2M3N3L3N3L3N2M4M2M4MgcS3"}, "image_id": 367, "id": 5471}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 394.0, 59.0, 49.0], "area": 1652, "segmentation": {"size": [512, 512], "counts": "\\m61m?2N2M4M2N3M2M3N3M2N3L3N3M2N2M40O01O01O010O01O01O010O01O01O010O01O01O010N1N3M2M3N3L3010O0010O0010O0001L3N3L3N2N3L3N3LWck6"}, "image_id": 367, "id": 5472}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 395.0, 15.0, 18.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "glf12k?4O00010L3N2M4M210OO2L3M3N3LecQ6"}, "image_id": 367, "id": 5473}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 404.0, 53.0, 65.0], "area": 1795, "segmentation": {"size": [512, 512], "counts": "Wnh41l?4HL`@6[?Jc@3O5\\?;L3N3O000N3L3O2O01O01O010O010RAoNk>S1N3M2M4M2FbNoAa1n=cNoA`1n=:N11O2O010N1M4M2M4M2N2M4M2M4M2N3L3N2M4M2M4M2N3L3N2M4MSc\\2"}, "image_id": 367, "id": 5474}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 407.0, 30.0, 43.0], "area": 656, "segmentation": {"size": [512, 512], "counts": "fmk22k?4X@Kc?910N1N3L3N2M4M2M4M2M3N3L3N2OO2M4M2M4M2N3L3N3L3N3L3N3L4MTSe4"}, "image_id": 367, "id": 5475}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 410.0, 31.0, 29.0], "area": 533, "segmentation": {"size": [512, 512], "counts": "[][12l?3M2M4M2N3M2M3O2O0010O010O0010O0010O010O0010ON2N3L3N3M2M4M2NoRU6"}, "image_id": 367, "id": 5476}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 425.0, 22.0, 27.0], "area": 347, "segmentation": {"size": [512, 512], "counts": "mmg32l?2M4M2N3L3N2M4M2O2O010O0001L3N3L3N3M2M3N3MdRm3"}, "image_id": 367, "id": 5477}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 430.0, 55.0, 49.0], "area": 1426, "segmentation": {"size": [512, 512], "counts": "Wnb51m?3M2M4M2N3N1b@BZ?c00O010O0001M2N3L3O20O0010O010O010o@SOm>Q1O010O01O010O01O0O2M2N1N10O13M21O010O0C]A[Of>b0\\A[Og>c0\\AZOf>d0\\AZOg>b0=N3M2N3L3N2N3M]ba1"}, "image_id": 367, "id": 5478}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 448.0, 54.0, 56.0], "area": 1591, "segmentation": {"size": [512, 512], "counts": "S_[62k?3N3L3N2M4O010O01O01O010c@CU?=i@EX?b0M2N2M4M201O00001M2M4M2M3M4M2N30M2M4N100010O01M2N2M4M2M4M2M3N3L3N3O01O01N1N3Cc@2`?Lb@2VQj0"}, "image_id": 367, "id": 5479}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 452.0, 41.0, 28.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "`^h02k?3M4M2M3O20O01O01O010O01O01O01O01O010O01O01O010O01O01O010O01O01O010O01ON3L3N2M4M`Qc6"}, "image_id": 367, "id": 5480}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 453.0, 44.0, 43.0], "area": 1270, "segmentation": {"size": [512, 512], "counts": "V>m0T?0O01O010O01O01O010O01O010O01ON3L3N3N10010O010O0010O0010O010ON2N3L3N3M2M3N3L3N3M2M3N3LdaY7"}, "image_id": 367, "id": 5481}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 462.0, 51.0, 50.0], "area": 1434, "segmentation": {"size": [512, 512], "counts": "doV72l?2N2M4M2M4M2O2O01O01O010O010O0L]Ol@b0R?@m@b0P?@n@b0o>:M20001M2M4M2M3N2O1001O001O00001O0M4M2N3L3N2M4M2N3L3N3M2M3N3M2M\\A"}, "image_id": 367, "id": 5482}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 470.0, 29.0, 31.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "Z_[44j?2M4M2M3N3L3N3L300010O01O01O010O01O01O0O2L3N2M4M2M4M2M3NUQV3"}, "image_id": 367, "id": 5483}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 470.0, 26.0, 25.0], "area": 396, "segmentation": {"size": [512, 512], "counts": "Z_W51m?3L3N3L3N2N3L3N3NO3M21O010O01O01O010ON3M2M3N3L3N3MUa[2"}, "image_id": 367, "id": 5484}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 472.0, 12.0, 15.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "Q_S54j?2M4M2N2010O000N3M2M4MWaf2"}, "image_id": 367, "id": 5485}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 480.0, 40.0, 32.0], "area": 874, "segmentation": {"size": [512, 512], "counts": "f_W23j?4M2M3M4M2d@AX?c0001O00001O0000M3M3M3M300001O00001O00001O001O00001O0N2M4L3N2M4L3N3Lk`T5"}, "image_id": 367, "id": 5486}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 499.0, 21.0, 13.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "k?5l?O00M3N2M3O1001O00001O001O00001O001O0N3MYPe7"}, "image_id": 367, "id": 5487}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 500.0, 14.0, 12.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "ooR31l?3L4M3O11O0000001O000O2L3M\\Pf4"}, "image_id": 367, "id": 5488}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 500.0, 34.0, 12.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "oo`31l?3L4M3O1001O001O00001O00001O00001O00001O00001O00001O00001O001O0000RPn3"}, "image_id": 367, "id": 5489}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 502.0, 26.0, 10.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "n_b02l?2N2N2N2001O001O001O00001O001O001O00001O001O001O00Q`P7"}, "image_id": 367, "id": 5490}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 503.0, 13.0, 9.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "moY33j?3M30000001O00001O00001LY`_4"}, "image_id": 367, "id": 5491}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 507.0, 12.0, 5.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "ooS61l?3O11O00001O001O001O00QPf1"}, "image_id": 367, "id": 5492}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 0.0, 45.0, 26.0], "area": 731, "segmentation": {"size": [512, 512], "counts": "P`73m?3M3M000000O100O100002N2N3M3M3M2N3M1O0000O100O10000O10000O10000O100O10000O4M2N0O103M3L4MboQ7"}, "image_id": 368, "id": 5493}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 0.0, 64.0, 52.0], "area": 1677, "segmentation": {"size": [512, 512], "counts": "o`o23l?2N2N2N2N2O1N2N2N2N2N2LYOl@i0R?4N00000000000000000000001O002N2N2N2O1O10000000000000000001O000N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2NQ_P4"}, "image_id": 368, "id": 5494}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 0.0, 31.0, 16.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "PPQ41o?1O1O1O2N1O1O1O1O1O2N1O1O1OO1O1O1O1O1O1O100O1O1O1O1O1O100O1OQ`_3"}, "image_id": 368, "id": 5495}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 0.0, 17.0, 8.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "P`]51o?1O1O1O2N1O1O00O1O1O100O1O1O1O10PPZ2"}, "image_id": 368, "id": 5496}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 0.0, 17.0, 9.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "PPX61o?1O1O1O2N1O1O1OO1O1O1O1O100O1O1OQ`_1"}, "image_id": 368, "id": 5497}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 0.0, 64.0, 46.0], "area": 1577, "segmentation": {"size": [512, 512], "counts": "h`d61n?2N2N3M2N2N2N2N2N2N2N2N2O1N2N3M2N2N2N2N000000001O01O1O1O1O1O1O1O1O1O1O11O1O00O1O2N2N2N1O0000001O1O1O1O100O1O1O1O2N2N3M2N2N2N2Ni_;"}, "image_id": 368, "id": 5498}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 17.0, 17.0, 43.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "b0Z1f>0O02O3M3L3N3M3L3N3L4M2N3L4M2N3L\\^g7"}, "image_id": 368, "id": 5499}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 17.0, 26.0, 30.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "PQk52m?2N2N2N2O1N2N2N3M2N2N2N2N0001O00002N2N2N3M2G`@0a?Na@0a?Nb@O`?O_og1"}, "image_id": 368, "id": 5500}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 20.0, 32.0, 41.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "l`V43l?2N2N2N2O1N2N2N2N2001O00e@_OV?a0h@AX?d00000000Bh@3X?Kj@5V?Il@7T?Go@8Q?FQA:[?000000N2N2N2N2O1NZ^Y3"}, "image_id": 368, "id": 5501}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 44.0, 56.0, 60.0], "area": 1694, "segmentation": {"size": [512, 512], "counts": "dbm61m?3N2N2N1O2M3N2N2N2N1N3N2N2N2M2O2N2N2N2M3N1O2N2N0O10O100001N3N1OO102N2N2M2O2N2N2N2M3N1O2N2N2M3N1O2Ig@D[?:7M3N2N1O2NS^6"}, "image_id": 368, "id": 5502}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 48.0, 41.0, 42.0], "area": 863, "segmentation": {"size": [512, 512], "counts": "Vbc31n?2N2N3M2N2N2N2N2N3M2O1N2N2N2N3M2N0000000001O00011N2N2N2N3M2N2N2N2N2O1N3M2N2N2N2Nomg3"}, "image_id": 368, "id": 5503}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 67.0, 12.0, 13.0], "area": 81, "segmentation": {"size": [512, 512], "counts": "XbS62m?2N2N2N2N21O0N2N2O1N2Ng]f1"}, "image_id": 368, "id": 5504}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 71.0, 65.0, 69.0], "area": 2195, "segmentation": {"size": [512, 512], "counts": "hSX41n?2N2N2N3M2N2WODkA>T>CkA>S>DkA>S>DkA>S>DdAINf0[>CeAINf0[>DdAHOf0[>DeAd0Y>c0O1N2N0000000000010O0001O3M2N2N2N2OOIcAlN]>T1eAjN[>W16000001O000000101N2N3M2N2N2N2O1N001O003M2N2N2O1N2N2N3M2N2NW]g2"}, "image_id": 368, "id": 5505}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 78.0, 16.0, 16.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "ebY62m?2N2O1N3M2N2N1O01O3M2N2O1N2N2N\\]^1"}, "image_id": 368, "id": 5506}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 81.0, 17.0, 33.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "obg71n?2N2N2N2N3N1N2N2N2N2N2N2N2N3M2O1^M"}, "image_id": 368, "id": 5507}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 94.0, 56.0, 49.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "gc`11n?2N2N2N2N2N2N3M2N2N2N2N2N2N000000000000101N2N2N2N2N20000000000000000000N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NV\\c5"}, "image_id": 368, "id": 5508}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 94.0, 37.0, 36.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "bSm51n?2O1N2N3M2N2N2N2N2N2O1N3M2N1O000000000001O01O02N2N2N3M2N2N2N2O1N2N2N3M2Nd\\`1"}, "image_id": 368, "id": 5509}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 112.0, 68.0, 62.0], "area": 1820, "segmentation": {"size": [512, 512], "counts": "kTj42m?2N2N2N2N2N2N2N2N2O1N2N3M2N2N2N2N02N2N2N2N1O00000001O000000002N2N2JQOXAQ1f>6N1O01O00000000000000001O000001O02N2N2N2N3M2N2N2N2N2N2N2Bb@7`?Gb@7f?M2N2NjkS2"}, "image_id": 368, "id": 5510}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 114.0, 22.0, 23.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "Qdn61n?2N2M3N2N1N3N2N2N1N010001O2M3N1O2M3N2N2N1NZ\\f0"}, "image_id": 368, "id": 5511}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 124.0, 38.0, 66.0], "area": 1457, "segmentation": {"size": [512, 512], "counts": "iT]73l?2N2O1N2N2N2N2N2N2N2N2N3M2N2N2O1N4L2N2N2N2N2N2N3M2N2N2N2O1N2N1O02N2N2N2N2NPL"}, "image_id": 368, "id": 5512}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 131.0, 60.0, 54.0], "area": 1686, "segmentation": {"size": [512, 512], "counts": "QUh02m?2N2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N3M0000000010O0000000000000002O100001O000O1N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2N2N2N2NRkY6"}, "image_id": 368, "id": 5513}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 147.0, 28.0, 28.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "odn12m?2N2N3M2O1N2N2N2N2N3M2N2O0O02N2O1N2N3M2N2N2N2N2O1N3M2NoZc5"}, "image_id": 368, "id": 5514}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 159.0, 62.0, 72.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "eVa51n?2N2N2O1N2N2^OE\\A=b>F[AF[A=c>D[A>c>D[A>c>D[A>c>D[A>c>a0N2N2N1O00000001O000001O000000000000JcAjN]>W1dAgN\\>Y1fAeN[>Z15K^AmNb>S1`AkN`>U1500000010O2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N3M2N^j_1"}, "image_id": 368, "id": 5515}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 161.0, 14.0, 14.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "XU_21n?2N2N2N2N2N2N11N2N200O1N2KW@0cZZ5"}, "image_id": 368, "id": 5516}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 162.0, 67.0, 71.0], "area": 2052, "segmentation": {"size": [512, 512], "counts": "cV\\21n?2N3M2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N3M2N001O000001O000000CiNRBW1n=kNPBU1P>mNnAS1R>oNlAQ1T>QOjAo0V>SOhAm0X>=1O0001O02N2N2N2N200O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2NSZb4"}, "image_id": 368, "id": 5517}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 163.0, 54.0, 52.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "eUm61n?2N2N2N2_@JV?7h@KV?7h@KV?7h@KV?a0N1O00001O001O100O2N2N2N2010O0000000000000000010O0N2N1O003M2N2N2N2O100O1N2N3M2O1N2N2N2N2N2Nli7"}, "image_id": 368, "id": 5518}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 172.0, 21.0, 20.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "feW21n?2N3N1N2N2N2N3M10O0000010O2N3M2N2N2N2O2M\\j]5"}, "image_id": 368, "id": 5519}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 175.0, 53.0, 52.0], "area": 1354, "segmentation": {"size": [512, 512], "counts": "]66i?2N2N2N3M2N2N2O1N2N3M2N2N2N2N1ImNaAR1`>oN^AQ1b>QO\\Ao0d>6000001O0001O00001O2N2O1N2N2N3M2N2N2O1N1O000000002N3N1N2N2N2N2N2N3MPZU7"}, "image_id": 368, "id": 5520}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 191.0, 29.0, 30.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "VfS12m?2N3M2N2N2N2N2O1000000000000001O000000000N2N2N2N2N2N2O1N]i]6"}, "image_id": 368, "id": 5521}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 205.0, 63.0, 58.0], "area": 1778, "segmentation": {"size": [512, 512], "counts": "WWm51n?2O2M2N2N2N2N2N2N2N2N3N1N2N2N2N2N1O0000000001O01O2N2N2N2N201O01O00000000000000010O0N2N2N2N2O100O1N2N3M2Kk@\\OW?b05N2N2N2O1N2N3M2N2N_XS1"}, "image_id": 368, "id": 5522}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 205.0, 33.0, 28.0], "area": 478, "segmentation": {"size": [512, 512], "counts": "dVh62m?2O1N2N2N2N2N200001O00O1N2O1N2N2N2N2N20N2N2O1N3M2N2N2N2N2N2N2N2NVYg0"}, "image_id": 368, "id": 5523}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 228.0, 60.0, 58.0], "area": 1628, "segmentation": {"size": [512, 512], "counts": "WXQ31n?2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N2N2N2O1N2N2N000000000000000000000001O01O000001O2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2NXhP4"}, "image_id": 368, "id": 5524}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 229.0, 25.0, 24.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "`Wa02m?2N2N2N2N2N2N2N2O2M2N1O02N2N2N2N2N2N2O2O0N2N2N2N_XR7"}, "image_id": 368, "id": 5525}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 238.0, 48.0, 50.0], "area": 1110, "segmentation": {"size": [512, 512], "counts": "ngk12m?2N2N2O1N2N2N3M2N2N2O1N2N2N3M1O002N2O1O10001O000001O000001O0000mNWAl0P?N2N2N2N2O2M2N2N2N2N2N3M2O1N2N`W\\5"}, "image_id": 368, "id": 5526}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 241.0, 31.0, 32.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "Qh62m?2N2N2N2N2N2N2N2N2N3N1N2N2N00000002N2N2O1N2N2N2N2N2N2N3M2N2NRhY7"}, "image_id": 368, "id": 5527}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 8.0, 15.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "b7?b?N2N2N2N3M2N2OVhk7"}, "image_id": 368, "id": 5528}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 260.0, 58.0, 57.0], "area": 1531, "segmentation": {"size": [512, 512], "counts": "Yid32m?2N2N2N2N3N1N2N2N2N2N3M2N2O1N2N2N1O001O01OKoN\\AQ1d>500000001O01O000000000001O01O1O2N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2NXW^3"}, "image_id": 368, "id": 5529}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 265.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "ZXo72m?2fG"}, "image_id": 368, "id": 5530}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 273.0, 29.0, 29.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "nX^71n?3M2N2N2N2O1N3M2N2N2N2N2O1N1O12M2N2N2N2N2N2N3N1N2N2N2N2NQW3"}, "image_id": 368, "id": 5531}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 292.0, 39.0, 50.0], "area": 1351, "segmentation": {"size": [512, 512], "counts": "]Z`02j?5J5L4L4K6K4L4L401O01O000001O0K5M310O00000010O00000010O000001L3L4lN\\Ai0o>00GPABQ?8TAEo>7?KdVl6"}, "image_id": 368, "id": 5532}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 299.0, 55.0, 50.0], "area": 1457, "segmentation": {"size": [512, 512], "counts": "Wjo61n?2N2N3GId@9Z?Ie@8Y?Je@8Y?9N2O2M2N2N2N2N2N3N1N2N2N00000002O1N2N1O00000010O0001O2N2N3M2O1N1O00001O3M2O1N2N2N2N3M2N2O1N2N2NTf4"}, "image_id": 368, "id": 5533}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 302.0, 66.0, 64.0], "area": 1906, "segmentation": {"size": [512, 512], "counts": "gZR42m?2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2N2O1N2N1O001O000000000001O0000000001O000000000001O000000002O2M2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2Nmel2"}, "image_id": 368, "id": 5534}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 305.0, 21.0, 20.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "liV11n?3M2N2O1N2N2N2N00001O0001O1O2N2N2N2N2O2MYf^6"}, "image_id": 368, "id": 5535}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 315.0, 4.0, 8.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "nYn71n?2N3M2TF"}, "image_id": 368, "id": 5536}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 328.0, 39.0, 36.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "Q[X31n?2N2O1N2N2N2N2N2N2N2N0000010O000000000000001O000001O00001O2O1N2N2N2N3M2N2N2O^UT4"}, "image_id": 368, "id": 5537}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 338.0, 37.0, 49.0], "area": 1318, "segmentation": {"size": [512, 512], "counts": "kkT13g?7J5J6J6J6K501O0001O0001N1J6O1000001O01O00000001O01O000gNbAP1g>00O1K5L5Gl@DT?6RAKm>OYAMk>MieX6"}, "image_id": 368, "id": 5538}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 340.0, 18.0, 33.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "lZg73l?2N2W@Je?:N3N1N2N2N2N2O2O0000000N2N3M2ZE"}, "image_id": 368, "id": 5539}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 347.0, 60.0, 57.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "o[m42m?2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O1N1O0KkN`AU1`>mN^AS1b>500000001O01O00000000002N2N2O0O0000002N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N3MbdT2"}, "image_id": 368, "id": 5540}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 349.0, 25.0, 25.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "Zki11n?2N2N2N2N2N2N2N2N2N2N2N0001O2N2N2N2N2N2N2N2N2N2Njdi5"}, "image_id": 368, "id": 5541}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 354.0, 22.0, 20.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "U[S32n?1N2N3M2O1O10000000001O0N2O01N2N2N2N2O1N3Mbda4"}, "image_id": 368, "id": 5542}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 366.0, 21.0, 20.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "fkT22m?2N2O2M2N2N2N2N3N0O01O2N2N2O1N3M2N2N2N2OXd`5"}, "image_id": 368, "id": 5543}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 379.0, 51.0, 52.0], "area": 1316, "segmentation": {"size": [512, 512], "counts": "^lf11n?2N2O1N3M2N2]@D_?`0N3M2N2N2N2N2O1N2N2N1O1O2N2N20001O000001O00000O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M2N2N2OUc_5"}, "image_id": 368, "id": 5544}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 380.0, 31.0, 28.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "^lc23l?2N2N2N2O1N2N2N001O00000001O2N2N200OIGg@8Y?Je@6[?Mc@2]?82N2N2N2O1N3M2N2Nlcl4"}, "image_id": 368, "id": 5545}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 382.0, 31.0, 30.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "\\\\m31o?1N2N2N2N2N2N2N2N3M2O1N2N2N2N1O01O2O1N2N2N3M2N2N2N2N2N2O1N2NeSc3"}, "image_id": 368, "id": 5546}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 383.0, 25.0, 22.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "^ln21n?3M2N2O1N2N1O000000010O000000000010O1O2N3M2N2N2Okcd4"}, "image_id": 368, "id": 5547}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 390.0, 17.0, 18.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "]\\c32m?2N2N2N2N2N2N2O20N1O1N2N2N2N2N3MaST4"}, "image_id": 368, "id": 5548}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 396.0, 66.0, 82.0], "area": 2625, "segmentation": {"size": [512, 512], "counts": "Y^Z52m?2N2N3M2O1N2N2N2kN@gBb0W=@gBb0W=@gBc0V=_OhBc0W=^OgBd0W=^OhBc0V=@gBb0W=@gBb0W=@gBb0W=@gB`0Y=BeB?Z=CdB=\\=EbB;_=F_B:a=I\\B7d=K[B4e=NYB2g=S10000010O2N2N3M2N2N2N2N2O1N2N3M2N2000000001O00O2M2nN\\Af0e>XO]Af0e>XO]Af0e>XO^Ae0d>YO^Ae0P?N2N2N2N2N2O1N3M2N2NRbd1"}, "image_id": 368, "id": 5549}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 417.0, 64.0, 53.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "l]\\22m?2N2N2N2O2M2N2N2N2N2N2N3M2OO000001O00000001O000001O2N2N2N2N3N1O1000000000001O01O00000O1N2N2O2M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3Nlac4"}, "image_id": 368, "id": 5550}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 423.0, 34.0, 32.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "j]Z31n?2N2O1N2N2N3M2N2N2O1N2N00001O01O0000000001O02N2N2N3M2N2O1N2N2N2N2N^bT4"}, "image_id": 368, "id": 5551}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 425.0, 52.0, 43.0], "area": 1177, "segmentation": {"size": [512, 512], "counts": "k]k32m?2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N2O01N2N2N2N3M2N2N20000000010O000O1N2O1N2N00001O2N3M2N2N2O1N2N2NlaZ3"}, "image_id": 368, "id": 5552}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 457.0, 79.0, 55.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "o_j21o?0O1O1O1IM`@4_?N_@3`?O^@2b?6O1O1O1O1GBo@?P?Cn@>Q?Dm@=S?8O1O1O1O1O1O1O1O100O1O1O1OFYA]Of>c0\\A\\Oc>d0_A[O`>e0bAZO]>g0dAXO\\>g0fAXOY>h0iAWOX>g0kAWOV>g0lAXOU>f0d0N0001O00000001O02N2N2000000N2N2O2M1O001O000002N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2NUQn3"}, "image_id": 368, "id": 5553}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 462.0, 60.0, 50.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "f_Q61n?2N2N2N2\\@H]?;`@F_?>O1O1O1O1O1001O1O_O_OdA`0[>BdA>\\>CbA>]>DaA=^>E`A<_>F_A;`>G^A:a>H]A9b>I]A7b>d0O1O1O1O1O1O1001O1O1O1O1O1O1O1N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N2N2O2Me`P1"}, "image_id": 368, "id": 5554}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 470.0, 19.0, 17.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "Qoe51n?2O1N2N2N2N000001O0001O0001O2N2N1O2NXaP2"}, "image_id": 368, "id": 5555}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 471.0, 70.0, 41.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "eo[42m?2N2N2N2N2N2N3N1N1O1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O10O1O2O1N2N2N2O2O000001OO1O1O1O2N1O00O100O11O2N1O1O1O1O1N2N3M2N2N2N2N2N2OYPa2"}, "image_id": 368, "id": 5556}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 472.0, 22.0, 22.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "Voc21n?2N2N2N2N2N2N2N1O1O0000000002N2N2N2N3M2N2NQQQ5"}, "image_id": 368, "id": 5557}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 475.0, 4.0, 7.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "k>7j?N2N2NSam7"}, "image_id": 368, "id": 5558}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 499.0, 27.0, 13.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "o_61n?1O1O100O1O1O1O1O1O1O1O1O1001O1O1O1O1O1O1O1O1O1O1O1OQP\\7"}, "image_id": 368, "id": 5559}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 507.0, 9.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "ooV71n?100O1O1O11O1O2NQ`d0"}, "image_id": 368, "id": 5560}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 0.0, 77.0, 45.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "\\`d32m?2O2GKb@7\\?Kb@7]?Ja@9\\?7N2N3N1O1O1O2N1O1O2N1O1O2N1O1O1O2N1O1OO1O100O1O1O1O1O100O1O1O1O100_OTAMm>3UAJl>5VAIk>6WAHj>7YAFh>:YAEg>:[ADf>;`0O100O1O1O100O1O11O1O2N1O1O2N1O00O100O1O1O100O1O1O100O1O1KW@2n?NloT3"}, "image_id": 369, "id": 5561}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 0.0, 84.0, 77.0], "area": 2916, "segmentation": {"size": [512, 512], "counts": "]ag41n?2N2O2M2N2JHb@9\\?8N1N2N2N3N1N2N2O2M2N2N2O2M21O01O01O001N1N2O2M2N000010O000ZOoN^BQ1b=RO\\Bm0d=UOZBk0f=WOXBj0g=YOVBg0k=ZOTBe0l=]ORBc0n=@oA`0Q>BmA?R>CmAh01O0001O03M2N0001O01O001O100O1O3M2N2O2M2N2N3M2O1N3M2N2N2O2M2N2N3M2N2O2M2N2NT_n1"}, "image_id": 369, "id": 5562}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 23.0, 103.0, 99.0], "area": 4251, "segmentation": {"size": [512, 512], "counts": "hbT22n?2M2N2IIc@8[?Kb@8[?8O1N3M2O1N2N3N1N2O2M2N2O1N3O01O01N1N2O1N0001O01O01O00010OEaNVB_1j=dNSB\\1m=fNRBZ1n=hNoAX1Q>jNmAe00AT>i0nAUOR>k0PBTOo=m0SBPOn=o0TBoNl=Q1VBmNj=S1YBkNg=U1ZBiNf=W1a001O3M2OO0001O03M2O1N2N2FfNlA\\1Q>fNmA\\1R>fNkAZ1U>9O01O010O00010O00010O00012IiA`NY>_1hA_N[>^14O001O01O01O000010O2O01N2N3N1N3M2O2]OSANP?ORAOP?OSAOo>ORAOP?OSANP?ORA0o>OSANd]X4"}, "image_id": 369, "id": 5563}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 50.0, 84.0, 79.0], "area": 2949, "segmentation": {"size": [512, 512], "counts": "QSg31n?2N2O1IK`@8^?I`@9^?6N3M2O1N3M2N2N3N1N2N2N3N1N201O01O01O01N1N2O2M2N2N1O01O0WOQObBo0_=RO_Bn0a=TO^Bl0a=WO\\Bi0d=YOZBg0g=ZOWBf0i=\\OVBc0j=@SBa0l=ARB?o=BoA>Q>EmA:S>HkA8U>j00O00002N2O0O0000010O00001O3N1N2N2N3M2O1N3M2N2O2M2N2N3M2O1N3M2N2N3N1N2N3M2N_mn2"}, "image_id": 369, "id": 5564}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 54.0, 103.0, 99.0], "area": 4172, "segmentation": {"size": [512, 512], "counts": "gcn42n?2M2N2IK`@8]?7N2O2M2N2N2O2M2N2N3N1N2N3N11O0001O0N2N3M01O0001O01O000000HaNQB_1n=cNPB]1Q>eNlA[1T>gNjAY1V>80AkAUOU>j0mATOS>l0oAROQ>n0RBoNn=R1SBlNm=T1UBjNl=U1VBjNi=V1`00011N2N1O10O001O2O1N3M2N2IcNiA^1U>dNiA^1U>610O000001O01O0001O01O0002KhA_N[>^1gAaNZ>^15N1O00010O00000011N2N11N3M2N2N2O2\\OUANm>0UANm>1TAMo>0TANm>0UANm>0UANn>0TAMn>1TANm>0Zn]1"}, "image_id": 369, "id": 5565}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 106.0, 82.0, 78.0], "area": 2863, "segmentation": {"size": [512, 512], "counts": "gTR62n?2M2N2JIa@9]?I`@9^?6N2N3N1N2N2N3N1N2N3N1N2N2N3O01O010O000O2M2N2N3NO01O0000YOPO_BQ1`=QO_Bn0a=TO]Bl0d=VOYBj0g=XOWBh0i=ZOVBf0i=]OTBc0m=^OQBb0o=AoA>Q>DmAEjA;V>h0O0001O002O1N00001O01O0001O1O3N1N2N3M2N2N3N1N3M2N2N3N1N2N3M2N2O2M2N3M2N2Ohkd0"}, "image_id": 369, "id": 5566}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 111.0, 78.0, 118.0], "area": 4383, "segmentation": {"size": [512, 512], "counts": "dfT13j?4HKa@7]?7M4L3M4L3N201O01O01O0O1N3L3BiNPBZ1m=jNoAZ1m=iNQBY1l=jNQBZ1k=?N2M4O00010O0N2M4DaMRCb2kX19L3M4TOVA>l>@VAa0i>\\O[Ac0Q?1ON3L3N2M4L3M3Ml[d5"}, "image_id": 369, "id": 5567}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 144.0, 81.0, 112.0], "area": 4214, "segmentation": {"size": [512, 512], "counts": "[WV23k?3K4M4M2M4M2M4M2O110O010O0010O[OUOUBl0h=WOXBh0e=[O[Bf0b=]O^Bb0`=@`B`0]=CcB>Z=EfB:W=IiB8V=IjB6W=IgB:W=GgB;V=IfB;W=GgB;W=HeBW1gAgN[>W18M4M2N3O010N1N2N3L3N3L3N2M4M2M4MX[a4"}, "image_id": 369, "id": 5568}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 162.0, 50.0, 48.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "UVP72l?3L3N3M2M4M2N2M4M2N3L3N3M2M3N3O0010O010O00010OO2M21M2N2M4M2N3N11O010O01O01O010O01SOTAd0l>YOWAd0T?N3L3M4M2M3N3L_j6"}, "image_id": 369, "id": 5569}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 166.0, 67.0, 83.0], "area": 2921, "segmentation": {"size": [512, 512], "counts": "oVj328O[?3c@OZ?4c@0Z?V1hAhNZ>V1:M2M3N30O01L3N3M2N3L3N2K`@Hc?55N3MbiS2"}, "image_id": 369, "id": 5571}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 217.0, 80.0, 120.0], "area": 4319, "segmentation": {"size": [512, 512], "counts": "gYb5153_?O_@3^?:M2N3L3N2N3N110O0010ON2N3L3AkNQBX1l=jNRBX1k=lNQBW1m=kNQBX1k=`0N3M2M40O0010N1N3_OeMXC^2f?oA^OT>c0lAZOW>e0iAXOZ>i0fATO\\>l0=O010O0O1M4M2M4M2N3L3N2N3LjhU1"}, "image_id": 369, "id": 5572}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 222.0, 77.0, 116.0], "area": 4275, "segmentation": {"size": [512, 512], "counts": "PZ<3c?1a@1]?1`@2]?;M2M4M2M301O0010O0010M2M4M2BkNnAX1P>jNnAY1n=kNnAX1P>jNnAY1n=>N2M4N1010OM3N3CdMQC_2mX18L3N3L3VOUA=m>AVA>k>^OXAc0R?N3L3N3L3N2M4M]X]6"}, "image_id": 369, "id": 5573}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 244.0, 67.0, 85.0], "area": 2922, "segmentation": {"size": [512, 512], "counts": "hiY32l?2HO^@4`?8L3N3M2M3N3N1010O01N1M3N3M2BkNnAY1n=jNPBX1n=kNnAX1P>>L3N3L3O20OO1N3L3O2O00010O010O00010OoNdB]O^=`0dB]O_=`0eB]O]=a0eB]O^=?fB]O]=a0eB]O^=?eB^O^=a0dB\\O_=d0`BZOb=f0_BVOd=k0[BSOh=l0f0010O0010O0010OM3N3M2M4M2M4M2M3N3Mogd3"}, "image_id": 369, "id": 5574}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 254.0, 81.0, 113.0], "area": 4209, "segmentation": {"size": [512, 512], "counts": "mj^13k?2IM^@6`?7L3N3L3N2M4O001O01O010O01[OROVBn0h=UOXBj0e=YO[Bh0b=ZO_Be0^=_OaBb0\\=@dB`0Y=DgB;W=GiB:U=GlB8U=HjB9T=HiB;T=IiB9U=IhB;nX18N3M2M4O000O2M2N3L3N3L3N2N3L3N3L3NjgX5"}, "image_id": 369, "id": 5575}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 312.0, 119.0, 81.0], "area": 4361, "segmentation": {"size": [512, 512], "counts": "mjV41l?4M2N3L3N3M2M3N3M2M4M2N3L3N2N3M201O010O01O010OO2OO1M4M2N3L3O20O0010O010O0010O00ROSAj0R?O01YATOX>l0eAWO\\>h0bAZO^>g0_A\\Oa>c0\\A@d>o00O01O01O010O010O01O01O01L3N30O010O0010O0010M2N3M2M4M2O1010]AoNX>n0eAUO[>l0bAWO^>h0`AZO`>T10O00010O010O010O00010O01O0N30O0010O010O0010O0N3M2N3L3N2N3L3UOn@f0W?N3L3N3M2M4M2N2MPem1"}, "image_id": 369, "id": 5576}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 367.0, 80.0, 112.0], "area": 4232, "segmentation": {"size": [512, 512], "counts": "b^l51l?3GMc@5[?Mb@7Z?9N3L3N2M4M2010O001M21O0\\OSOVBl0g=XOXBi0e=YO\\Bf0a=]O_Bd0^=_OaBa0\\=BeB=Y=FfB;V=HkB7U=IkB8U=HiB9W=GfB<[=DbB?[=CbB`0\\=BbBa0P=gNQCl0K`0Q=gNRC\\2lU1jAgNY>W1:M2M4N11O01M2N3L3N3L3Jc@G_?7c@G`?66M4MYdk0"}, "image_id": 369, "id": 5577}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 381.0, 113.0, 80.0], "area": 4221, "segmentation": {"size": [512, 512], "counts": "Q]n31l?4M2N2M4M2N3L3N3L3N2N3L3N3M2M4O00010O010O0010O00N3O001O010O01O010O01O01ROaA:^>CeA=\\>@gA?Y>_OiAb0V>[OmAe0T>XOoAh0P>VORBj0c>0O01O01O010O010O01O01O0M4M2N3L3N2N3L31M2010O01O01O010O010O01O01^AdN^>`10O0cNaAZ1b>01O010O01O010O01O010O01O0O1N21O0010O010O00010O010ORObA8a>EaA9a>EaA9a>DcA8a>EaA9a>EbA7U?N3M2M3N]RY2"}, "image_id": 369, "id": 5578}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 18.0, 44.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "i=\\1e>L3N2M4M2M4M20010N1N3L3N2M4M2M4M2MSbf7"}, "image_id": 369, "id": 5579}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 455.0, 66.0, 57.0], "area": 2500, "segmentation": {"size": [512, 512], "counts": "no=2k?3N2QOKQB7l=LRB6l=MPB6n=LPB6n=MoA5n=NoA5o=MoA5o=NnA4o=OnA4R>MkA5V>j0001O001O00001O001O001O0000O1001O001O001O00001O002N1O00001O001O001O002N001L3N3M2O101O0N3M2M4M2N2M4M2N3M2MTQa6"}, "image_id": 369, "id": 5580}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 484.0, 58.0, 28.0], "area": 919, "segmentation": {"size": [512, 512], "counts": "doa13j?3N3M2M4M2N2g@@P?c0m@_OS?g01O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O0D`@8`?Fb@:c?O001O001O00001O001O001O00001O001MUPa5"}, "image_id": 369, "id": 5581}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 54.0, 32.0], "area": 1087, "segmentation": {"size": [512, 512], "counts": "03m?1O1O1O1O1O1O1O1O1O1O1a@DX?=f@EY?b0O1O2N1O1O1O1O1O1OO1O1O1O1O1O1001O1O1O1O1O1O1OO1O100O1O2N2N2N2N2N2N2N2N2N2N2N2N2NdoT7"}, "image_id": 370, "id": 5582}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 0.0, 48.0, 34.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "P`j21o?1O2N1V@Me?4Z@Me?8O1O1O1O1O1O1O2N1O1O1O1OO1O1O11O1O1O100000001O0000000N2N1O02N2N2N2N3M2N2N2N2N2O1N2N2N^_]4"}, "image_id": 370, "id": 5583}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 0.0, 64.0, 53.0], "area": 1991, "segmentation": {"size": [512, 512], "counts": "\\aZ41l?4L3N2M4L3N2M4O001O01OO2L3N2M4L3M4M2M3M4O01O0N1N1O2N3N2N200001O00001O00001O001O00001O00O1N2M3N2001O0000M3M3N2M3M3M4M2M3M4L3N2MQ`e2"}, "image_id": 370, "id": 5584}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 0.0, 33.0, 13.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "PPf51o?00001O001O00001O00001O001O004L00001O00001O001OO1N21O0000N2M3N2MS`i1"}, "image_id": 370, "id": 5585}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 0.0, 11.0, 16.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "Y`j73k?2M4L3O101O001O00001O"}, "image_id": 370, "id": 5586}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 13.0, 63.0, 55.0], "area": 2072, "segmentation": {"size": [512, 512], "counts": "`aU52l?2M4M2M4M2M3N3L3N3L3N2M4M2M4O00010O0\\AgNa>\\110O01O01O010O01O01O010O01O01O01O010O01O01O0O2M2M3N3L3N0O13O010O01O0M3N3L3N3L3N2M4M2M4MWoj1"}, "image_id": 370, "id": 5587}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 19.0, 21.0, 41.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "c0X1j>O0O1N3M2N2O1N3M2N2O1N3M2N2N3N1N2N2N3M2Oc^e7"}, "image_id": 370, "id": 5588}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 19.0, 34.0, 24.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "g`c31o?6J6J7I000O1000O10000000O1000O100000O1000O10000000O1000O10006J6J6Jk^k3"}, "image_id": 370, "id": 5589}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 26.0, 46.0, 69.0], "area": 1950, "segmentation": {"size": [512, 512], "counts": "nQY73j?4M2M3M4L3N2M4YAZOo=i0oAYOo=j0mAZOS>e0jA^OV>b0gAAV>c0gA@V>V1M3M4M200010O010O00010O00010O001M2M3M4L3N2M4I_AjNd>S16M4L3N3L3M3M4M2M3MoN"}, "image_id": 370, "id": 5590}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 29.0, 73.0, 68.0], "area": 2400, "segmentation": {"size": [512, 512], "counts": "Qb;2m?2N3M2N2O1N2N2N2N3M2g@ZOU?j0N2O1N2N3N10N3M2N01O2N2N2N2N2O1N2N3M2N2N2N1O01O0000000001O01O0000000001O00JgAfNZ>Y1hAeNX>[18N2N2O2M2N2N2N2N2N3M2O1N2N2N2N2N3M2N2O1N2N2N[n_6"}, "image_id": 370, "id": 5591}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 57.0, 63.0, 68.0], "area": 2242, "segmentation": {"size": [512, 512], "counts": "SSR41m?2O2M2N3M2N3M2N3N110O010N1N3M2o@WOi>l0TAWOi>Q1N3N1N3N110O010O0N3M2N3M2N3M2N3N101O10O010O0O2M2N3M2N3M2N3M2G`APOc>m0_AROb>l0aAQOb>l0:N3M3N1N3M2N3M2N3M2N3M2Nk]n2"}, "image_id": 370, "id": 5592}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 79.0, 69.0, 71.0], "area": 2258, "segmentation": {"size": [512, 512], "counts": "WTd42l?2O2M3N2M2O2N2M2O2M3N1N3N2N1N3N2M3N1N3N0O10O10O010O010O10O0101O1N3N2M2O2M3N1OO2O2M3N1N3N2N2M2O2M3N1N3NO10O2O1N3N2M2O2M3N1O2M3N1N3N2M3N1OT]Y2"}, "image_id": 370, "id": 5593}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 83.0, 80.0, 71.0], "area": 2907, "segmentation": {"size": [512, 512], "counts": "mcn01]?b0000;E000000000000000000001N2N2O1N2N2N2N2N2N2N3M2N2N2O1N2N00000002N2N2N2O1N2N2N1O000000000L`AiN`>W1bAgN^>Y1401O101N1O00001O0000000000102M2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2Nk\\i5"}, "image_id": 370, "id": 5594}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 92.0, 10.0, 9.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "nbW22n?1N3M2O10O1N2O1N2NP]c5"}, "image_id": 370, "id": 5595}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 94.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "nbo72P="}, "image_id": 370, "id": 5596}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 103.0, 55.0, 69.0], "area": 2350, "segmentation": {"size": [512, 512], "counts": "nTi61m?3L3M3ZOIYAOM;g>IXA0M:h>IYAb0c>?N201O01ON3L3N3L3M3N3N1010O00010O00010O010O00010O00010O0M4L3N2M4L3N3L3N2M4L3N2M4L3N3L3M3N3L3N2M_\\;"}, "image_id": 370, "id": 5597}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 118.0, 13.0, 13.0], "area": 81, "segmentation": {"size": [512, 512], "counts": "iSd22m?2N2N2O1000000000N2N2N2NS\\U5"}, "image_id": 370, "id": 5598}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 125.0, 85.0, 71.0], "area": 3137, "segmentation": {"size": [512, 512], "counts": "Wea11n?2N2N2N2^@HY?:e@IX?9f@IX?a0O1N2N2N2N2O110N1N2N2N2JkN^AW1`>5O00000000000001O010O200N3N1N2N2N1O00001O000001O2N2N2N3M2N2O1N2N2N2N2N2N2N0001O000000011N3M2N2N2^OeA\\O]>b0eA\\O]>b0eA\\O]>b0eA\\O]>c0dA[O^>c0dA\\O^>a0dA]O^>a0dA]O^>a0b0N2N2N2N2N2O2M2N\\kS5"}, "image_id": 370, "id": 5599}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 125.0, 28.0, 87.0], "area": 1731, "segmentation": {"size": [512, 512], "counts": "ZVb72T?1`A2^>0`A2]>1`A3\\>1`A2T>EgA<31S>FhA<12T>:iAIT>T1L3N2M4L3N2M4N110O0N2M4M20010OO1N1N3M3NUL"}, "image_id": 370, "id": 5600}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 186.0, 77.0, 79.0], "area": 3025, "segmentation": {"size": [512, 512], "counts": "]gS62l?2O2M3N1N3N2N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2M2O0O010O010gA`NR>`1nAcNP>^1oAcNo=`1nAcNP>h1M2O2M30O10O10O10O10O10O10TNWB_1j=_NWBb1i=[NZBe1e=ZN]Be1d=XN^Bg1o=M3N1N3N2M2O0O0G^AWOb>i0`ATO`>l0bASO^>m0dAQO[>o01OO1O1O100O1O1VOcA1^>MeA0\\>OfAO\\>OfAO\\>0fAN[>0gAN[>0hAM[>0gAN\\>NgA1V?M2N2N`he7"}, "image_id": 370, "id": 5602}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 224.0, 84.0, 69.0], "area": 3074, "segmentation": {"size": [512, 512], "counts": "\\Xj23l?3N3Y@H`??L3N3L4M2NO0100QOVOhBi0Y=ZOcBg0\\=]OaBb0`=@nAF:k0h=BkAF:h0j=KSB4n=NoA3P>0nA0R>0nAOS>2kAOT>n01000O010O10O10O02O2N0O1MhA^NW>c1300O010O10O10O01000O012M3N3M3L3N000O010O10O12M3N3M3L100O01000O0100O0100O01000O010O4M2N3L4M2N3L4M2Mmgk3"}, "image_id": 370, "id": 5603}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 236.0, 27.0, 27.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "cgQ42m?4M3M2M4M3M2M3N0O10O10O010O10O10O10O102M4M2N3L4M2M4MRh`3"}, "image_id": 370, "id": 5604}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 238.0, 58.0, 85.0], "area": 2624, "segmentation": {"size": [512, 512], "counts": "aXS71n?2M3N1N3XAHe=:XBHf=:YBHe=:bABb07j==TBDj=?SBDk==TBEi=>TBDk=>SBDj=>TBEj==TBDj=>UBDj=;UBHk=7TBJl=V10000O0N3N2M100O010O02O1O2M3N1N3CQBfNQ>X1PBfNR>X1QBfNQ>X1QBeNR>X1PBgNQ>X1;O010O010O01000O3N1N3N2M2O2M3N1N3N2M]H"}, "image_id": 370, "id": 5605}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 240.0, 69.0, 81.0], "area": 2689, "segmentation": {"size": [512, 512], "counts": "Wi11n?3N2M3N1N3N2M2N3N2M2O2M3M3N1N2OO01O010oNnNSCR1m?XAJ`>6`A3W>MiA6S>KmA5S>KmA5S>OiA1W>l0000OJ70000O10000000O10000000O10000000O1JnA^NR>b1:L1N10000000000000O07J9G8H9G9G]UR4"}, "image_id": 370, "id": 5608}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 330.0, 76.0, 63.0], "area": 3218, "segmentation": {"size": [512, 512], "counts": "Wkc03m?6J6J5J7J6J2NO10O10000000O01000000O010000000O10O100000O1000O1000O1^OmNWBS1i=SOQBm0o=YOkAf0V>b0O100000O10O10000000O10O103M4L00000O10O10002N3M000O1000O1000O10000jNlA`0Y>[OlA`0Z>YOmAa0o>I6K6JPTV6"}, "image_id": 370, "id": 5609}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 349.0, 89.0, 50.0], "area": 2745, "segmentation": {"size": [512, 512], "counts": "YkP56h?4M2N2N2O1N3N2N2N1O1O001O001O00O1000000000000a0_O0J600000000000000000O10000000000000:F0000000000000G9000000000000000000000000000001O000000000000000O100000:F0000000000001O1O1O1O1OYdb1"}, "image_id": 370, "id": 5610}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 366.0, 84.0, 48.0], "area": 2791, "segmentation": {"size": [512, 512], "counts": "RlY36V?d0000000000000001O00000005K000000000000a0_O0000000000000000000000000000000000000000000000dNgAQ1d>H800000000000000000000000000000000000000000000000000001O00000000000001O000000UT\\3"}, "image_id": 370, "id": 5611}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 382.0, 25.0, 45.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "o;\\1c>AcA]Ob>>dA\\O^>b0`0O010000000000O0100000000000O01000006J6J7H\\Sc7"}, "image_id": 370, "id": 5612}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 393.0, 9.0, 8.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "Z\\o03m?4LO01000000003LeSl6"}, "image_id": 370, "id": 5613}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 400.0, 68.0, 52.0], "area": 2202, "segmentation": {"size": [512, 512], "counts": "g]k0:W??0000000000000000000000M300O10000000000000000000001O000000000000000M5N0000000E;N2O1O100O1000000O11O0000001O001O1O2N8B6@a0O000O2O1N2M5JnbR6"}, "image_id": 370, "id": 5614}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 411.0, 16.0, 8.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "m\\Q46i?100O1000000000000000000000000Ucf3"}, "image_id": 370, "id": 5615}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 412.0, 92.0, 48.0], "area": 3637, "segmentation": {"size": [512, 512], "counts": "R]\\5`0]?3000000d@5c>d0000000000000000000001O000001O00000000000000000000000000MO400000000000001O000000000000000001O:F000000000000000000001O0000000000J6000001O00000000000000000000000001O00f0ZO00000L[bU1"}, "image_id": 370, "id": 5616}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 422.0, 75.0, 57.0], "area": 3167, "segmentation": {"size": [512, 512], "counts": "Z^\\34`?=O:TOb0000000000000000000000000000000000000=C00000000000000000000000000000000000000000000000000000006J000]Oc0000000000000000000000000000000000000000000jR^3"}, "image_id": 370, "id": 5617}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 441.0, 13.0, 8.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "i]e48h?00000000000000000000000WRT3"}, "image_id": 370, "id": 5618}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 452.0, 11.0, 16.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "T^e4`0`?0000000000000000J60lQU3"}, "image_id": 370, "id": 5619}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 454.0, 11.0, 6.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "W^\\15k?000000000O10000004LfQ^6"}, "image_id": 370, "id": 5620}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 461.0, 8.0, 6.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "]n`76j?0000000000000cQ;"}, "image_id": 370, "id": 5621}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 463.0, 9.0, 9.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "_^Q69g?00000000000001O`Qj1"}, "image_id": 370, "id": 5622}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 469.0, 64.0, 43.0], "area": 2068, "segmentation": {"size": [512, 512], "counts": "Voh0j0V?0000000000000000000000000000000000000000000000000000000000000_Oa000000000000000000000000000000000006J00000000000I70000000000000UQW6"}, "image_id": 370, "id": 5623}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 475.0, 65.0, 37.0], "area": 1550, "segmentation": {"size": [512, 512], "counts": "hoW58`?80000001O0000000000000000001O00000000000000001O000000O1F:N20000000000001OL4H81O00000000000000001O0000000000000000001O00000CSAGm>O]A1c>EgA5aog1"}, "image_id": 370, "id": 5624}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 476.0, 9.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "Po_72l?2N30O010O010N1NSa;"}, "image_id": 370, "id": 5625}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 477.0, 69.0, 35.0], "area": 1326, "segmentation": {"size": [512, 512], "counts": "no^62l?2N2O1N2N2N21O001O001O1O0000N2O1N2N2N21O001O001O1O001OO1O1N2N2N2O1N2N2O1N2N2N2O1O1001O1O001O001O001OO1N2O11O0O2M3M2O2M2N3M2O2M3M2N3N1N3Mk`>"}, "image_id": 370, "id": 5626}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 480.0, 8.0, 5.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "P_\\45k?0000000000000Pa_3"}, "image_id": 370, "id": 5627}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 484.0, 23.0, 28.0], "area": 595, "segmentation": {"size": [512, 512], "counts": "T?l0T?000000000000000000000000000000I700000000000lPd7"}, "image_id": 370, "id": 5628}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 486.0, 95.0, 26.0], "area": 2052, "segmentation": {"size": [512, 512], "counts": "\\_Y3a0_?000000003M000000000000000000000L400000000000000001O000000000000000[Oi@a0[?0000000000000000000000000000000I70000001O000000000000000000000000000000000000000000000000007I0000000000000000001O000000aPW3"}, "image_id": 370, "id": 5629}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 510.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "o_f71n?11OQP8"}, "image_id": 370, "id": 5630}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 467.0, 18.0, 41.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "__g7210h?6M2M4M2M310L3N3M2M4M2N2M4O010O010ZA"}, "image_id": 371, "id": 5631}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 61.0, 92.0, 54.0], "area": 3452, "segmentation": {"size": [512, 512], "counts": "PSk41f?9G:F9K50000000000010O00000000000001O0001O0000003M0001O0001O0000000001O00000YAQO^>Y1O00000M31O000001O0000000001O003M010O000000000000000L5O00000000K50001O000H8O100000001O0001O00000N2G9mN]A`0V?G9GQnf1"}, "image_id": 372, "id": 5632}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 112.0, 27.0, 28.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "RTe22l?2N3L3N3M2N3M2M4O010O01O010O010O010N1N3M2N3L3N3M2N3MY\\m4"}, "image_id": 372, "id": 5633}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 113.0, 41.0, 73.0], "area": 2377, "segmentation": {"size": [512, 512], "counts": "adU1:f?000ROKQB5o=>^ABb>n00:F`0@5K0O100000O1000000000000000000000O100000O1000000000007I`0@6JOoN_Ae0a>[OnA6X>C`lU6"}, "image_id": 372, "id": 5634}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 142.0, 11.0, 32.0], "area": 175, "segmentation": {"size": [512, 512], "counts": "Yej72k?4M2M4L3N2M4L2OO01O3bK"}, "image_id": 372, "id": 5635}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 156.0, 34.0, 32.0], "area": 840, "segmentation": {"size": [512, 512], "counts": "mdl15k?e0[O000000O1001O5J100000000000000000000000K5O100000007I2Fb@N^?280O10000lZb5"}, "image_id": 372, "id": 5636}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 177.0, 19.0, 19.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "ne[71m?2N3L3N3M2N3O01O010O010O0O1N3M2M4M2N\\j:"}, "image_id": 372, "id": 5637}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 205.0, 15.0, 13.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "dVo12k?3N3M2010O00010O00010O0O1N3LbYi5"}, "image_id": 372, "id": 5638}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 320.0, 5.0, 15.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "[jm73j?3N3L3M3PF"}, "image_id": 372, "id": 5639}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 331.0, 81.0, 80.0], "area": 4101, "segmentation": {"size": [512, 512], "counts": "lkg62l?2M4M2N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N30O01O010O01O01O010O01O01O010O01O01O010O00010O010O0010O0010O0010O00010O010O00010O010O00010O01O01O010O01O01O0M4M2M3N3M2M4MoD"}, "image_id": 372, "id": 5640}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 3.0, 44.0, 43.0], "area": 1427, "segmentation": {"size": [512, 512], "counts": "X`]52n?7I7I5K00000O106J7I4L00000O100000O100000O1000O100000O100000O100000O100000O2O6J7I7I7I7Ij^l1"}, "image_id": 377, "id": 5641}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 3.0, 50.0, 34.0], "area": 1259, "segmentation": {"size": [512, 512], "counts": "[`Y65j?7J7I6J2N000O1000O10000000O10O10000000O10O10000000O10O10000000O1000O100000O1000O100000O1004K8I6J6JV_m0"}, "image_id": 377, "id": 5642}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 36.0, 54.0, 56.0], "area": 2047, "segmentation": {"size": [512, 512], "counts": "hQ]31o?5J5L5K4L5K4K6K3MO10O1000O1000O10O10InN`AR1_>8000O10O1000O1000O1LfNbA[1]>6N00000O0100000O010001O001N101O2\\OaACc>9aABd>9aACc>9aACc>9f0L4KPng3"}, "image_id": 377, "id": 5643}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 50.0, 49.0, 58.0], "area": 2051, "segmentation": {"size": [512, 512], "counts": "mQa23m?5K6J4K1I^OPAb0o>8000O11O6J6I6K6J6J000O10000000O103M000000O03N0000000O0100000000O01000000POhA8X>HnA2Q>OUBJn=4WBGn=3YBGm=3V1Jj]f4"}, "image_id": 377, "id": 5644}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 59.0, 50.0, 58.0], "area": 2112, "segmentation": {"size": [512, 512], "counts": "VRg12n?6J6J4K1H_OQAa0o>8000O11N7J6J6J6J5K0O100000O1000004L00O01000000000O010000000000O01000000000SOjAOV>2PBHP>8VBBn=:YB_Om=;R1J6I`m_5"}, "image_id": 377, "id": 5645}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 62.0, 97.0, 43.0], "area": 2767, "segmentation": {"size": [512, 512], "counts": "gbg51o?5_OMk@8P?Mk@8P?>M010000000O010000000O010IWOWAi0i>700O010000000O010000000O0100000005J2OO100000O10O100000O10O100000O10O1000O10O100000O10O1002N3L10000000O010000000O010000000O01000002N0O01000M32N4K100001N6K5K5D]@1S]h0"}, "image_id": 377, "id": 5646}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 68.0, 53.0, 61.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "\\bi04l?a0_Oa0_O>B00000000000I70000000O10000000000000000000O1000002N00O6Ka0_O=C0Mj@\\OV?d0210000000000000000000000000O10008Hbl[6"}, "image_id": 377, "id": 5647}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 71.0, 37.0, 70.0], "area": 1834, "segmentation": {"size": [512, 512], "counts": "W2k1U>000000007I4LO100000003M5K0000003M2N000O010000000POoA1Q>O\\BDd=oNbAR1^>:N0O1000O1000O10O1000O1000O102N4YO^AHg>3^AGg>5]AGh>4]AGg>4h0KPl`3"}, "image_id": 377, "id": 5649}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 103.0, 49.0, 53.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "bSi64l?3L4M3M4L2M1000O02O3M4K10003M3L4M3M4K10000O01000O10O1000O01000O01000O10IcAjN^>U1fAhN\\>V1:M4K4M3M4K4M3M4L3L4Mf[>"}, "image_id": 377, "id": 5650}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 115.0, 50.0, 50.0], "area": 1554, "segmentation": {"size": [512, 512], "counts": "nSl52m?4M4L3L5L3MO10O10004K4M1OO14K4M4L3L0100000O010000O010000O010000O010000O01M302N3L5L3M4L3L5L4L3L5L3MXkZ1"}, "image_id": 377, "id": 5651}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 129.0, 27.0, 28.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "VTc14k?5L4L4L3L5L0000O01000O10O1000O10O1000O101O4L4K5L4L4LZ[o5"}, "image_id": 377, "id": 5652}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 129.0, 62.0, 43.0], "area": 1772, "segmentation": {"size": [512, 512], "counts": "aTY24k?6K4L4L2M101O4L3M0FUO^Al0b>XOZAh0f>80100000O0100000O0100000O01001O5K4K10O1000O10O1000O10O1000O10O1000O10O1000O10O1000O10001O5J5L4L4L4KVkg4"}, "image_id": 377, "id": 5653}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 148.0, 27.0, 28.0], "area": 524, "segmentation": {"size": [512, 512], "counts": "hdc01o?4L4L5K4K5L2N000O10O1000O1000O10O1000O101O5K4K5L4L5Kejn6"}, "image_id": 377, "id": 5654}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 166.0, 58.0, 75.0], "area": 2709, "segmentation": {"size": [512, 512], "counts": "[Vn32n?5K4K6K4L5K4L2M1000O10O1000O1000AQOlAP1S>UOiAk0j=ROWB8Je0o=YORBV1n=nNnAR1R>>0O010000000O01000001N6K4L2NO10O1000O101O5K4L2M10001O5J5L5K4L5oNVAb0n>ZOWA`0Y?L5K4LjhT3"}, "image_id": 377, "id": 5655}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 170.0, 38.0, 53.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "`eS34l?4L5J5PABV>c0dABX>b0dABX>b0dA^O\\>S1O10O11O0O1000O1000O1000O10O1000O10O1000O4M4L5K4L4K6K4L5K4K5L\\YY4"}, "image_id": 377, "id": 5656}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 174.0, 79.0, 56.0], "area": 2707, "segmentation": {"size": [512, 512], "counts": "neY62<0T?3h@1T?3i@0T?2j@0S?b0M2N3L4M2N3L100O10O10O010O10O10O10O100O1000O01000O010O01000O0100O0100O01000O010O03N2N3L4M1N01000O0100O0100O0100O0100O01000O0100O03N2M4M3M2M4M2M4M3M2Mgi>"}, "image_id": 377, "id": 5657}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 175.0, 24.0, 25.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "cee25k?5K4L5J3N00O10O100000O10O1000O1000O1004K5L5K5KoYn4"}, "image_id": 377, "id": 5658}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 180.0, 41.0, 64.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "g5g0X?5WAYOQ>j0lAYOQ>j0lAZOP>e0PB_Ol=b0TBBh=>XBAi=>XBBj=>SBCl=b0PB]OQ>Y10000O01000O010001O4K4M4L3L5L3M000O10O10O103M4L3L5L3M4K4M4L3MhX[7"}, "image_id": 377, "id": 5659}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 193.0, 40.0, 34.0], "area": 983, "segmentation": {"size": [512, 512], "counts": "XVR23m?5K4L4K6K4L2N0O100000O0100000O10O1000O10O100000O0100000O10O1000O101O4L4L5J5L4LYiY5"}, "image_id": 377, "id": 5660}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 197.0, 30.0, 47.0], "area": 1268, "segmentation": {"size": [512, 512], "counts": "WVT1?a?b0^OOQA6j>OQA6j>d0L1O0O1000O10O100000O10O100000O10O100000O01001O5J6K3M0000O010000000O010000000O0100000O010000000O0103M5K5K4K6K]Wg4"}, "image_id": 377, "id": 5666}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 245.0, 67.0, 61.0], "area": 2456, "segmentation": {"size": [512, 512], "counts": "dXW43m?4K4M3M3M4K4M3M3L2O00O01000O01000O10\\OROUBo0k=TORBk0o=XOeAK4m0V>AfA`0Z>c00O01000O10O10O10O11N2OO1000O0103M4L1N10O10O10O102N4K4M3M3L5L3M3M3L5L3M3L4M4L3M0000O05LhVg2"}, "image_id": 377, "id": 5667}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 269.0, 31.0, 46.0], "area": 781, "segmentation": {"size": [512, 512], "counts": "Xi`72n?3L3N2M4M2N2M4M0O10O10O010O010A]O`Ad0`>^O^Aa0c>BZA>e>EXA;i>GUA9j>>10O10O10O010OO20O0dG"}, "image_id": 377, "id": 5668}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 280.0, 50.0, 51.0], "area": 1470, "segmentation": {"size": [512, 512], "counts": "WYj62n?2M3N2M4M2M3N2M3N2M3N0O102N2M3N3L3N2M100O010O10O010O10O010O010O0102M3N2N2M3N3L3Kl@ZOV?d05M3N2N3L3N2M3N2MWf<"}, "image_id": 377, "id": 5669}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 285.0, 23.0, 21.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "RYk23l?5L4L4L1N1000O10O1000O10O100000O10O11O5J5L4LeVi4"}, "image_id": 377, "id": 5670}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 306.0, 36.0, 30.0], "area": 795, "segmentation": {"size": [512, 512], "counts": "hiZ25k?5K5J5L5K00000O10O1000O1000O1000O1000O1000O1000O1000O10O10002N5K5J6K5KhUS5"}, "image_id": 377, "id": 5671}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 313.0, 72.0, 56.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "aZa44l?3L4M4L3M3L5L3M3L3N000O0100000O01000O0EoNiAQ1W>ROfAn0Z>UObAk0^><0000O010000O01000O01000O010000O02O4L3L4M3M4K4M3M1O03M000O01000O01000O0100000O01000O04M3M3L5L3M3MPeZ2"}, "image_id": 377, "id": 5672}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 326.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "Vjo73g5"}, "image_id": 377, "id": 5673}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 327.0, 59.0, 56.0], "area": 1854, "segmentation": {"size": [512, 512], "counts": "gjR72n?2M3N2M3N2M3N2M3N1N100O3M3N2N2000000N1O2M3M3NO010O010O010O010O010O010O010O010O01O01O010I^AoNb>R1`AlN`>S1611N3N2M3N2M3N2M2O2M3N2MVE"}, "image_id": 377, "id": 5674}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 340.0, 68.0, 50.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "akb23m?2M4M2N3L3N3L3N1OO010O01000OI\\OSAe0m>]OQAb0o>8000O010O01000O02O3L01000O010O01000O010O10O10O010O10001N10O10O10O010LWAPOj>P1210O012M3N3M2M4M2M4M2N3L3N3L3N_T[4"}, "image_id": 377, "id": 5675}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 362.0, 35.0, 15.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "akf31o?4K2MJZ@6f?20002M1000O10O100000O10O13M5J2NI5[@Ke?8000O10O1000O10O100000O4M4L_dg3"}, "image_id": 377, "id": 5676}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 374.0, 69.0, 59.0], "area": 2728, "segmentation": {"size": [512, 512], "counts": "Sli43m?3L3N3M2M4M2M3N3M2M4M2M4M2N3M11M3N0O11N4M2M4M0O0100O0100O0100O0100O0100O0100O03N0O10O101N3N3M2M10O010O10O10O2O3L3N3L3N3M2M3YOVA4m>HVA5m>IVA4m>HVA6l>HWA4]?N2MjbS2"}, "image_id": 377, "id": 5677}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 381.0, 24.0, 18.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "R\\_34l?4K5L1O00O010000000O0100000O0100000O01001O4L5JjcT4"}, "image_id": 377, "id": 5678}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 391.0, 43.0, 59.0], "area": 1497, "segmentation": {"size": [512, 512], "counts": "S=f0X?3N2M2N3N1N3EoNcAT1Z>nNeAS1Z>oNcAT1Z>:N3N2O0100O00OO1O3N1N3M2O2M3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2O2M2N]SZ7"}, "image_id": 377, "id": 5679}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 391.0, 14.0, 46.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "V]i72V?OaA3\\>0aA3]>OaA2]>0aA3\\>0aA2^>0`A2]>1`A1_>0_A3^>h000O0010OjC"}, "image_id": 377, "id": 5680}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 402.0, 49.0, 38.0], "area": 1323, "segmentation": {"size": [512, 512], "counts": "l\\g24k?6K4L4L5K4K3N00O10O1000O10O100000O10O1M30O10O1000O1000O1000O102N000O10O100000O0100002M5L4L5K4K5LgR`4"}, "image_id": 377, "id": 5681}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 434.0, 83.0, 72.0], "area": 2598, "segmentation": {"size": [512, 512], "counts": "T_V53m?2M3N2M3N2M3N2M3N2M2O0O0HWO[Ai0e>YOXAg0h>\\OVAd0j>8010O010O010O010O010O010O010O010O010O010O010O010O010O010O01DoNiAP1X>ROeAo0Z>TOdAk0]>WO`Aj0_>;10O010O010O010O010O02O2M3N2M3N2M3N2M3N0O010O102M3N2M3N2M3N2M3N2M]Q`1"}, "image_id": 377, "id": 5682}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 451.0, 4.0, 9.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "U^n72m?2N3N2kA"}, "image_id": 377, "id": 5683}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 453.0, 69.0, 43.0], "area": 1796, "segmentation": {"size": [512, 512], "counts": "g^:3l?6K4L4L4L4K2O0000O010000L3100000O10O100000O0100000O010001O10O000O100000O010N2O1000O1000O10O1000O10O100N11000O10O1000O10O1IQA]Oo>c0603N4L4L4K6KXQc6"}, "image_id": 377, "id": 5684}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 457.0, 61.0, 40.0], "area": 1642, "segmentation": {"size": [512, 512], "counts": "inm22n?5K5K4L5J4M0000O10O1KZOPAf0P?410O100000O10O100000O0100000O10O100000ON300O1000O10O100000O102N000O010000000O010000000O6K5K4L5K5JPaS4"}, "image_id": 377, "id": 5685}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 485.0, 55.0, 27.0], "area": 1045, "segmentation": {"size": [512, 512], "counts": "c_n52n?3L3N2M3N2M10000O100O100O100O100O100O10000O100O100O12N2N2N00O100O100O10000O100O100O100O100O11O2N2Hg@E[?9g@E[?89N3L3N[PV1"}, "image_id": 377, "id": 5686}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 487.0, 23.0, 18.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "\\_V14l?4K6K00000O0100000O010000000O0100000O05L4L4L]P^6"}, "image_id": 377, "id": 5687}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 507.0, 24.0, 5.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "ooe31o?0000000O1000000O1000000O100000000O1000000004LQPn3"}, "image_id": 377, "id": 5688}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 511.0, 5.0, 1.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "o_^31o?0000000QP_4"}, "image_id": 377, "id": 5689}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 102.0, 112.0], "area": 7167, "segmentation": {"size": [512, 512], "counts": "0n1R>1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1000000000O10O1000000000000000000000O0O2N2N2N2N2N2N2N2N2N2N1N3N2N2N2N2N2N2N2N2000O10000000O100N2N2N2N2N2N1O2N2]OZAIh>5ZAIh>5ZAHi>6YAHi>5ZAIh>5ZAIh>5ZAIh>5c0N1O2NVn\\6"}, "image_id": 378, "id": 5690}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 0.0, 108.0, 61.0], "area": 3801, "segmentation": {"size": [512, 512], "counts": "P`a11o?1O1O1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1OO1O1N2O1O1O1O1O1O1O1O1O1N2O1001O1O1O1O1O1O1O1O001OZOhAEX>9jAGV>7lAIT>5mAKT>3nAKS>4oAJS>3PBKR>3PBJS>4oAJS>4oAJS>4oAJS>4oAJS>4oAJS>4nAKT>3n0Md_h4"}, "image_id": 378, "id": 5691}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 62.0, 239.0, 339.0], "area": 38814, "segmentation": {"size": [512, 512], "counts": "jeX42m?2N2N2N2N2N2M3N1O2N2N2N2N2N2N2M3N1O2N2N2N2N2N2N2M3N2N1O2N2N2N2N2M3N2N2N1O2N2N2N2N2M3N2N2N2N1O2N2N2M3N2N2N2N2N1O2N2N2M3N2N2N2N2N2N1O2M3N2N2N2N2N2N2N1O2M3N2N2N2N2N2N2N2M2O2N2N2N2N2N2N2N2M3N1O2N2N2N2N2N2N2N2N1O2M3N2N2N2N2N2N2N2O010O10000000000000O1000O10000000000000O10O10000000000000O1000O10000000000000O10O10000000000000O1000O10000000000000O10O1000^LUGNk8OXG1h8MZG3f8K\\G5d8I^G7b8G_G:a8DaG;`8CbG=]8BeG>[8_OhGa0X8]OjGc0V8[OlGe0T8YOnGg0R8YOnGg0R8YOmGh0S8XOmGg0T8YOlGg0T8YOlGg0S8ZOmGf0S8ZOmGf0S8ZOmGf0S8ZOmGf0S8YOnGg0R8YOnGg0R8YOmGh0S8XOmGg0T8YOlGg0S8ZOmGf0S8ZOmGf0S8YOnGg0R8WOPHi0P8UORHk0n7SOTHm0l7QOUHP1k7nNWHQ1j7mNXHS1h7kNZHU1e7jN]HV1c7gN`HY1`7eNbH[1^7cNdH]1\\7aNfH_1Z7_NhHa1X7]NiHd1W7ZNkHe1V7YNlHg1S7XNoHh1Q7UNRIk1n6SNTIm1l6QNVIo1j6oMXIQ2h6mMZIS2f6kM[IV2e6hM]IW2d6gM^IY2b6dMaI\\2^6cMdI]2\\6aMfI_2Z6_MhIa2X6]MjIc2V6[MlIe2T6YMnIg2R6WMoIj2Q6TMQJk2P6RMSJn2>"}, "image_id": 378, "id": 5692}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 81.0, 131.0, 132.0], "area": 8545, "segmentation": {"size": [512, 512], "counts": "TUc02m?2N2N2N2N2N2M3N1QA_O^>c0`A^O_>d0^A_O`>c0^A_O`>c0^A_O`>c0^A_O`>c0^A_O`>R1N2M3N2N2N1O2N2N2N2N2N2N2M3N2N2N2N1O1O01O2N1O000O11N3N2N2N2N2N1O2N2N2N2N2M2O0000000000000000O010000000000000000000O01000000000000002N2M3N2N2N2N2N2N1O2N2N2N2M3N2N2N2N2N2N2N1O2N2N2M3N2N2N2N2N2N2N1O2N2M3N2N2N2N2N2N2N2N2N1O2M3N2N2N2N2NR\\[5"}, "image_id": 378, "id": 5693}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 172.0, 82.0, 90.0], "area": 3922, "segmentation": {"size": [512, 512], "counts": "PgQ31n?2N2N2N2N2M3N2N2N2N1O2M3N2N2N2N2N2M3N2N2N2N2N1N3N2N2N2N2N2M3N2N2N2N2N1N3N2N2N2N2N0O10000001O2N1O2N2N2N2N2N2N2N1O2N2N2N2N2N2N2N1O2N2N2N2N2N2M3N3L4M3L4M3M2M4M3L4M3K^Ye3"}, "image_id": 378, "id": 5694}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 179.0, 71.0, 115.0], "area": 4629, "segmentation": {"size": [512, 512], "counts": "c5l2U=0000000000O10O100000000000000000O10O10000000000000N2N2N2N2N1N3N2N2N2N2N2O1000000000O10O1N2M3N2N2N2^OjAWOX>g0jAWOX>g0jAWOX>g0jAWOW>h0kAVOW>h0kAVOW>h0jAWOX>g0jAVOY>h0a0N2N2M3N2N2N2N1O2N2N2N2N^Xl6"}, "image_id": 378, "id": 5695}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 490.0, 20.0, 22.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "o_f71n?1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1"}, "image_id": 378, "id": 5696}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 0.0, 65.0, 48.0], "area": 1525, "segmentation": {"size": [512, 512], "counts": "RPc01n?2O1N2O1O2N1O1O1O1O1O2N1O1O1O1O2N1O1O1O1O2k@UOQ?n0O1O2O0O1N2O1O2N100000001O01O0O1N2N2N1O011N3M2N2N20001O0001O00O2M2N2O1N2N3M2N2N2N2O1Nm^\\6"}, "image_id": 379, "id": 5697}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 0.0, 49.0, 34.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "P`W11o?1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1]OQA4P?KRA3o>LSA2n>MTA1m>NUA0l>OVAOk>0WANj>1XAMi>2c0O2NPPP6"}, "image_id": 379, "id": 5698}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 0.0, 12.0, 6.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "PPU21o?1O001O1O1O1O00O1O1O1NRPe5"}, "image_id": 379, "id": 5699}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 0.0, 56.0, 28.0], "area": 1080, "segmentation": {"size": [512, 512], "counts": "\\PU63l?2M2O2N2M2O2M3N2N101O1O001O1O001O1O1OO1O1N2O1N2O1O1O1001O1O1O001O1O001O1O1O001O1ON2O1N2O1O1N2O1N2O1O1N2O1I]@Oe?0]@Nd?0WPo0"}, "image_id": 379, "id": 5700}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 3.0, 67.0, 89.0], "area": 2714, "segmentation": {"size": [512, 512], "counts": "o`m61^12n<1PC0o<2nB1o<1PC1n<1PC0o<2nB1o<1PC1n<1VBAb0`0V=2VB@b0`0V=2UBAb0`0V=8iBIV=9gBJV=9hBIW=7hBKX=5eBM\\=3bBO]=2`B1`=N_B3b=T12N1N3N2M2O000O2O2M3N1N3N2M3N1O0O1KaAjN^>V1dAhN]>X14010O010O10O10O010O02O1N3N2N1N3N2M3N1N3N2M2O2M3N1O2M3N1N3Nbo0"}, "image_id": 379, "id": 5701}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 10.0, 58.0, 60.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "dP71o?2M3M2O2M2N3N1N3M2O2N101N1N3N1SASOd>P1YASOd>P1ZAQOe>W1M2O2M201000O010O010M2O2M2N10O02O2M2N3N1N3M2O2O0001N2ON3M2O2M2N3N1N3M2O2M3M2O2MWnk6"}, "image_id": 379, "id": 5702}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 118.0, 44.0, 39.0], "area": 899, "segmentation": {"size": [512, 512], "counts": "QTZ73l?2M2O200O01000Z@Hb?8]@Jc?901000OO2M3N1N3N2M2O2M3N110O10O10O10O10O10O10O10N2N1N3N2M2O2M3N10100OkK"}, "image_id": 379, "id": 5703}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 158.0, 15.0, 14.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "VUf61n?1O2M3N2M2O2O10O001M3N1N3N2MoZR1"}, "image_id": 379, "id": 5704}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 158.0, 41.0, 37.0], "area": 753, "segmentation": {"size": [512, 512], "counts": "^Ul62l?2O2M3N2N1N3N2M2O2N2M210O1000O01000O10O10O10O1000O10O10O1O0O2M3N1O2M3N2M2O2N2M2O_Z?"}, "image_id": 379, "id": 5705}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 161.0, 14.0, 30.0], "area": 231, "segmentation": {"size": [512, 512], "counts": "cUi73l?2N2M2O2M3N1O2M3N2M2O2N2M2oJ"}, "image_id": 379, "id": 5706}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 170.0, 18.0, 18.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "eeb61m?3N2N2M2O2N2N2M2001N2N2O001D]@8g?N2M3N^ZT1"}, "image_id": 379, "id": 5707}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 184.0, 53.0, 54.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "^VT63l?2M2O2N2M3N1O2M3N2M2O2O10ON3MWOPAg0n>[ORAe0l>]OTAb0j>@WA?i>BWA>i>AWA`0i>@WA`0i>@WA?h>>NVOZA=c>F]A:c>F]A9d>b0OTO^A;b>BaA=`>AaA`0_>WO^A25g0\\>VOaA06j0k>O01000O10O10N2M2O2N2M3N1N3N2N2M2O2NUYQ1"}, "image_id": 379, "id": 5708}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 204.0, 41.0, 52.0], "area": 1192, "segmentation": {"size": [512, 512], "counts": "Xg[71n?2M2O2N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M30O10O100000O0100000O001lN^Af0e>XO\\Ag0f>WO\\Ag0e>WO^Af0P?N1O2M3NlH"}, "image_id": 379, "id": 5709}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 221.0, 14.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "RgR62l?3N2N1O200O10O1000OO2M3N2NmXf1"}, "image_id": 379, "id": 5710}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 225.0, 16.0, 14.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "Ug[63l?2M2O2O100O0100000O01O1N2N1N3NhX\\1"}, "image_id": 379, "id": 5711}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 228.0, 15.0, 14.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "Ygo51n?2M3N2N1100000O010000O0O2M3Nehh1"}, "image_id": 379, "id": 5712}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 231.0, 25.0, 21.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "_Wi51n?1O2M3N2M2O2O10O1000O10O10O10O100O0N3000OI\\@3d?K^@4g?O2NYXj1"}, "image_id": 379, "id": 5713}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 248.0, 61.0, 39.0], "area": 1442, "segmentation": {"size": [512, 512], "counts": "_hc61l?3L5L3L4M3L5M20010O0001O01O0001Om@XOn>m00010O000010O000010O000010O000M4N1000010O0001O01O00010O0001O01O00010O0001ON3L3L4M4K4L4Mlg="}, "image_id": 379, "id": 5714}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 255.0, 32.0, 31.0], "area": 523, "segmentation": {"size": [512, 512], "counts": "^hm52m?2N2M3N1O2M3N2N1N3N2M3O01000O0100000O01N2N2N1N3N2N2M2O2M3N1O2MeWb1"}, "image_id": 379, "id": 5715}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 262.0, 34.0, 37.0], "area": 603, "segmentation": {"size": [512, 512], "counts": "PY[51n?1N3N2M2O2N2M3N1N3N2N1N3N2M3N1OO01000O3N1N3N2M2O2N2M3N1N3N2N1N3N2MbgS2"}, "image_id": 379, "id": 5716}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 290.0, 57.0, 50.0], "area": 1481, "segmentation": {"size": [512, 512], "counts": "kYT61m?3N1N3M3N1N3M2O2M3M2O2N2O010O0N3N2M2N30O10O10O10O010O10O10O010O10O10O010O10O10O01N1N3M3N1N3M2O2M3M2O2M3N1N3M2O2MYVo0"}, "image_id": 379, "id": 5717}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 333.0, 47.0, 35.0], "area": 1400, "segmentation": {"size": [512, 512], "counts": "aZ]5?a?>B0000000000000000000L40002N0000N200000000000003M0000000000000000000000000000000000000O10j0VOfTk1"}, "image_id": 379, "id": 5718}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 349.0, 92.0, 76.0], "area": 3371, "segmentation": {"size": [512, 512], "counts": "`k\\21n?2N2N2M3N1O2N2N2M3N2N1O2N2N2M3N2000O01000000000O0100000000000O01000000000O010000000000O0N3N2N2N2N2M2O2N2N2N2N2M2O2N2N2N2M300O01000000O1N1O2N2N2M3N2N2N1gNgAj0[>TOgAi0\\>TOgAj0[>TOgAOIe0b>ZOfA0Ie0c>YOfA0Ie0V?M3N2N2N1O2N2M3N2N2NeSU4"}, "image_id": 379, "id": 5719}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 368.0, 22.0, 14.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "a[]61o?b?000000000000000000000000000000000000000laR3"}, "image_id": 379, "id": 5726}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 108.0, 29.0, 33.0], "area": 527, "segmentation": {"size": [512, 512], "counts": "ecS533Nf?3X@0e?8N1N3N110O0N3I]Ol@f0R?[Om@f0Q?6N10O00011N3N2M2N3N1N3M3N1N3N1N3M3Nok]2"}, "image_id": 380, "id": 5727}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 134.0, 56.0, 57.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "lTg52l?2N3X@Ka?7^@L^?=M4L4L3M4L4L1O20O2N101N101N1O2O00O3L301N1O2O0O2OiN[AS1c>kN`AU1e>2O0O2O0O2OO0O2N102M3M2O2M2O2M2N3N2M2N3N1N3M2O2M2N3Naj\\1"}, "image_id": 380, "id": 5728}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 138.0, 14.0, 14.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "^dW42n?2M2N3N1N3M10O1O2O2M2N2N3N_[a3"}, "image_id": 380, "id": 5729}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 145.0, 35.0, 38.0], "area": 707, "segmentation": {"size": [512, 512], "counts": "ld\\41n?3N1N3M3N1N3N1N3M3N1N3M2O2M3N1N3M10O1O3N2M2N3N1N3N2M2N3N1N3M3N1N3N1NhjQ3"}, "image_id": 380, "id": 5730}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 160.0, 55.0, 66.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "keP51n?3N1N3@Ko@8o>JPA8n>Jo@8o>Jo@9n>JPA7o>>N1O01O01O3N1N3M2O2M2N30O010O010O01M201O10O1O0N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2O2M2N3N1N3M2O`iS2"}, "image_id": 380, "id": 5731}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 164.0, 26.0, 27.0], "area": 388, "segmentation": {"size": [512, 512], "counts": "]UV42m?2N3N1N3M2O2M2N3N1N3M2O0O0101N3M2O2M2N3N1N3M2O2M2N]j\\3"}, "image_id": 380, "id": 5732}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 199.0, 41.0, 56.0], "area": 1305, "segmentation": {"size": [512, 512], "counts": "_VY44l?3M2M4M3L4M2N3L4M3M2M4M3M3L3N3L4M00O01000O01002M4M3L3N3M3L4M2N3L4M3M2M4M3L4M2N3L\\XR3"}, "image_id": 380, "id": 5733}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 200.0, 30.0, 30.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "df^32m?2N2O2M2N3N1N3M2O2M2N2N2OO01O01O011N3M2O2M2N3N1N2N3M2O2M2NXYR4"}, "image_id": 380, "id": 5734}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 221.0, 32.0, 42.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "age21o?1N3N1N3M3N1DEPA>m>DQA>m>EQA=m>DQA>m>=M2O2M2N03N1N3N2PORAi0P?UOSAh0T?O2M2N3N1N3M3N1N3M2O2MVXj4"}, "image_id": 380, "id": 5735}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 239.0, 53.0, 60.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "nWZ31n?2O2M2N3N1N3N1N3M2O2M2N3N2M2N3N1N3M2O2M2N3N1N3O00100O010O01M3N1N3M2O2M2O2M2N3N1N3M2O2M3M2O2M2N3N1N3M2O2MVWk3"}, "image_id": 380, "id": 5736}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 262.0, 54.0, 44.0], "area": 1173, "segmentation": {"size": [512, 512], "counts": "lhQ72m?2O2M2N3N1N3M2O2M2N2O2M2N3N1N1O010O000010O00011N3M2O0O1O00010O00010O00010O00010O00011N3M2O2M2N3M2O2M2N3N1N[W3"}, "image_id": 380, "id": 5737}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 268.0, 62.0, 58.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "dX`22m?2N3N2M2O2M2N3N1N3M3N1N3N10100O010O010N1N3M3N1N3M2O2M3M2O2M21O1M2O2M2N3N1N3M3N1N01O01O01O01O102M2N3N2M2O2M2N3N2M2N3N1N3N1Naf`4"}, "image_id": 380, "id": 5738}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 283.0, 13.0, 14.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "Pii72m?3N1N3M2O1N0010O1O3M2O2MQG"}, "image_id": 380, "id": 5739}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 298.0, 37.0, 57.0], "area": 1338, "segmentation": {"size": [512, 512], "counts": "\\Z\\64i?3L4L4L5K4L4VAXOY>k0cAYOY>l0cAWOY>[1O0001O01O01O01O0001O01O0001L3L4M4K4L4L4M4O00001Dn@HR?4RAHR?4?LcVQ1"}, "image_id": 380, "id": 5740}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 311.0, 22.0, 25.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "mYW72n?2M2N3N1N3M2O2M3N1N3O00N3N1N3M2O2M3N1N3M2Oie="}, "image_id": 380, "id": 5741}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 313.0, 28.0, 35.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "nYP73l?2O2M2O4K2N3N2M2O20O10O010O010O01O0O2M2O2M2N3N2M2N3N1N\\ea0"}, "image_id": 380, "id": 5742}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 324.0, 28.0, 29.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "fZ_72l?3N1O2M3N1N3N2M3N1O2M3N1O2O01N2M2O2N2M2O2M3N2M2O2N2M2Ode2"}, "image_id": 380, "id": 5743}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 347.0, 48.0, 43.0], "area": 1074, "segmentation": {"size": [512, 512], "counts": "`[U52n?2M2N3N1N3M2O2M2N3N1N3M2O2M0010O00010O0002O1N2N10O000LUAUOk>j0XASOh>n05O00010O1O2O2M2N3N1N3M2O2M3M2O2M2N3NadR2"}, "image_id": 380, "id": 5744}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 354.0, 23.0, 25.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "Y[Q61o?1N3N2M2N3N2M2O2M3M2O1N12M2N3N2M2O2M3M2O2M2N_Tc1"}, "image_id": 380, "id": 5745}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 354.0, 8.0, 15.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "\\[l73j?3L4M4O000010OmD"}, "image_id": 380, "id": 5746}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 356.0, 26.0, 27.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "]k\\61n?2O2M2N3N2M2N3N2M2O2M2N010O0201N1O1N1O3N1N3M3N1L5KU@0YdV1"}, "image_id": 380, "id": 5747}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 375.0, 26.0, 28.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "[l`71l?3N2M4M2M4M2M3N30O010O00010O0010O0010L3N2M4M2M4L3NTT2"}, "image_id": 380, "id": 5748}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 396.0, 40.0, 51.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "emn67j>0eA9R>0fA9X>h0000000001O0001O000000000001O0001O0000000001O00000H8G9H8H8J7O00000000000LdS="}, "image_id": 380, "id": 5749}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 437.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "emo71Z2"}, "image_id": 380, "id": 5750}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 444.0, 9.0, 24.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "^nk73k?2M4M2N2N3L3N3M2TB"}, "image_id": 380, "id": 5751}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 451.0, 3.0, 6.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "S>6l?M3NiQn7"}, "image_id": 380, "id": 5752}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 461.0, 37.0, 51.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "n_b32S?OdA4Y>0dA2Z>0cA3[>OcA3Z>1bA2\\>0bA2\\>1`A2[>3cAO]>j00000O10000000000000000O100000000000O1M3N1N3N2N3L;F;D2UA1i>1UA1h>3UAOi>3UAOi>4TANm>1PA2P??1O001O00O1N2N200001O001O001O001O00001O001O001O0Gm@ET?8o@GQ?6RAIP?4RAJP?4?N3MjPn2"}, "image_id": 380, "id": 5755}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 507.0, 14.0, 5.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "o__51m?2N200001O001O00001O001O00Q`Y2"}, "image_id": 380, "id": 5756}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 509.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "o_]11o?0O1O1001O1OQP_6"}, "image_id": 380, "id": 5757}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 118.0, 141.0], "area": 9929, "segmentation": {"size": [512, 512], "counts": "0l2T=1O001O1O1O1O1O001O1O1O1O001fCkLg;V3WDlLh;U3UDnLj;S3TDoLk;R3SDPMl;P3SDQMm;P3QDRMn;o2oCTMP<_3O1O1O001O1O1O1O1O001O1O1O1O1O001O00O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1aNeCWO]?^AC`>?]ADa>>]ADa>>]ADa>>]AD`>?^AC`>?^ABa>P1N2N2N2N2N2N2N2O01N2N2N2N2N2N0O1000000000000000BbN[B^1e=dNYB\\1g=fNVB[1j=gNTBY1l=iNRBW1n=kNPBU1o==101O2N2\\OlAYOV>e0lAYOV>e0lAYOV>e0lAYOV>e0lAYOV>e0kAZOW>d0kAZOW>d0kAZOW>d0kAZOW>d0d0N2N2O10O1000O1O1N2N002N2N2N2N^ka4"}, "image_id": 382, "id": 5761}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 151.0, 26.0, 26.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "Zej41n?2N2N1O2M3N2N2N1N100000O011O1O1N1000002M3N2O001N2NU[h2"}, "image_id": 382, "id": 5762}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 162.0, 44.0, 43.0], "area": 949, "segmentation": {"size": [512, 512], "counts": "fUW42m?2N1N3N2N2M2O2N2M3N1O2M3N2N1N300000O0100000O0100000O01000N2N1N3N2N2M2O2N2M3N1O2M3N2N1N[jR3"}, "image_id": 382, "id": 5763}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 165.0, 18.0, 16.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "ZUX52l?2O2M30O10O10O10O10O10O01O01001L2Obj^2"}, "image_id": 382, "id": 5764}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 191.0, 54.0, 63.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "TW]33i?5N2N1N3N2N2M2O2N2IZOPAh03XOd>2XAh02XOd>o0ZAROe>V1M3N2N1O20000N1O2N2M10O01000O3N2N1N3N2N1N3N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2M2O_ig3"}, "image_id": 382, "id": 5765}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 207.0, 39.0, 38.0], "area": 720, "segmentation": {"size": [512, 512], "counts": "UWT22m?2N2N2N2N2N2O1N2N3M2N2N2N2N1O000000000000000001O2N2N2O1N2N2N2N2N2N2N2N2N2N2NTYX5"}, "image_id": 382, "id": 5766}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 223.0, 21.0, 22.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "_WY41n?2N2N1O2N2N2M1000O100000O1002M3N2N2N1O2NmX\\3"}, "image_id": 382, "id": 5767}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 255.0, 48.0, 43.0], "area": 868, "segmentation": {"size": [512, 512], "counts": "lX[42m?2N2M3N1O2N2N2N2M3N2N1O2N2N2M10O10000000O0100000002N2M3N2N2N2N1N3N2N1O00000002M2O02N1N3N2N2N2Nlgl2"}, "image_id": 382, "id": 5768}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 265.0, 19.0, 19.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "ehh32l?3N2N1N3N2N2N1N10O300O0O2N2N2M3N1O2Mbgm3"}, "image_id": 382, "id": 5769}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 291.0, 37.0, 35.0], "area": 676, "segmentation": {"size": [512, 512], "counts": "`Yc33l?2N1O4K3N20O1000N2M2O2N2M3N1O2N200O0100000O10M3N2N2M2O1O2N1N3N2N2M2O2N2MaVj3"}, "image_id": 382, "id": 5770}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 297.0, 15.0, 16.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "big61m?3N2N2N1O2N2M30N2N2N1N3N2N2NbfP1"}, "image_id": 382, "id": 5771}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 323.0, 53.0, 48.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "lZZ42m?2N1N3N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2M3N20O010000000O010O1N2NO01000002M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2N1N3N]Uk2"}, "image_id": 382, "id": 5772}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 354.0, 16.0, 16.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "[[j61n?2M3N2N1O2M3N2O01M3N2N2N1N3N2Nidm0"}, "image_id": 382, "id": 5773}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 356.0, 28.0, 32.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "nkS61n?1O2N2M3N2N2NO10O100000O10O100000O10O100000O3N1O2N2N2MjT^1"}, "image_id": 382, "id": 5774}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 360.0, 19.0, 21.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "f[_62m?2N2N2M2O2LE`@=^?3O00O012N2N1O2M3O1N2N2McTW1"}, "image_id": 382, "id": 5775}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 400.0, 13.0, 14.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "jll61m?2O2M2N3N2M2000N3M3N1N3N^cl0"}, "image_id": 382, "id": 5776}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 401.0, 55.0, 49.0], "area": 1366, "segmentation": {"size": [512, 512], "counts": "o\\c51n?2O1000O01O1N2a@JQ?8l@KQ?8m@JQ?8m@JQ?7m@LQ?c00O1000O100M3N1O2000000O1N1N3N000000000O3N02N2M3N1O2N2N2N2M2O2N2N2N2N2M2O2N2N2N2N1N3NoRa1"}, "image_id": 382, "id": 5777}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 445.0, 16.0, 17.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "Wn]62m?2N2N2N2N2N1N100002N2M3N1O2N2NoQZ1"}, "image_id": 382, "id": 5778}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 454.0, 21.0, 25.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "c^h52m?1O2M3N2N2N2N1N3N2N2N2000O1Kg@@[?>5N2F_@1c?M`@1i?O01[Qm1"}, "image_id": 382, "id": 5779}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 456.0, 69.0, 56.0], "area": 1967, "segmentation": {"size": [512, 512], "counts": "mnm63l?2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N200000O1000O100000O10O10000000O10O10000000O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O00"}, "image_id": 382, "id": 5780}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 0.0, 155.0, 178.0], "area": 14325, "segmentation": {"size": [512, 512], "counts": "\\dl12l?3M2N3M2N3M2N3M2N3M2N3M2M4M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N2N3M2N3M2N3M2N3M2M4M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M201O001OO1N2N2N2N3L3N3M2N3M2N3M2N3M1O000O3N2N2N2N2N21O001O00DYD]Li;`3[D]Lg;a3[D]Lg;a3=N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N3M2N3M2M3N3M2N3M2N3M2N3M2Nloe3"}, "image_id": 383, "id": 5781}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 16.0, 39.0, 50.0], "area": 977, "segmentation": {"size": [512, 512], "counts": "iaf41m?2N2N3M2M4M2N3M2N3L3N2N3M2N3M2M4M2N2N30O0N2N3L3N3M2M4M2N3L3N2N3L3N3M2N3L3N2NZoe2"}, "image_id": 383, "id": 5782}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 58.0, 30.0, 26.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "YRT51m?2N3L3N3M2M4N11O010O01O01O010O01O010O01O010O01ON3M2M4M2N3Lom\\2"}, "image_id": 383, "id": 5783}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 76.0, 20.0, 30.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "TSW13k?2N3M2N3L3N3M2N00000001N3N3M2N3M2N2N3Mcm^6"}, "image_id": 383, "id": 5784}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 107.0, 82.0, 128.0], "area": 3893, "segmentation": {"size": [512, 512], "counts": "UfS31m?3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3N1N3MPObBB\\=>gBBV=>lBBR=>QCBl<>VCBh<>[CBb<>`CB^<>eCBX<>jCBT<>oCBn;>TDBj;>YDBd;>^DB`;>cDB[;=gDCY;;jDEV;8lDHT;6oDJQ;3QEMo:3REMn:3QEMo:3REMn:3QEMo:2SENm:OUE1k:MXE3h:JZE6f:H]E8c:E_E;a:CbE=^:@dE`0\\:^OgEb0Y:[OiEe0W:YOlEg0T:VOnEj0R:TOQFl0o9QOSFo0`EUA;l>BWA>h>@ZA`0T?0O01M2N3M2N3M2N3MVYc3"}, "image_id": 383, "id": 5785}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 133.0, 86.0, 97.0], "area": 4332, "segmentation": {"size": [512, 512], "counts": "cVT51m?2M4M2N3L3N3M2PO_O_Bc0^=@_Bd0^=_O_Bc0_=_O_Bd0^=_O^Bd0_=A]Bb0`=B\\B`0b=DZB?b=Q1N2N3M2M4M2010O00010O010O001L3N2N001O210O01O010O01O010O010O01O010OROaB[O`=a0cB_O]=?fBAZ=C`B885X=@dB878T=^OgB78;R=[OiB0IL?i0ni0V>1O010O010O010O00010O010O0101N0N3M2N2M4M210O010O0M3N3M2Mci`1"}, "image_id": 383, "id": 5786}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 350.0, 14.0, 48.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "V\\i72l?2N3L3N3FDl@>`>EhAOF?a>EdAd0\\>@^Ac0`>?00O2O001QE"}, "image_id": 383, "id": 5787}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 401.0, 42.0, 66.0], "area": 1948, "segmentation": {"size": [512, 512], "counts": "S^[73\\?OQA4l>NRA4l>OQA4l>NRA4l>OQA4l>a0M3N3M2N3M2N3M2M4M201M2O20O010O0010O010O010O01O0N2]NlAX1W>eNkAY1W>eNlAZ1]>01O010O010O010OkB"}, "image_id": 383, "id": 5788}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 495.0, 40.0, 15.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "`_1;d?100001O000000000001O0001O00000000000001O01O00000000000000010O0000000000000000]`Z7"}, "image_id": 383, "id": 5789}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 503.0, 21.0, 9.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "m_[33m?000O10000O10000O10000O10000O10000O13M3MSPZ4"}, "image_id": 383, "id": 5790}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 0.0, 23.0, 12.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "PPR61o?1O1O2N1O1O1O1O1O1O1OO1O1O1O100O1O1O1O1O1O1OQ`b1"}, "image_id": 384, "id": 5791}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 0.0, 55.0, 54.0], "area": 1651, "segmentation": {"size": [512, 512], "counts": "h`c6;b?F_@<_?4N3N1N2N2N2N2N2N2N2N2N2N2N2O1N3M00000000000001O1O100O1O1O1F[AXOf>g0\\AWOe>i0\\AUOe>j0]ATOd>k09O1O1O1O1O1O1O1O1O1O1O1O100O1Ca@8`?Ha@6`?Ib@5_?Jc@4f?Nio`0"}, "image_id": 384, "id": 5792}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 7.0, 65.0, 46.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "fPn41o?1N2N2N2N2N2N2N200O1N3M2N2000N20000_Og@9Z?Gf@7\\?Id@10L]?4c@Na?4_@Ja?822N2N2N2N1O2N2N2N2N2N2000000001O000001N1N2N2N2O1N2N02N2N2CUABm><=N2N2N2N2N2N2OV_Q2"}, "image_id": 384, "id": 5793}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 12.0, 16.0, 17.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "``]72m?2N2N2N21O0000000001OO1N2N2N2NY_:"}, "image_id": 384, "id": 5794}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 36.0, 29.0, 28.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "aQm51o?1N2N2N2N2N2N2N2N2N2N2N2N2N2O01N2N2N2N2N2N2N2N2N2N2N3M2N_^d1"}, "image_id": 384, "id": 5795}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 42.0, 99.0, 111.0], "area": 3728, "segmentation": {"size": [512, 512], "counts": "hc]62m?2N2O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2O2O000000N3M2N2N2N2N2N2N2N2N2N2N1O0001O00000mNUORCk0n]=DcB:]=HcB6]=LdB1\\=1dBM\\=5dBI\\=9bBG^=;`BF_=<_BDa=>]BBd=?ZBAf=a0XB_Oh=c0VB]Oj=e0TB[Ol=\\10000000000001O2APBkNQ>T1QBkNn=U1TBiNl=W1VBgNj=Y1XBeNh=[1ZBcNf=]1\\BaNf=T1oAROa>l0aAROa>l0aAROa>6[A;6^O`>5\\A;P?CRA;P?CRA;[?O1N2N2N2N2N2Nnm0"}, "image_id": 384, "id": 5796}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 56.0, 16.0, 17.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "lag52m?2N2N2N3N100000000001M2O1N2N2Nn]P2"}, "image_id": 384, "id": 5797}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 60.0, 53.0, 37.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "hbl02h?6I7J6K6O00000000001O0001ON2M30001O01O00000004L01O0000000N2O101O0001O00000001O0001O000001O0001O00K5I7I8ITnX6"}, "image_id": 384, "id": 5798}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 65.0, 38.0, 56.0], "area": 1551, "segmentation": {"size": [512, 512], "counts": "_Se46e?5K5K5K5K5K6J5K5N2000001O0L41O000001O01O000001O01O0000001M2J6J6J6K6N1Fk@IU?1QAOo>LVA3e\\h2"}, "image_id": 384, "id": 5799}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 67.0, 11.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "Wbd61n?2O1N2N2N20O1N1O2N2NjmU1"}, "image_id": 384, "id": 5800}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 96.0, 55.0, 59.0], "area": 1631, "segmentation": {"size": [512, 512], "counts": "dcS22m?2N2N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2VAlNe>Y1N3M2O10N2N20000000000000000000010O0O1N2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2H\\@1f?M\\@1b[Q5"}, "image_id": 384, "id": 5801}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 102.0, 28.0, 50.0], "area": 747, "segmentation": {"size": [512, 512], "counts": "Z3^1a>00000002N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2O1NSla7"}, "image_id": 384, "id": 5802}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 107.0, 9.0, 8.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "^SW14i?3000001O000001Ij\\d6"}, "image_id": 384, "id": 5803}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 117.0, 7.0, 7.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "hca11n?2N2N2OO2N2NZlZ6"}, "image_id": 384, "id": 5804}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 133.0, 28.0, 27.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "ad51n?2N2N2N2N2N2N2N2N2N2N2O1N3N01N2N2N2N2N2N2N2N2N2O1N3M2N^[\\7"}, "image_id": 384, "id": 5805}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 133.0, 25.0, 24.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "bTY12n?2M2N2N2N2N2N2N2N10O000001O0003M2N2N2N2N2N2O1N2Nb[Z6"}, "image_id": 384, "id": 5806}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 152.0, 75.0, 74.0], "area": 2470, "segmentation": {"size": [512, 512], "counts": "Y63l?2N2N2N2N2N2KCd@?Z?5N2N2N2N3M2N2N20000N2N2N2N2N1O0000000000000001O0001O00000000000000000000000000000001O00002O1N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1NbZj6"}, "image_id": 384, "id": 5807}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 157.0, 37.0, 37.0], "area": 687, "segmentation": {"size": [512, 512], "counts": "\\ef11n?2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2O1000000000N2N2N2N2N2N2N2N2N2O1K_@Hc?65N2N2Najf5"}, "image_id": 384, "id": 5808}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 163.0, 13.0, 20.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "Y5>a?000000000002O1N3M2N2N2NgZi7"}, "image_id": 384, "id": 5809}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 164.0, 41.0, 40.0], "area": 1303, "segmentation": {"size": [512, 512], "counts": "dUT57i?>B3MODEUA;k>=0000O100000000000M3000000000000O01000000000000000000000000000O013M?A>BlYW2"}, "image_id": 384, "id": 5810}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 177.0, 35.0, 54.0], "area": 1433, "segmentation": {"size": [512, 512], "counts": "UVm26j?0f@3a>NSA=n>;I6F:K51O000000000001O01O00000000000000010O00000F:F:L40000N3E:EfZa4"}, "image_id": 384, "id": 5811}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 182.0, 27.0, 30.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "RVm02m?2N2N2N2N2a@EV?=h@EV?=h@EV?d0N2N1O0002N2O1N2N2N2N2N2N2N2N2N2N2N2NmYe6"}, "image_id": 384, "id": 5812}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 187.0, 22.0, 22.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "WfX62m?2N2N2N2O1N3M2N000000000001O2N1O2N2N2N2N2NPZ\\1"}, "image_id": 384, "id": 5813}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 193.0, 9.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "Yfc21g?81O0001O000001ISjW5"}, "image_id": 384, "id": 5814}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 202.0, 130.0, 130.0], "area": 5789, "segmentation": {"size": [512, 512], "counts": "kWg01n?3M2N2N2N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N0001O00000000000001O000000000001O000000000001O00000000000001O2N2NWCjNl:S1TEoNl:P1SEROm:n0QETOo:l0oDVOQ;j0mDXOS;h0kDZOU;f0iD\\OW;d0gD^OY;b0fD_OZ;a0dDA\\;?bDC_;<_DFa;;\\DGd;9ZDJe;6YDLg;4WDNi;1VD1j;QOYCf0k0;l;mN[Ch0g0=n;iN]Cj0c0?Xd0YA_Oh>`0UACk>>QAFn>f010O00010O00010OO2M21N1N2N30O01O01O01O01O010O00010O01O01O01O01O010O0N2M4M2M3M4L3N2M4L3M4MgXW2"}, "image_id": 384, "id": 5819}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 235.0, 9.0, 10.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "_WR62l?3N2N2000N2N2N1ObXi1"}, "image_id": 384, "id": 5820}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 247.0, 60.0, 65.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "SXo51n?2N2N2N2N2N2g@Dk>>SADk>>SADk>>SADl>=RAEl>=RAEl>i0N200O100000000000000YAkNb>Z10000N2N2N2N2001O01O0000N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2NSgR1"}, "image_id": 384, "id": 5821}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 263.0, 28.0, 29.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "ch[11n?2N2N2N2N3M2N2N2O1N2N2N2N3N10O2M2N2Kg@@[?>5N2N2O1N2N2N2N2N\\WV6"}, "image_id": 384, "id": 5822}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 274.0, 56.0, 57.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "mib41d?0e@2Y?0d@3Y?Oe@3Z?Od@3Y?k05N2N2N2N2Af@6[?Hg@6[?Hg@6c?N2N2NTUd3"}, "image_id": 384, "id": 5828}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 328.0, 29.0, 30.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "fZY52m?2N2N2N2N2N2N2N2N3M2N2N2N2N02N2N2N2N2N2N2N2N2N2N2N2N2N2N[UX2"}, "image_id": 384, "id": 5829}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 331.0, 20.0, 47.0], "area": 940, "segmentation": {"size": [512, 512], "counts": "[Zf7_1a>0000000000000000000000000000000000000eE"}, "image_id": 384, "id": 5830}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 356.0, 48.0, 50.0], "area": 1222, "segmentation": {"size": [512, 512], "counts": "o;9f?2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O000000000000010O0000000001O2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2OYdW7"}, "image_id": 384, "id": 5831}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 359.0, 27.0, 26.0], "area": 388, "segmentation": {"size": [512, 512], "counts": "h[W42f?O`@3^?O`@3^?O`@3^?9N2N2N00000002N2O100001O0000O1N3M2N2O1N2N2N2NYT[3"}, "image_id": 384, "id": 5832}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 360.0, 23.0, 22.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "`[k42m?2N2N2N2N2O1N2N2N2O100000O1N2N2N2N2N2N2N2O1N]Ti2"}, "image_id": 384, "id": 5833}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 368.0, 25.0, 37.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "c[]61n?3N2M3N3M2M3a@AY?e0O20O000010M20N2NO02O2M3N3M2M3N3L3NaSV1"}, "image_id": 384, "id": 5834}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 372.0, 45.0, 47.0], "area": 1185, "segmentation": {"size": [512, 512], "counts": "\\\\m22m?2N2O1N2N2N3JCd@?Z?5N2N2N2N2N2N2N2O1N2N2N2N2N000002N2N2N2N10O2N2N03HWATOk>j0WATOk>j07N2O1N2N2N2Bd@5^?Id@5^?Id@5e?N2NgS\\4"}, "image_id": 384, "id": 5835}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 383.0, 9.0, 8.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "P\\\\74l?3L10000000O103MnS?"}, "image_id": 384, "id": 5836}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 399.0, 12.0, 32.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "T]j7:Z?ATAa0j>ATAb0i>@UAb0i>@UA`0k>:0000YAoN`>Q1`AQO^>o0bASO\\>o0bASO\\>o0bASO\\>Z1N2N2N2N2O101O00000000O2M2O1N2N2N2N2N2N2N2N2N2N2O1001ON2N2N3M1O000000000000001O2N2N2N2O1N2N2N2N2N2N2N2NYR[3"}, "image_id": 384, "id": 5839}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 420.0, 73.0, 72.0], "area": 2288, "segmentation": {"size": [512, 512], "counts": "P^g12m?2N2N2N2N2N2N2N2N2N2N2O1N3M2N1O00000001010N2N2N2N2N3`AgNT>[1jAgNT>[1jAgNT>[1jAgNT>d1N2N2O2O000000aNRBk0n=SOTBm0l=QOVBo0j=oNXBQ1h=mN[BQ1f=mN\\BQ1f=mN\\BQ1f=mN\\BQ1f=mN\\BQ1[>N2N2N000000000001O000001O002N2L4N2N2N2N2N3M2N2N2O1N2NeQT5"}, "image_id": 384, "id": 5840}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 425.0, 51.0, 51.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "U^o51n?2N3N1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2O1N000000000000001O0002N2N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2NSRW1"}, "image_id": 384, "id": 5841}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 427.0, 34.0, 33.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "k]]51n?2N2N3N1N2N2N2N2N3M2O1N2N2N2N00001O01O2N2N2N3M2O1N2N2N2N3M2N2O1N2NVbQ2"}, "image_id": 384, "id": 5842}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 428.0, 4.0, 6.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "\\=6k?N2O1Nbbm7"}, "image_id": 384, "id": 5843}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 465.0, 73.0, 47.0], "area": 1695, "segmentation": {"size": [512, 512], "counts": "^o\\21n?2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N01O0001O020N2N2N2N2N1O100O1O1O1O1O1O11O1O1XO]A1e>M\\A2e>L]A3d>K^A4c>J_A5b>I`A6a>HaA7`>GbA8_>FcA9^>EdA:]>DeA;Q?O1OO1O1O1O1O1O1O1O100O1001O1O1O1O1O1O1O1N2N3M2N2OV`^4"}, "image_id": 384, "id": 5844}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 466.0, 51.0, 46.0], "area": 1283, "segmentation": {"size": [512, 512], "counts": "^_\\51n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O0O1O1O1O1O1O1O1O1O1O1O12N1O1O1O1O1FYAYOh>e0ZAYOh>e0ZAYOh>e0ZAYOh>e0;N2N2N2N2O1N2N2N2N2N2NiPj1"}, "image_id": 384, "id": 5845}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 475.0, 61.0, 37.0], "area": 1195, "segmentation": {"size": [512, 512], "counts": "ooU11n?1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O11O2N1N2N2O10000O1O1O1N2N2N2N2N2N2N2N2N2N2N3M^`k5"}, "image_id": 384, "id": 5846}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 493.0, 19.0, 19.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "boT22m?2N2N2N3M2O1000000001O0O1O1O1O1N2N2NW`a5"}, "image_id": 384, "id": 5847}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 495.0, 10.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "boo42m?2N2N2O1000N2N2O1N]Pk2"}, "image_id": 384, "id": 5848}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 497.0, 22.0, 15.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "ioZ42m?2N2N2X@Id?:O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1ORPZ3"}, "image_id": 384, "id": 5849}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "o_Y51n?1001OQ`d2"}, "image_id": 384, "id": 5850}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oom41PPR3"}, "image_id": 384, "id": 5851}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 0.0, 52.0, 36.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "[`o02m?2N2N2N2N2N2N2N2N2N2N3N1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O1Hj@EW?:k@EU?:n@CS?=8O1O1O1O1O1O1O1O1O1O1O1O1OQ`V6"}, "image_id": 386, "id": 5852}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "P`l11o?0P`R6"}, "image_id": 386, "id": 5853}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 0.0, 2.0, 2.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "P`Z22n?OQ`d5"}, "image_id": 386, "id": 5854}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 0.0, 41.0, 26.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "P`i21o?1X@0^?1`@1_?0_@2`?9N1O1O1O1O1O1O1O1O1O1O1O00O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1L[@Lf?34O1O1O1OQPb4"}, "image_id": 386, "id": 5855}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 0.0, 33.0, 23.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "VPX42m?2N2N2N3M2N2O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O2O1N2N2N2Nl_W3"}, "image_id": 386, "id": 5856}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 0.0, 55.0, 46.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "Y`h41n?2N2N2N2N2N2N2e@BQ?`0m@CP?`0m@BR?g0O1000000001O0000000000N2N2N2N3M2N00001O000001O1O1O1O1O2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N_o[2"}, "image_id": 386, "id": 5857}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 0.0, 17.0, 9.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "PPe52n?1O1O1O1O1O1O1O00O1O1O1O1O1O1O1OQ`R2"}, "image_id": 386, "id": 5858}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 8.0, 15.0, 16.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "^Pi11n?2N2N2N2N2N201O00N2O2M2N2N2N`_o5"}, "image_id": 386, "id": 5859}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 18.0, 15.0, 16.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "g`d13m?1N2N2N2N2N21O000O1N2N2N2N2NVoS6"}, "image_id": 386, "id": 5860}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 34.0, 50.0, 48.0], "area": 1306, "segmentation": {"size": [512, 512], "counts": "bQX43l?2N2[@K\\?7b@K\\?7b@K\\?>O1N2N2N3M2O10000000000O1N2N2N2N2N1O000001O000000002N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N[nn2"}, "image_id": 386, "id": 5861}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 36.0, 69.0, 68.0], "area": 2164, "segmentation": {"size": [512, 512], "counts": "^b32m?2O1N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N000000000000010O000000001O2N0000LdAeN\\>[1fAcNZ>]14000002O1N2N2N2N2N2N2O100O1N2N2Jl@]OV?a07M2N2N2N2N2N2N2O1NUni6"}, "image_id": 386, "id": 5862}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 44.0, 48.0, 47.0], "area": 1156, "segmentation": {"size": [512, 512], "counts": "VRS71n?2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2N10O0000000000001O2N2N2N2N20000N2N2N2Jj@@W?>k@@W?>7N2O1N2N2N3M2NQn4"}, "image_id": 386, "id": 5863}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 54.0, 10.0, 11.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "jQi32m?2N2N2N2000N2N3M2NUnQ4"}, "image_id": 386, "id": 5864}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 58.0, 32.0, 31.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "Ybo21o?1N2N3M2N2N2N2O1N3M2N2N2N2N10O000000101N2N3M2N2N2N2O1N3M2N2N2Ni]`4"}, "image_id": 386, "id": 5865}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 66.0, 50.0, 52.0], "area": 1434, "segmentation": {"size": [512, 512], "counts": "fRe33l?2[@L[?6c@L[?6c@L[?6c@L[?>O1N2IYORAi0l>YORAi0l>7O1N2N2N2N2N2N2O101O00O1N2N2N2N2OO000001O2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2NVma3"}, "image_id": 386, "id": 5866}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 88.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "hbo72V="}, "image_id": 386, "id": 5867}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 91.0, 35.0, 35.0], "area": 621, "segmentation": {"size": [512, 512], "counts": "_SQ71n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N01O0001O000002N2N2N2N2N2N2N2N2N2N2N2N2Ni\\="}, "image_id": 386, "id": 5868}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 94.0, 51.0, 58.0], "area": 1437, "segmentation": {"size": [512, 512], "counts": "fcm22m?2N2N2N2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N2O1000000001O0O1iNdAg0^>WOdAg0^>WOdAg0^>WOdAg0^>WOdAg0^>WOdAg0l>N2N2N2O1N2N2N3M2N2N2N2NkkX4"}, "image_id": 386, "id": 5869}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 102.0, 48.0, 49.0], "area": 1174, "segmentation": {"size": [512, 512], "counts": "PdV61n?2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N000000000002N2N2N20000N2ISAXOo>f0SAXOP?e07N2N2N2N2N2O1N2N2N2N2NV\\Q1"}, "image_id": 386, "id": 5870}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 118.0, 28.0, 28.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "Vd\\71m?3N1O2M3N2M2O2N2M2O2M3O10O1000O01O1M2O2N2M3N1N3N2N1N3NP\\5"}, "image_id": 386, "id": 5871}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 128.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "P41oko7"}, "image_id": 386, "id": 5872}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 128.0, 59.0, 57.0], "area": 1628, "segmentation": {"size": [512, 512], "counts": "lTZ21n?2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N1O001O00000001O001O2N3M2N2O1000000001O0000^OTAOl>OVA1j>MXA3h>LYA4g>J[A6e>H]A8c>F_A:a>DaA<`>AbA?P?N2N2N2N2N2N3MaZh4"}, "image_id": 386, "id": 5873}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 132.0, 14.0, 15.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "XTR21n?2O1N2V@Kf?9N20000000O1N2N2N3Mdkf5"}, "image_id": 386, "id": 5874}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 141.0, 47.0, 47.0], "area": 1171, "segmentation": {"size": [512, 512], "counts": "UUc52m?2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N2N1O00000001O2N2N2N20000N2N2JPAYOR?f0o@XOS?f05N2N2N2N2N2N2N2N3M2NoZe1"}, "image_id": 386, "id": 5875}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 141.0, 24.0, 23.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "fTY72m?2N2M3N1O2M3N1010000O010000O01000N1N3N2N2M2O2MZk:"}, "image_id": 386, "id": 5876}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 152.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "h46k?N2NW[n7"}, "image_id": 386, "id": 5877}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 152.0, 11.0, 11.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "lTe61n?2N2N2N2O1000O1N2M3NT[U1"}, "image_id": 386, "id": 5878}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 156.0, 51.0, 60.0], "area": 1434, "segmentation": {"size": [512, 512], "counts": "oUg13l?2O1N2N2N2N2N2N2N2N2N2N2^OXOiAk0T>WOjAk0T>WOjAk0T>WOkAj0S>XOkAj0S>XOkAj0S>YOjAi0T>c0000N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2NUZ_5"}, "image_id": 386, "id": 5879}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 164.0, 24.0, 25.0], "area": 291, "segmentation": {"size": [512, 512], "counts": "]e\\62m?2N2N2N2N2N2N2N2N30O0000000O1N2N2N2N2N2N2N2N2N^ZW1"}, "image_id": 386, "id": 5880}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 176.0, 48.0, 48.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "YVP51n?3M2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N0000000002N2N2N20000N2N2JRAWOP?g06O1N2N2N2N2N2N2N2N2N2N2NliW2"}, "image_id": 386, "id": 5881}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 187.0, 19.0, 17.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "PVU11n?2N3U@Kg?8N2N20001NO000000101N2N2N2N3M2OnYa6"}, "image_id": 386, "id": 5882}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 189.0, 32.0, 32.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "[V[21n?2O1N2N2N2N2N2N2N2N2N2N2N2N3M2O10N2N2N2N2O1N3M2N2N2N2N2N2N2N2NdiT5"}, "image_id": 386, "id": 5883}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 190.0, 15.0, 15.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "VVo51n?2N2O1N3M2N1O000002N2N2N2N2NnYi1"}, "image_id": 386, "id": 5884}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 202.0, 56.0, 59.0], "area": 1625, "segmentation": {"size": [512, 512], "counts": "UWS12m?2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N10O01O0000000001O2N2N200000001O0000000_OWAKh>3[ALe>2]ANc>0_A0a>NaA2_>LcA4]>JeA6[>HgA8Y>FiA9Y>DiA:Y>DiA:P?N2N2N3MThP6"}, "image_id": 386, "id": 5885}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 203.0, 24.0, 24.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "cfi52m?2O1N2N2N2N2N2N2N2O1000000001N1N2N2N2N2N2N2N2NXYj1"}, "image_id": 386, "id": 5886}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 208.0, 29.0, 32.0], "area": 493, "segmentation": {"size": [512, 512], "counts": "Sga71n?1O2M3N2N1N3N2N2M2O2N2M3N1O2N2000N1N3N2M3N1O2M3N2N1N3N2NWI"}, "image_id": 386, "id": 5887}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 211.0, 24.0, 25.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "kV?2n?1N2N2N2N2N2N2N2N3O0000000000N2N2N2N2N2N2N2N2NPiT7"}, "image_id": 386, "id": 5888}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 217.0, 53.0, 53.0], "area": 1327, "segmentation": {"size": [512, 512], "counts": "\\g]41n?2N2O1N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2O11O000000000000000000000N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2NWhg2"}, "image_id": 386, "id": 5889}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 222.0, 18.0, 17.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "Wg\\51n?2KOX@3f?5N2N2N2000O1O1N3O0O1N2O1N2N2NgXZ2"}, "image_id": 386, "id": 5890}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 240.0, 58.0, 56.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "Uh>1n?2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N2O1N2N3M2N2N200000000000N2O1N2O100010O0O1N10O2N2N2lNWAn0o>N2N2N2N2N2N2N2N3N1N2N2N2N2N2N]Wd6"}, "image_id": 386, "id": 5891}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 244.0, 29.0, 50.0], "area": 843, "segmentation": {"size": [512, 512], "counts": "cha71m?3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1010O1N2M2OZH"}, "image_id": 386, "id": 5892}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 246.0, 15.0, 15.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "nWe62l?2O2N2N2N2N2N11N2N2M3N2N2N1OUXS1"}, "image_id": 386, "id": 5893}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 248.0, 24.0, 25.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "QX\\52m?2N2N2N2N2N2N2N2N2001O0000000N2N2N2N2N2N2N2N2NjgW2"}, "image_id": 386, "id": 5894}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 268.0, 46.0, 46.0], "area": 1094, "segmentation": {"size": [512, 512], "counts": "TYQ42m?2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N2N000000000001O2N2N2N2N2N2N20000N2N2N2O2Ic@F_?8c@F_?86N2N2N2NQgW3"}, "image_id": 386, "id": 5895}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 283.0, 61.0, 69.0], "area": 2015, "segmentation": {"size": [512, 512], "counts": "gY12m?2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N00000dAfNS>Z1mAhNR>W1nAkNP>U1PBnNm=R1SBPOk=P1VBQOh=o0XBSOf=m0ZBUOg=i0XBYOh=e0XB]Oh=a0XB@i=`0UB@m=`0QB@Q>`0mA@U>U11N2N2N3M2N2RO_A:c>D_A:c>D_A:c>D_A:c>D_A:c>D_A=`>BaA>_>@cA?_>^OcA`0Q?M2N2O1N2N2N2N2NdUP7"}, "image_id": 386, "id": 5896}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 286.0, 58.0, 55.0], "area": 1529, "segmentation": {"size": [512, 512], "counts": "miR72m?1N3N2N2M2O2M3N1O2M3N2M2O2N2M2O2M3N2N1N3N2M21000O10N2M2O2M3N2N1N3000O10O1000OASAKn>3SANm>OVA1i>NYA2g>L[A3f>J\\A7d>G^A9a>EbA:_>DbA=Q?0000O010M3N1O2M3N2Mg5"}, "image_id": 386, "id": 5897}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 290.0, 22.0, 22.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "[ik41n?2M3N2N2N2N2N2N2000000000N1O2N2N2N2N2N2N2NcVi2"}, "image_id": 386, "id": 5898}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 303.0, 47.0, 46.0], "area": 1097, "segmentation": {"size": [512, 512], "counts": "WZ^31o?1N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2O1N2N2N000000000010O2N2N2N2N2N2N2O100N2N3M2N2N2Ka@G`?76N2N2N2NnUj3"}, "image_id": 386, "id": 5899}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 326.0, 24.0, 24.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "_jX41n?2N2N2N2N2N2N2N2N2O1000000001M2N2N2O1N2N2N2N2N]U[3"}, "image_id": 386, "id": 5900}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 329.0, 49.0, 52.0], "area": 1423, "segmentation": {"size": [512, 512], "counts": "Xkb61:0X?2g@0W?2f@1X?1f@1W?2g@OX?>N2M3N1O2M3N2N1N10000O01000O2O2M2O2N2O10O10ON3N2N2M2O2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2MWed0"}, "image_id": 386, "id": 5901}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 330.0, 18.0, 28.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "Z:l0U?N2N2N2N3M2O1000000O1N2N2N2N2N2N2NUef7"}, "image_id": 386, "id": 5902}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 337.0, 47.0, 47.0], "area": 1105, "segmentation": {"size": [512, 512], "counts": "Z[l22m?2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M1O000000000002N2N2N3M2N2N2O100O1N2N2N2N2Jc@F_?86N2O1N2N2NkT\\4"}, "image_id": 386, "id": 5903}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 339.0, 31.0, 31.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "VkQ61n?1O2M3N2N1N3N2N2M2O2N2M3N2N1O20O0O2N2M3N1O2N2M3N1O2M3N2N1N3NSe^1"}, "image_id": 386, "id": 5904}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 350.0, 18.0, 16.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "SkY72l?3N1N3O10O10O10O10O10O10O10M3N2M2OjT="}, "image_id": 386, "id": 5905}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 359.0, 52.0, 53.0], "area": 1334, "segmentation": {"size": [512, 512], "counts": "jkS21n?2N2N2N2N2N2N2N2N4L2N3N1N2N2N002N2N2N2N200000000000000000001O00N2O1N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2N2NjSR5"}, "image_id": 386, "id": 5906}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 363.0, 24.0, 25.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "dkf32m?2N2N2N2N2N2N2N2N200010O000N2O1N2N2N2N2N2N2N2NWTm3"}, "image_id": 386, "id": 5907}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 378.0, 48.0, 54.0], "area": 1389, "segmentation": {"size": [512, 512], "counts": "h\\X63l?2M3N1O2M3N1O2M3N2M2O2N2M2O2M3N2N1N3N2N2M2O2O10O0100000O010O1N1N3N2@[ABg>=ZAAh><[AAh>=YABi>;ZACg>>>N2N1N3N2N2M2Ofco0"}, "image_id": 386, "id": 5908}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 380.0, 15.0, 15.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "S\\P42m?2N2N2N2N2N2N10O3M2N2N2N2N2OmSh3"}, "image_id": 386, "id": 5909}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 383.0, 28.0, 28.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "[lm41n?2N2N2N3M2N2N2N2N2N2N2N2O1001M2N2N2N2N2N2N2N2O1N2N2N2NcSd2"}, "image_id": 386, "id": 5910}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 389.0, 17.0, 15.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "Zla51n?2M3N1O2O10O1000O10O10O10N2N2M2OdcU2"}, "image_id": 386, "id": 5911}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 389.0, 10.0, 12.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "[\\P62l?3N2M2O200OO2M3M2Oicj1"}, "image_id": 386, "id": 5912}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 391.0, 48.0, 49.0], "area": 1200, "segmentation": {"size": [512, 512], "counts": "Rm_12m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N00000000000002N2N2N2N2N2O100O1N2N2N2Jh@AZ?=h@AZ?=6N2N2N2N2N2NVSh5"}, "image_id": 386, "id": 5913}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 411.0, 19.0, 20.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "V]k37a?8M3000000010O000000000000010O000J6G\\Sk3"}, "image_id": 386, "id": 5914}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 416.0, 39.0, 48.0], "area": 865, "segmentation": {"size": [512, 512], "counts": "am^21n?2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2O1N20000000000000000000VOUA>k>@XA?h>_OZALL>m>CYAML>m>C^A;V?M2O1N2N2N2Njam4"}, "image_id": 386, "id": 5915}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 419.0, 30.0, 29.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "a]l41n?2N2N2O1N2N2N2N2N3M2N2N2N2N100O02N2N2N2N2N3M2N2N2N2O1N2N2N`bd2"}, "image_id": 386, "id": 5916}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 419.0, 23.0, 23.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "XmZ521Oj?5O100O1N2N2N2N2N2000001O00O1N2N2N2N2N2N2N3M`bY2"}, "image_id": 386, "id": 5917}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 425.0, 50.0, 48.0], "area": 1265, "segmentation": {"size": [512, 512], "counts": "nmj51n?2M2O2N2]@HZ?;c@H[?9d@IZ??N3O1O10O1000O10OO2N2N2M2O2N2M3N1O2M3N2OO1O2N2M2O2N2M3N1O2M3N2N1N3N2M3N1O2M3N2N1N3N2NXR\\1"}, "image_id": 386, "id": 5918}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 427.0, 58.0, 56.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "P^o01n?2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N2N2N2N20000000000000O1N2N2000000000O1N2OO2N2kNYAn0i>POYAn0n>N2N2N2N2N2N2N2N2N2N2N2N2N2N2NbaS6"}, "image_id": 386, "id": 5919}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 427.0, 30.0, 30.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "i]^31n?2N2N2N2N2O1N2N2N2N3M2N2N2N2N02N2O1N2N3M2N2N2N2N2N2N2N2N2NWbR4"}, "image_id": 386, "id": 5920}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 454.0, 66.0, 58.0], "area": 1694, "segmentation": {"size": [512, 512], "counts": "hnb31n?2N2O1N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2O100001O0000000000000001O000000000000O2N1O1O1O1O1O1O1O1O1O1O1O1O1]Oi@;X?Cj@WObAi0^>YO`Ag0`>[O^Ae0b>]O]Ab0c>@[Aa0d>=0000001O0002N3M000001O2N2N2N2O1N2N2JSAVOo>h07M2N2N2N2N2N2N2N2O1N3M2No^e4"}, "image_id": 388, "id": 5936}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 18.0, 35.0, 34.0], "area": 524, "segmentation": {"size": [512, 512], "counts": "Wac32m?2N2O1N2N2N2N2N2N3M2N000001O000000000001O000001O2N2N2N3M2N2N2O1N2N2NToj3"}, "image_id": 388, "id": 5937}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 18.0, 28.0, 27.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "oPc61n?2N3N1O01OO00002N2N2N2N3M2OO01O3M2N2000N2N2Dg@N[?1f@M\\?1f@N[?0h@MZ?1GWA:g>HWA;g>FWAFWAFWAFWA`0N2N2M3N3N0O000000000000001O01O001O3M2N2N2N2N2O1N1O0010O2N000000N20002N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1Ni]j3"}, "image_id": 388, "id": 5940}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 57.0, 20.0, 18.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "Qb^51n?2O2M3N2M2O1N0010O010O010O002O2M3N2M2Oo]W2"}, "image_id": 388, "id": 5941}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 57.0, 56.0, 56.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "nRP61n?2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2O0O1O00000000000N2N2O1N2N2100O1O2N1O1O2N1O1O2N1O1O2O0O2N1O1O2N1N2N3L3M3M4L3MRnS1"}, "image_id": 388, "id": 5942}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 60.0, 38.0, 45.0], "area": 780, "segmentation": {"size": [512, 512], "counts": "hbn31o?1N2N2N2N2N3M2O1N2N2N2N2N2N3N0O00000000001NSOQAl0o>200000000001^OPA3P?MSA0m>0UAOl>OVAOl>OVAOl>OVAOl>OVAOl>OVAOl>Oa^^3"}, "image_id": 388, "id": 5943}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 62.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "nao72P>"}, "image_id": 388, "id": 5944}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 98.0, 23.0, 24.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "ZSc71n?2N2N2O2M2N2N2N2N210O000000O1N3M2N2N2N2N2O2M_\\1"}, "image_id": 388, "id": 5945}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 100.0, 29.0, 29.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "acg41n?2N2N2N2N2N2N3N1N2N2N2N2N2N2OO2N2N2N3N1N2N2N2N2N2N2N3N1N^li2"}, "image_id": 388, "id": 5946}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 108.0, 13.0, 25.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "\\3i0X?O1N2N2N2N2N2N3M2N2N2O1NX\\i7"}, "image_id": 388, "id": 5947}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 110.0, 55.0, 55.0], "area": 1435, "segmentation": {"size": [512, 512], "counts": "^T^31n?2O1N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N3M0000000LiNaAV1_>lN_AT1a>50001O02N2N2N00003M2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2Nm[f3"}, "image_id": 388, "id": 5948}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 120.0, 29.0, 30.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "Udb52m?2N2N2N2N2N2N2O2M2N2N2N2N2N20N2N3M2N2N2O1N2N2N2N3M2N2N2Nikn1"}, "image_id": 388, "id": 5949}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 124.0, 30.0, 31.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "YTi63m?1N2N2N2N2N2N3M2O1N2N2N2N2N2N03M2N2N2N2N2O1N3M2N2N2N2N2N3Nckg0"}, "image_id": 388, "id": 5950}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 126.0, 22.0, 22.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "Udg02m?2O1N2N2N2N2N2N2O11O0000000O1N2N2N2N2N2N2Nf[m6"}, "image_id": 388, "id": 5951}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 135.0, 28.0, 49.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "iTb71o?2M2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N3N100001O000000aK"}, "image_id": 388, "id": 5952}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 137.0, 69.0, 57.0], "area": 1984, "segmentation": {"size": [512, 512], "counts": "SUP42m?2N2O1N3M2N2N2N2N2N2N2N3N1O100001O00N2N3M2N2N2N2N2N2O0O1O0000000000000002N2O1N2N1O00000000000010O01O2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N3M2OS[m2"}, "image_id": 388, "id": 5953}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 143.0, 50.0, 50.0], "area": 1462, "segmentation": {"size": [512, 512], "counts": "i4k0U?1N2N3M2N2N2N2N2N2N2O2M11N2N2N3M2N2N2O1N2N2N1O0000000010O00000000000003M2O1N2N2N2N2N2N2N3M2O1N2N2NRkV7"}, "image_id": 388, "id": 5954}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 162.0, 54.0, 55.0], "area": 1432, "segmentation": {"size": [512, 512], "counts": "dUk52m?2N2N3M2N2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2O101O00000000000001O00000N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N3M2N2O1NliY1"}, "image_id": 388, "id": 5955}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 168.0, 53.0, 51.0], "area": 1236, "segmentation": {"size": [512, 512], "counts": "eeo61n?3N1N2N2N2O1N2N3M2N2O1N2N2N2O2M2N2O10000001O01O01O00001N100O2N1O100O2N100O1O010O00010O000010O01O0M3J7I6Koi5"}, "image_id": 388, "id": 5956}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 185.0, 60.0, 52.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "bf81n?2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N3M2N2N2N2000000000N2N2N00000001O000001O0000000000002N2O1N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2NeYi6"}, "image_id": 388, "id": 5957}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 187.0, 60.0, 65.0], "area": 1789, "segmentation": {"size": [512, 512], "counts": "UWT52m?2N2N2N3N1N2N2N2N2N2N2N2N2N3]OSOnAo0P>SOoAn0o=TOoAn0P>SOnAo0P>SOnAo0P>SOnAm0R>VOkAj0U>a0O000000000002N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2O1N2N2N3M2N1O00001O3M2O1N2N2N2NZim1"}, "image_id": 388, "id": 5958}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 214.0, 116.0, 110.0], "area": 4397, "segmentation": {"size": [512, 512], "counts": "ZYV52m?2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N3N1N2N11N2N2N2O1N2N2N1O0000000000000001O01O00KQOZAo0f>SOXAm0h>510O2N2N2O10000O0O001O0000000000000001O000000AdN[B]1d=eNZB[1f=gNXBY1h=iNVBW1k=jNSBV1m=lNQBT1o=nNoAR1Q>>0000000001O00000001O000000000000001BnAnNS>P1oAnNT>o0nAoNT>o0nAoNT>o0nAoNT>o0nAoNT>o0`0N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2Obho0"}, "image_id": 388, "id": 5959}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 217.0, 57.0, 58.0], "area": 1671, "segmentation": {"size": [512, 512], "counts": "egm02m?2N2N3M2_@FY?2m?2N2N2N200000O1N2N2N2NcX[7"}, "image_id": 388, "id": 5962}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 239.0, 28.0, 25.0], "area": 381, "segmentation": {"size": [512, 512], "counts": "mg_51n?2N2N2N3M2N2O1N2N2N1O0000001O01O2N2O2O0001ON2N2N2F\\@5i?N2NSXR2"}, "image_id": 388, "id": 5963}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 252.0, 9.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "QXY51n?2N2N2N1O01O2N2NSXb2"}, "image_id": 388, "id": 5964}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 260.0, 58.0, 51.0], "area": 1484, "segmentation": {"size": [512, 512], "counts": "kh\\11n?2N2N2N2N2N3N1N2N2N2O11OO1N3M2N2O1N2N2N2N2N201O00OO001O00000001O00000001O02N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2NWWf5"}, "image_id": 388, "id": 5965}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 263.0, 33.0, 39.0], "area": 707, "segmentation": {"size": [512, 512], "counts": "f8f0Y?2N2N1O001O1O000001O0000000000010O2N2N2N3M2N2N2O1000000N2N2HZ@3k?N2NWW_7"}, "image_id": 388, "id": 5966}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 271.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "_82_go7"}, "image_id": 388, "id": 5967}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 284.0, 52.0, 51.0], "area": 1298, "segmentation": {"size": [512, 512], "counts": "iYZ23l?2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N2N001O00000000000001O0001O0002N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2O_fk4"}, "image_id": 388, "id": 5968}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 294.0, 34.0, 34.0], "area": 582, "segmentation": {"size": [512, 512], "counts": "hif02m?2N2N2N2N2N2N2N2N2N2O1N2N2N2N0000000002N2N2N2N2N2N2N2N2N2O1N2N2N2N]Vh6"}, "image_id": 388, "id": 5969}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 297.0, 20.0, 20.0], "area": 214, "segmentation": {"size": [512, 512], "counts": "ci02m?2N3N1N2N2N2N2N1O001O02N2N2N2N2N2N3M2O^Ve7"}, "image_id": 388, "id": 5970}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 307.0, 25.0, 25.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "QZi11n?2N2N3M2O1N2N2N2N2N001O01O0000002N3M2O1N2N2N2N2NUVj5"}, "image_id": 388, "id": 5971}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 324.0, 9.0, 19.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "]jk71n?2N2N2N2N2N2MD`@>]?4kE"}, "image_id": 388, "id": 5972}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 325.0, 58.0, 57.0], "area": 1622, "segmentation": {"size": [512, 512], "counts": "U[d21n?2O1N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N2N001O00000001O00000001O000002N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N2N3MTe^4"}, "image_id": 388, "id": 5973}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 335.0, 43.0, 49.0], "area": 1169, "segmentation": {"size": [512, 512], "counts": "k:4g0N[>5\\AJG3k>5\\AKF2l>5\\A3b>O\\A3c>N[A4c>N[A4c>N[A4c>d000000003M2O1000N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2000001O0N2N2N2N2O1N2NdTZ7"}, "image_id": 388, "id": 5974}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 345.0, 60.0, 64.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "Tln61o?1N2N2N2N3M2IEf@=X?Ef@=Y?6N20001N1O1N20000N2N1O1O0001O000000JPO]AP1c>RO[Ao0d>SOZAm0f>6000000000010O00000000000001O01O0001O2N2N2^OSANo>0SANo>0TANm>0UANm>0UANn>OTAOn>OTAOn>0SANo>0\\U3"}, "image_id": 388, "id": 5975}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 356.0, 34.0, 33.0], "area": 578, "segmentation": {"size": [512, 512], "counts": "fkP21n?2N2N2N2N2N2N2N3M2N2N2N2N2O0O001O000001O2N2N2N2N2N3M2N2N2N2N2N2O1N_T^5"}, "image_id": 388, "id": 5976}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 368.0, 55.0, 60.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "h\\V32m?2N2N2N3M2N2N2N2E_OTAc0j>_OTAc0j>_OUAb0i>@UAb0i>jNaAX1]>6O0001O0000000001O000002N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N1O002N2N2O2M2N2N2N2NhSn3"}, "image_id": 388, "id": 5977}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 387.0, 18.0, 36.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "b\\g72m?3N1N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2lC"}, "image_id": 388, "id": 5978}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 399.0, 33.0, 32.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "ol`21n?2N2N2N2N2N2O1N2N2N3M2N2N2N2N1O000101N2N2N2N2N2N2N2N3M2N2N2N2O1NScn4"}, "image_id": 388, "id": 5979}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 404.0, 51.0, 59.0], "area": 1501, "segmentation": {"size": [512, 512], "counts": "l]g01n?2N2N2O1N2@GVA;h>GVA;h>GVAEVA=h>EVA=h>EVA=c>ZO^A;M=c>ZO^AU1a>6N2N2N2N1O000000000002O1N2N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N3N1N`R_6"}, "image_id": 388, "id": 5980}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 407.0, 67.0, 66.0], "area": 2186, "segmentation": {"size": [512, 512], "counts": "omh32m?2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N2N3M2N01O000001O00000000001O000001O01O000002N0001O02N200O1N2N2O2M2N2N2KQAVOQ?h05N2N2N2N2N2N2N2O2JZ@Mh?1fbU3"}, "image_id": 388, "id": 5981}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 412.0, 20.0, 20.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "Xmo11n?2N2N2N2N3M2N0001O01O000002N2N3M2N2N2NnRf5"}, "image_id": 388, "id": 5982}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 414.0, 56.0, 47.0], "area": 1484, "segmentation": {"size": [512, 512], "counts": "fmS71j?1X@1f?6N3M2O100O1N2O1N2N2N2N2N3M2N2N2N2N2N2N10O00001O2N2N0001O00200000000001N1N2N2N2N2O1N2N2N2N2N2_Oc@f0]AXOe>f0]AXOe>f0]AXOe>f0SA^OP?`0RA^OQ?>;N3L3N3M2Mold5"}, "image_id": 389, "id": 6003}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 66.0, 54.0, 62.0], "area": 1660, "segmentation": {"size": [512, 512], "counts": "YRm03m?2M3N3L3N2N3L3N2M4M2M3N3M2M3N3L3N2010O00010M2M3N3L3N2NO0100O02O2M4M1N10O10O11N3N3L3N2M4M2N2M4M2M3N3L3N2N3L[lW6"}, "image_id": 389, "id": 6004}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 83.0, 51.0, 64.0], "area": 1778, "segmentation": {"size": [512, 512], "counts": "jb>2m?3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M200010O01N1N2N3L100O10O10O010O010O12M3N3L3N2M4M2N2M4ERA^OP?`0RA]OQ?`0;M2N3L3N2M4MPlg6"}, "image_id": 389, "id": 6005}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 89.0, 49.0, 75.0], "area": 2169, "segmentation": {"size": [512, 512], "counts": "o2c0a07`=L^B7_=K^B8_=LYBXOMn0h=LYB9g=P1O10O010O010O10O11N3N3L3N3L3N2N3L3N2M4M2M2OO10O4M0O10O10O10O0102M3N3M2M3N3L3N2M4M2N3L3N_[W7"}, "image_id": 389, "id": 6006}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 177.0, 87.0, 60.0], "area": 2930, "segmentation": {"size": [512, 512], "counts": "UfS33l?3N3M2M4M2M4M2N3L010O10003L3N3L3N3M2M4M0O10O10O10O010O10O10O010O10O10O010O10O11N4M2M3N000O010O01000JYAROg>n0\\APOd>P1510O10O010O10O10O010O10O10O010O10O10O010O10O101N4M2N3L3N3L3N3M2M4M2Mfi`3"}, "image_id": 389, "id": 6007}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 187.0, 62.0, 54.0], "area": 1651, "segmentation": {"size": [512, 512], "counts": "oVP11o?2N3L3N2M4M2M3N1OO0100O010O0100O01B]O_Ac0a>@[A`0e>CYA=g>EVA;j>>0O01000O010O010O01000O010O010O01000O010O010O01000O010O010O01000O3N2BQAFS?6PAHR?6PAGS?6?M2MfiP6"}, "image_id": 389, "id": 6008}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 219.0, 100.0, 101.0], "area": 5059, "segmentation": {"size": [512, 512], "counts": "^W_32n?2M4M2N3L3N3L3N2NO01]AYOm=f0TB\\Oi=e0VB^Oh=d0VB_Og=d0VB^Og=e0SBQOH>R>d0RBBk=a0SBBj=`0SBCj=\\1N2NO0100O0100O02O3L3N2gB_Mn0MWOQAi0o>3010O012M3N2N3L3N2N200O1N2M3N3L3NSgn2"}, "image_id": 389, "id": 6009}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 221.0, 65.0, 50.0], "area": 1780, "segmentation": {"size": [512, 512], "counts": "ngX11o?2M4M2N2M3^OE[A>b>EZA>d>DZAGXA:h>IUA6k>`000O0100O010O01000O010O010O01000O010O010O01000O010O010O10O10O010O4M2M3N2M10O10O10O010O010002M3N3L3N2M4M2N2M\\hf5"}, "image_id": 389, "id": 6010}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 233.0, 46.0, 52.0], "area": 1341, "segmentation": {"size": [512, 512], "counts": "a7\\1d>0O010O01000O010O01000O010O010O01F]AWOc>i0`ATO_>m0cAPO`>m0i0aAUO_>k0dAQO^>m0>M2M3N3M1N010O01000O010O01000O010O010O3N2N2M4M2M4Mlfn6"}, "image_id": 389, "id": 6012}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 286.0, 64.0, 50.0], "area": 1765, "segmentation": {"size": [512, 512], "counts": "oif12n?2M4M2M3^OFZA=c>E[A=c>EZA=d>GYA8h>JUA7j>`00O010O10O010O10O010O10O010O10O010O10O010O10O010O10O010O10O010O102M3N3M2M10O0100O0100O0100O3N3L3N2NO01H^@2b?N`@Oc?N`@0RfY5"}, "image_id": 389, "id": 6013}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 313.0, 66.0, 52.0], "area": 1878, "segmentation": {"size": [512, 512], "counts": "YZ62n?2N3L3N3L3N3L3N2N3L3N3L10000O103L3N2N0O10O0100O0100O0100O01000O010O01000O01E^AXOa>i0bASO`>l0cAQO_>l0>M0O10O12M3N2M1000O010O01000O010O0102M4M2N3L3N3Lbeh6"}, "image_id": 389, "id": 6014}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 334.0, 67.0, 49.0], "area": 1750, "segmentation": {"size": [512, 512], "counts": "ekU41e?0d@3X?0f@1Y?2c@O\\?4b@K_?;EATA`0k>CSA?k>CSA?j>ERA>l>;0O010O010O01000O010O010O0100O0100O010O0100O0100O010O0100O0100O010O2O2N3L3N2M4MO01000O010O0100O010O103M2M3N3L3N2Mldh2"}, "image_id": 389, "id": 6015}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 350.0, 64.0, 50.0], "area": 1766, "segmentation": {"size": [512, 512], "counts": "P\\T22m?3N2M4M2^OEZA>d>EYA>c>E[A;e>GXA:g>JVA5k>`0O10O10O010O010O10O10O010O10O010O10O010O10O010O10O010O10O010O10O02O2N3L3N2M010O01000O010O011N3N3M2M3N3L3N3L[dk4"}, "image_id": 389, "id": 6016}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 351.0, 69.0, 53.0], "area": 1967, "segmentation": {"size": [512, 512], "counts": "`[?2n?2M4M2M4M2N3L3N2M4M2N3L100O01000O2O2M4M2N0O010O01000O010O010O10O10O010O01000O01F]AWOb>i0aATO`>l0cAQO_>l0dARO_>k0=M4M2N0O10O01000O010O01000O010O010O13L3N2M4M2N3L]T^6"}, "image_id": 389, "id": 6017}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 393.0, 69.0, 52.0], "area": 1925, "segmentation": {"size": [512, 512], "counts": "j\\f02m?4M2M4M2N3L3N2M4M2M4M1O0O010O10O13L3N3L2OO10O10O010O010O10O10O010O10O10O010O010E`AVO`>j0bASO`>l0bAROa>j0=N3L3N1N10O10O010O10O10O010O01000O010O3N3M2M4M2M4MRSW6"}, "image_id": 389, "id": 6018}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 401.0, 65.0, 48.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "^md42m?3N3L3N3ADTA>j>DSA`0i>DTA>j>DSA=l><0O10O010O10O10O010O10O10O010O10O010O10O10O010O10O10O010O10O10O010O4M2N3L3N3L01000O010O01000O010O102N3L3N3L3N3MgbZ2"}, "image_id": 389, "id": 6019}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 414.0, 64.0, 51.0], "area": 1786, "segmentation": {"size": [512, 512], "counts": "Pna22m?4M2N2]OGZA=c>E[A=b>GZA=c>E[A;e>GXA9h>KUA5k>`0O0100O010O0100O0100O010O0100O0100O010O01000O010O010O01000O010O013L3N2N3L100O0100O0100O010O2O2N2M4M2M4M2M3NZR^4"}, "image_id": 389, "id": 6020}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 430.0, 69.0, 54.0], "area": 1986, "segmentation": {"size": [512, 512], "counts": "P^P13l?3N3L3N3M2M4M2M4M2M4M0O10O10O10O04M2M3N2N1N010O010O010O01000O010O010O01000O010OF`AUOa>j0cASO^>l0dAQO`>k0=N2N2M4MO010O010O10O10O010O010O10O100O3N3L3N3M3L3NlQm5"}, "image_id": 389, "id": 6021}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 444.0, 62.0, 55.0], "area": 1724, "segmentation": {"size": [512, 512], "counts": "Qof22m?3N2N3L3N3L3N2M10O10O10O010O01A[OcAd0^>^O`Ab0_>B]A?c>C[AFWA;h>>10O01000O010O01000O010O0100O0100O0100O010O01000O010O01000O010O011N3BQAGQ?6SAGP?6RAGQ?7RAFQ?6?N2MdQZ4"}, "image_id": 389, "id": 6022}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 65.0, 47.0, 69.0], "area": 1940, "segmentation": {"size": [512, 512], "counts": "jc[64i?3M4SOJlA8R>LiA7T>NgA6V>LgA7X>KeA7Z>JcA:\\>e0O1O2O0N3M2M3N3L30001O010O0001N1M4L3N2M4L3N201M2M3M4L3N3L3M3M4M2M3M4L3N3L3Mhml0"}, "image_id": 390, "id": 6023}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 68.0, 23.0, 16.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "\\b[71m?2M3M40O01O01O010O00010O010O00010O01O01M2N3Lgm8"}, "image_id": 390, "id": 6024}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 78.0, 29.0, 33.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "UcU73k?3L3M3N3L3M3N3L3N30O0010O00010O0010O001M2M3M4M2M4L3N2M4M\\m;"}, "image_id": 390, "id": 6025}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 130.0, 73.0, 59.0], "area": 2631, "segmentation": {"size": [512, 512], "counts": "iek63T?i0000000000000000000000000I700000001O00L4001O00000000000000000000000000000000000000000YOg0000G9000000000000000000001O000000000000A?000000000000000nK"}, "image_id": 390, "id": 6026}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 140.0, 90.0, 76.0], "area": 3743, "segmentation": {"size": [512, 512], "counts": "jUn42l?3L3M3M4L3M3M4N100010O00010O0001L3M3M4M2M4L3M3M4L3O1010O00010O0010O00010O00010O0010O00010O00010O00010O0010O00010O0010O0XNlAd1X>1O01O01O01O0jNfAg0Y>VOkAi0V>SOmAm0S>POQBP1n=nNTBR1]>010O00010O0010L3M3N3L3M3M4L3M3M4M2M]jd1"}, "image_id": 390, "id": 6027}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 191.0, 11.0, 11.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "UVX75e?600000000000000000ORZb0"}, "image_id": 390, "id": 6028}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 227.0, 86.0, 66.0], "area": 3234, "segmentation": {"size": [512, 512], "counts": "Xg_43l?3XAL`=7[BLd=6XBLh=7RBMl=7oAKQ>7kALT>7fALY>7cAK]>m0O001N101O001N100010O4M4K100O010L3N3M2N3O0010O01O01N1N3M2N3N101O01O010O01O010O01O01O010O01gNbAo0]>nNgAQ1d>10O00010O010O01M2N3M2N2M20O3L3N3M2N3L3N3M2M4M2N2N3L3N3M2N3LWXU2"}, "image_id": 390, "id": 6029}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 232.0, 86.0, 78.0], "area": 3393, "segmentation": {"size": [512, 512], "counts": "dXe63k?2M4M2N3M2N3M2M3N3N110O0010O010O01KTOSAm0j>6N3M2N3M201O0O0O002N2N3M201O010O010O01O010O010O04L01O010O0bNlAQ1T>lNnAT1R>jNQBV1o=gNSBY1m=eNUB\\1X>O010O010O0010O010ON3O010O000mN\\Ak0d>RO^An0b>POaAP1g>010O010O010O01POSAl0l>ROVAl0Q?M2N3M2M4M2N3MWG"}, "image_id": 390, "id": 6030}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 288.0, 83.0, 61.0], "area": 2666, "segmentation": {"size": [512, 512], "counts": "QjP44j?2N3M2N3M2M3N3M201O010M2O2O010O0N3L3N2M100O310O010O010O010O010O0010O010O0010O01hNYAV1h>010O010O010O0O2N10010O010O010O0010O010O010O010fN[AW1h>L3N3M2O20O01O0N3M2N3M2N3M2M3N3M2N3MTfe2"}, "image_id": 390, "id": 6031}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 295.0, 96.0, 107.0], "area": 3873, "segmentation": {"size": [512, 512], "counts": "Pld51m?3M3M3M3L4M3M3M3L4M3M2N3N2EjNkAV1R>mNmAT1P>nNQBR1k=RORBQ1k=QOSBR1l=oNQBP1S><2M4M2M40N1N2N3M2O2OO0O2N2N1O2M2O2N2N1O2N2M2O2N101000O01000O010O1O0O2N2N3M2N3M2N2N3M2N3M20001O010OO2L3N3M2M4M00O010001O2M4M2N3L3N2N3M2M4M2N3L3N3M2N3L3NdVk0"}, "image_id": 390, "id": 6032}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 334.0, 81.0, 70.0], "area": 2814, "segmentation": {"size": [512, 512], "counts": "e[`32l?2M4M2N3L3N3M2M3N3M2M4N110O01O010OO2M2M4M2N2N3L3010O010O01O01O010O010O010O00010O010O010O0nNdA`0\\>^OfAc0Z>ZOiAe0W>XOlAi0T>TOnAl0R>ROQBn0`>01O010O01O010O010O01O01O010O010O01O0O1N3L3N3M2M4M2N2M4M2N3MZTW3"}, "image_id": 390, "id": 6033}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 347.0, 80.0, 79.0], "area": 3189, "segmentation": {"size": [512, 512], "counts": "mlc61l?4M2N3M2N3L3N3M2N3M21O010O01O010M2N2N2M3N1O2N1O2M3N1O2O0101O0010O010O01ON3M2N3M2M4M2N2N3L3O2O010O00O2L3N3M2M4M2N2M10000O013M2M4M2N3L3N2N3L3N3L3N3M2M3N3M2M4M2NoT4"}, "image_id": 390, "id": 6034}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 358.0, 15.0, 18.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "ck`51m?2N3N1N3M3M2N3OO2M2N3M2N3M3MhdW2"}, "image_id": 390, "id": 6035}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 383.0, 23.0, 56.0], "area": 669, "segmentation": {"size": [512, 512], "counts": "]md73k?2N3L3N3M2N2N3L3O2O0JYOTAg0i>[OWAe0g>^OYAb0c>A]A?a>D^A=_>c0N1O0O1001N4MRD"}, "image_id": 390, "id": 6036}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 386.0, 70.0, 67.0], "area": 2410, "segmentation": {"size": [512, 512], "counts": "_mT31m?2N2M4M2N3FDk@?S?Ck@`0Q?9N2N3L3N3M2M3N3M2N3O0010O001M2010O00010O010O010O00010OdNeAS1\\>jNgAV1X>hNjAX1`>0O00010O010OlN\\Al0e>QO]AP1i>01O010O01O01O010O010O01ON3M2M4M2N3L3N2N3L3N3L3NjRh3"}, "image_id": 390, "id": 6037}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 409.0, 14.0, 15.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "Q]R52l?3M2N3M201O010O010ON3M2N3MUcf2"}, "image_id": 390, "id": 6038}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 422.0, 16.0, 16.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "a]m42g?O]@3a?O\\@5a?601O01O001O010O000N3L3N3Mgbj2"}, "image_id": 390, "id": 6039}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 433.0, 90.0, 71.0], "area": 3556, "segmentation": {"size": [512, 512], "counts": "b^S51l?3N3M2N2N3L3N3O0010O010O010ON2N3L3N3M2N3M2O20O01O01O010O010O010O010O00010O010O010O\\AjN^>W1_AkNb>Z1O0M4M2O20O00010O010O01O0N3L3N2N3M210O010O0010O010O0010OO2L3N3M2N2M4M2N3_O_A_Oc>?_A_Od>>^A_Oe>?^A^Oe>>^A@d>>a0M2N2N3L3NTb_1"}, "image_id": 390, "id": 6040}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 450.0, 102.0, 62.0], "area": 3599, "segmentation": {"size": [512, 512], "counts": "Rog21l?4M2N3M2N3M2M4M2N2N3M2M4M2N3N110O010O01O01O010O010O010O010O010O00010O010O010O010O010O01O01O010O010O010O010O01O01O010O001L3N3M2O11O001O001O001O001O00001O001O001O001O0O2M2N2M4M2O2O010O010O010YOm@?S?_OPA=S?Ao@=\\?M2N3M2MlPe3"}, "image_id": 390, "id": 6041}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 453.0, 6.0, 14.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "`^m71l?3N2N3M2N3jA"}, "image_id": 390, "id": 6042}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 455.0, 19.0, 15.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "]nV41m?2N3O2OO01N1N30O0010O010O0010M2N3M2Mga_3"}, "image_id": 390, "id": 6043}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 498.0, 31.0, 14.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "n_X52m?11O001O00N2N2N2N2N2O100O10000O1000000001O001O001O1O2N2N5K00QPX2"}, "image_id": 390, "id": 6044}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 503.0, 24.0, 9.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "o_j21m?2N2N2N200001O00001O001O001O001O001O001O001O00Q`i4"}, "image_id": 390, "id": 6045}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 507.0, 12.0, 5.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "nok42l?2O1001O001O001O001O00QPn2"}, "image_id": 390, "id": 6046}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 492.0, 50.0, 20.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "noY32l?2N2M3N2N2N2N2N2O1001O001O001O001O001O001O001O001O001O001O001O00001O001O001O001O001O001O001O001O00QPm3"}, "image_id": 391, "id": 6047}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 510.0, 6.0, 2.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "noo22n?000001O00QPm4"}, "image_id": 391, "id": 6048}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 95.0, 6.0, 16.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "QSm72n?3L3N3L3N2oL"}, "image_id": 392, "id": 6049}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 131.0, 9.0, 24.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "Wdk71n?3N3M2M3N3L3N3L3kK"}, "image_id": 392, "id": 6050}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 242.0, 23.0, 44.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "kgd71n?4M2M4M2N2M4M2M4M2N2M4M1N10O01000O04M2M3N1O0^H"}, "image_id": 392, "id": 6051}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 288.0, 3.0, 7.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "Qin71n?4M2nF"}, "image_id": 392, "id": 6052}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 0.0, 43.0, 25.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "P`21o?1O1O1O2N1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O2NO1O1O1O100O1O1O1O1O1O1O1O1O100O1O2N2N2N3MloW7"}, "image_id": 393, "id": 6053}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 0.0, 24.0, 12.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "PPk01o?1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O100O1O1O1O1O1OQPi6"}, "image_id": 393, "id": 6054}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 0.0, 50.0, 39.0], "area": 1075, "segmentation": {"size": [512, 512], "counts": "Y`d52n?1`@On>4o@No>4o@No>4o@No>4o@On>3QANm>4QANm>d0N2O1N2OO1O1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O2O2M2Nm_b1"}, "image_id": 393, "id": 6055}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 9.0, 24.0, 24.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "e`[32m?2N2N3M2N2N2O1N2N2N00000001O2O1N2N2N3M2N2N2N2N^_X4"}, "image_id": 393, "id": 6056}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 16.0, 63.0, 68.0], "area": 1894, "segmentation": {"size": [512, 512], "counts": "WbU61n?2]ONUA4i>NUA4i>NUA5h>MVA5i>MTA5j>MUA4i>NUA4i>NUA4i>c0N2N001O01O0000000000000001O01O0000000000000001O01O00000000000000101N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3Njnj0"}, "image_id": 393, "id": 6057}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 23.0, 29.0, 29.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "TQR42m?2N2O1N2N2N2N2N2N2N2N2N2N3M10O2N2N2N2N2N2N2N2N3M2N2N2O1Nl^_3"}, "image_id": 393, "id": 6058}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 30.0, 59.0, 51.0], "area": 1591, "segmentation": {"size": [512, 512], "counts": "fQa41n?3N2M3M3N2M2O2M3N2M3N2M3N1N3N0O1O010O010O010O010O01O01O010O010O010O010O010O01O01O010O02O2M2O2M3N2M3M3N1N3N2M3N2M3N2MX^a2"}, "image_id": 393, "id": 6059}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 48.0, 43.0, 46.0], "area": 917, "segmentation": {"size": [512, 512], "counts": "^R\\53l?2O1N2N2N00101N2N2N3M2N2N2N2N2OO000001O0000000001O000001O0000000001O002N3N1Ai@2Y?Lj@1X?Mj@1X?Mj@1X?Mj@1_mn1"}, "image_id": 393, "id": 6060}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 51.0, 12.0, 12.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "fQZ41o?1N3M2O2M2O10N3N1N2N2OWn_3"}, "image_id": 393, "id": 6061}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 53.0, 62.0, 46.0], "area": 1547, "segmentation": {"size": [512, 512], "counts": "\\b]33k?2M4M2M3N3L3N3L3N2O2O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O01M2M4M2M3N3L3N3L3N2Mj]c3"}, "image_id": 393, "id": 6062}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 60.0, 39.0, 33.0], "area": 663, "segmentation": {"size": [512, 512], "counts": "\\Rf22m?3M2N2O1N2N2N3M2N2N2O1N2N2N001O000001O1O2N2O1N2N2N3M2O1001O0000O2M2N2N2N2N2Na]f4"}, "image_id": 393, "id": 6063}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 61.0, 98.0, 64.0], "area": 2282, "segmentation": {"size": [512, 512], "counts": "`SS61n?2N3M2N2N2N2N2O1N2N3M000000001O0001O000000000000O2O000000000001O0Hi@HW?8l@ET?;n@DR?;PACR?;;O1N2N2N2N2N11N2V@Md?5Z@Md?9O1N2KCd@?Z?5N2O2M2N2N2N2N2N2N2N2N3N1O10N2N2N1O00010O000000000000002N2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M_m;"}, "image_id": 393, "id": 6064}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 93.0, 28.0, 27.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "]cg52m?2N2N2N2N2O2M2N2N000000000001O01O0001O2N3M2N2N2N2O1N2Nk\\j1"}, "image_id": 393, "id": 6065}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 101.0, 42.0, 51.0], "area": 1255, "segmentation": {"size": [512, 512], "counts": "nS[72m?2O2M2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3M2O11M2N10O000000000000000010O001O2N2N2N2N2NgL"}, "image_id": 393, "id": 6066}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 104.0, 60.0, 58.0], "area": 1854, "segmentation": {"size": [512, 512], "counts": "SdP21n?2N2W@Lc?6[@Lc?:N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N11N2N2N000000001O01O000002N2N2N2N2N2O2M2N2N2N2N2N2N2N3N1N2N2N2N2N2N2NQ\\Q5"}, "image_id": 393, "id": 6067}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 109.0, 16.0, 17.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "acW52m?2N2N2N3N1O10000010N1N2N2N2N3NX\\`2"}, "image_id": 393, "id": 6068}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 110.0, 21.0, 22.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "icY31n?2N2N2N2N2N2N3M2N2N0002N2N2N2N2N2N2N2N2NZl[4"}, "image_id": 393, "id": 6069}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 113.0, 12.0, 13.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "eSk62m?2O1N3M2N2000N2N3N1N2NYln0"}, "image_id": 393, "id": 6070}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 124.0, 14.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "SdW62m?2N2N2N2N2N1O02N2N2N2N2N2No[a1"}, "image_id": 393, "id": 6071}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 138.0, 9.0, 19.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "Z4c0^?N2O2M2N3M2O2M2N\\[k7"}, "image_id": 393, "id": 6072}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 139.0, 57.0, 73.0], "area": 1851, "segmentation": {"size": [512, 512], "counts": "Uel41^?1QA0m>2QA0m>2QA0m>2QA0m>3PAOn>3PAOn>3PAOn>d0M2N2O1O100000000000001bAiNP>W1nAkNS>T1kAnNU>R1iAPOW>P1gAROY>n0eAUOZ>Y100000000000N3N1N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N3M2N2OUjV2"}, "image_id": 393, "id": 6073}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 146.0, 61.0, 50.0], "area": 1521, "segmentation": {"size": [512, 512], "counts": "\\Uh23m?1N3N2M3M2O2M3N2M2O2M3M3N0O10O010O010O00010O010O010O01O01O010O010O01O01O010O010O01O01O010O010O2O1N3M3N2M3N1N3N2M3M2O2M3NhZY4"}, "image_id": 393, "id": 6074}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 146.0, 23.0, 24.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "jTU71n?2N2N2N2N2N2O1N2N201O00000000N2N2N2N3M2N2N2NQ[?"}, "image_id": 393, "id": 6075}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 154.0, 13.0, 25.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "Uei71n?2N2N2N2O1N2N2N3M2N2N2N2UK"}, "image_id": 393, "id": 6076}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 157.0, 23.0, 24.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "TUo62m?2O1N2N2N3M2N2N2O100000001ON2O1N2N3M2N2N2N2NfZe0"}, "image_id": 393, "id": 6077}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 164.0, 46.0, 45.0], "area": 973, "segmentation": {"size": [512, 512], "counts": "ne\\62m?2N2N2N2N2N3N1N2N2N2N2N2N2N3M2N01O0000000000000001O01O00002N2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N\\Zl0"}, "image_id": 393, "id": 6078}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 169.0, 49.0, 51.0], "area": 1347, "segmentation": {"size": [512, 512], "counts": "c5j0W?001O0000000000000N2N2N3M2O0O000000000000001O0001O00000000001O2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2N2NVZW7"}, "image_id": 393, "id": 6079}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 189.0, 55.0, 74.0], "area": 2413, "segmentation": {"size": [512, 512], "counts": "`Vc36i04@Ig=>^B`0Z=H[Bc0e=m0O0000000001O0000000001O000000M3G:O0000000001OO1F:E;E0001O0001O0000000000000001O0001O0K5FPZa3"}, "image_id": 393, "id": 6080}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 189.0, 68.0, 55.0], "area": 1783, "segmentation": {"size": [512, 512], "counts": "lVn61n?2N2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N2N3M00000000000001O1O100O1O1O1O2N2N2N1O1O1O1O1O1O0001O0000000002N2O1001ON3M2N2N2N2N2O1N2N2N2N2NcI"}, "image_id": 393, "id": 6081}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 199.0, 35.0, 43.0], "area": 797, "segmentation": {"size": [512, 512], "counts": "nVb42m?2N2N2N2N2N3N1N2N2N2N2N2N001O2O1N3M2N2N1O001O0001O002TOVA?l>_OVA?l>_OWA1M3n>JWA1M3n>JWA1M4Y?Ji@4c?N2NlXl2"}, "image_id": 393, "id": 6082}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 199.0, 47.0, 35.0], "area": 1057, "segmentation": {"size": [512, 512], "counts": "Sgc52k?3L4L4L5K4M3L4N3O0001O01O0001O01O00010OM3L4M40O0001O01O0001O01O00010O0000010O000001N1L4L5L3Lcid1"}, "image_id": 393, "id": 6083}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 209.0, 56.0, 56.0], "area": 1425, "segmentation": {"size": [512, 512], "counts": "eW=2m?2N3M2N2O1N2N2N2N2N2N2N2N3M2N2O1N0GPOcAP1]>ROaAn0_>TO_Al0a>VO]Aj0c>901O000001O0000002N2N2N2O1N2N2N2N3M2N2N1O0000010O2N2N2N2N3M2N2N2N2Nmhf6"}, "image_id": 393, "id": 6084}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 214.0, 20.0, 21.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "lVQ31n?2N3M2O1N2N3M2O1O2O0001M2O1N3M2N2O2M2Nnhd4"}, "image_id": 393, "id": 6085}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 233.0, 40.0, 39.0], "area": 747, "segmentation": {"size": [512, 512], "counts": "ngX71n?2O1N2N2N3M2N2N2N2O1N2N2N3M2N2N0001O000001O000002N2N3M2N2O1N2N2N2N2N3M2N2O1N2NWX3"}, "image_id": 393, "id": 6086}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 234.0, 14.0, 23.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "_7b0]?0000000002N2O1N3M2N2N2N2N^hh7"}, "image_id": 393, "id": 6087}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 244.0, 55.0, 58.0], "area": 1460, "segmentation": {"size": [512, 512], "counts": "UhR62m?3M2O1N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N201O01O0000000000000001O01O0000000N2O1N2N2N2N3M2N2N2N2N2O1N2N2N2N3M2N2NVgQ1"}, "image_id": 393, "id": 6088}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 248.0, 57.0, 55.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "ihS43l?2O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N1O00001O01O0000000000000001O01O0000001O2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2O1N2Nego2"}, "image_id": 393, "id": 6089}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 251.0, 49.0, 56.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "PYm01o?1N2N2N3M2N2N2N2N2O1N3M2N2N2N2N2N10O0001O000K5E;1O1O1O1O101N1O1O1O1O1O1O2O0O1O1O1O1O1O1O2N100O1^OdXZ6"}, "image_id": 393, "id": 6090}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 267.0, 10.0, 12.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "aXk72m?3M2N2N1O0002N2N2NcG"}, "image_id": 393, "id": 6091}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 279.0, 8.0, 20.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "mXl71n?3M2N2N2\\@H]?:b@G\\?`0O1XG"}, "image_id": 393, "id": 6092}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 287.0, 46.0, 51.0], "area": 1162, "segmentation": {"size": [512, 512], "counts": "_im52m?2N3M2N2N2N2O1N3M2N2N2l@YOl>i0RAYOl>P1N1N2N2N2N201O01O00000O1N2N3M2N2N2O1N2N2N3M2N2N2O1N2N3M2N2N2N2O1NSV[1"}, "image_id": 393, "id": 6093}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 289.0, 30.0, 31.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "^i52m?3M2N2N2N2N2O1N2N3M2N2N2N2N20O1N2N2N3M2N2N2O1N2N2N3M2N2N2N_V[7"}, "image_id": 393, "id": 6094}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 290.0, 65.0, 50.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "gYZ11n?3M2O2M2N2N3N1N3M2O2M2N2O2M2N3O0010O010N1N3N0O001O01O01O01O00010O0001O01O01O01O00010O0001O01O01O01O0002O2M2N3N1N2N3M2O2M2N3N1N2N]Ve5"}, "image_id": 393, "id": 6095}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 300.0, 56.0, 46.0], "area": 1504, "segmentation": {"size": [512, 512], "counts": "oYo4j0V?000000001O0000000000000000000O100O1O1O1O1O1O1O1O100O0000000000000010O00001O2N2N2N2N2O1N2N3M2N2N2N2N2N2O2M2N2NRfT2"}, "image_id": 393, "id": 6096}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 301.0, 18.0, 18.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "bi_61n?2N2N2N2O1N201O00000000N3N1N2N2N2NXVW1"}, "image_id": 393, "id": 6097}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 311.0, 59.0, 54.0], "area": 1441, "segmentation": {"size": [512, 512], "counts": "Wjn61n?2N2O1N2N2N2N3M2N2N2O1N2N3M2N2N2N2O1N210O0000000000O2N100O1O100O2O0O1O100O100O1O01O01O01O010O00010OM3M4M2M3N2M4L3N2M_e3"}, "image_id": 393, "id": 6098}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 314.0, 24.0, 18.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "Rj21n?2N3N1N2N2N2N3N0O1O11O1O1O1O2N1O1O1O1O2N1N2O1OgUa7"}, "image_id": 393, "id": 6099}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 320.0, 17.0, 19.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "]j`61n?2N2IN]@4a?N^@4_?70O000000102M2N2N2N3N1NieV1"}, "image_id": 393, "id": 6100}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 330.0, 63.0, 54.0], "area": 1508, "segmentation": {"size": [512, 512], "counts": "VkP21n?2N2N2N3M2N2N2O1N2N2N2N2N2N200000001O0000000N2N00001O01O000000000000000000000001O01O00000000002N2N2N2N2N3M2N2O1N2N2N2N2N2N2NWeo4"}, "image_id": 393, "id": 6101}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 337.0, 27.0, 26.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "P[d62m?2N3M2N2N2N2N2N2N10O0001O00000002N2N2N2N2N2N2N2O1N2NVUn0"}, "image_id": 393, "id": 6102}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 339.0, 60.0, 53.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "W[P12m?2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N1O00000001101O00000001O000001O00000001M2N000001O01O3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N_dQ6"}, "image_id": 393, "id": 6103}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 344.0, 24.0, 37.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "l:Q1n>1O1O1O1001N2N1O2N2M3N2N2N2N2N2N2N2N2N2N2N2N2Nhdc7"}, "image_id": 393, "id": 6104}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 348.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "ljo72R5"}, "image_id": 393, "id": 6105}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 350.0, 29.0, 29.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "]ki61n?2N2N2N2N2N2N2N2N2N2N2N3M1O0001O2N2O1N2N2N2N2N2N3M2N2N2Nfdg0"}, "image_id": 393, "id": 6106}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 355.0, 56.0, 54.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "Ulc51n?2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N00000000001O01O00000000000001O01O000000001O2N2O2M2N2N2N2N2N2N2N3N1N2N2N2N2N\\T`1"}, "image_id": 393, "id": 6107}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 368.0, 57.0, 54.0], "area": 1400, "segmentation": {"size": [512, 512], "counts": "b\\f21o?1N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N10O0000000000000000010O00000000000000010O2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2NnS]4"}, "image_id": 393, "id": 6108}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 384.0, 30.0, 20.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "]lT71l?4K4L4N3O0001O01O01O0001O01O0001O01O01O00M4N103M00010O000LkS<"}, "image_id": 393, "id": 6109}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 393.0, 56.0, 63.0], "area": 1637, "segmentation": {"size": [512, 512], "counts": "f]Z61n?2N2N2N2N3M2BGRA;l>GRA;m>GQA:m>HQA:m>>N2N2O1O2NO1OHiNiAV1W>mNhAQ1X>QOhAn0W>TOjAi0V>YOiAf0W>\\OgAd0Y>^OeAb0\\>_ObAa0^>`000000001O02N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1N2N2N2Nnbi0"}, "image_id": 393, "id": 6110}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 395.0, 18.0, 34.0], "area": 321, "segmentation": {"size": [512, 512], "counts": "[W1nAkNR>U1lAmNT>`1000_NlAU1T>iNnAX1Q>fNQBZ1o=dNTB[1Y>00eN`AU1`>iNbAW1^>gNdAY1b>00010O000000000O0O0001O3M2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2N2NcaZ3"}, "image_id": 393, "id": 6113}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 412.0, 19.0, 28.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "Wmf72m?2N2N3M2O1N2N2N2N3M2N2N2O1O01N2N3M2NPC"}, "image_id": 393, "id": 6114}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 419.0, 51.0, 67.0], "area": 1679, "segmentation": {"size": [512, 512], "counts": "a^f01n?2N3M2O2M2N2O2M2N2N3N1N2N3M2O1N000010O000001O0K5K5N31N1OO1K6J52N1O100O1O100O1O2N100O1O1O100O2mN_Ad0o>I7H9HcR`6"}, "image_id": 393, "id": 6115}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 426.0, 22.0, 21.0], "area": 220, "segmentation": {"size": [512, 512], "counts": "e]^71o?1N2N2N2N2N2N3M2N000001O001O2N3M2N2O1N2N2N_b6"}, "image_id": 393, "id": 6116}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 441.0, 24.0, 24.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "R^81n?2N2N2N2N2N2N2N2N200000000000N2N2N2N2N2N2N2N2Nja[7"}, "image_id": 393, "id": 6117}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 446.0, 28.0, 54.0], "area": 960, "segmentation": {"size": [512, 512], "counts": "n=f1[>N200O1N2N0000001O2N3M2N2N2N2N2N2O1N2N2N2N2N2N2N2Fa@0a?Na@0a?Na@0mPb7"}, "image_id": 393, "id": 6118}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 455.0, 52.0, 57.0], "area": 1457, "segmentation": {"size": [512, 512], "counts": "]_R71n?2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N1O0000000N2O1N2N2O1N200100O2N1O2N1O1O2N1O1O2N1O101N1O2N1L4M4K4M3L5Lda3"}, "image_id": 393, "id": 6119}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 458.0, 46.0, 54.0], "area": 1502, "segmentation": {"size": [512, 512], "counts": "o_Q31m?2O1ROOkA3S>0jA2U>OiA3U>0iA1U>1iA1U>2hA0W>1gA1W>2gAOW>4fANX>4fANY>4eAMY>n0O11O001O001O1O001O001O1O001O001N1N3M3N1N3M2O2M2[On@:U?Dm@9V?Dm@:]?N3M2N3N1Nn`W4"}, "image_id": 393, "id": 6120}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 466.0, 61.0, 46.0], "area": 1450, "segmentation": {"size": [512, 512], "counts": "i_c11n?2M2O2M3N1N3N2N2M2O1O1001O1O001O1O00O1O1N2O1O1N2O1N2O1N2O1N2O1N2O1N2O1N2O1O1001M2O2M3N1O2M3N1N3N2M2O2M3N2M2O2M3N1N3N2N1NRQ^5"}, "image_id": 393, "id": 6121}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 487.0, 9.0, 9.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "[o81n?2N3N1N0002N2N2Ng`b7"}, "image_id": 393, "id": 6122}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 488.0, 46.0, 24.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "ooc41n?1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2MSPe2"}, "image_id": 393, "id": 6123}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 508.0, 9.0, 4.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "o_k71n?1O1O100001O1O1O1"}, "image_id": 393, "id": 6124}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 0.0, 42.0, 20.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "P`P21o?001O1O1O001O1O1O001O1O001O1O1O001O1O1O001O1O1O001O1O00O1O1N2O1O1N2O1O1N2O1O1N2O1NR`Z5"}, "image_id": 395, "id": 6125}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 0.0, 58.0, 45.0], "area": 1364, "segmentation": {"size": [512, 512], "counts": "lPd21n?2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3ON2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N200100O0100000O01O1N2N1N3N2N2M2O2N2M3N1Odo^4"}, "image_id": 395, "id": 6126}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 0.0, 29.0, 17.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "SPd31n?2N2N2O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O100O1O1O2N2No_m3"}, "image_id": 395, "id": 6127}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 2.0, 38.0, 39.0], "area": 813, "segmentation": {"size": [512, 512], "counts": "e`d62m?2N2N2N2N2JFd@=ZAAh><`0N2N2M2O2N2Mg]X7"}, "image_id": 395, "id": 6130}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 66.0, 49.0, 50.0], "area": 1273, "segmentation": {"size": [512, 512], "counts": "mR`41n?1N3N2N2M2O2M3N2N1N3N2M3N1O2M3N1O2O1000O01O1M3N1O2M3000O01000O1N1N3N2oNYAe0i>YOZAe0h>YOZAe0h>YOYAe0S?N2N2M2O2N2M3N1N3N2NU]g2"}, "image_id": 395, "id": 6131}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 66.0, 39.0, 40.0], "area": 934, "segmentation": {"size": [512, 512], "counts": "kbc52l?2M4M2M4M2M3N3L3N3L300010O01SAQOh>n0VATOj>R1O01O010O000N3N101O01O01O0M4M2M4M2N2M4M2M4M2Mfmh1"}, "image_id": 395, "id": 6132}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 89.0, 48.0, 54.0], "area": 1360, "segmentation": {"size": [512, 512], "counts": "hSd01n?2N2M3N1N3N2N2M2O2N2M2O2M3N2N1N3N2M2O2N2M3N1O2O100O01000O0100M3N1O2M3N2BXA@j>>YA@i>>XAAj>>YA@i>>>M3N1N3N2N1Nilc6"}, "image_id": 395, "id": 6133}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 96.0, 21.0, 40.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "cce72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2O0O0QM"}, "image_id": 395, "id": 6134}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 102.0, 21.0, 22.0], "area": 226, "segmentation": {"size": [512, 512], "counts": "ecX11n?2N1N3N2N2M2O2N2N1N10O101O2M3N2N1N3N2N2Nel\\6"}, "image_id": 395, "id": 6135}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 111.0, 11.0, 11.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "cSV52m?2M3N110O1000N1N3N1N_\\d2"}, "image_id": 395, "id": 6136}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 112.0, 60.0, 53.0], "area": 1601, "segmentation": {"size": [512, 512], "counts": "YTP41m?3N1N3N2N2M2O2M3N2N1N3N2M2O2N2M3N1O20000O0100000O010000O010000O0100O0OO120O01mNVAo0j>oNWAR1l>00O010M3N2N1N3N2M3N1O2M3N1O2M3N2M2O2NckQ3"}, "image_id": 395, "id": 6137}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 114.0, 14.0, 24.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "dcb53m?3L4M4L3L4M3NN4M3M3M4K4M3Mj[V2"}, "image_id": 395, "id": 6138}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 117.0, 11.0, 11.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "kc52m?1N3M2O2O1O01M3N2M2OYld7"}, "image_id": 395, "id": 6139}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 118.0, 51.0, 55.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "jTW12m?1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N1N3N2N2M0100O011O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2Mk[o5"}, "image_id": 395, "id": 6140}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 148.0, 39.0, 37.0], "area": 709, "segmentation": {"size": [512, 512], "counts": "TU11n?2N2M3N1O2N2M3N2N1O2M3N2N110O100000O10O1000O10O1000N2N2M2O2N2N2M3N1O2N2M3N1OjZ[7"}, "image_id": 395, "id": 6141}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 148.0, 15.0, 14.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "kTo41m?3N1N3N2N110O10O100N1O2M3N1NX[i2"}, "image_id": 395, "id": 6142}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 156.0, 15.0, 14.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "Reg02m?2M2O2M3O10O0100O01N2M2O2M3NojP7"}, "image_id": 395, "id": 6143}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 164.0, 10.0, 11.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "[UR12l?2O2M2N30O0O2M3N1Nkjh6"}, "image_id": 395, "id": 6144}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 164.0, 29.0, 29.0], "area": 448, "segmentation": {"size": [512, 512], "counts": "cUW72m?2M3N1O2M3N2M2O2N2M3N101O10O1000OO2M3N1N3N2N2M2O2M3N2N1NcZ:"}, "image_id": 395, "id": 6145}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 167.0, 45.0, 56.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "TVj31n?2M3N1O2M3N2N2M2O2N2j@[Om>h0PA[On>f0QA\\Om>m0N0O10000O2O2N2M3N101000000O0100000O]OaAE`>9bAG^>7cAJ]>3fAMY>2iAMX>1jAMX>0jAOX>OjAOX>OjANX>1jAMX>0jAOX>OjAOX>OjANX>0V[_3"}, "image_id": 395, "id": 6146}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 169.0, 28.0, 27.0], "area": 400, "segmentation": {"size": [512, 512], "counts": "hUV11m?2O2M3N2M2O2N2M2O2M3O0100000O0100O0O2M3N1O2M3N2M2O2N2M]j[6"}, "image_id": 395, "id": 6147}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 184.0, 24.0, 26.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "UfU62m?2N2N2M2O2N2M3N2N1N3O1O10O100M2\\Og@`0^?N2M2O2N2M3N1OnY^1"}, "image_id": 395, "id": 6148}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 186.0, 12.0, 12.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "oU41n?2M2O2N20O0100000M2O2NRje7"}, "image_id": 395, "id": 6149}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 193.0, 17.0, 17.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "XVn63l?2N1N3N2M21000O10O100N1O2N2M2O2MiYi0"}, "image_id": 395, "id": 6150}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 198.0, 54.0, 57.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "Sg31n?1N3N2N2M2O2N2M3N1N3N2N1N3N2N2M2O2M3N2N1N3O10O1000O10O1000O10O1mN]Ah0c>VO_Aj0`>UObAk0^>ROeAm0\\>QOeAP1Z>oNhAQ1d>O0N3N2N2M2O2N2M3N1O2M3N2M2O2N2MiXQ7"}, "image_id": 395, "id": 6151}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 203.0, 40.0, 49.0], "area": 1207, "segmentation": {"size": [512, 512], "counts": "SW\\71n?2N2M3N1O2M3N2N1N3N2N1N3N2M3N1O2M3O10O0100000O0100000O0100N1O2N2M10O10O11N2O2TOZA;[8"}, "image_id": 395, "id": 6152}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 209.0, 44.0, 55.0], "area": 1267, "segmentation": {"size": [512, 512], "counts": "]W[31n?2N1N3N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1N3N2N101O1000O10O1000O10O_O^AEb>9`AG`>6bAJ^>5dAK\\>3fAMZ>0hA0Y>NiA0X>OjANY>OjAOX>OiAOZ>OhAOY>OjAOX>OjANXWo3"}, "image_id": 395, "id": 6153}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 211.0, 33.0, 33.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "Tg^42l?3N2N1N3N2N2M2O2M3N2N1N300000O01000O10O01N2M3N1O2M3N2N1N3N2M2O2NohP3"}, "image_id": 395, "id": 6154}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 219.0, 2.0, 3.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "k63m?OVin7"}, "image_id": 395, "id": 6155}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 234.0, 14.0, 16.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "bWP13l?2N2N1N3N2N2NO3N2N2N2M2O2Nbhh6"}, "image_id": 395, "id": 6156}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 240.0, 60.0, 48.0], "area": 1493, "segmentation": {"size": [512, 512], "counts": "Qh`61m?3N2N2M2O2M3O0a@B]?a0O01000O0100000N1N3N2M2O2N2M2O2M3N1010000O010000O01000O01000O010N2N110O1N2M2O0O10O12M3Ij@AW?=l@@W?>7M2O2M3N2M2OPXa0"}, "image_id": 395, "id": 6157}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 243.0, 11.0, 12.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "iWl42l?3N1N3N2O10M3N2M2O2NZXn2"}, "image_id": 395, "id": 6158}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 248.0, 20.0, 41.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "h7Y1h>M3N1O2N2M3N1O2M3N2N2M2O2N2M3N1O2M3N2Njge7"}, "image_id": 395, "id": 6159}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 250.0, 44.0, 55.0], "area": 1262, "segmentation": {"size": [512, 512], "counts": "fXl21n?2N2M2O2N2N2M3N1O2M3N2N2N1N3N2N2M2O2N2N2M3N1010O1O0100000O10O_O^AEb>8`AI`>5bAK^>3dAL]>2eANZ>0iAOX>OiAOZ>OhAOZ>NiA0X>OjAOX>OiAOZ>OhAOZ>NiA0nU^4"}, "image_id": 395, "id": 6160}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 260.0, 66.0, 50.0], "area": 1684, "segmentation": {"size": [512, 512], "counts": "mX\\51n?2N2M2O2N2M3N1O2M3N1O2M3N2N1N3N2M3N1O2O1000O0100000O01000O1N1O2M3N2N1N3O100O010000O01000N2N110O10O1000OO2N2N2M2O2N2M3N1O2M3N2N1N3Nofb1"}, "image_id": 395, "id": 6161}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 261.0, 14.0, 14.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "\\XX62m?1N3N2N2M21000O01M3N2M2O2Nfg`1"}, "image_id": 395, "id": 6162}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 280.0, 28.0, 28.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "Vii12l?3N2N1N3N2N2M2O2N2N2N101000000O01N2N2N2M2O2N2M3N1O2M3NmVh5"}, "image_id": 395, "id": 6163}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 281.0, 29.0, 29.0], "area": 448, "segmentation": {"size": [512, 512], "counts": "Y91m?3N1O2M3N2N1N3N2N2M2O2N2O01000O10N2N2N1N3N2N1N3N2M3N1O2MnVa7"}, "image_id": 395, "id": 6164}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 289.0, 46.0, 41.0], "area": 1139, "segmentation": {"size": [512, 512], "counts": "jiX23j?3N2M4L3N3L3M3M4M2O1010O0010O0010O00010O0010O00010O0010O00010O0010O0010N1M3M4M2M4L3N2M4L3MeVP5"}, "image_id": 395, "id": 6165}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 290.0, 29.0, 26.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "\\Yi61n?2N1N3N2N2M2O2N2O1O0100000O0100000O010000N1O2M3N2N1N3N2NaVh0"}, "image_id": 395, "id": 6166}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 299.0, 66.0, 50.0], "area": 1748, "segmentation": {"size": [512, 512], "counts": "VZV61n?2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2N2O0100000O0100000OO2M3N2N1N3N2N2M20100000O010O1N2O010O1000O10O01M3N2N1N3N2N2M2O2N2M3N1O2Mjeh0"}, "image_id": 395, "id": 6167}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 303.0, 59.0, 60.0], "area": 1775, "segmentation": {"size": [512, 512], "counts": "Tjl42m?1O2M3N2N2M2O2N2N2M2O2N2N2M3N1O200000O01000000M2O2WAkNd>Z1N2M30O100000O010000000N1O20000O10oNdA<]>BeA>[>_OgA?[>@gA>[>@gA>[>_OhA?Z>_OhA>[>@fA?[>@gA>o>M3N1O2N2M3N2NYeU2"}, "image_id": 395, "id": 6168}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 321.0, 23.0, 24.0], "area": 263, "segmentation": {"size": [512, 512], "counts": "`ZZ11n?2M3N2N2N2N1N3N2N2N0O1002N2M3N2N1O2N2M3N2N1OhUZ6"}, "image_id": 395, "id": 6169}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 327.0, 47.0, 41.0], "area": 1190, "segmentation": {"size": [512, 512], "counts": "R[i11l?4L3M3N3L3M3M4L3N2N3O010O00010O01O01O01O01O01O01O01O01O010O00010O01O01O01O0N2M4L3M3M4L3N2M4L_U_5"}, "image_id": 395, "id": 6170}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 335.0, 21.0, 16.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "eZg21n?1O2M3N2O10O10O1000OO2M3N0002N2N2M2O2N2N]Un4"}, "image_id": 395, "id": 6171}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 343.0, 53.0, 65.0], "area": 1787, "segmentation": {"size": [512, 512], "counts": "a[g53l?1O2N2M3N2N1ECRA?l>CQA`0m>BQA`0m>BQA`0l>;O2N2N2000O10\\AjN]>V1aAkN]>X1aAjN]>^1N200O10O100000O10O100000O10O1000000N1POdA:_>DcA:_>CdA;^>CdA;^>CcA;_>DcA:_>CdA;^>CdA;^>CdA:R?O2N2N2N2MnS^1"}, "image_id": 395, "id": 6172}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 351.0, 31.0, 31.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "][d31n?2N2N3M2N2N2N2N2O1N2N3M2N2N2N11N2N2N2N2N2N2N3M2O1N2N2N2N2N2NbTl3"}, "image_id": 395, "id": 6173}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 354.0, 77.0, 104.0], "area": 3340, "segmentation": {"size": [512, 512], "counts": "n[e41n?2N2N2N2N3M2N2N2N2O1m@]Of>e0XA]Of>e0YA]Od>e0ZA]Od>e0ZA]Od>Q1N2N2N2N2N2N2N2N2O1N3M2N2TBnMg=W2N3M2O10D^BYNb=e1`B[N`=c1cB\\N]=b1eB]N\\=b1eB]N\\=c1dB[N^=e1bBYNa=f1_BXNc=h1;1O000YBWNV=i1hBYNX=4\\BZ1:dNZ=0^B\\17eN[=N_B]14gN]=JaB9Lh04WO_=FcB4JN2Q10YOj=FVBN2S1L[Ol=B_Bd1o=001O0001O000N2N2N2N2A^A]Od>a0^A^Oc>`0_A^Oc>a0_A\\Oc>b0_A\\Oc>b0_A\\Oc>b0`0M2N2N2N2N2N2N2NhRT2"}, "image_id": 395, "id": 6174}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 361.0, 29.0, 29.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "ikk02m?1N3N2N1N3N2M3N1O2M3N1O20000O0100N2N1O2M3N1N3N2N2M2O2N2M]de6"}, "image_id": 395, "id": 6175}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 367.0, 46.0, 48.0], "area": 1273, "segmentation": {"size": [512, 512], "counts": "XlY14i?3M3M4M2M3M4L3N2M4O010O01O01O01O01O01O01UAROb>n0[AVOe>R10010O00010O00010O0001O0M3M4M2M4L3M3N3L3M3M4L3NVTo5"}, "image_id": 395, "id": 6176}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 375.0, 15.0, 16.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "Q\\[21n?1N3N2M2O2M3N1010M3N1N3N2M2OVT]5"}, "image_id": 395, "id": 6177}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 397.0, 48.0, 42.0], "area": 1047, "segmentation": {"size": [512, 512], "counts": "Qm_32n?2M2N3N2M2O2M3M2O2M3N1N3M10O010O01O01O010O0101N1O010O01MQAUOn>k0310O0010O02O1N3N2M3M2O2M3N1N3M3N1N3NnRh3"}, "image_id": 395, "id": 6178}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 410.0, 50.0, 36.0], "area": 1232, "segmentation": {"size": [512, 512], "counts": "e]i01j?5L4K5L5J5L400010O000000010O000001O01O0000010O000001O01O0000010O000001O01O000N2K6O00004MO01O0K5L4Knb]6"}, "image_id": 395, "id": 6179}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 415.0, 15.0, 15.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "X]k11m?3N2N2N1O2M3N01O2N2N2N1N3N2NmRm5"}, "image_id": 395, "id": 6180}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 436.0, 20.0, 18.0], "area": 183, "segmentation": {"size": [512, 512], "counts": "j]h42l?3N2N1N3O100O01000O10O1000O01N2N2M2O2NSbm2"}, "image_id": 395, "id": 6181}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 438.0, 48.0, 46.0], "area": 1117, "segmentation": {"size": [512, 512], "counts": "\\^W41n?2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1O200O10O1000O10O1000O10O1000OO2N2M3N1O2M3N2N1N3N2N2M3N1O2M3N2NdaP3"}, "image_id": 395, "id": 6182}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 441.0, 29.0, 29.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "Wnf33l?2N2M2O2N2M3N1O2M3N2N1010000O0100O1N1N3N2N2M2O2N2M3N1O2Mmaj3"}, "image_id": 395, "id": 6183}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 449.0, 43.0, 36.0], "area": 1143, "segmentation": {"size": [512, 512], "counts": "in95f?5K5K6K4K5000001O01O0001O0001O0001O0001O0001O0001O0001O0001O0001O0001O000N2K5K6J5K5KmaP7"}, "image_id": 395, "id": 6184}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 454.0, 10.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "[n[12m?1O2M3N1010N1N3N2MiQ_6"}, "image_id": 395, "id": 6185}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 454.0, 25.0, 23.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "`^\\31n?2M2O2N2M3N1O2N200O0100000O010000O0O2M3N1N3N2N2M`QW4"}, "image_id": 395, "id": 6186}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 480.0, 26.0, 32.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "R?n0Q?1O1001O1O1O001O1O1O001O1O1O001N2M3N1N3N2N2M2O2N2Ma`b7"}, "image_id": 395, "id": 6187}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 480.0, 47.0, 32.0], "area": 873, "segmentation": {"size": [512, 512], "counts": "no_32m?1O1N2O1N2O1O1N2O1O1N2O1N2O1O1N2O1O1N2O1N2O11O001O1O1O001O1O1O001O1O001O1O1M2O2N2M3N1O2M3N1Na`h3"}, "image_id": 395, "id": 6188}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 484.0, 42.0, 28.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "ooS21m?2O1O1N2O1O1N2O1O1N2O1O1N2O1O1O1N2O1O1N21O1O001O1O1O001O1O1O001O1O1N1N3N2N2M2O2N2M^PW5"}, "image_id": 395, "id": 6189}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 491.0, 23.0, 21.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "i_l62m?2N2N2N2N2N1O1O1O1O1O1O1O0002N2N2N2N2N2N2N2M_Ph0"}, "image_id": 395, "id": 6190}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 491.0, 42.0, 21.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "ooT71n?1O1O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1O1001O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O001O1O1O1OQP6"}, "image_id": 395, "id": 6191}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 493.0, 17.0, 16.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "c_j02m?1N3N2N2N110000O010000OO2N2M3N1N]Pm6"}, "image_id": 395, "id": 6192}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 0.0, 104.0, 75.0], "area": 3352, "segmentation": {"size": [512, 512], "counts": "^ad22m?2N2N2M3N2N2N2N1O2N2N2N2N200000000ROVOeBj0Y=XOfBi0X=YOfBi0X=YOfBi0X=YOfBi0X=YOfBi0X=YOfBi0X=YOfBi0Y=XOfBi0Y=XOgBh0X=YOhBg0W=ZOiBe0W=\\OiBb0X=_OhB?Y=BgBd=A\\B?e=@[B`0f=_OZBa0g=^OYBb0h=]OXBc0i=\\OWBd0j=]OTBc0m=^OQBb0P>_OnAa0S>@kA`0V>AhA?Y>BeA>\\>b01O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OQ`g3"}, "image_id": 397, "id": 6193}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 0.0, 201.0, 144.0], "area": 8750, "segmentation": {"size": [512, 512], "counts": "P`k41o?1O00001O00001O0e@0f>1VA2j>NSA5m>KPA9o>Hn@:R?Fk@=U?8O00001O00001O001O00001O00001O000000M3M3M3001O00M3M3N21O00001O00001O00001O001O00001O00001O00001O001O00001O00001m@VOn>o0O00001O00001O001O00WOUA=k>@XAa0g>]O[Ac0e>ZO_Ae0n>1O01O01O010O01O01O01O01O01O000M4M21O0N2M4M2M3M4L3N3O00010O00010O01O01O010O000N30O00010O0010O00010O0010O00010O0010O00010O001nBhN[;W1bDlN^;T1_DoNa;R1[DROe;m0YDUOi;i0TDZOm;f0oC^OPPRDCm;=SDCn;9VDFj;7YDJh;1\\DNe;O]D1c;EbCVOn0V1_;AfCVOo0X1\\;^OnDc0Q;[OQEe0o:\\OQEc0P;\\OPEe0o:[OQEe0o:\\OQEc0P;\\OPEe0o:[OQEe0o:\\OQEc0P;\\OPEe0o:\\OQEc0P;\\OSEa0m:_OTEa0k9"}, "image_id": 397, "id": 6194}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 3.0, 29.0, 30.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "a`R41n?2N2N2N2N2N2O1N2N2N2N3M2N2N2N02N2N2N2N3M2O1N2N2N2N2N2N3M_o^3"}, "image_id": 397, "id": 6195}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 24.0, 47.0, 48.0], "area": 1127, "segmentation": {"size": [512, 512], "counts": "\\QV41n?2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N3M2000000000N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N_^R3"}, "image_id": 397, "id": 6196}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 43.0, 8.0, 9.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "^Qn42m?2N2N2000N2N2Nbnm2"}, "image_id": 397, "id": 6197}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 56.0, 15.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "PRa31n?2N2N2N2N2N1O0001O2N2N2N2N2NT^W4"}, "image_id": 397, "id": 6198}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 58.0, 23.0, 23.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "SR^51n?2O1N2N2N2N2N3M2N2N200000O1N2N2N2N2N3M2O1N2Nj]V2"}, "image_id": 397, "id": 6199}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 64.0, 7.0, 8.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "SRk42m?3M2N02O1N2Nn]Q3"}, "image_id": 397, "id": 6200}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 72.0, 62.0, 62.0], "area": 2032, "segmentation": {"size": [512, 512], "counts": "lbZ31n?2j@OZ>3dAOZ>3dAOZ>3dAOZ>3dAOZ>3dAOZ>3dA0Y>2eA0Z>2cA0[>2cA0[>2cA0[>2cA0[>l0N2N00000000000001O2N2N2N2N2N200O1N2N2N3M0000000000000000010O00002N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2NW]f3"}, "image_id": 397, "id": 6201}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 73.0, 51.0, 52.0], "area": 1323, "segmentation": {"size": [512, 512], "counts": "lb3120i?5N2N2N20000N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N2N2N201O01O0N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2NllR7"}, "image_id": 397, "id": 6202}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 77.0, 29.0, 29.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "jR_42m?2N2N2N3M2N2N2N2N2N2N2N2N2O01N2N3M2N2N2N2N2N2N2N2N2N2N2OT]R3"}, "image_id": 397, "id": 6203}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 112.0, 54.0, 50.0], "area": 1435, "segmentation": {"size": [512, 512], "counts": "ZTe02m?2O1N2N2N3M2N2N2N2N2N01O00000001O2N2l@WOn>o0N2O1N2N10O20O1O1N2N2N2N2N001O03M2N2N2N2N2N2N2N2O2M2N2N2N2Be@4]?Je@4]?Je@4]?Ke@2f?N2Obk_6"}, "image_id": 397, "id": 6204}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 113.0, 25.0, 25.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "kSi51n?2N2O1N2N2N2N2N2N2N3M200000O2M2N2O1N2N2N2N2N2N2NR\\j1"}, "image_id": 397, "id": 6205}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 115.0, 43.0, 45.0], "area": 978, "segmentation": {"size": [512, 512], "counts": "^TS41n?2N2O1N2N2N2N2N2N2N2N2N001O0000001O2N2N2O1N2N2N2N3M2N000002N3M2N2N2O1GQA^OQ?9n@H3MQ?9n@H3MR?9l@H]?68N2N2No[W3"}, "image_id": 397, "id": 6206}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 139.0, 18.0, 17.0], "area": 170, "segmentation": {"size": [512, 512], "counts": "cTW62m?2O1N2N2N2N2N2N1O10O2N2N2N2N2N2N3M^k_1"}, "image_id": 397, "id": 6207}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 157.0, 51.0, 55.0], "area": 1389, "segmentation": {"size": [512, 512], "counts": "he]11n?2O1N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N3M2N1O1O0002N2N2N2N2N2D[AZOg>d0[AZOg>d0[AZOg>d0[AZOg>d0[AZOg>d0R1N2N2N2O10000000O1\\OZAKh>3ZAKh>3ZAKh>3ZAKh>3ZAKi>2YALi>2YALi>2YALi>2YALi>2d0N^Z:"}, "image_id": 397, "id": 6209}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 164.0, 54.0, 52.0], "area": 1452, "segmentation": {"size": [512, 512], "counts": "jUj42m?2N2N2N2Z@Gb?=N2N3N1N2N2N2N2l@VOo>o0N2N2N2N20001O0000O0O00000000000000000000002N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2NYjZ2"}, "image_id": 397, "id": 6210}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 165.0, 41.0, 41.0], "area": 800, "segmentation": {"size": [512, 512], "counts": "ne92m?2N2N2N2N2N2N2N2N2N2N2N2N2N2OO00000000000000000001O02N2N2N2N2N2N2N2N2N2N2N2N2N2N^jQ7"}, "image_id": 397, "id": 6211}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 167.0, 30.0, 32.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "gUm51n?2N2N2N2N2N2N2N2N2N2N2N2N2O2M000001O2N2O1N2N2N2N2N2N3M2I[@0g?N[@0SZd1"}, "image_id": 397, "id": 6212}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 181.0, 20.0, 19.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "PfS22m?2N2N2N2N2N2N1O00000001O2N20000O1N2JY@0i?NY@0lib5"}, "image_id": 397, "id": 6213}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 195.0, 31.0, 30.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "hV>1n?2N2N2N2N2N2N2N2N1O000000001O00000000000001O2N2N2N2N2N2N2N2NfYR7"}, "image_id": 397, "id": 6214}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 195.0, 64.0, 60.0], "area": 1826, "segmentation": {"size": [512, 512], "counts": "gfS22m?2N2N2O2M2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2O100000001O000001O000000000N2N2O1N2N2N3M2N2N2N2N1O000001O01O3M2N2N2N2N2N2N2N2N2O1NmXl4"}, "image_id": 397, "id": 6215}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 206.0, 57.0, 61.0], "area": 1671, "segmentation": {"size": [512, 512], "counts": "eWc51n?2N2N2N2N2N2N2N2O1N2^O^OcAd0[>^OcAd0[>^OcAd0[>^OcAd0[>^OcAd0[>^OcAe0Z>]OdAe0Z>b0N2O1N000000000101N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N00002N2N2N2N2N2N2NiX`1"}, "image_id": 397, "id": 6216}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 208.0, 55.0, 50.0], "area": 1368, "segmentation": {"size": [512, 512], "counts": "YWj02m?2N3M2N2N2N2N2N2N2N2N2N2N2N2O10000O1N2N3M2N1O0000000000001O0001O2N2N2N2N2N2N2N2N3M2N2N2N2N2N00011N2N2N2N2N2NnXZ6"}, "image_id": 397, "id": 6217}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 208.0, 64.0, 53.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "QWn62m?3M2O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2N20010O0000000000000001N1N2N2N2N2N2O1N2N2O2O000001O0000000000N1O00000001O02N3M2N2N2N2N2N2N2OZh1"}, "image_id": 397, "id": 6218}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 213.0, 11.0, 11.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "iVg11n?2N2N2N2O2ON2N2N2N2NWYS6"}, "image_id": 397, "id": 6219}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 215.0, 15.0, 15.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "mVn12m?2N2N2N2N2N2O10O1N2N2N2N2N2NSYj5"}, "image_id": 397, "id": 6220}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 215.0, 21.0, 25.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "mfe72m?2O1N2N2N3M2N2000001O0000000000O1N3M2N2NlH"}, "image_id": 397, "id": 6221}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 224.0, 24.0, 25.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "XgQ21n?2N2N2O2M2N2N2N2N200001O0001ON2N2N2N2N2N2N3N1NaXb5"}, "image_id": 397, "id": 6222}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 242.0, 54.0, 61.0], "area": 1603, "segmentation": {"size": [512, 512], "counts": "^X`12m?2N2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3N01N2N2N2@hAUOZ>i0hAUOZ>i0hAUOZ>i0hAUO[>h0gAVO[>i0fAUO\\>i0?N2N2N2N2N2N2N2N2N2N2N2N2Nggd5"}, "image_id": 397, "id": 6223}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 256.0, 81.0, 56.0], "area": 2030, "segmentation": {"size": [512, 512], "counts": "jXj52m?2N2N2N2N2N2N2N2N2N2N2N2N3M2N0001O2N2N2N3M2N2N2N2O1N1O0000001Oh@Gj>9VAIh>9VAIh>9VAIh>9VAIh>9VAIh>:UAIh>i0N2N2N2N2N2N2N2N1O0001O2N2N2N2N2N2N2N2N2N2UOSA`0o>^OSA`0o>^OSAa0n>]OSAb0U?000000000002N2N2L`@Eb?:30O1N01O2N2N2N2NZWm0"}, "image_id": 397, "id": 6224}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 258.0, 123.0, 100.0], "area": 3947, "segmentation": {"size": [512, 512], "counts": "ajP21n?2N2O1N2N2N2N2N2N2N2N2N3M2JWORAk0l>6N2N2N2N2N01O0000000001O000000000001O2N2N2N2N2N2N1O0001O0000000001O000000000002N2N2N2N2N2N2N2N000001O00000000000000000000000001O0001O000000000000000000000000010O2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2O10000000000N2N2N2NSgQ4"}, "image_id": 397, "id": 6225}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 261.0, 31.0, 32.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "^XU11n?2N2N2N2N2N2N2N2N2O100000000001O00000001O00N2N2N2N2N2N2N2N2NWW[6"}, "image_id": 397, "id": 6226}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 279.0, 57.0, 42.0], "area": 1658, "segmentation": {"size": [512, 512], "counts": "ai25f?5L4K5L5J5M3001O01O0000010O000001O01O2N0010O0000001N10000010O00000010O00000010O00000010O00000010O000000TOQAb0Y?L4K5L4KofP7"}, "image_id": 397, "id": 6227}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 286.0, 28.0, 29.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "WiU11n?2N2N3M2N2N2N2N2N2O100000010O000000O1N2N2N2N2N2N3M2N2N`V\\6"}, "image_id": 397, "id": 6228}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 314.0, 13.0, 14.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "j9;e?1O01O0000010O000001OK6JXVi7"}, "image_id": 397, "id": 6229}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 314.0, 52.0, 45.0], "area": 1246, "segmentation": {"size": [512, 512], "counts": "Ujo63l?2N2O1Z@J_?8_@J_?>M2N2N2N2N2N200001O0000N2N2N3M2N2N2N2000N2N2O1N2N3M2N2N2N2N2N1O00000000002O1N2N3M2N2N2N2N2N2NdU6"}, "image_id": 397, "id": 6230}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 318.0, 31.0, 26.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "[jk05k?0000003J4J5K5000001O0001O0001O0001O0001O0001O0001O00M4J5K5KSfd6"}, "image_id": 397, "id": 6231}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 322.0, 18.0, 15.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "[j=3i?4K50001O000001O01O0001O01O0001K4LoUY7"}, "image_id": 397, "id": 6232}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 323.0, 54.0, 53.0], "area": 1363, "segmentation": {"size": [512, 512], "counts": "R[\\31n?3M2O1N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N00001O00000001O000000000000011N2N2N3M2N2N2N2N2N2N2N2O1N2N3M2N2N2N2NYeh3"}, "image_id": 397, "id": 6233}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 340.0, 10.0, 17.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "kZk71n?2N2N2N2O1N3M2N2OO]E"}, "image_id": 397, "id": 6234}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 347.0, 12.0, 16.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "Uke03j?4K4M301O0000010OL4M4KWUT7"}, "image_id": 397, "id": 6235}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 359.0, 23.0, 23.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "bkm22m?2N2N3M2N2N2N2N2N2O0O0002N2N2N2N2N3M2N2N2N2O^df4"}, "image_id": 397, "id": 6236}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 359.0, 17.0, 44.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "hkg71n?2W@Na?4]@Na?3^@O`?:M3N2N2N2N2N2N2N2QAROi>T1N2N2N2hD"}, "image_id": 397, "id": 6237}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 364.0, 35.0, 34.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "gkP22m?2N3M2N2N2N2N2O1N2N2N2N200000000000000O1N11N3N1N2N2N2N2N2N2N2N2N2N2NQd]5"}, "image_id": 397, "id": 6238}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 364.0, 62.0, 51.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "nkT42m?2N2O1N2N2N2l@Da>?\\ACb>?\\ACb>?\\ACb>?\\ACb>?ZAEd>=ZAEd>m0N2N1O0001O2N2N2001OO1N2N2N2N2N2N2O1N1O00001O000000000000000000000000001O2O1N2N2N2N2N2N2N2N2N2N2NYTl2"}, "image_id": 397, "id": 6239}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 383.0, 15.0, 16.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "X\\i03i?5K4O101O00010O0000010OM3L4LSTo6"}, "image_id": 397, "id": 6240}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 386.0, 33.0, 30.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "b\\c31n?2N2N3N1N2N2N2N2N2N2N2N2N1O00000001O2N2N2N2N2O1N2N1O001O1O2LY@Li?24NfSl3"}, "image_id": 397, "id": 6241}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 396.0, 52.0, 49.0], "area": 1342, "segmentation": {"size": [512, 512], "counts": "Vmj41n?2W@O`?4]@Na?:N2N2N3M2N2O1N1O0000002N2N2N2N2N3M2N10O0000000000001O2N2N2N2N2O1N2N2N1O002N2N2N3M2N2N2N2O1N2N2N2NQS[2"}, "image_id": 397, "id": 6242}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 400.0, 32.0, 33.0], "area": 966, "segmentation": {"size": [512, 512], "counts": "`m0UAUOk>Q1O001O00001O001O001O001O001O001O0\\AgNa>]1O001O001O0010O010O001O0N3O010O010O010O0jN_An0a>oNaAQ1_>mNdAS1d>010O01O010O010O0N3L3N2N3M2O20O010M2M4O010O010O010O01O010O010O010M2N2N3L3N3M2N3M2N3MS_k3"}, "image_id": 398, "id": 6257}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 0.0, 19.0, 7.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "P`g31o?00001O001O001O001O001O001O0000O1M3NRPo3"}, "image_id": 398, "id": 6258}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 0.0, 91.0, 65.0], "area": 3167, "segmentation": {"size": [512, 512], "counts": "dPb42l?3L3N3M2N3M201O010O010O001O0N3M2N3L301O001O00001O001O001O001O001O010O010O010O010O0O2M2O2O001O00001aAcNY>]1eAfNZ>`101O00001O001O001O001O00001O001OROnANR>0PB1o=MTB2l=KWB6h=H[B7e=G]B9c=E`B;_=CcB=]=AeB`0Z=^OiBa0X=\\OjBe0U=XOoBg0Q=WOQCj0S>O0N3M2M3N3M2N3M2M4M2Nh^P2"}, "image_id": 398, "id": 6259}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 22.0, 31.0, 33.0], "area": 774, "segmentation": {"size": [512, 512], "counts": "]a`73i?4K5L5K4L4N201O01O0001O01O0001O01O0001O01O0001O0001O01O0001OTO"}, "image_id": 398, "id": 6260}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 50.0, 86.0, 65.0], "area": 3262, "segmentation": {"size": [512, 512], "counts": "dQ^4=c?[OiAg0Q>_OjAf0S>e00004MO000001OWNlAf1W>01O0001O0000N3N1000J6J6K6I6K5J6K501O011N00000001O01O00000001O0O1J6K5M4O0A^B`Nb=Z1dBgN[=S1kBmNU=n0PCROP=h0VCSOP=f0WCSOo<:SB0o<"}, "image_id": 398, "id": 6262}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 124.0, 91.0, 69.0], "area": 3007, "segmentation": {"size": [512, 512], "counts": "eTe11m?3M2N3L3N3M2N3L3N3O00010O010O010O0010O010ON3L300010O010O01O010O010O010O0010O0010O0010OO2M2M4M2M4M201O010O01O01O010dA`NV>`1hAcNX>b1010O01O01UOhAMW>WOiAh021W>MkA4U>InA6R>JnA7R>HoA7Q>IoA8P>HQB7P>GQB:n=FSB9n=FRB;m=DUB;l=CUB=l=AVB?k=]OWBc0d>0O01O0N3M2M3N3M2N3M2N3MfZm4"}, "image_id": 398, "id": 6263}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 165.0, 68.0, 84.0], "area": 3232, "segmentation": {"size": [512, 512], "counts": "iUV37g?4M3L3gABn<>PCDo<>mBES=;kBHS=:jBHV=8hBJX=7dBM[=3cBO]=1aB1]=2`B1]=1`B2]=2`B1]=1aB1`=O\\B4d=LZB7e=R11O010O00010O010O00010O0010O0000O1M2N3M3N1N3M1O100O1O2N3N2M4L3M3N3L3O110O01O01O0N3M2M3M4L3N2M4L3M3Nfjg3"}, "image_id": 398, "id": 6264}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 170.0, 83.0, 71.0], "area": 3135, "segmentation": {"size": [512, 512], "counts": "cfR12l?2M4M2N3L3N3@_O]Ac0a>BYAa0e>?M2N3M201O000O2O001N110O01O010N1N30O0010O010O0010O010N101N10001O001O010O01O010O101N2fNaAP1`>kNdAT1d>010O0010O010O0010O0010O010O0010OROXAd0h>ZOZAg0f>VO]Ai0m>10M2N3L3N2N3L3N3M2M4MYic5"}, "image_id": 398, "id": 6265}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 195.0, 65.0, 94.0], "area": 3476, "segmentation": {"size": [512, 512], "counts": "\\hT42l?3UN0`BL?7o<1]BL`06Q=2ZBKc06Q=2WBKe06T==hBFW=;bBTOEe0g=9`BN`=2]B2b=N\\B4d=T1O2O0aBgMW=Y2fBjM[=^2O0010O00100O3M2O1N10O01O010O00ON3M2O2M2N2O1N1O0010O01O103L3M3M4M2M4L3N2O20O0001M2M3M4L3M3M4L3L4M4Lhij2"}, "image_id": 398, "id": 6266}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 228.0, 69.0, 79.0], "area": 3019, "segmentation": {"size": [512, 512], "counts": "Sif44h?4M3L5L3L4M301O01O01O01O0001O01O01O01O0001]OXOlAh0P>\\OQBc0l=ASB`0h=DXBn=j0010O0000O2L3M3M4L3000010O0000L5L3L4M4L31O0001O0L4010OM3L4M4N1@\\ADd>9_AHa>3cA_OJ9d>5iAHZ>4jAHZ>5k0K4MghV2"}, "image_id": 398, "id": 6267}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 244.0, 93.0, 58.0], "area": 3021, "segmentation": {"size": [512, 512], "counts": "\\ha02l?3L3N2N3M2M4M2N3M2N30O0010O010O0010O010O00010O0N3M2010O0N3M2M4N11O010O01O010O001O0O101N101O010O001O0N201O0O2O001O0O20O00010O010O01O0102M2N2O0O010O010O01O01O010O010iNYAS1k>0O01L3N3M2@n@0U?Mn@OU?Nn@0T?Nn@0U?Mn@0gVP6"}, "image_id": 398, "id": 6268}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 245.0, 27.0, 25.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "ShP23k?3L3N3M2M3N3O0010O010O00010O010O010O000O2L3N3L3N3M2MVha5"}, "image_id": 398, "id": 6269}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 261.0, 59.0, 58.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "QiR74i?3L4M4K4N2010O000001L3M3L5L3000001O01O01O01O01O0001O01O01O01O01O0001O01O01O01O01O0001O01M2N2010O00aAhNV>X1fAlN[>]1O000010O00010OcG"}, "image_id": 398, "id": 6270}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 292.0, 86.0, 70.0], "area": 2582, "segmentation": {"size": [512, 512], "counts": "VZ53k?2M4M2N3M2N2N3M2N3N110O010O010O010O010O010OO2M2N1O03M2M4M2N3M2N3M2N3N110O010O010`AdNZ>\\1dAgN\\>^1010O010O010O010O010O0SOeA6Z>HhA8Y>EjA;U>CmA=T>@oA`0P>^ORBb0o=[OTBe0k=YOWBg0b>O010O010O010O010O010O01O010O01M2N3M2N3M2N3M2N3M2N^e_6"}, "image_id": 398, "id": 6271}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 316.0, 90.0, 47.0], "area": 3114, "segmentation": {"size": [512, 512], "counts": "PZ`28h?AcA?^>]OfAb0Z>[OiAe0W>XOlAe0j>L3M3M4L3M3MPU6"}, "image_id": 398, "id": 6273}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 346.0, 75.0, 69.0], "area": 2625, "segmentation": {"size": [512, 512], "counts": "m:a1_>1O0O2O000O2O0O2ON2N5M3N2M3N2M0010O0010O010O010O0010O010O010O010O010O00010O010O010O010O010O01O010M2O2M2N3M2N2N200O10O10O1000O0100O10O13L3N3M2M4M3L3NfSj6"}, "image_id": 398, "id": 6274}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 400.0, 60.0, 87.0], "area": 3017, "segmentation": {"size": [512, 512], "counts": "g]\\55e?6I7J6J6O101O000001O000001O00000PBUOlaNkA]1^>M2N3M1O001O000000002N3N1N3M2010O01M2N3M2O2M3M2N3M2N3M2NSSS4"}, "image_id": 398, "id": 6276}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 403.0, 59.0, 109.0], "area": 3527, "segmentation": {"size": [512, 512], "counts": "S_]63h0OT>8iAKQ>;oAEj=b0jAZO24Q>e0gA^O8LQ>R1nAnNR>T1lAlNT>U1kAkNU>V1jAjNP>f1J6K5J6001O0000001O0000CjMPCV2n6nADX>=hA[O`>d0`AUOg>l07M2N3M2N3M2N3M2N3M2N3MRSe0"}, "image_id": 398, "id": 6277}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 408.0, 70.0, 79.0], "area": 2676, "segmentation": {"size": [512, 512], "counts": "bnY12m?2M2O2N2N2M3N1O2N2M3N2N1EXO]Aj0a>WO]Al0a>VO]Al0`>WO^Ak0`>UOVAj0j>TOXAk0P?M3N2M3N1O2M3N2M2O2N2M3NVRc5"}, "image_id": 398, "id": 6278}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 419.0, 34.0, 93.0], "area": 2381, "segmentation": {"size": [512, 512], "counts": "V__71c02_=4hB0B3c=2bB2E2h=N\\Bm0b=VOUBQ1j=c0O1O3N000001O000000000000001O000000000000O1N2I7J6J61O0000"}, "image_id": 398, "id": 6279}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 421.0, 37.0, 54.0], "area": 1127, "segmentation": {"size": [512, 512], "counts": "U=\\1d>010O01jN]An0b>POaAo0`>nNbAS1f>O0010O010O0010O010O010O010O010O001N1N3M2N2N3L3N3M2N3M2N3M2MUR]7"}, "image_id": 398, "id": 6280}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 451.0, 78.0, 61.0], "area": 2802, "segmentation": {"size": [512, 512], "counts": "aod32i?5K5K5K5J7J5N200000010O00000000001O01O00000001O0001O000001O000M3000010O0000000J7I6N200000010O00000000014K00000000SOmAMT>LSB3m=GZB8f=BaB=_=]OiBa0[>01O00000000001O000000001O0N2I7HkPT3"}, "image_id": 398, "id": 6281}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 393.0, 91.0, 80.0], "area": 2125, "segmentation": {"size": [512, 512], "counts": "i\\71n?2M2O2N2N2M3N2N1O2N2M3N2N1100000O1000O1000O1000O100000O1000O1000O100000O1000O1000O100000O1000O1000O1000O100000O1000O1000O1000O100000O1000O1000O100000O1000OO2N2M3N2N2N1O2M3N2N2N1O2MjQ[6"}, "image_id": 399, "id": 6282}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 427.0, 16.0, 20.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "h]U23l?1O2N2M3N2OOO10O10O1002K5M3N1Ocbb5"}, "image_id": 399, "id": 6283}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 457.0, 28.0, 29.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "knd11n?2N2N2M2O2N2N2M3N1O2M3N2N1O00O2O2N2N2M3N1O2M3N2N2N1N3N_Qm5"}, "image_id": 399, "id": 6284}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 470.0, 60.0, 42.0], "area": 1125, "segmentation": {"size": [512, 512], "counts": "S_n13l?2N2N1N3N2N2N2M2O2N2N2O10O010000000O010000000O0100n@WOl>i0QAYOP?k01O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1N2N1NW`S5"}, "image_id": 399, "id": 6285}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 498.0, 17.0, 14.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "b?>c?O1O1O001O1O1O1O001O1O1O1O1O001OQPg7"}, "image_id": 399, "id": 6286}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 0.0, 85.0, 26.0], "area": 1008, "segmentation": {"size": [512, 512], "counts": "P`?7i?000000000000000000000000000000000000000000000000000000000000000000000000001O1O001O1O001O001O1O001O1O001O1O0d@_OY?d001O1O0100O0O2O01M2O2O001O00N2N2O1N2O1N2O1N2O1N2N2O1N2O1N2OQPV6"}, "image_id": 400, "id": 6287}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 0.0, 70.0, 72.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "bai13k?2O2M2N3N2M2O2M3O010O01000O010O01000O010O01000N1N3N2O010O0O2eNVO]Cl0b\\OgAINl0\\>[OdAj0^>;1N2N2O1N2N2O1N2N2O1N2O1N3M2O2M3M2O2M3M2O2M2N3Nj_S5"}, "image_id": 400, "id": 6288}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 0.0, 44.0, 44.0], "area": 1111, "segmentation": {"size": [512, 512], "counts": "mPX31n?2M2N3N2M2O2M3M2O2M2O2FXOXAk0e>XOXAj0g>WOXAk0g>8O001O001O1O001ON2N2O1N2O1N2N2O1N2O1N2N2O2M2O2M2N3N2M2H[@4j?MjoQ4"}, "image_id": 400, "id": 6289}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "PPT41ook3"}, "image_id": 400, "id": 6290}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 0.0, 70.0, 52.0], "area": 1866, "segmentation": {"size": [512, 512], "counts": "]Pn42m?2M2O2M3N1O2M3N2M201O1O001O1O1O001O1O001O1O1O001O1O001O1O1O001O10O10O1000O10O10O10O1000O10O10O1000O10O10OO2N2M3N1N3N2N1N3N2M3N1O2M3N1N3N2NVon1"}, "image_id": 400, "id": 6291}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 0.0, 8.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "P`W61o?00001O00001OOQ`d1"}, "image_id": 400, "id": 6292}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 36.0, 31.0, 31.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "cQR43l?2N1O2M3N2N2N1N3N2N2N2M2010000000O0O2N2N2N2M2O2N2N2M3N1O2N2M`^^3"}, "image_id": 400, "id": 6293}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 48.0, 97.0, 77.0], "area": 2742, "segmentation": {"size": [512, 512], "counts": "Tbf22l?3N1O2M3N2N1O2M3N2N2M2O2N2N2o@ROl>S100O10O1N11N2N1100000O10O100mNVAo0i>oNZAQ1k>O10O100000O01000N2N2N1O2O100O0100000O10O100O1N1O2N2N2O01000RAPOk>S10O010N2O1O010000000O0100000O010000000O010M3N1OO03N1O2N2M3N1O2M3N2F]@4i?N2Mjlh3"}, "image_id": 400, "id": 6294}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 61.0, 14.0, 15.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "TbQ53l?1O2M3N1N300000N1O2M3N1N3Nn]g2"}, "image_id": 400, "id": 6295}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 65.0, 14.0, 33.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "kRi71l?4M2M4M2M3N3M2M4M2M3O2O010mM"}, "image_id": 400, "id": 6296}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 69.0, 27.0, 44.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "Zc[73k?3L3N2M4M2M4M2M3N3M2M2O0O010O03N3L3N2M4M2M4M2M3N3M2Mjm6"}, "image_id": 400, "id": 6297}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 86.0, 32.0, 32.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "WcU52m?2M2O2N2M3N1O2M3N2N1N3N2N20O1000O10O1N2M2O2N2M3N2N1N3N2N2M2O2Nm\\Z2"}, "image_id": 400, "id": 6298}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 95.0, 53.0, 58.0], "area": 1484, "segmentation": {"size": [512, 512], "counts": "WTY42l?2O2N2N2M3N1O2M3N2N2N1N3N2N2N1N3N2N2M3N1O2N0O01000O10O1000O0102N2N2O10O01N2N2M2O2N2Io@\\OS?b07M2O2N2M3N2N1O2M3Na\\l2"}, "image_id": 400, "id": 6299}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 118.0, 16.0, 34.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "aTh71m?2N2M4M2M4M2N3L3N2N3L3O20O01O0YL"}, "image_id": 400, "id": 6300}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 138.0, 53.0, 58.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "ael43l?2N2M2O2N2M3N2N1O2M3N2N1N3N2N2M2O2N2N2M3N1O1N10O10O10O10O10O2O2N1N3N2N2N1N3N2N2M2O2N2M3N1O2N2M3N2N1N3N2NWkX2"}, "image_id": 400, "id": 6301}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 154.0, 31.0, 32.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "^Uo61m?3N1N3N2M2O2M3N1N3N2M2O2M30O01000O001M3N1N3N2M2O2M3N1N3N2M2OlZa0"}, "image_id": 400, "id": 6302}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 186.0, 62.0, 83.0], "area": 1980, "segmentation": {"size": [512, 512], "counts": "ng^61l?4M2M3N3L3N3N10010O0010O0010O0010O0010O0dN^OZCc0c<_O^C`0_]Dn@?o>:M4M2O110O0N3L3N2O20O00010O01O01O010O01OM4M2MVI"}, "image_id": 400, "id": 6304}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 252.0, 42.0, 61.0], "area": 1730, "segmentation": {"size": [512, 512], "counts": "\\X`02Q10f=5UBOg=5UB0g=3VB0f=4VB0i=2RB3m=MPB6P>JlA:U>h001O00010O000O110O0000010O000010O000010OL4L4M4K4L4M3L5L3L4L5L3L4L4Mogj6"}, "image_id": 400, "id": 6305}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 268.0, 52.0, 62.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "jY]12l?2O2M3N1N3N2M2N3N2M2O2M2FUO^Am0`>UO]An0`>TO_Am0`>UO]An0`>;N1N3M3N1N3N1N20N3N2M2O2M3N1N3N2M2N3N1N3N2M2O2M3M2O2M3N1N3N1N3M3N1N3NTgh5"}, "image_id": 400, "id": 6306}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 289.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Q92mfo7"}, "image_id": 400, "id": 6307}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 304.0, 53.0, 58.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "iZW21m?2O2N2M3N1N3N2M2O2M3N1O2M3N2M2O2M3N1O2M3N1N3N2M2OO01000O101N3N2M2O2M3N1O2M3N2M2O2M3N1O2M3N1N3N2M2O2M3N2NQVn4"}, "image_id": 400, "id": 6308}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 316.0, 23.0, 24.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "]Zb03j?3L4L5L3M31O01O00010O0001O01O0001O0L4M3L5K4MRVR7"}, "image_id": 400, "id": 6309}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 325.0, 32.0, 40.0], "area": 736, "segmentation": {"size": [512, 512], "counts": "Yki01k?5K4L40001K4L4L4L5K4L41M2L4L3N31O0001O01O0001O01O00O2K4L4_Oc@b?010OM3L4Modl7"}, "image_id": 400, "id": 6313}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 368.0, 35.0, 48.0], "area": 865, "segmentation": {"size": [512, 512], "counts": "f\\?2l?3M2M3N3M2M4M2N3L3O101O010JUOUAk0i>WOXAi0e>ZOZAf0c>]O^Ac0_>_OaAa0]>BbA>]>CdA;^>EaA8b>H\\A8g>>2N3L3N2N3L3N3M2Hf@G^?67N2N\\To6"}, "image_id": 400, "id": 6314}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 374.0, 29.0, 29.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "Vl[21n?2N1N3N2M2O2N2M3N1N3N2N20O10O10O100N1O2N2M3N1N3N2N1N3N2NocU5"}, "image_id": 400, "id": 6315}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 378.0, 65.0, 75.0], "area": 2218, "segmentation": {"size": [512, 512], "counts": "d]h31m?2O2M3N1N3N1N3N2M2O2M3N1N3M3N1N3N2M2O2M3N1N3N2M2O2M2N100O010O01O010O010O010O2O1N3N2M2N3N1N3N2M2O2M3N1N3M3N1N3N2M2O2M3N1N3N2M2N3NgSW3"}, "image_id": 400, "id": 6316}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 402.0, 33.0, 38.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "\\mg03k?2N3L3N3M2M4M2M3N3M2M40O01O01O010O010O00010M2N3L3N3M2M3N3M2M4M2MXcg6"}, "image_id": 400, "id": 6317}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 413.0, 18.0, 16.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "SmQ23k?2N3N2N110O0100O010O0100O01M2N3M3NlRe5"}, "image_id": 400, "id": 6318}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 413.0, 57.0, 56.0], "area": 1546, "segmentation": {"size": [512, 512], "counts": "d]i42m?2N2N2N1O2M3N2N2N2N2N1O2M3N2N2N2N2N2M2O2000000000000O0100000000000O010000N2N2N2N2M2O2N2N2N2N2N2M2O2N2N2N2N2N2M3NTRZ2"}, "image_id": 400, "id": 6319}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 420.0, 33.0, 55.0], "area": 1177, "segmentation": {"size": [512, 512], "counts": "c=34h0g>:N3M2M3N3M2O20O010O01O01O010M2N3M2M4M2N2M4M2N3M2M4M2N2M4M2N3M2MdR_7"}, "image_id": 400, "id": 6320}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 433.0, 50.0, 79.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "S^f11m?3M2M4PBHT<:iCIU<:hCHU<;iCHT<\\=BbB`0^=A^Bb0c=]O[Be0e=[OYBg0h=h0001N1N2N3L3N3M2M4M2N2UOaA2c>K_A3c>JaA3b>J`A3c>K`A2c>J`A5a>IaA8^>FeA1]O4d?J_@5f?1OO2MVa`5"}, "image_id": 400, "id": 6321}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 441.0, 29.0, 30.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "^ne21l?3N3L3N2M4M2N3L3O110O0010O010O00010O010N1N2M4M2N3L3N3L3NQbk4"}, "image_id": 400, "id": 6322}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 443.0, 38.0, 37.0], "area": 703, "segmentation": {"size": [512, 512], "counts": "]nU41n?1N3N2N2N2M2O2N2N2M2O2N2M3N2O0100000O10O100000ON3N2N2N1N3N2N2N2M2O2N2M3N2NdQW3"}, "image_id": 400, "id": 6323}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 456.0, 57.0, 46.0], "area": 1415, "segmentation": {"size": [512, 512], "counts": "Qoa02l?2M4M2N3M2M4M2N2N3L3N30O010O0010O010O0010O010O0010O010O0010O010O0010O010O0010O010O0010O01O0M3N3M2N3L3N3M2N3L3N2NVaa6"}, "image_id": 400, "id": 6324}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 467.0, 30.0, 41.0], "area": 610, "segmentation": {"size": [512, 512], "counts": "f_a31l?3N3M2M4M2N2M4M2N3L3N3M2M3N1OO4M2N3N1O1N3M2M4M2N3L3N2N3L3NYao3"}, "image_id": 400, "id": 6325}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 469.0, 58.0, 43.0], "area": 1372, "segmentation": {"size": [512, 512], "counts": "__a53m?2M2O2M2N3N1N3M3N1N100O1O100O1O100O100O1O100O1O100O1O010O01O01O010O00010O01O01O01O010O3N1N3M2O2M3M2O2M2O2M3M2O2M2Nf`a1"}, "image_id": 400, "id": 6326}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 489.0, 16.0, 17.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "e_l41m?3M2N2N3M2N30O010O0N3M2N3M2N2Ne`k2"}, "image_id": 400, "id": 6327}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 493.0, 35.0, 19.0], "area": 389, "segmentation": {"size": [512, 512], "counts": "n_m22m?1N2N2O1N2O1N2O1N2N2O1001O1O001O1O001O001O1O001O1O001O1O001O001O1M2OVPa4"}, "image_id": 400, "id": 6328}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 501.0, 23.0, 11.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "o_T61n?100O100O1O100O100O1O100O100O1O12N2N1O2N2N1OQP`1"}, "image_id": 400, "id": 6329}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "ooj31o?0QPT4"}, "image_id": 400, "id": 6330}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 174.0, 8.0, 17.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "^5a0`?N2N2N2N2N2N2M]jk7"}, "image_id": 402, "id": 6331}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 185.0, 19.0, 21.0], "area": 187, "segmentation": {"size": [512, 512], "counts": "XVT11n?2M3N1O2M3N2N1N01000O11N2O2N2M3N1O2MUZb6"}, "image_id": 402, "id": 6332}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 193.0, 19.0, 21.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "`fX11n?2N2N1N3N2N2NO0100000O012N1O2M3N2N2Nki]6"}, "image_id": 402, "id": 6333}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 206.0, 22.0, 20.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "df]12m?1N3N2N2N2O0010000000O010000000N1O2N2M3N2NWYW6"}, "image_id": 402, "id": 6334}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 221.0, 29.0, 27.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "TWg11n?2N2N2N2M3N110000000O10O1000000000O10O1000000N1O2N2N2N2NaXj5"}, "image_id": 402, "id": 6335}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 242.0, 39.0, 35.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "jWR21n?2N2N1O2M3N2N2O1O10O100000000000O10O1000000000O10O1000000000OO2N2N2N2N2M3N1OfWZ5"}, "image_id": 402, "id": 6336}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 278.0, 19.0, 20.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "SYk22m?2M3N1O2N2N2N1N10O10001O2N1N3N2N2N2NVWk4"}, "image_id": 402, "id": 6337}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 287.0, 17.0, 21.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "_YT41m?2O2M3N1N3M3N0O10O002O1N3N2M2O2MPWc3"}, "image_id": 402, "id": 6338}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 290.0, 88.0, 141.0], "area": 6747, "segmentation": {"size": [512, 512], "counts": "R9`3a<000000000O10O1000000000000000O10O1000000000000000O10O10000000000M3N1O2N2N2N2N2N2M3N2N2N1O2N2N2N2N2M3N2N2N1O2N2N2N2N2M3N2N2N2N1O2N2N2M3N2N2N2N2N1O2N2N2M3N2N2N2N2N2N1O2M3N2N2Nadc6"}, "image_id": 402, "id": 6339}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 422.0, 103.0, 90.0], "area": 5273, "segmentation": {"size": [512, 512], "counts": "Y?g0X?1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O000002N2N2N2M3N1O2N2N2N2N2N2N2M3N1O2N2N2N2N2N2N2M3N2N1O2N2N2N2N2M3N2N2N2N1O2N2N2N2M3N2N2NeQ\\6"}, "image_id": 402, "id": 6340}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 0.0, 67.0, 63.0], "area": 2383, "segmentation": {"size": [512, 512], "counts": "Y`i02m?2M2O2N2M2O2N2O001a@A[?b001O1O1O001O1O0000O11UAYO[>h0cAYO]>g0bA[O]>f0`A]O_>d0_A^O`>c0]A_Oc>a0\\AAc>o0O001O1O001O1O1O001O1O001O1O1O001O1O001O1O001O1OTOoAJQ>4PBLQ>2QBNn=0UBOl=OUB0m=MVB0l=OVBOl=NVB0m=NUB0m=NUBOm=OVBOl=OUB0m=MVB0l=OVBOl=OUBOn=NUB0a]U6"}, "image_id": 403, "id": 6341}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 0.0, 29.0, 10.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "PPW21o?00001O001O00001O001O00001O001O001O00001O001O0000N2M3N2NR`Z5"}, "image_id": 403, "id": 6342}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 0.0, 62.0, 42.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "UP`41n?2M2Y@La?6]@Kb?;O1O001O1O1O001O1O001O1O001O1O1O001O1O001O1O1O001O1O001O1O001O1O1O001O10O1000O0100lNUAR1m>M3N1O2M3N1N3N2M3N1O2M3N1N3N2M2O_o`2"}, "image_id": 403, "id": 6343}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 0.0, 3.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "P``51o?1O0PP^2"}, "image_id": 403, "id": 6344}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 0.0, 51.0, 20.0], "area": 624, "segmentation": {"size": [512, 512], "counts": "PPQ71o?5K4L5K5K00O100000000O1000000O100000000O1002N00000000O100If@HZ?870000O1N2000000O1000000O100000000O100000P`5"}, "image_id": 403, "id": 6345}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 7.0, 25.0, 28.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "[PW62n?5J6K6J5K1O0O1000O1000O100000O10O100000O6K6J5K5KT_\\1"}, "image_id": 403, "id": 6346}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 8.0, 28.0, 27.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "]`c61o?4L4K6K4L4L0O10O100000O0100000O0100000O010003M4L4K5L4LU_n0"}, "image_id": 403, "id": 6347}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 19.0, 21.0, 18.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "h`e01n?2M3N1O2O1O0100000O01000O0100000ON3N2N1NUoo6"}, "image_id": 403, "id": 6348}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 30.0, 35.0, 29.0], "area": 685, "segmentation": {"size": [512, 512], "counts": "Ua^74l?4K6K1O000O10003M5K0O10O1000O10O1000O10O100000O10O1000O15J2OOGf@NZ?2j@JV?6>KjN"}, "image_id": 403, "id": 6349}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 43.0, 57.0, 43.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "WR93i?4L5K4L4L4L5K41O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O0001O01O0001M2L4L4L5K4L4L4L]^j6"}, "image_id": 403, "id": 6350}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 47.0, 59.0, 47.0], "area": 1679, "segmentation": {"size": [512, 512], "counts": "Qbj52n?4L4L5J3N00O10O10003M4K5L2N000O10O1000O10O1000N2O01000KoN[AQ1e>50001N1000O10O10I[ASOe>m07O0100000O0010000O0100000O4M4L4L4K5L4L4LfmW1"}, "image_id": 403, "id": 6351}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 70.0, 36.0, 54.0], "area": 1222, "segmentation": {"size": [512, 512], "counts": "VS^74k?5L5J5L5K00000O10O1000O10O1000YOZOTBf0k=_OPBb0P>CkAIeA7[>MaA3^>h00O100000O10O1000O1000O1000O^OdABV<"}, "image_id": 403, "id": 6352}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 80.0, 31.0, 31.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "RS21n?1N3N2M2O2N2M3N1N3N2N1N3N2O10O1000OO2M3N1N3N2N2M2O2M3N1O2M3NU]^7"}, "image_id": 403, "id": 6353}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 87.0, 84.0, 112.0], "area": 4202, "segmentation": {"size": [512, 512], "counts": "UUc22l?2O2N2M2O2M3VACo==PBEo=9lAHU>7jAJV>7gAJ[>6cAJ_>6^AJc>7\\AIc>:ZAFg>>VABh>GYAR1f>40O10O101N3O1O0@gNZBX1e=jN[BV1b=lN^BU1`=mN`BR1^=QObBo0\\=ROcBP1\\=QObBP1^=QO_BR1R=`NTC=IU1P=aNWC:FW1R=`NXC:DX1Q=aN[CP2cT1oAjNS>T1nAkNS>S1PBjNS>T1oAjNS>S1>O2O1O0@SALo>2RAMP?0SAMP?1RAMo>1SAMP?1RAMP?1RAMo>1XmR4"}, "image_id": 403, "id": 6354}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 90.0, 32.0, 32.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "]So01m?3N1N3N2M2O2M3N1N3N2M2O2N20O01000O010N2N1N3N2M2O2M3N1N3N2M2O2Mll`6"}, "image_id": 403, "id": 6355}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 118.0, 57.0, 47.0], "area": 1351, "segmentation": {"size": [512, 512], "counts": "Yd81m?3N2N1N3N2M2O2M3N2N1N3N2M201000O010000O010000O01000O010000O010000O0100M2O2M3N10O1N3N2M2O2N2M3N1N3N2M2O2N2M2O2M3Ngkj6"}, "image_id": 403, "id": 6356}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 119.0, 52.0, 46.0], "area": 1549, "segmentation": {"size": [512, 512], "counts": "\\de62g?1]@3_?5000O10O10IZATOf>l070O010000000ON300000O10O2O3M003L6K4L4L4K6K\\[`0"}, "image_id": 403, "id": 6357}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 120.0, 24.0, 27.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "Xdl32m?2M3N1N3N2M2O2M3N1N3N200OO2M3N1N3N2M2O2M3N1N3NQ\\g3"}, "image_id": 403, "id": 6358}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 133.0, 16.0, 21.0], "area": 170, "segmentation": {"size": [512, 512], "counts": "U4?b?00O0C`@;b?10O10O10O10O01N2M2O2M3N_kg7"}, "image_id": 403, "id": 6359}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 154.0, 81.0, 55.0], "area": 2134, "segmentation": {"size": [512, 512], "counts": "feS41n?3M3N2M2O2M3N2M2N3N2M3N1N1O010O010O010O00010O010O01O01O010O010O01O01O010O2OO010O0OQASOn>n02O010O000102O010O1000O0100000O0100000O0OO01O010O011N3N1N3M3N2M2O2M3N2M2N3N2Mmic2"}, "image_id": 403, "id": 6360}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 162.0, 46.0, 47.0], "area": 1172, "segmentation": {"size": [512, 512], "counts": "R5n0S?0O10O10O10O1000O10O10O1000O10O10O10O1000O10O10ON01000O01000O03N1N3N2N1N3N2M3N1O2M3N1N3N2N^jX7"}, "image_id": 403, "id": 6361}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 193.0, 27.0, 37.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "Uf`34l?6I6K5K5K5K3L0100000000O010000000O01001O5K5J6K6J5K5KohQ4"}, "image_id": 403, "id": 6362}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 206.0, 53.0, 61.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "lgb11m?3N1N3N2M2O2M3N1N3N2M2O2N2M2O2M3N2M2O2M3N1N100O0100O010O010O012M3N1OO021OO2N2M3K4O2M3N1N3N2\\Of@>`?N2N2M2O2M3NTib5"}, "image_id": 403, "id": 6363}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 220.0, 63.0, 37.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "Wg^45k?5K4LONEa@;_?3O10003M5K5J10O100000O10O1000O1000O1000O10O1000O1000O102N13LO01000000O010000000O0100000O010000000O010004L5J6K4L5K4LYha2"}, "image_id": 403, "id": 6364}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 230.0, 7.0, 14.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "V7>b?O2M3N2N1N3NgXl7"}, "image_id": 403, "id": 6365}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 234.0, 8.0, 15.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "`Wl71n?2N2N2N2O2M2N2eH"}, "image_id": 403, "id": 6366}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 239.0, 44.0, 72.0], "area": 2075, "segmentation": {"size": [512, 512], "counts": "eWS72n?5K5b@Gj>>RAFj>?PAFl>g000O105K5K4L5J6K4L4L000O010000000O0100000O10O10004L4K6K5K4L5K5J5L5K5K5K4K6K5K`f6"}, "image_id": 403, "id": 6367}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 241.0, 51.0, 49.0], "area": 1238, "segmentation": {"size": [512, 512], "counts": "VXg23l?2M3N1O2M3N1N3N2M2O2M3N1O2M300N1O200TAmNj>T101000O010N20O10O1mNUAP1n>00O010000O0100M2O2M3N2N1N3N2M2O2M3N1N3N2N1NgW_4"}, "image_id": 403, "id": 6368}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 245.0, 23.0, 25.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "WX[21n?1N3N2N1N3N2M3N1O2M2O0O10O3N1N3N2N1N3N2M3N1OVXY5"}, "image_id": 403, "id": 6369}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 251.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "kgo72S8"}, "image_id": 403, "id": 6370}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 268.0, 51.0, 28.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "\\ho4l0T?000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000dgV2"}, "image_id": 403, "id": 6371}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 274.0, 47.0, 52.0], "area": 1250, "segmentation": {"size": [512, 512], "counts": "eYQ12l?2O2N2M3N1N3N2N2M2O2N2M2O2HUOWAn0g>TOWAn0f>7O2N2M2O2N2ON2O1O0O2O2M3N1O200N1N3JTAUOn>i06M2O2N2M3N1O2M3N1N3N2N2MQWW6"}, "image_id": 403, "id": 6372}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 294.0, 51.0, 58.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "_ZT23l?1N3N2N1N3N2M3N1O2M3N1N3N2N2M2IoN^AS1`>oN]AT1a>6N3N2M10000O03N1N2O000O102M3N1O2M3N101O1N2M2Lm@ZOU?c06N1N3N2N2M2O2M3N2N\\VR5"}, "image_id": 403, "id": 6373}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 294.0, 42.0, 45.0], "area": 1017, "segmentation": {"size": [512, 512], "counts": "TjW31m?3N2M2IJc@8[?Ic@:Z?7O2M3N1N3N2M210O10O01M3N1N3N2M20N3N1N3N2M2O2M3M2O2M3N1N3N2M2O2M3N1N3N2M_VS4"}, "image_id": 403, "id": 6374}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 299.0, 23.0, 18.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "_ib46j?4L4LO10O100000O0100000O10O100000O0100004K5L]fQ3"}, "image_id": 403, "id": 6375}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 303.0, 31.0, 30.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "PZd11n?2M3N1N3N2M3N1O2M3N1N3N2O1O010000O001M3N1O2M3N1N3N2M3N1O2M3NVVl5"}, "image_id": 403, "id": 6376}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 326.0, 62.0, 37.0], "area": 1653, "segmentation": {"size": [512, 512], "counts": "_Zn42n?5K5J6K5K4H6N0000000O01000000O01003M000O1000O1000O10O100000O10O1000O1000O1000O1000O1000O4M1O00O10O100000O10O10001O4K6K5K5KTeR2"}, "image_id": 403, "id": 6377}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 331.0, 26.0, 25.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "`jg34k?5L4L4L4K100000O0100000O010000000O0100003L5L4L4L4KSUk3"}, "image_id": 403, "id": 6378}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 340.0, 71.0, 59.0], "area": 2351, "segmentation": {"size": [512, 512], "counts": "Z[e0c0]?0000000000=C000000000000000006J0000000000@WOiAi0U>YOkAg0S>\\OlAd0Q>_OoAa0o=AQB?l=ESB;n=DRBBPB>P>BPB>Q>AoA?Q>AoA?R>@nA`0R>_OoAa0R>[OQBe0o=XOTBh0b>00000000000000000000000000000000000000000\\TW6"}, "image_id": 403, "id": 6379}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 345.0, 45.0, 43.0], "area": 1044, "segmentation": {"size": [512, 512], "counts": "\\[i22m?2N2M3N1N3N2M2O2N2M2O2M3N2N110O10O1000O01000O1N1O2N20O10O10O100M2O2N2M2YOn@?T?_Om@?\\?N2M2O2N2M2O2McT`4"}, "image_id": 403, "id": 6380}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 379.0, 25.0, 69.0], "area": 1060, "segmentation": {"size": [512, 512], "counts": "Ulc74l?5K4G5000O10O5L5K4L5J5L4L5K4L5J5L3M00O01000000O0VD"}, "image_id": 403, "id": 6381}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 396.0, 63.0, 39.0], "area": 1719, "segmentation": {"size": [512, 512], "counts": "f\\W51o?5K5K4K6K4IXOn@m0l>4000000O01000000O01001O2N0O1000O10O1000L40001OO01000N5M0000O0100000O10O10002N3L010000000O0100000O01002N5K4K6K5KlRi1"}, "image_id": 403, "id": 6382}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 400.0, 45.0, 42.0], "area": 1027, "segmentation": {"size": [512, 512], "counts": "T]l21m?3N2N2M2O2M3N1N3N2N1N3N2M3N101000O01000O01000OO2N2O10O1000O01O1N1N3N2N1N3ZOj@?]?M2O2M3N1O2M3NkR]4"}, "image_id": 403, "id": 6383}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 409.0, 30.0, 30.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "Ymd32l?3N1N3N2M2O2M3N1N3N2N101000O10O1000N1O2M3N1O2M3N1N3N2M2O2MmRl3"}, "image_id": 403, "id": 6384}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 416.0, 32.0, 32.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "bmS22l?2N3N1N3M3N1N3M2O2M3N101000O010O01000O010M2O2M3M2O2M3M2O2M2N3NeR\\5"}, "image_id": 403, "id": 6385}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 444.0, 57.0, 68.0], "area": 1891, "segmentation": {"size": [512, 512], "counts": "lm?`0Y1Kc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5]CKc<5XC0h<6RCJn<6RCJn^?11002N4K6K1O00O010000000O0100000O010000000O0100000O2O3MO5L00O10O100000O10O1000O10O100000O10O1000O104L5K4L5J5L\\Q`1"}, "image_id": 403, "id": 6389}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 462.0, 16.0, 17.0], "area": 199, "segmentation": {"size": [512, 512], "counts": "hne44h?5K4N200010O00000010O0000M4K4LcQR3"}, "image_id": 403, "id": 6390}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 463.0, 52.0, 41.0], "area": 1056, "segmentation": {"size": [512, 512], "counts": "R_b23k?3N1N3M2O2M3N1N3O10O010O10O10O010O10O10O01000O010O10O1N1N3N1N3M2OO01O01O010O2N2O2M2O2M3M2O2M3M2O2M2O2M\\ac4"}, "image_id": 403, "id": 6391}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 463.0, 32.0, 25.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "P_P52j?4L4K6K4000001O01O000001O01O0001O01O0001O01O0001O01O000M3K5L5K_a_2"}, "image_id": 403, "id": 6392}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 483.0, 34.0, 29.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "Z_g33l?1N3N2N2M210O10O1002M01000O01000N20O01000O10O10O10O01N2N2N1N3N2M2O\\`g3"}, "image_id": 403, "id": 6393}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 485.0, 32.0, 27.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "^_m53i?4L5N10000010O0000010O00c@GS?:i@IX??0Gg@LX?4h@LX?5h@JY?5g@LX?4h@LX?2j@NV?Nm@3T?HPA8]?O00001O00001N1M3NY`b1"}, "image_id": 403, "id": 6394}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 491.0, 2.0, 8.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "[_o72n?6_@"}, "image_id": 403, "id": 6395}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 497.0, 23.0, 15.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "no`12m?1N2N2O1N2O1N2O1O1001O1O001O001O1O001N2M2O2MY`S6"}, "image_id": 403, "id": 6396}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 501.0, 25.0, 11.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "noY22m?1N2O1N2N2O100001O001O1O001O1O001O001O1O001O001OQ`Y5"}, "image_id": 403, "id": 6397}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 501.0, 23.0, 11.0], "area": 170, "segmentation": {"size": [512, 512], "counts": "o_S51n?1N2O1N2O1O1N21O1O0000N200001O1O001O1O1O0M4NVPa2"}, "image_id": 403, "id": 6398}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 503.0, 21.0, 9.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "o__31n?1N2O1N2N200001O001O1O001O1O001O001O1O00QPV4"}, "image_id": 403, "id": 6399}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 0.0, 35.0, 55.0], "area": 1452, "segmentation": {"size": [512, 512], "counts": "PPV15k?7I7I8H7I7I4L00O100006J5K00000000O100000000000000O10006J7I7I7I8G8I7I_^X6"}, "image_id": 404, "id": 6400}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 0.0, 44.0, 56.0], "area": 1672, "segmentation": {"size": [512, 512], "counts": "dPn12n?>A3N006J6J006J0000000]OnNXBQ1i=]OiAc0W>c000000000000000000000000O1000000]OoAWOQ>i0c000H8000000000000O109G>BYo[5"}, "image_id": 404, "id": 6401}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 0.0, 67.0, 52.0], "area": 2598, "segmentation": {"size": [512, 512], "counts": "dPR3=c?b0^O1O0000O01000000000000000000000000000000000O0100GQOaAo0_>9000000000000I7000001O1M200000000000000000000000000000O10000000000QOVBHj=8o00000O10:Ff_l3"}, "image_id": 404, "id": 6402}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 0.0, 73.0, 25.0], "area": 1525, "segmentation": {"size": [512, 512], "counts": "P`a5f0Z?00000000003M00000000000000000000L4000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000006J00000000joY1"}, "image_id": 404, "id": 6403}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 0.0, 50.0, 40.0], "area": 1786, "segmentation": {"size": [512, 512], "counts": "PQW77o>j000000000000000J6000000000000000000000000000001O000000000000000000000000000000000000000f0ZO00000000ZO"}, "image_id": 404, "id": 6404}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 38.0, 11.0, 12.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "Vao6c0000000O1000000000000000000000O1000000000000000000000009G00000000O10000000000000000000000000000000000:F00000000000000000O1000000000000000000000000000000000O102N000000000000000W^Q1"}, "image_id": 404, "id": 6406}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 62.0, 28.0, 30.0], "area": 710, "segmentation": {"size": [512, 512], "counts": "oaP1e0[?0000O100000000005K4L00000O10000000000000O1000000G9000R^a6"}, "image_id": 404, "id": 6407}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 68.0, 53.0, 67.0], "area": 2155, "segmentation": {"size": [512, 512], "counts": "jcU74h?4K5K6K4K5L4K6K4K5L4K5L50O0000010O000000010O0000010O0000000QOmA2S>HSB7m=EWB;i=@\\B`0d=\\O`Be0`=UOeBk0Y>O0000010O0O1L4O1010O000001O000L4KjL"}, "image_id": 404, "id": 6408}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 83.0, 87.0, 58.0], "area": 3506, "segmentation": {"size": [512, 512], "counts": "kRk28h?X1:K0O1000O1=C1O000000000000O10O10000000000000[O@jA`0V>M]A3c>e000000O1000O1000000000004L00000000000O100M`NfA`1Z>300O0100000000000000000000WOkAGU>9WB[Oj=d0V1CYO[Ag0e>YOZAh0f>XOZAg0c>]O]Ac0c>=00O1000000000000000000000O100000O1002N?A>Bo[Z1"}, "image_id": 404, "id": 6410}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 104.0, 79.0, 39.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "Rd]17\\?=M30000001O0001O000000000000000000000J601O0000000001O0000000000000001O0000000001O00000000000001O0J6O10000000000000000001O01O00000000YOWA7i>\\OdAd0l>0000000000010O00M3BdlZ5"}, "image_id": 404, "id": 6411}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 115.0, 11.0, 12.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "cSi6;0000000000N201O2M0100000000000000L4O10000000000000000000c0]O6J00000000000000000O100000000000O12Ne0[OmYf3"}, "image_id": 404, "id": 6414}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 150.0, 87.0, 65.0], "area": 3372, "segmentation": {"size": [512, 512], "counts": "WUb5>b?=C000000000000O10L40000000000000005J10000000000000006M8E000000000000000000000O100000000000O100_OdA^O\\>b0a000O100000000000000000O1000000000000L400H;M0000000O1000000000000009G002Ne0[OZZR1"}, "image_id": 404, "id": 6415}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 158.0, 19.0, 48.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "\\ef71m?2j@0Z>3bA0[>3cA0Z>3cAO[>3bA0[>3cA0\\>1aA1_>O^A5a>e010O01O010O01O010O0nJ"}, "image_id": 404, "id": 6416}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 160.0, 67.0, 68.0], "area": 2271, "segmentation": {"size": [512, 512], "counts": "fei13m?2M3N2M3N2M3N2M3N2M3N2M3N0O10O010O0101N3N2M3N2M3N2M3O100O1M100O010O010O010O010O01TOkAMT>4nAIS>6PBHo=9SBDn=;UBCj=>XB_Oi=`0ZB^Oe=c0]BZOd=e0_BYO`=h0bBUO_=j0k010O010O3N2M3N2M3N2M3N2M3N2M3NZjT5"}, "image_id": 404, "id": 6417}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 218.0, 12.0, 33.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "j6Q1Q?N3L3N3M2M4M2M4M2N3L3N[hi7"}, "image_id": 404, "id": 6418}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 221.0, 28.0, 28.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "Ugm61n?3N2M4M2N2M4M2M3N1O0O010O010O01000O0103L3N2M4M2N2M4M2MaXd0"}, "image_id": 404, "id": 6419}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 228.0, 27.0, 27.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "\\WS23m?2M3N2M3N2M3N2M3N0O010O010O010O0100O3N2M3N2M3N2M3N2M]X_5"}, "image_id": 404, "id": 6420}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 228.0, 50.0, 66.0], "area": 2211, "segmentation": {"size": [512, 512], "counts": "]Wf3i0W?000000005K8H00O10G9000b0^O1O0000000000O100000000000000002i0UO0000000NXAlNh>T12000O100000000000O1CWACi>==0000O6Kmg`3"}, "image_id": 404, "id": 6421}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 242.0, 30.0, 64.0], "area": 1339, "segmentation": {"size": [512, 512], "counts": "XXa73k?3L3RAJP>9mAIQ>9lAKP>8nAJP>9mAIS>7jAMU>4hANY>1eA2X>1dA2Z>l0N100010O010O010O00010O010O01M2N2N0O1`H"}, "image_id": 404, "id": 6422}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 245.0, 39.0, 77.0], "area": 2003, "segmentation": {"size": [512, 512], "counts": "Whk24^?Ol@7o>Nl@7o>b0K6J5J6K5K5K3M0O01001O5K5J2OO100000O10O10001O5K6I6K5K5K5K5J6K5K5K5K6I6KWf`4"}, "image_id": 404, "id": 6423}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 255.0, 33.0, 49.0], "area": 1020, "segmentation": {"size": [512, 512], "counts": "]8S1m>O010O0100HPO_AP1b>RO\\An0d>6010O0100O0100O010O2O3M1N3N3L3N2M4M2N2M4M2M3N3L3NPW_7"}, "image_id": 404, "id": 6424}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 259.0, 55.0, 45.0], "area": 1421, "segmentation": {"size": [512, 512], "counts": "jXZ62l?2M4M2N3M2N3M2N3M2N3M210O010O0010O0010O010N1N3M2010O010O010O010O010O010O010O01OROWAe0j>XOXAh0h>VO[Aj0m>00O2M2N3M2M4M2N3M2N3M2NYWj0"}, "image_id": 404, "id": 6425}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 300.0, 55.0, 63.0], "area": 1855, "segmentation": {"size": [512, 512], "counts": "WZQ41n?3N2M2O2M3N2M2N3N2M3N2M2O2M3M3N1N3N2M2OO010O03M2IbNjA_1T>610O010O01O0KjAbNV>^1lA_NT>a1510O01002O1M2O2M3N2M3SOUA`0m>^OUALNa0o>AXA=k>@XA>W?M3N2M2O2M3M[US3"}, "image_id": 404, "id": 6426}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 300.0, 60.0, 78.0], "area": 2129, "segmentation": {"size": [512, 512], "counts": "oin51n?2N2N2N2N2N2N2N2dA@U=b0iB@T=c0jB_OT=b0jBAT=a0jBAT=a0jBAT=a0jBAT=a0jBAT=a0bBPOKa0a=a0bBPOK`0b=b0aBPOK`0d=`0_BROK`0f=>]BLc=4[BNe=1ZB1f=T11N2N2M3N1O2N2N2N2N2N2HeAhN]>V1eAhN]>V18N2N1N3N1O0000001O2N2N2N2M3N2N2N2N1O2N2N2N2N2MhUS1"}, "image_id": 404, "id": 6427}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 305.0, 67.0, 45.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "V:4k?3N2M3M3N1N3N2M3N2M2O2M1O010O01O010O010O01O010O010O01O010O010O01O010O01O010O2O1N3N200000O10O1000O1M0010O010O011N3M3N1N3N2M3N2M2O2M3M_Un6"}, "image_id": 404, "id": 6428}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 319.0, 15.0, 15.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "Yjb71m?2M4M2N3N1010O0010O00M4M2N3Moe5"}, "image_id": 404, "id": 6429}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 332.0, 49.0, 49.0], "area": 1318, "segmentation": {"size": [512, 512], "counts": "fZY33m?1N3N2M3M2O2M3N2M2O2M3M3N1N3N2M3M21000O10O1N0O0010O010O0010O00101N3N1N3N2M3M2O2M3N2M2O2M3M3N1N3NeTn3"}, "image_id": 404, "id": 6430}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 348.0, 60.0, 65.0], "area": 2331, "segmentation": {"size": [512, 512], "counts": "Y\\R73j?4K4L4M4K4O10010O0000010O00010O0000010O00010O000L4M4K4M3L4M4K4N201N100O101N100O2O01O00010O0000010O1O2O0O2`NiAS1Y>gNkAY1^>10OOSO^A>_>AfA?U>DlA3hAK[>2hALY>3iAJZ>3hALZ>1iALY>3hAK[>2hALZ>1iALY>3hALZ>1iALY>3l0MmS^2"}, "image_id": 404, "id": 6433}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 373.0, 71.0, 61.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "V<\\1d>00O3N2M2O1N1O010O010O00010O010O01O01O010O010O00010O010O01O01O010O010O00010O011N3N2M1O10O010O00010O010O010O00010O010O03M2O2M3N1N3N2M3M2O2M3NhSl6"}, "image_id": 404, "id": 6434}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 402.0, 73.0, 51.0], "area": 1844, "segmentation": {"size": [512, 512], "counts": "Z]f01o?2M2N3N1N3M2O2M2N3N1N3M010O000100O2N3N1N0010O00010O1O3N1N3N2M2N3N1N3M2O2M2N2OO01O01O01O01O01O01O2O2M2O2M3M2O2M2N3N1N3M010O00011N2N3N1N3M2O2M2O2MdRU6"}, "image_id": 404, "id": 6435}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 402.0, 54.0, 63.0], "area": 1893, "segmentation": {"size": [512, 512], "counts": "jlo33l?3N1N3N1N3M3N1N3N1N3M3N1N3O1O0100O01M3N1N3M2O2M3M2O2M2O2O1O010O1M010O0010O0010O02TOjALX>2jAKX>3kAJX>4iAKY>2jAKX>3jAKY>3iAKX>3kAJX>3jALX>2jAKX>3kAJX>4k0NjRU3"}, "image_id": 404, "id": 6436}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 419.0, 87.0, 54.0], "area": 2399, "segmentation": {"size": [512, 512], "counts": "l]a64j?2M4M2M3N3L310O00010O010O00010O010O00010O001L3N2N3O010O00010O010O00010O010O00010O010O00010O010O00001M2M4N10010O0010O0010O0O2L3N2M3NO011N3N3L3N3L3N2M4N110O0M3N3L3N3L3N2M4M2MiR3"}, "image_id": 404, "id": 6437}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 433.0, 59.0, 57.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "R>;d?3N1N3M2O2M2N3N1O01O2M2N12M2N3NO03M2O2M3M2O2M2N3N1N2N01O010O00010O00010O00102M2O2M2KZAmNi>Q14N3M2O2M3M2O2M2N3N1N3M2O2M2N3N`QR7"}, "image_id": 404, "id": 6438}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 443.0, 50.0, 51.0], "area": 1449, "segmentation": {"size": [512, 512], "counts": "mn\\52l?2M3N3M2M4M2M4M2N2M4M2N3L3N3N11O01O010O01O01O010O010O01O01OjN[AQ1j>1O01O010O010M2M3N3M2M4M2M3N3M2M4M2M4MdQj1"}, "image_id": 404, "id": 6439}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 457.0, 28.0, 29.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "d^^41n?2O2M2O2M3M2O2M3M2O2M2OO01O01O01O03N1N3M2O2M3M2O2M2O2MXaS3"}, "image_id": 404, "id": 6440}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 479.0, 82.0, 33.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "lo`01o?2M2N100O1O1O100O1O100O1O100O1O100O1O1O100O1O100O1O100O1O1001O1O2N1O1OO1O1L]Oj@d0U?^Oj@b0U?500001O1O00O1O1O100O1O100O11O1O1O1O00O1O1O100O1O100O0010O3M2O2M2N3M2O1N3M2O2M2N3N1N_PV6"}, "image_id": 404, "id": 6441}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 480.0, 25.0, 20.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "Z_Z21n?2N3N1N3M2O2M0010O000010O000101O2O010O001O3K2O2MaPY5"}, "image_id": 404, "id": 6442}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 483.0, 43.0, 29.0], "area": 764, "segmentation": {"size": [512, 512], "counts": "__j42n?2M2N3N1N3M2O2M2N2O2M1O100O1O100O1O12N1O2N1O2N1O1O0000O1O12N1O1OO1O1001O2N1Ga@Oa?Na@1`?Nb@0h?O1OQP`2"}, "image_id": 404, "id": 6443}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 508.0, 10.0, 4.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "o_^61o?0O100O100O1001O2NQ`\\1"}, "image_id": 404, "id": 6444}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 237.0, 4.0, 8.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "aWn72m?1O2M3bH"}, "image_id": 406, "id": 6445}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 411.0, 88.0, 82.0], "area": 3881, "segmentation": {"size": [512, 512], "counts": "dn[61l?4M2M3N3M2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2N3M2010O010O00010O010O0010O0010O010O0010O0010O0010OUNVB`1j=^NXBc1h=YN\\Bf1d=XN^Bi1m=O0010O0010O0O2M2M3N3M2M4M2M4M2N2M4M2010O010O00010O010O00M4M2N3L3N3L3N2NRR8"}, "image_id": 406, "id": 6446}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 139.0, 11.0, 31.0], "area": 228, "segmentation": {"size": [512, 512], "counts": "\\4l0S?20O01O01L3\\Ol@:V?Cn@9^?M4M2M\\[j7"}, "image_id": 408, "id": 6447}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 210.0, 14.0, 15.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "gfU11n?2N2N3N1N2N200000N2N2N2N3MWYc6"}, "image_id": 408, "id": 6448}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 220.0, 66.0, 59.0], "area": 1992, "segmentation": {"size": [512, 512], "counts": "WW`03l?2N2N2O2M2N2N2N2j@@j>b0TA_Oj>c0TA_Oj>c0TA_Oj>n0M200001O01O0001O01O01O0001O01OO1N2N001O0001O0001O2N10O000N_AfNa>Z1210O2N2N2N2N3N1N2N2N3M2O1N2N3M2N2O1N2N3M2N2N2OVh^6"}, "image_id": 408, "id": 6449}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 315.0, 10.0, 21.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "n9b0[?3000O2L3N2M4L3N3LUfj7"}, "image_id": 408, "id": 6450}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 318.0, 39.0, 34.0], "area": 783, "segmentation": {"size": [512, 512], "counts": "cj41l?3N3L3N3L3N2M4M2O2O01O01O010O01O01O010O01O01O010O01O01O010O01L3N2M4M2M4M2M3NieW7"}, "image_id": 408, "id": 6451}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 338.0, 33.0, 35.0], "area": 691, "segmentation": {"size": [512, 512], "counts": "\\k^31l?4M2M3N3L3N3L3M3N3L301O01O010O01O01O010O001L3N2M4M201M2N3L3N2M4MWeP4"}, "image_id": 408, "id": 6452}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 347.0, 97.0, 78.0], "area": 3947, "segmentation": {"size": [512, 512], "counts": "S;T1l>010O0010O0010O0010ON2N3M2M4M2O2O01O01O010O01O01O010O01O01O010O0dA`NY>c1010O0O1N30O01O01O010O010O00010O010O00010O010OiNfAh0Z>VOiAi0X>TOjAm0U>POoAo0R>nNPBS1^>01O010O010O01O01O010O01O01O010O01O01O010O01O01O0O2L3N2M4M2N3L3N3L3N2M4MgS_6"}, "image_id": 408, "id": 6453}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 358.0, 97.0, 64.0], "area": 3247, "segmentation": {"size": [512, 512], "counts": "S\\d14i?3M3M4M21O010O01O01O010O01O01O01O01O010O01O01O010OO2L3N2M4L3N2M4M2M4L3N201O010O00010O010O00010O01O01O010O01O01O010O00010O01O01O010O01O01O010O00010O01O00001O010O01O01O01O01O0M4QO[A>h>_O[A?g>_O\\A=h>_O[A?T?M4L3N3L3NhSk4"}, "image_id": 408, "id": 6454}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 393.0, 95.0, 64.0], "area": 2994, "segmentation": {"size": [512, 512], "counts": "UmP34i?3M4L3M3M4L3M4L3M3O20O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O00010O01O01O010O01O01O010O01O01M2N3L3N2M4L3N3L3N2M4M2Mib_3"}, "image_id": 408, "id": 6455}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 393.0, 51.0, 35.0], "area": 889, "segmentation": {"size": [512, 512], "counts": "g\\l53k?3M2M3N3L3N3O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O0N2M4M2M4M2MXSZ1"}, "image_id": 408, "id": 6456}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 226.0, 3.0, 5.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "Ugn71n?2M2nH"}, "image_id": 409, "id": 6457}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 237.0, 54.0, 71.0], "area": 2007, "segmentation": {"size": [512, 512], "counts": "VXi63h?5L4L4e@BS?f001O000N20001O01O000001aAWOf=h0WB[Oi=e0SB@l=`0QBCo==nAFR>;jAIV>6fANZ>l000010OO1L5K4L410O00010O01O01OTOWBAj=:[BEe=8^BIa=3dBL]=OgBM]=0gBL]=1eBK_=5aBGc=9^BCf==n000010OM4L3M3M]h;"}, "image_id": 409, "id": 6458}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 238.0, 67.0, 76.0], "area": 2867, "segmentation": {"size": [512, 512], "counts": "Si_53j?7I7I7I7I7I3N2M2N3M3M3M2N31O00O0N3N21O0O101O0]OmAYOS>g0QBTOQ>k0SBQOm=n0XBnNh=R1\\BjNe=2lAONj0T?O0O110O000010XAWOW>i0eA\\OZ>d0bA@_>`0]AC`>P1M4L3000010O0N2M3M4L3M3M3N30M2^NWBi0m=TOWBg0n=UOUBg0o=VOTBf0P>WOTBd0Q>ZOPBb0T>^OlA>X>ChA9\\>FdA6`>J`A2d>O\\ALi>3b0N_h^1"}, "image_id": 409, "id": 6459}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 267.0, 78.0, 57.0], "area": 2426, "segmentation": {"size": [512, 512], "counts": "UiU42l?2M4M2N2M4M2O2O01O010O010O0001G621O00001O0O2O001WAUO[>k0cAWO]>i0aAZO^>g0^A\\Ob>R1O00001N101O001O00001O001O0O2OO4K5L11O000O2M0O1001O2B`AZOc>c0_AZOd>d0_AYOd>g0[AWOg>i090O01O010O010O0O1N3M210O0@i@8W?Ek@;^?N1N3L3N2NTWc2"}, "image_id": 409, "id": 6460}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 275.0, 9.0, 34.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "Wik72j?4L4M4b@FP?>l@Go>g0O11O01[G"}, "image_id": 409, "id": 6461}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 318.0, 15.0, 12.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "VZP71l?3M3N3O00010O00010O0001N1M3MRVh0"}, "image_id": 409, "id": 6462}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 319.0, 40.0, 52.0], "area": 1447, "segmentation": {"size": [512, 512], "counts": "kZ\\74h?5c@Jk>9QAKk>9QALk>f0000010O000010O0000O2K4M3L4L5O0001L3L4L5L31O0001O01O00010O0001O01O00010jE"}, "image_id": 409, "id": 6463}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 326.0, 110.0, 58.0], "area": 3484, "segmentation": {"size": [512, 512], "counts": "Ski31l?4M2N2M4M2M4M2H^Oo@e0o>8O010O0001TOPAg0o>VOUAi0Q?1O01OO2M2M4N10010O0010O010O0010O0010O0010O0010O0010O0010O0010O0010O010O0010O0010O00N3L3N3L3N2M4M2O2O01O010O0O2L3N2M4M2N3L3N2M4M2O2O00010O010O00010O010O00010O010O0010O0001L3N3L3N2M4M2M4M2NZU_2"}, "image_id": 409, "id": 6464}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 343.0, 69.0, 73.0], "area": 2574, "segmentation": {"size": [512, 512], "counts": "flT62j?5L3CJl@:P?Jm@:o>=L4L4M4N10001O01O00010O0001O01O00M4K4M3L4M4K10O104K4N200O2L3L4L5M20000010O00010O00N2L5L30001O01O00010O0000010O0001L3L4M3L5K4M3LReh0"}, "image_id": 409, "id": 6465}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 392.0, 82.0, 63.0], "area": 2958, "segmentation": {"size": [512, 512], "counts": "Qmn21l?4M2N3_@GV?NjA3U>KnA4S>HPB6R>HPB6S>FQB6R>HPB6R>HQB5R>GQB6R>HPB6o>L3NaRh3"}, "image_id": 409, "id": 6466}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 396.0, 41.0, 43.0], "area": 1229, "segmentation": {"size": [512, 512], "counts": "\\m[71k?4L4L5K4L4O110O0000010O0O1L4L5K41O0001O01O0001O01O0001O01O0001O01O0001O01O0001N1_C"}, "image_id": 409, "id": 6467}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 437.0, 91.0, 63.0], "area": 3839, "segmentation": {"size": [512, 512], "counts": "c^T6:X?>O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1OM400000000000001O0000001O1O1O1O2h0WOO1O000000000000000000000001O01O0000000000000000000000000O1DiAoNW>P1jAPOV>P1jAPOV>P1jAPOV>P1jAPOS>T1lAlNS>U1mAkNR>V1nAjNQ>W1oAiNP>X1<000000000000000001O2M2Hd0C0001OCTR>"}, "image_id": 409, "id": 6468}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 453.0, 76.0, 59.0], "area": 2861, "segmentation": {"size": [512, 512], "counts": "boh21m?2M3N3L3N3M2M3N3M2M4M2M4M2N2M4M2M4N110O00010O010O01O01O010M2N3M2N21O00001O001O001O00001O001O001O00001O001O000YOdAJ]>3eAM[>1hANY>NjA2V>LmA3T>IoA7Q>GRB8n=FTB:m=BVB>j=@YB?d>01O00001O001M2N3L3N2N_PQ4"}, "image_id": 409, "id": 6469}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 460.0, 20.0, 35.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "W_f72k?3L4M4L3O101N1M3L4M4M20001O00001O00001cA"}, "image_id": 409, "id": 6470}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 497.0, 35.0, 15.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "ooV41m?2M3N2N2N2M300001O001O001O001O00001O001O001O001O00001O001O001O001O00S`W3"}, "image_id": 409, "id": 6471}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 510.0, 8.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "o_k71o?00000000000O1R`0"}, "image_id": 409, "id": 6472}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 511.0, 5.0, 1.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "ooP71o?0000000Q`l0"}, "image_id": 409, "id": 6473}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 0.0, 37.0, 13.0], "area": 259, "segmentation": {"size": [512, 512], "counts": "P`Y31o?001O00001O001O001O00001O001O001O00001O001O00001O001O001O0000O1N2N2M3N2NRPT4"}, "image_id": 410, "id": 6474}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 0.0, 50.0, 68.0], "area": 2517, "segmentation": {"size": [512, 512], "counts": "\\`S42l?2N3M2VAHi=9O10OO20000000000000O1000000000000O10000"}, "image_id": 410, "id": 6477}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 21.0, 89.0, 42.0], "area": 2657, "segmentation": {"size": [512, 512], "counts": "_a^21i?6K5K5O1010O03M000K6L3000000010O00000000010O00L40001O000001O0001O000001O01N11O00000001O01O000001O0001O0001O000001O0001O000001O01O00000001O000000000TOWAa0j>YO[Af0P?0000000O010000000005J7J5KZnT4"}, "image_id": 410, "id": 6478}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 70.0, 87.0, 66.0], "area": 3177, "segmentation": {"size": [512, 512], "counts": "lbm11m?2N2M4M2M4g@Ck>?SACk>`0QADn>BoA;R>BRB=m=AVB>k=^OXBc0g=[O\\Bd0d=YO_Bg0b=VO`Bi0]>L3N3M2M4M2M3N3Malf4"}, "image_id": 410, "id": 6479}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 119.0, 2.0, 10.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "gSo73m?7RL"}, "image_id": 410, "id": 6480}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 123.0, 66.0, 47.0], "area": 2207, "segmentation": {"size": [512, 512], "counts": "PTo65k?7I7I3L10000000O11O7H8I6J000000000O10O100000000000O0100000000000O10O10000000002N7H8K5I2NO10000000O1000O1000000AB\\A=e>?000000O1000L4000nK"}, "image_id": 410, "id": 6481}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 130.0, 84.0, 61.0], "area": 2872, "segmentation": {"size": [512, 512], "counts": "ndd13j?3N2M4M2M4M2M3N3L3N3O00010O0010O0010O0010O0010O0010O0010O0010O0010O001N1N2M4M2M40O0010O0010O0010O0010O010ON2M4M201O00010O010O0001WOgAJX>3kAMV>0lA1S>LQB3o=KSB5n=GVB9i=EYB;h=A[B`0d=^O_Ba0a=\\ObBd0^>0OO1M4M2M4M2M3N3LhZQ5"}, "image_id": 410, "id": 6482}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 136.0, 50.0, 80.0], "area": 2498, "segmentation": {"size": [512, 512], "counts": "cdS64l?7I5J10O100000KAi@?W?50O14L6I10007I6J7I6J2M10O1005K6J4L0O01000000000O01005K0000000O010000^NeB?[=AkB9[=AlB8Z=BlB8[=AlB7\\=AlB8d>J^YS1"}, "image_id": 410, "id": 6483}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 143.0, 57.0, 80.0], "area": 3031, "segmentation": {"size": [512, 512], "counts": "ReS57h?8I7I6CXOVAg0k>7000000O5L7I2N000000O0100M30000O5L7I7I5K000N110O1000000000O10O1000000000O10O10000000\\NcBe0_=YOhB?`=ZOgB?`=ZOgB?`=ZOgB?c>J0L\\@Ld?44O10`io1"}, "image_id": 410, "id": 6484}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 189.0, 97.0, 63.0], "area": 2867, "segmentation": {"size": [512, 512], "counts": "dfT12k?3N3L3M3N3L3N3L3N2010O0010O0010O00010O010O00010O00010O010O00010O0010O0010O00010O010O00010O0010O0010O00N3M2N3L3N2N3M21O010O01O0_AeN^>_1O010O00010O00010OXOdAL]>0fA1Y>MjA2V>KmA5T>HnA9Q>DSB;n=AUB`0j=^OYBa0g=\\O\\Bd0`>10O0010O0010O000M4M2M4M2M3M4MihZ5"}, "image_id": 410, "id": 6485}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 191.0, 30.0, 36.0], "area": 838, "segmentation": {"size": [512, 512], "counts": "aVa73m?7I6I3N00000CCWA=i>=O10O1000000000O10O10000000O10O1000000000O1QJ"}, "image_id": 410, "id": 6486}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 197.0, 90.0, 66.0], "area": 3047, "segmentation": {"size": [512, 512], "counts": "[WP43j?3N3M2N3M2N3M2N2N3M2M4M2N3M2N3M2N3M2010O010O0N3M2N3O010O010O010O01O010O01O01L3O20O010O010O010O010O010dN`AV1`>hNcAX1b>010O010O01O010mNYAk0g>SO[An0d>PO_Ao0i>10O010O010O010M2N3M2O20O010O010O01O0O1N3M2N3L3N3M2N3Mchb2"}, "image_id": 410, "id": 6487}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 227.0, 18.0, 10.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "UgX75k?3M0000000O0100000000000O01000006JgX>"}, "image_id": 410, "id": 6488}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 232.0, 71.0, 66.0], "area": 2322, "segmentation": {"size": [512, 512], "counts": "bhU11l?3M4M2M4L31O010O]OBbA`0[>CbAa0[>D_A?^>E_A>`>a00001L300010O010O00010O00010O010O00010O001SOcA6\\>GhA8X>HhA9X>GgA9Y>GhA8X>IgA8Y>GhA8X>HgA9Z>GdA;[>ChAAkA`0U>\\OoAc0Q>[OQBe0o=XOTBi0b>O00010O010O00010O01O01O010O0N2M4M2M3M4M2M4L3N`gf5"}, "image_id": 410, "id": 6489}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 248.0, 57.0, 44.0], "area": 1761, "segmentation": {"size": [512, 512], "counts": "YXZ67h?8I7I6J0000O10O1000000GUO]Ak0c>\\OVAc0k>9000000O1000O10000000MmNXAT1g>30O10000000000KXAROg>n0603M000000O10O1000000000O10O100000005K7I8G8I\\Wi0"}, "image_id": 410, "id": 6490}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 258.0, 29.0, 30.0], "area": 653, "segmentation": {"size": [512, 512], "counts": "Vha71o?7I7I3M00000O10O10000003M6I100000000000O0100000000000O01nG"}, "image_id": 410, "id": 6491}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 261.0, 86.0, 56.0], "area": 2666, "segmentation": {"size": [512, 512], "counts": "Tic32l?3L3N2N3L3N3L3N3N10010O010O0010O0010O0010O0010O010O0010M2N30N1M4M2N3L3N2N3L3N30O0010O0010M201O010O00010O010O010O00010O010O010O00010O01N1O2O0001M2N3L3GWAYOl>d0VAZOl>c0:N3M2M4M2N3L3N2N]WQ3"}, "image_id": 410, "id": 6492}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 294.0, 87.0, 66.0], "area": 3023, "segmentation": {"size": [512, 512], "counts": "eYh03k?2N3M2M4g@Fh>;UAGk>g0010O010O0010O0010O0010O01O01M2N3M200010O010O0010O00010O010O0N2N3N110O3M010O010O010O01O01O010OmNgA?Z>^OhAc0W>[OlAd0U>XOnAh0R>VOPBk0o=SOTBl0`>10O010O010O00010O010O010O00010O0M4M2N3M2M4M2N2M4M2NeUl5"}, "image_id": 410, "id": 6493}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 301.0, 4.0, 24.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "]Yn74l?6J7I7\\F"}, "image_id": 410, "id": 6494}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 308.0, 45.0, 71.0], "area": 1929, "segmentation": {"size": [512, 512], "counts": "Sk^66j?7I7[ODXA`0d>>00QOROlBm0U=ZOdBf0\\=B\\B>d=o00000000O0100000000000O0100001XOYBUOh=j0_BoNa=Q1fBhN[=W1h0O1O0O2O1O4L7I7I3M0O1000O100001O7IYej0"}, "image_id": 410, "id": 6495}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 310.0, 103.0, 71.0], "area": 3539, "segmentation": {"size": [512, 512], "counts": "Tj[31h02Z>0dA2Z>0cA4Y>0dA2Z>0cA4Y>0dA2Z>0dA2\\>O`A5_>K_A7b>d0O00010O010O0010O0010O010O00N30O010O00010O010O01O01O0102M10O01O03M01O0RO\\Aa0d>\\O^Ad0b>YOaAh0_>UOdAj0i>10O00013L010O00010O010O010N1M3010O010N1N2M4M2N3L3N3L3N2N3O010O01O0N210O0010O010O00hN^AR1i>O0M3N3L3N3M2M4M2M3N3M2M4MUeP3"}, "image_id": 410, "id": 6496}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 326.0, 10.0, 10.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "\\ZR52l?3L3O101O01O000N2Mleh2"}, "image_id": 410, "id": 6497}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 326.0, 83.0, 77.0], "area": 3440, "segmentation": {"size": [512, 512], "counts": "]ll44b?:0001O00000000000000000H8@`00000000001O00000000000001O000000000000000E;000O1@`0E;000001O00000001O000000000000I7LK9M30001O0000000000000001O00000000000b0XAAe>b0WAAf>b0XAAe>n0M3N3L301O01O01O010O01O01O010O01O01O01O0M4M2M3N3L3N3L3N2]Om@M11V?Ok@N20U?Ol@N20\\dZ7"}, "image_id": 410, "id": 6500}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 350.0, 41.0, 57.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "mkm02k?3N3L3N3i@Cf>`0WADf>>XADe>`0WACg>?WADe>m0M3N3L301O01O010O01O01O010O01O01O010O000M4M1N103L3N3L3\\ORAJ2Oo>5QAI30o>3RAJ10P?4d0Lmd]6"}, "image_id": 410, "id": 6501}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 378.0, 102.0, 75.0], "area": 3192, "segmentation": {"size": [512, 512], "counts": "klg23j?3N3M2M3N3L3N3M2N210O010O0010O0010O010O00010O010O0010O0010O001L3N2M4M2N3L3N3L3O110O010O00010O010O0010O0010O010O00010OROgA5Z>HhA9W>DmA;S>CoA>Q>^OSBa0m=]OUBc0l=ZOVBg0i=VO[Bi0^>10O00010O010O010O00010O010O00010O010O00010O010O0[Om@>S?^OQAa0W?010OO1N3L3N3L3N3M]Re3"}, "image_id": 410, "id": 6502}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 382.0, 17.0, 42.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "Plg72n?7I6J7H8I6J5KO100000O100000O1000RD"}, "image_id": 410, "id": 6503}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 385.0, 22.0, 23.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "c\\R63`?=K51O01O0000000000000000000000010O000000EXdb1"}, "image_id": 410, "id": 6504}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 404.0, 80.0, 64.0], "area": 2594, "segmentation": {"size": [512, 512], "counts": "am02l?2N2M4M2M4M200010ON3L3N2M4M2N3M210O01OO2O010O00010O010O01O01O010O0YAkNd>Y1O0010O0010O0N3M210O01O010O01O01O010O01O01O010O01OROcA9^>DdA<\\>AhA?X>^OjAb0V>\\OlAe0S>XOQBg0d>10O00010O010M2N3L3N2M4M2M4M2NURg6"}, "image_id": 410, "id": 6505}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 417.0, 70.0, 56.0], "area": 2515, "segmentation": {"size": [512, 512], "counts": "bm`66j?7I7H8I2N0000O1006I1000O1000CPOjAP1V>VOdAj0\\>=O10O1000000000O3N0000O100000006J000O100000007I3M0LSATOn>l03000O10O1000000000O10O10000007H7J000000O4M7I7I7I7I7I\\Q<"}, "image_id": 410, "id": 6506}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 432.0, 98.0, 49.0], "area": 2697, "segmentation": {"size": [512, 512], "counts": "P^c47a?8H80000000001O000001O000001O000001O00000001O000001O00000001O000001O00000001i@]Oo>j000001O00000001O0001O00000001O000001O00000001O000001O000001O000001O000001O000QAUOi>R1O01O0000000001O0001O000QO\\Ab0Q?00001O00I7H9Hnak1"}, "image_id": 410, "id": 6507}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 461.0, 56.0, 51.0], "area": 1784, "segmentation": {"size": [512, 512], "counts": "e>W1j>O01O010O010O01O01O0N200N2M3N2M3001O00001O001O00001O001O001O0_O_ACa><`AD`>:bAF^>2jANV>0mAOT>MoA3Q>KRB4o=HTB8k>O00001O001O001O00001O001O001LW`S7"}, "image_id": 410, "id": 6508}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 466.0, 102.0, 46.0], "area": 2765, "segmentation": {"size": [512, 512], "counts": "[_k21m?3L3N2O20O01O01O010O0N3M2M12N3M2M3N3L3N3L30010O010O0010O0010O010O00010O010O00010N1N3L3N3O01O01O010O01O010O01O010O0O1O20O01O01O010O01O010O01O001O00001O001OO1N2M3N2M3N20000001O001O000M4M2N3L3N3L3Cl@LV?1m@MV?0m@LV?1m@MV?0eaa3"}, "image_id": 410, "id": 6509}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 0.0, 24.0, 17.0], "area": 246, "segmentation": {"size": [512, 512], "counts": "V`91o?2M3N2M3N2M100O010O010O01O100O100O100O102M3N2Mk_Z7"}, "image_id": 411, "id": 6510}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 0.0, 41.0, 48.0], "area": 1412, "segmentation": {"size": [512, 512], "counts": "S`V35k?5K5K5J6K5K5K6J4K1000O1000O1000O100000000O100000000O100000000mN_Af0b>YOdAb0\\>^OiA=W>CnA8R>GTB4l=LYBNh=1T10000OQPU4"}, "image_id": 411, "id": 6511}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 0.0, 51.0, 70.0], "area": 2268, "segmentation": {"size": [512, 512], "counts": "cPV41o?4K5L4L4L4K5L1O000O0MWOQAi0m>YOSAg0l>7N101N200LjN_AU1a>9L4L4L4L4L4L3M00O1000000O1000000O1000000O100TNVBb1j=^N[B]1g=aN]BZ1Y>L4L4L4_OQAHT?4PAHT?3QAIS?3n^P3"}, "image_id": 411, "id": 6512}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 0.0, 54.0, 22.0], "area": 596, "segmentation": {"size": [512, 512], "counts": "U`S52i?0Z@2d?7N2N2N2N2N2N1OO100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O1001O00O1O10P`Q2"}, "image_id": 411, "id": 6513}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 0.0, 50.0, 58.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "_PW72m?2N2N3N1N2N3M2N2K^Oj@c0T?_Oj@d0S?6N2O1N10O1001O1O1O2N1O1O1O2N1O1O2N1O10010O0000N3M10O000003N1O10001O01O01O01O0]O"}, "image_id": 411, "id": 6514}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 0.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "P`o72n?"}, "image_id": 411, "id": 6515}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 1.0, 50.0, 52.0], "area": 1393, "segmentation": {"size": [512, 512], "counts": "dPh01o?2M3N2M2N3N2M3N1N3N2M3M2O2M3N2M2O2M3M100O010O01O01O010O010O01O01O01G^ATOb>k0aAROb>l0`AROa>l02m?2N2N2N2N2N2N1N3O10000000N2N2N2N2N1O2N2N2NRoV7"}, "image_id": 411, "id": 6519}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 44.0, 7.0, 15.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "\\1?c?N1N2N2N2N2N^^l7"}, "image_id": 411, "id": 6520}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 49.0, 20.0, 34.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "cQn32n?5K6J5K5J6K4L00000O01000003M6I6K5K5K5Kbmg3"}, "image_id": 411, "id": 6521}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 59.0, 39.0, 48.0], "area": 1322, "segmentation": {"size": [512, 512], "counts": "QbR32m?6K5K5K5K4K6K5K5K1OO010000000O010000000O010000000O010000001N6K5K5K5K5J5L5K5KmlY4"}, "image_id": 411, "id": 6522}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 65.0, 24.0, 36.0], "area": 614, "segmentation": {"size": [512, 512], "counts": "W2n0Q?1MROTAm0m>3O10O010O10002M010O102O2N1M3N3L3N3M2M3N3IZmc7"}, "image_id": 411, "id": 6523}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 69.0, 80.0, 52.0], "area": 2312, "segmentation": {"size": [512, 512], "counts": "kbP62m?2M3N1O2N2N2M3N1O2N2N2M300k@VOS?l000O100O1N1N3N2N2N2N2M2O2N2O100000O001N2N2M3N1O2N2N2M3N2O0100000OO1O002N2M2O200O1000O1N2N2N1N3N2N01O1O000001N3N2N2N2N1N3Ka@Fa?85M3N2N1OX]g0"}, "image_id": 411, "id": 6524}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 89.0, 12.0, 10.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "lRZ53m?2M3N0O10O010O101N3N2MSm_2"}, "image_id": 411, "id": 6525}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 99.0, 13.0, 13.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "TSY45k?7I00000O100000O1000005Kh\\`3"}, "image_id": 411, "id": 6526}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 101.0, 60.0, 71.0], "area": 2287, "segmentation": {"size": [512, 512], "counts": "_d`53m?2M3N2M3N2M3N1J^Ok@e0R?7M3N2M3N2M2YOjNbBV1^=lN_BT1a=oN]BQ1c=QOZBo0f=TOXBk0i=WOTBj0k=XOTBg0l=\\OQBe0o=e0010O010O010O011N3N1N3M3N2M3O100OOO1O010O102M3N2M3N1N3N2M3ZOk@=W?Aj@=Y?@j@>^?M3M3N2M3NY[a1"}, "image_id": 411, "id": 6527}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 111.0, 40.0, 55.0], "area": 1370, "segmentation": {"size": [512, 512], "counts": "kS[31o?5J6K5K00O10O102N5K5K5J6K5K1O0JgNeAY1[>510O100000O10O100000O10O12N5K5J6K5K5K5K5J6K5KRkP4"}, "image_id": 411, "id": 6528}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 140.0, 23.0, 19.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "ddS42m?3N3L3N2N0O100O010O0100O010O0100O02O2M3N3L3NZk`3"}, "image_id": 411, "id": 6529}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 152.0, 45.0, 48.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "od]14k?6K4L5K5J5L5K5K4L0O1000O1000O1000O10O1000O1000O1000O1000O12M6K5K4L5K0O1000O1000O2O5K5K4KVjk5"}, "image_id": 411, "id": 6530}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 155.0, 59.0, 72.0], "area": 2198, "segmentation": {"size": [512, 512], "counts": "\\fU61n?3M2N2O1N3M2N2N2O2M2N2N3N1WOUOZBm0e=TOYBn0e=UOXBn0e=TOYBn0e=TOZBk0g=WOVBi0j=YOTBg0l=[ORBe0n=]OPBd0o=_OoA`0R>AlA?T>f001O0001O0001O0001O1O2O20O000001OOO1O2O1N3M2N2N2QO\\AJO?g>D\\AK0>f>E]AJO?f>EdA9_>EbA:_>DdA9S?N2N3M2Oeil0"}, "image_id": 411, "id": 6531}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 168.0, 14.0, 12.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "\\ee42m?3N2N2M10O010O010O101N4M2McZS3"}, "image_id": 411, "id": 6532}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 175.0, 24.0, 23.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "kUj42n?2M2N2N2N3N1N2N1O00010O00000002O1N2N2N3M2O1N2NYji2"}, "image_id": 411, "id": 6533}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 177.0, 55.0, 42.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "QVi31o?2M4M2N2M4M2M3N2M4M2M3N00O010O010O010O01000O010O010O010O10O10O010O011N3NO10O010O10O010O0102N3L3N2M3N3L3N2M4MjY[3"}, "image_id": 411, "id": 6534}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 186.0, 44.0, 55.0], "area": 1321, "segmentation": {"size": [512, 512], "counts": "Yff02m?3N3M3L4M2N0O10O0102N3L4M3GUOWAm0g>9L4M3M2M3N000O01000O0101N4M2N3L4M3M3L3N3M3L4M2M4M3M3L3N3MiXc6"}, "image_id": 411, "id": 6535}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 198.0, 23.0, 19.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "\\Vf13m?3M3L3N2NO0100O01000O01000O0100O01001N4M3M3LaYn5"}, "image_id": 411, "id": 6536}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 201.0, 15.0, 16.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "]fZ21n?4M2N2M4M1N010O010003L3N2M4M]i]5"}, "image_id": 411, "id": 6537}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 201.0, 66.0, 78.0], "area": 2300, "segmentation": {"size": [512, 512], "counts": "RgS52m?2N3M2N2N2N2N2N2N2N2N2O10000O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2O1N2N2O10000001O000000000`NnAQ1R>mNPBS1P>kNRBV1m=hNUBX1k=fNWBZ1i=dNYB\\1g=bN[B^1T>mN_Ad0c>ZO_Ad0c>ZO_Ae0b>YO`Ae0b>YO`Ae0c>XO_Af0o>N2N2N2N2N2N2N2N2N2N2NkWk1"}, "image_id": 411, "id": 6538}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 202.0, 48.0, 60.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "QWX71n?3M2O1N3M2N3N1N2N3M2O1N3N11O01O010O0O1N3M2O1N3M2N2O2M000010O0000010O0001O01O0001O01O0002N2O2M2eI"}, "image_id": 411, "id": 6539}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 217.0, 46.0, 43.0], "area": 877, "segmentation": {"size": [512, 512], "counts": "dWU42m?2N2N2N3N1N2N3M2O1N2N3M010O00000010O00000010O00000010O00000010O000003N1N2N2N3N1N2N3M2N2O2MihS3"}, "image_id": 411, "id": 6540}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 217.0, 10.0, 9.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "mVe61o?1N2N3M1O01O2N3N1NTiU1"}, "image_id": 411, "id": 6541}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 228.0, 41.0, 52.0], "area": 1286, "segmentation": {"size": [512, 512], "counts": "\\g32n?3M3L4M3M3L4M3M3L4M3M3L4M3M3L1000O01000O01000O01000O4M3M3L4M3M3L4M3M3L4M3M3L4M3MbgW7"}, "image_id": 411, "id": 6542}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 241.0, 68.0, 65.0], "area": 2138, "segmentation": {"size": [512, 512], "counts": "kXb61n?2O1N3SOJkA8S>JkA8T>JjA8S>JkA8S>JkA8T>JiA9T>IkA8S>JkA8S>KjA8T>IjA9T>IkA8S>KjA8S>l0O1N000010O003M2O1N3M2N2O2M1O1O012M2N2N3N1N2N3M2O2M2N0010O0000010O2N2N01O01O01O2N2O2M2N2N3N1N2N3M2O1N3Mbg;"}, "image_id": 411, "id": 6543}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 250.0, 61.0, 60.0], "area": 1661, "segmentation": {"size": [512, 512], "counts": "RYh11n?2N2N2N2O1N3M2N2N2N2N2N2N2N2O2M2N2N1O000HnNcAR1]>POaAP1`>QO^Ao0b>8O0000000000000001O01O0002N2N2N1O002N2N2O1N3M2N2N2N2N2N1O1O2O1N2N2N3M2N2NbWY5"}, "image_id": 411, "id": 6544}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 250.0, 16.0, 16.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "PhX51n?2N2N2N2N2N20000000N2N2N2N2N2NnW_2"}, "image_id": 411, "id": 6545}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 253.0, 62.0, 56.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "YXb42m?2_@MR?5l@MR?5l@MR?5l@MS?4k@NS?4k@OR?a0N2N2N2N2N2N2000O1N3M2N2N2N2N02N2N2N2N2M3N2N21O0001O0000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NSg^2"}, "image_id": 411, "id": 6546}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 286.0, 30.0, 30.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "[Y71n?2N3M2O1N2N2N2N3M2N2O1N2N3M000101N2N2N2N2N3M2O1N2N2N3M2N2OcfY7"}, "image_id": 411, "id": 6547}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 291.0, 51.0, 62.0], "area": 1605, "segmentation": {"size": [512, 512], "counts": "RjY22m?2N2N2N2N2O1N2N3M2N2N2N2o@WOh>k0WAVOg>l0WAVOg>l0WAWOf>S1O1N2N2N2N0000003M0001O000000000000010O2N2N2EaAROa>l0aAROa>l0aAROb>6YA;R?CPA;R?CPA;R?Do@:S?Do@:]?N3N1N2N2NSfl4"}, "image_id": 411, "id": 6548}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 300.0, 56.0, 63.0], "area": 2113, "segmentation": {"size": [512, 512], "counts": "_jP74l?3M4K5L3M4L4K3N00000O0M40000O010000O0100000O\\OPOYBQ1g=ROVBn0j=VORBj0m=[OnAe0S>^OjAb0V>d0O0100000O103M0O10O1000O12M4M4L4L3L4M00O010002N4ZOSA0Q?MRA0R?LRA0R?KSA1^d3"}, "image_id": 411, "id": 6549}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 304.0, 11.0, 11.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "dY]62m?3N1N2N2N11N2N2O1N2N\\V]1"}, "image_id": 411, "id": 6550}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 305.0, 75.0, 57.0], "area": 2108, "segmentation": {"size": [512, 512], "counts": "WZW51n?3N1N3M2O2M3N1N3M2O2M1O011N2N3N1N3N2N1010O01N2M2O1N0010O00010O01O01O010O00011N3M10O01IYAUOf>k0\\ASOd>m080O00010O0100O2N3N2M0010O00010O0010O001Ki@AV??m@^OU?`08N1N3M2O2M2O2MTVc1"}, "image_id": 411, "id": 6551}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 309.0, 10.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "iif62m?2O1N2N01O011N2N3NWVT1"}, "image_id": 411, "id": 6552}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 323.0, 9.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "Vjm41n?2O2M2N2OO2N2N2Nkem2"}, "image_id": 411, "id": 6553}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 329.0, 20.0, 21.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "fj]12m?3M2N2N2N2N1O00000000000001O2N2N2N2N3MbUX6"}, "image_id": 411, "id": 6554}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 331.0, 58.0, 58.0], "area": 1642, "segmentation": {"size": [512, 512], "counts": "^km21o?2M2N2N2JI`@9^?6N3N1N2N2N2N2N2N3M2O1N2N2N2N00000001O0001O00000001O000001O0001O2N2N3M2O1N2N2N2N2N3M2N2O1N2N2N3M2N2N2N2OnTU4"}, "image_id": 411, "id": 6555}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 338.0, 14.0, 14.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "iZS11n?2N2N2N2N2N2N11N2N2N3M2N2NXee6"}, "image_id": 411, "id": 6556}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 348.0, 55.0, 45.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "[kS62n?3L4M3M4K4M3M3L4M1OOHTO^Al0b>WOZAj0f>701000O010000O010003L4M00O0100002M4M3MO10O10O1000O01000O010000O011FYAYOk>b0YAZOk>c0P13000O01000O01000OBWAGi>9ZACg>=]A_Ob>b0>O4M4L4K5LmRf0"}, "image_id": 411, "id": 6569}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 426.0, 9.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "\\]\\72n?3L4MO10O10O4M3M`R?"}, "image_id": 411, "id": 6570}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 429.0, 7.0, 5.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "]]b44l?000000001O0cRZ3"}, "image_id": 411, "id": 6571}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 433.0, 60.0, 52.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "anR11n?2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N10O00000001O00000000000001O0000011N2N2N3N1000000000000O1N2N2N3M2N2N2N2N2N2N2N2O1NfQo5"}, "image_id": 411, "id": 6572}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 433.0, 23.0, 22.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "n]Y21n?2N2O1N2N3M2N2N001O0000000002N2N2N2N2N2O1N2NXR[5"}, "image_id": 411, "id": 6573}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 437.0, 9.0, 7.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "f]b72n?4L0O10O1000001N[R9"}, "image_id": 411, "id": 6574}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 447.0, 14.0, 15.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "T^d22n?2M2O2M3M2OO00012M2N3N1N3NiaT5"}, "image_id": 411, "id": 6575}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 449.0, 72.0, 34.0], "area": 1861, "segmentation": {"size": [512, 512], "counts": "X^c3;e?7I0O10009G00O10000000000000O100000000000000000O1000O1000000000000000000000000O0100000O1O100000000000000000000000O1L400000O100002M3N0H8000OG`@4`?LVbX3"}, "image_id": 411, "id": 6576}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 475.0, 59.0, 37.0], "area": 1418, "segmentation": {"size": [512, 512], "counts": "ioh25e?6K5J70O000000010O000000001O00000000001N1000002K4J5K72G5K5N2000000GAPA`0P?8000:F000000000000000000O100000000000O10000=C>BZ`Y4"}, "image_id": 411, "id": 6577}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 484.0, 35.0, 28.0], "area": 891, "segmentation": {"size": [512, 512], "counts": "no^72n?000VO=WACi>j0000000000000000000000000000000000000000000000000000000000000"}, "image_id": 411, "id": 6578}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 489.0, 53.0, 23.0], "area": 986, "segmentation": {"size": [512, 512], "counts": "g_f19_?8000000000000000000001O0000000000000000001O00H81O000000000000000000001O0000000000000000001O000000000N2EoP_5"}, "image_id": 411, "id": 6579}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 495.0, 65.0, 17.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "e_X6;_?61O000000000000000000000000001O000000000000000000000000001O00000000000000000000000000M2N3M2M4M2N3M2N3L3N3M2N3L4M2Nbmg7"}, "image_id": 412, "id": 6585}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 79.0, 87.0, 69.0], "area": 3139, "segmentation": {"size": [512, 512], "counts": "cSe31m?3M2M3N3L3N3M21O01O0M4M2M3N3[ASOR>P1lAROR>Q1kAROR>P1kASOU>n0hAUOW>Z1010O0010O0010OI701O001O000010O01O00001OVNUB_1k=aNVB_1i=]N[Bc1Q>0M3N01N4M2M4M2M3N3L3N3L3N2N3L3N30O0010O0010O010O00010O010O00010O010O00010OO2L3N2M4M2N3L3Nk\\o2"}, "image_id": 412, "id": 6586}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 88.0, 76.0, 84.0], "area": 3444, "segmentation": {"size": [512, 512], "counts": "lTY23j?3BLn@7n>LPA6n>Mn@6P?Ln@7n>?N3L3N3M2M4M2N2M40O010O0O2M2N2MdAdN[>Y1eAiN[>W1bAmN^>Z1010O00010O010ON3L3N2N3L3N3M2N3N11O010O001M2N2M010O1000O0102N2M4M2N2M4M2M4M2N3L3N2N3L3N3L3N3M2M3N3M2M4MQm`4"}, "image_id": 412, "id": 6587}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 90.0, 67.0, 80.0], "area": 2765, "segmentation": {"size": [512, 512], "counts": "dc[53k?3M2M4M2N2M4M2O2O0010OjA[OQ=e0mB]OT=c0hBAW=?gBCZ==cBF\\=:aBI_=8^BJc=5[BNd=2YB1h=OVB3i=MWB3j=MUB4j=LWB3f=1YBOe=3YB0d=2ZB0c=4YB0d=W1O2O00010O01L3N3M2M3N3M2M4N101O0O1M2O00O0102N2M4M2M4M2N3L3N2N3L3N3M2M4M2N2M4MQmb1"}, "image_id": 412, "id": 6588}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 99.0, 7.0, 8.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "Vcc23j?3010O00010KolX5"}, "image_id": 412, "id": 6589}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 106.0, 84.0, 74.0], "area": 3164, "segmentation": {"size": [512, 512], "counts": "]Ta61m?3M2O2M2N3M2N3M2N3O10O010O010O01N1N3M2N3M2O2M2O2O0O2M2O2M2N3O0100O0100O010O010O010O001N1010O010O10O010O010O010O010O010O010O10O010O010ROgA3Y>CgAH3e0V>@jAH2h0T>^OlAH3j0Q>[OZBd0f=ZO\\Bg0d=VO_Bh0b=WO_Bg0_>M2N3M2N3N1N3M2N3MTk4"}, "image_id": 412, "id": 6590}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 111.0, 15.0, 14.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "gS]31l?4L3N3O010O00010O0010N1N3M2M`\\[4"}, "image_id": 412, "id": 6591}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 147.0, 77.0, 60.0], "area": 2675, "segmentation": {"size": [512, 512], "counts": "fUV31m?2M3N3L3N3L3N2N3L3N5L2O010O00010O010O0010O0010O0010O0010O001M20001O010O01O01O0M4M2M3N1O2M3N30O01O01O010O01O01O010O010O000N3L3N3L3N2M4GUAYOm>d0VAYOn>d08M3N3L3N3L3N2NS[c3"}, "image_id": 412, "id": 6592}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 168.0, 82.0, 65.0], "area": 2705, "segmentation": {"size": [512, 512], "counts": "SVW63k?2O2M2N3M2N3M2N3N2M2N3M2N3M2O20O010O010O01000O010O01O0N3N110O010O010O10O010O010O10O3NO0100O010O010O010O0100O0100O010O010O01M2N3N1N3M3M2N3M2O2O001M2N3N1N3M3M2N3M2Oji?"}, "image_id": 412, "id": 6593}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 170.0, 87.0, 74.0], "area": 3388, "segmentation": {"size": [512, 512], "counts": "iVb42k?3N3M2M4M2M4O00010e@]OW?f010O00010O010O0N2N3M2M4M2M3N3L3N2M12010O0010O010O00010O01N1M3N3M2M40O00010O010O00010O010O01O01O010O01OM10O_OoAVOR>j0QBSOn=m0UBPOl=P1WBmNk=Q1WBlNl=Q1XBlNj=S1c00O01O01O01M2N3L3N3L3N2M4M2M4M2N2MbZR2"}, "image_id": 412, "id": 6594}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 172.0, 12.0, 10.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "aUV71m?3M2O2O010O010O01O0N3Mbjc0"}, "image_id": 412, "id": 6595}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 211.0, 85.0, 71.0], "area": 3215, "segmentation": {"size": [512, 512], "counts": "iWf52l?2N3M2O2DGm@;Q?Gm@Y1iAjNW>V1fAlNZ>T1eAnN[>\\1O010O010OcNfAT1[>jNfAW1Y>gNjAX1W>eNkA\\1]>O010O010ON3M2O2N110O01000O010O010ON3N1N3M2N3M2@XAFk>7WAGk>7XAFk>7WAGk>7XAFk>7`0N3M2OdXo0"}, "image_id": 412, "id": 6596}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 247.0, 81.0, 60.0], "area": 2328, "segmentation": {"size": [512, 512], "counts": "bXf03k?2M4M2M3M4M2M4M2M4L3O110O010O00010O010O010O00010O010O010O00010O010O01O01O010O010O01O010O01O01O010O010O01O01O010O010O01O01O010O0TOTAe0l>XOVAh0Q?010O010O010OO1N3M2M4M2N2M4M2MWWQ6"}, "image_id": 412, "id": 6597}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 291.0, 80.0, 67.0], "area": 2845, "segmentation": {"size": [512, 512], "counts": "Vjm12l?2N3L3N3M2M4M2N2M4N110O0010O0010O010O0010O0010N1M4M2N3L3N2N3L3O20O010O0010O0010O010O0010O001N1N3O00010O010O010O00010O010OSOiA1X>LjA5U>@iAJ6e0Q>_OkAJ6g0P>[OnAK5j0l=YO_Bg0b=VO`Bk0[>O010N1M4M2N2M4M2N3L3N3McUj4"}, "image_id": 412, "id": 6598}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 319.0, 79.0, 58.0], "area": 2721, "segmentation": {"size": [512, 512], "counts": "cj:2l?2N3L3N2M4M2N3L301O01O01[AXOQ>g0mA\\OR>e0jA^OV>b0hAAX>?eAC[>=bAF_>9_AJ`>k0010O010O0010O0010O0010O010O0010N1N3L3O101O010O010O01O01O010OO2L3N2M4M2N3L3N3O00M4M2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2N3Lke]6"}, "image_id": 412, "id": 6599}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 350.0, 97.0, 72.0], "area": 3338, "segmentation": {"size": [512, 512], "counts": "cka42l?2O2M2N3M2N3N1N3M3M2N3N110ZATOW>k0hAVOX>k0eAXO[>g0cA[O]>f0`A]O`>c0]A@b>`0]AAd>n0O0100O01N2N110O0100O010O0100O010O0100O010O010cN`AY1c>010O010O01000O010O010O0100O0100O001NO011N3M2N3N110O010O10O010O010O10O010nNVAl0o>10O010O0O2M2N3M3N1N3M2N3M2O2M3Mdcm1"}, "image_id": 412, "id": 6600}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 355.0, 80.0, 61.0], "area": 2939, "segmentation": {"size": [512, 512], "counts": "U\\a13k?2M4M2N2M4M2N3L3N3M2M3N3M2O2O0010O0010O010O0010O0010O010O0010O0010O010L3O20O00010O010O010O00010ON3M2N3L3N2N30O010O000N3M2M4M2N3L3N2N3L3O20ON3ROo@h0W?M2N3L3N2N3L3N3MadV5"}, "image_id": 412, "id": 6601}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 370.0, 65.0, 71.0], "area": 2632, "segmentation": {"size": [512, 512], "counts": "m;;2>f>?N3M2N3M2O20O010O010O010O0N3O0010O010O010O010O010O010O010O010O010O010O010O01fNbAQ1]>mNeAS1\\>jNgAV1a>010O010O010O010O010O01M2QOVAf0m>WOVAf0S?N3M2N3M2N3M2N3M2NSSo6"}, "image_id": 412, "id": 6602}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 381.0, 14.0, 13.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "UlY11m?3M2M4N110O01O010O000N3M2NQT_6"}, "image_id": 412, "id": 6603}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 387.0, 12.0, 11.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "ZlR11m?2N3L310O01O01O01O0N3MkSg6"}, "image_id": 412, "id": 6604}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 391.0, 84.0, 81.0], "area": 3268, "segmentation": {"size": [512, 512], "counts": "c]o33k?2O2M2N3M2N3B@YAa0e>AYAb0e>@XAc0e>_OYAc0e>@XAc0e>=N3N1N3M2N3M3O010O01O0N3N11000O010O010O010O01000O010O010O010O01000O010_NgAY1Y>eNiA\\1^>O010O10O010O10O0jN^Ao0b>oN`AP1`>nNbAS1f>O010O01000O010O010O010O0N3QOUAe0n>XOTAf0U?M3N1N3M2N3M2N3N1NRbf2"}, "image_id": 412, "id": 6605}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 410.0, 72.0, 70.0], "area": 2897, "segmentation": {"size": [512, 512], "counts": "P^l51l?3N2M4M2M4M2M4M2M3N3L3N3L3N2M4O0010O01O01O010O01O01O010O010O01O01O010O01O01ON3cAbNT>`1iAcNT>g1N101O01O010O010O01O01O010POnA0R>NPB3o=JUB5l=HVB8j=FXB;h=A\\B>d=@^Ba0b=[ObBd0^=ZOdBe0]=YOfBc0^=YOeBe0\\>M4M2M4M2N2M4Mlao0"}, "image_id": 412, "id": 6606}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 415.0, 83.0, 63.0], "area": 2867, "segmentation": {"size": [512, 512], "counts": "f]P12l?3M2N3M2N3M2c@BV?e0N3M2N3L3010O010O010O010O010O010O010O010O010O010O010O010O010OVAoNe>V110O010N1N3M2N3M201O010O010O010O001M210O01O0N3M2N3M2N3M2N3M2O2O01O01M5K2N3M2N3M2N3M2N3M2N]Rf5"}, "image_id": 412, "id": 6607}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 427.0, 45.0, 75.0], "area": 2434, "segmentation": {"size": [512, 512], "counts": "VoY72_?0l@2Q?1l@2R?1k@2Q?1l@2R?`0L3N2M4M2FkNfAY1W>iNgAY1V>:M4M2M4N10010O0010O0010O0010O0010O0010O0010O0013L001O0N2M4M2M3N3LZB"}, "image_id": 412, "id": 6608}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 432.0, 39.0, 62.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "`=a1`>0O010O01O010O010O010O010eNcAQ1]>mNeAT1[>iNhAV1a>10O010O010O010O010O010O0oNYAg0j>VOYAg0i>WOYAf0S?M2N3M2N3M2N3M2N^Q\\7"}, "image_id": 412, "id": 6609}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 451.0, 89.0, 61.0], "area": 3142, "segmentation": {"size": [512, 512], "counts": "Y_a32l?3N1N3M2N3FCm@?Q?Cm@`0P?Cm@?Q?:N1N3M2N3M3M2N30O010O010O0O2M10O3M2100O010O010O01000O010O010O010O10O010O010O01000O01O001O001hN^AP1b>oN_AQ1b>lNaAS1f>01ON2N2O11O001mNXAl0h>RO[Am0l>01OO1N2O11O1O001O0O2M2N3M3M2O2M2N3M2N3M2N3NiPR3"}, "image_id": 412, "id": 6610}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 473.0, 10.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "n^R51m?3M2O2O10O10O01M2NVah2"}, "image_id": 412, "id": 6611}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 479.0, 73.0, 33.0], "area": 1721, "segmentation": {"size": [512, 512], "counts": "lo\\54i?3L4L4L4M3L4O1001O0000001O00001O0000001OM3M3O1001O0000001O0000001O0000O1L4O1001O0000001O0000001O00001O00000YOo@?R?\\ORAd0U?01O0000001O0000001O0Gf@LZ?0j@0c?01O00000MW`^1"}, "image_id": 412, "id": 6612}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 502.0, 30.0, 10.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "noX32l?2N2N2N21O001O001O1O001O001O001O00N2O1001O001O001O1O001O00QPX4"}, "image_id": 412, "id": 6613}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 506.0, 28.0, 6.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "n_i62l?2N21O001O001O00001O001O00N2N2O11O001O001O00001O001O00Q`h0"}, "image_id": 412, "id": 6614}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o_g71o?0Q`7"}, "image_id": 412, "id": 6615}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 36.0, 20.0], "area": 448, "segmentation": {"size": [512, 512], "counts": "03m?2[@LZ?5d@M[?4c@N\\?3b@O]?;O1O1O00O1O1O1O11O2N1OO1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1OQP^7"}, "image_id": 413, "id": 6616}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 0.0, 48.0, 26.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "T`l32m?2N2N2N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1OQ`[3"}, "image_id": 413, "id": 6617}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 0.0, 10.0, 5.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "PPd51o?1O1O1O1OO100O1O1OQPW2"}, "image_id": 413, "id": 6618}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 0.0, 11.0, 8.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "S`Y61n?2N2N2O1O00O1O1O2N2Ono`1"}, "image_id": 413, "id": 6619}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 13.0, 16.0, 15.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "a`b31n?2O1N2N2N3N10000000N2O2M2N2N2OY_U4"}, "image_id": 413, "id": 6620}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 17.0, 23.0, 23.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "iPT72m?2N2N2N2N2N2N2N2O1000000000N2N2N2N2N2N2N2N2NS_`0"}, "image_id": 413, "id": 6621}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 19.0, 26.0, 45.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "lPc73m?3L3N3L4M2N0O10000O4M3L3N3M3L3N3M1N010O01000O01000]O"}, "image_id": 413, "id": 6622}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 20.0, 34.0, 48.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "m`g4?a?>B:F00000H80000O10O10000000000000000000003M5K0O01000000003M>B3@g@3Y?Mn^g2"}, "image_id": 413, "id": 6623}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 22.0, 26.0, 25.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "SQ]52n?1N3M2N2N2N2O2M2N000001O01O000002N2N2O1N2N3M2N2N2OPoU2"}, "image_id": 413, "id": 6624}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 37.0, 14.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "Ya]72m?2O1N2N2N20000000O1O1N2N3Mc^;"}, "image_id": 413, "id": 6625}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 49.0, 27.0, 32.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "iQ`62m?3M2N2N2[@G`?>N2O2M2N2000000000[Oh@b0W?\\Ok@d0Z?O0O1N2N2O1N2N3M2N2N2Ni]R1"}, "image_id": 413, "id": 6626}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 53.0, 30.0, 34.0], "area": 550, "segmentation": {"size": [512, 512], "counts": "SbY4270]?2a@0]?2a@0^?1`@1^?:N2N3M2N2N0000001O002N02N2N2N3M2N2N2N2N2N2N2N2N2O1Nm]W3"}, "image_id": 413, "id": 6627}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 54.0, 11.0, 11.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "kQX62m?2N2N2N2N02O1N2N2N2OU^b1"}, "image_id": 413, "id": 6628}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 54.0, 50.0, 55.0], "area": 1410, "segmentation": {"size": [512, 512], "counts": "`Ro63l?2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N1O000001O2N2N2O1N3M2DYA\\Oi>b0YA\\Oi>b0YA\\Oi>b0YA\\Oi>b0YA\\Oi>b0800O10000000O100000O10000000O1000O12N7IO1000O107I7I8H8H7I`lQ2"}, "image_id": 413, "id": 6630}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 68.0, 57.0, 88.0], "area": 2452, "segmentation": {"size": [512, 512], "counts": "fS[21n?2N2N2N2mNH\\B9b=I\\B:a=H]B:a=H]B:b=G\\B;b=GZB=d=EZB=d=FYBUB@m=?TB_On=?UB^Om=`0UB^Om=`0UB^Om=`0UBWO@6\\>a0XB]Ok=`0WB^Ok=`0WB^Ok=`0l0O1N2N2N2N3M2N2Nd\\h4"}, "image_id": 413, "id": 6631}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 69.0, 37.0, 35.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "mRR61n?2N2O2M2N2N2N2N2N2N000001O0000000001O00000001O00000000101N2N2N3M2N2N2N2Nc][1"}, "image_id": 413, "id": 6632}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 89.0, 22.0, 23.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "PS^62m?2N3M2N2O1N2N2N3N10000000O1N3M2O1N2N2N2N3MjlV1"}, "image_id": 413, "id": 6633}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 117.0, 31.0, 31.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "STT32m?3M2N2N2N2N2O1N2N3M2N2N2N2N0001O1O2N2N2O2M2N2N2N2N2N2N2N3M2Ol[\\4"}, "image_id": 413, "id": 6634}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 119.0, 101.0, 103.0], "area": 3160, "segmentation": {"size": [512, 512], "counts": "Zd_12m?2N200OO01O1O2N2O1EFm@U1^AnNa>R1]APOc>V11N2O100jNYAR1k>0000001O000001O0000000000YAlNa>T1]AnNc>R1[APOe>V1O0O02N200000001O000000000N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2Ngim4"}, "image_id": 413, "id": 6635}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 120.0, 55.0, 51.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "eTo52m?2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N3M1O00001O00000001O002N2N2N0000000010O0001O2N3M2N2N2N2N2N2N2N2N2N2O1N2N2Ni[U1"}, "image_id": 413, "id": 6636}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 148.0, 40.0, 37.0], "area": 740, "segmentation": {"size": [512, 512], "counts": "SeQ31n?2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2000000000O2M2N2N2N0000002N2N3M2N2N2O1N2N2N2N2NjZZ4"}, "image_id": 413, "id": 6637}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 157.0, 56.0, 54.0], "area": 1693, "segmentation": {"size": [512, 512], "counts": "feU13l?2N2N2N2N2N2N2N2N2N2N2N2N2O2O0O1N2N2N2N2N2N2N2N2N2N01O01O01O2N1O00002N2N2N2N3M200001OO1N2N2N2N2N2N2Fh@IZ?5h@I[?4g@J[?4:N2N]Zn5"}, "image_id": 413, "id": 6638}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 157.0, 21.0, 20.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "UUh51n?2N2O1N2N2N2N2N3M2000O1N2N2N2N2O1N2N2N2NiZm1"}, "image_id": 413, "id": 6639}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 177.0, 29.0, 28.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "nU\\31n?2N2N2N2N2N2N2N2N2N2N2N2N2O10N2N2N2N2N2N2N2N2N2N2N2N2N2OQZU4"}, "image_id": 413, "id": 6640}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 178.0, 56.0, 50.0], "area": 1460, "segmentation": {"size": [512, 512], "counts": "Zf81n?2N2O2M2N2N2N2KCd@?Z?5N2N2N2N2N20000N2N2N3M2N2N2N2N1O0002N2N2N2N2N2N2N2N2N2N2000001O00000O1N2N2N2N2N2N2N2O1N2N2N2NaYk6"}, "image_id": 413, "id": 6641}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 190.0, 18.0, 18.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "WVo11n?2O1N2N2N2N2N2N2N0001O2N2N2N2N3M2Nlig5"}, "image_id": 413, "id": 6642}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 200.0, 59.0, 58.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "PW[31n?3M2O1N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N3MWOcA2[>NgA2W>NkA2U>LmA4S>KnA5R>IPB7Q>FQB:o=DTB:m=DTB;n=CRB=P>APB?R>_OnAa0T>]OlAc0V>[OjAe0i>O01O000000000000000002N2N3N1N2N2N2N2N2N2NjXg3"}, "image_id": 413, "id": 6643}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 205.0, 3.0, 5.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "_fn71n?2N2bI"}, "image_id": 413, "id": 6644}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 214.0, 44.0, 65.0], "area": 1525, "segmentation": {"size": [512, 512], "counts": "m6j0U?2N2N2N2N2`AnNo=T1oAnNo=T1oAnNQ>S1lAoNT>Q1jAQOV>o0hASOX>\\1O00000001O0O1N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2NThY7"}, "image_id": 413, "id": 6645}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 219.0, 31.0, 36.0], "area": 601, "segmentation": {"size": [512, 512], "counts": "Zgh11n?2N2N2N2N2N3M2N2N2N2N2N2N2N2N0101000000000000O1Am@NU?0m@NU?0m@NU?0m@NU?0m@NU?0m@NU?0Uig5"}, "image_id": 413, "id": 6646}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 222.0, 43.0, 57.0], "area": 1358, "segmentation": {"size": [512, 512], "counts": "QhZ72m?2N2N2N3M2O1N2N2N2N3M2O1N2@VOgAl0W>VOgAm0W>UOfAm0X>UOfAm0X>UOfAk0Z>WOeAi0Z>>00001O0000101N2N2N2N3M2O1N2N2N3M2O11OO1N2Ij@AY?h0o@ZOQ?k000001O000TOn@i0U?O1N2N2N2N2N2N2N3M2N2O1N2NQgc6"}, "image_id": 413, "id": 6650}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 260.0, 30.0, 31.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "bXY32m?2N2N2N2N2N2N3M2N2N2O1N2N2N2N02N2N2O1N2N2N2N2N2N3M2N2N2N2N^gW4"}, "image_id": 413, "id": 6651}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 270.0, 66.0, 65.0], "area": 2142, "segmentation": {"size": [512, 512], "counts": "YYX41o?1N2T@Ng?7N2N2N2i@Ch>?VACh>?VACh>?VACh>=YAEe>l0001OO1N2O2M2N1O00000000001O01O0000000000000001O01O1O2N3M2N2N2N2N2O1N2N2N00001O2N2N2O2M2N2N2N2N2N2N2N3M2Ojff2"}, "image_id": 413, "id": 6652}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 287.0, 54.0, 56.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "aiY52m?2N2N2N3M2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M200000000000010O0000000000O2N1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N2O1NmUk1"}, "image_id": 413, "id": 6653}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 288.0, 30.0, 28.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "^YQ21n?2N1O2N2N2N2O1N2N2N2N2N2N2O1N0010O2N2N3M2N2N2O1N2N3M2N2N2Ncf_5"}, "image_id": 413, "id": 6654}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 293.0, 22.0, 23.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "cYn21n?2N3M2N2N2N2N2N00000000000002N2N2N2N2N2N2Neff4"}, "image_id": 413, "id": 6655}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 294.0, 27.0, 30.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "hi52m?2GNa@4]?Na@4]?Nb@3\\?:O1N2N2N2N2N1O02N2N3M200N2N2N2N2N2N2N2N2N2N\\f\\7"}, "image_id": 413, "id": 6656}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 304.0, 55.0, 54.0], "area": 1371, "segmentation": {"size": [512, 512], "counts": "bZX32m?2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N1O000000000000001O00000001O00000000002N2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N2NnUl3"}, "image_id": 413, "id": 6657}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 318.0, 54.0, 54.0], "area": 1403, "segmentation": {"size": [512, 512], "counts": "nZe11n?2N2N2N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N0KiNbAW1_>jN_AV1a>400000000001O2O1N002N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N]e_5"}, "image_id": 413, "id": 6658}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 322.0, 19.0, 19.0], "area": 175, "segmentation": {"size": [512, 512], "counts": "ZZW11n?2N2N2N2O2M2N2N2N20N2N3M2N2N2N2N2N2OdU_6"}, "image_id": 413, "id": 6659}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 325.0, 67.0, 73.0], "area": 2341, "segmentation": {"size": [512, 512], "counts": "j[c41n?2N2N2N2^OIXA9f>HYA:e>HYA:e>HYA:e>HYA:e>HYA:c>\\OYA<2:c>J[A8c>d0N2N2N2N2N2N2N0NXNmAh1S>200000000000000000001O10O2N2N2N2N2aNeAV1]>hNeAV1]>hNeAV1d>N2N2N1O2N2N2N2N00000000001O2N2N2N2N2N2N2N2N2N2N2NQU[2"}, "image_id": 413, "id": 6660}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 363.0, 48.0, 47.0], "area": 974, "segmentation": {"size": [512, 512], "counts": "[\\>1n?2N2N2N2N2O1N2N2N2N2N2N2N2N2N1O00000000000001O000000000000000001O0002N2N2N2N2N3M2N2N2N2N2N2N2NXdi6"}, "image_id": 413, "id": 6661}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 365.0, 63.0, 64.0], "area": 2239, "segmentation": {"size": [512, 512], "counts": "ok\\22m?2O1N2N3M2N2N2O1N2N3M2N2N2m@WOm>j0RAWOl>P1N1O1O002N3N1N2O11O0001O0001O0001O000O1N2N3N1N00000000010O00000002N2N3N1WO`AOb>OaANa>0aANa>0aANa>0aANb>0_AOb>O`AOb>OaANa>0aANa>0aANb>O`AOb>0gdc4"}, "image_id": 413, "id": 6662}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 369.0, 57.0, 57.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "Rlf32m?2N2O1N2N2N3M2N2N2O1N2N2N2N2N3M2O1N2N2N2O10001O00000001O00000001O0001N1N2N2N2O1N2N2N3M2N0000010O3M2N2N2N2N2K\\@Kf?36M_c\\3"}, "image_id": 413, "id": 6663}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 385.0, 57.0, 52.0], "area": 1596, "segmentation": {"size": [512, 512], "counts": "j\\o52m?2N2N2N2N2N2N2N2N2N2N3i@YOP?m0N2N2OO01O2N2N2N2N2N2000O1N2N10O0000000000000001O1O2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N2N2N\\ST1"}, "image_id": 413, "id": 6664}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 387.0, 49.0, 45.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "Wm`11n?2N2N2N3M2N2N2OO000001O00000001O000001O00000001O000001O00000001O000001O00000001O002N2O1N3M2N2N2Nfcf5"}, "image_id": 413, "id": 6665}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 403.0, 47.0, 56.0], "area": 1295, "segmentation": {"size": [512, 512], "counts": "e]R21n?2O1N2N3M2ZOFcA<[>FcA<[>FcA<[>FcA<[>FcA<[>FcA:]>HaA8_>J_A6a>L^A4a>N]A5a>f0O10000000000000000001M2N2N2N2N2N2POSAj0S?N2N2N2N2O1N2N2N2N2N3M2N2N[RV5"}, "image_id": 413, "id": 6666}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 409.0, 56.0, 54.0], "area": 1465, "segmentation": {"size": [512, 512], "counts": "i]91n?2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2O0O00000000000000001O01O0000002N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2O1N2N3Mbbj6"}, "image_id": 413, "id": 6667}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 410.0, 32.0, 32.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "Xm[51n?3N1N2N2N2N2N2N2N2N2N2N2N2N3M20O1N2N2N2N2N2N2N2N2N2N2O1N3M2N2NfRT2"}, "image_id": 413, "id": 6668}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 413.0, 4.0, 9.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "m<9g?N3M2NTcm7"}, "image_id": 413, "id": 6669}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 414.0, 23.0, 25.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "X]]72m?3M2N2N2N2N2O1N3M2N2N20N2N3M2N2N2N2O1N2N3M2NfR7"}, "image_id": 413, "id": 6670}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 420.0, 28.0, 27.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "a]Z41n?2N2N3M2O1N2N2N2N2N2N2N2N1O01O2O1N2N2N2N2N3M2N2N2N2N2N`bW3"}, "image_id": 413, "id": 6671}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 444.0, 58.0, 37.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "o]n25j?9H8H7I6J0000000O1000O1000000000O1000O1000006J2N00000O10000000O1000O1000000000O1000O1000000000O1000O10002N7I0Fc@1]?O`0J`aT4"}, "image_id": 413, "id": 6672}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 447.0, 55.0, 42.0], "area": 1167, "segmentation": {"size": [512, 512], "counts": "\\^\\62m?2N2N3N1N2N2N2N2N2N2N2N3N1N200001O000001O00000001M0000000010O00000000000000010O0002N2N2N2N2N2N3N1N2N2N2N2N2NdQh0"}, "image_id": 413, "id": 6673}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 453.0, 63.0, 52.0], "area": 1899, "segmentation": {"size": [512, 512], "counts": "RoP73l?2O1N2IIb@9\\?Ib@9\\?7N2N2N2N3M2O10000OO00000001O00002N2O1N3N1N2N2N2O1N2N1O0002N2N2O2O000O1N2N1O002N2N2O1N3M2N2N0000001O2N2N3N1N2N2N2NWA"}, "image_id": 413, "id": 6674}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 477.0, 62.0, 35.0], "area": 1209, "segmentation": {"size": [512, 512], "counts": "g_W42m?2N2N2Z@I`?9^@Ha?g0XOh0YOg0[Oe000000000000000000000000000000000000001O0000000000000001O00000000000000000000000000000001O000000000000000000000L400O1F[LYDe3g;_LUDa3j;;00000000000001O000000000001TLUDa3k;]LWDc3i;ZLZDf3QImA7T>KjA5V>MhA3X>j000O010000000O0100000000O3LdAaN^>]13O2N2M3N2N2N2N1N3N2N2N2N2N1N3N2Jb@G`?76N2N1O2N]eV6"}, "image_id": 414, "id": 6683}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 387.0, 25.0, 17.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "`\\Z53`?=0000000000000000001O000001O0000000000000000000lSY2"}, "image_id": 414, "id": 6684}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 395.0, 50.0, 52.0], "area": 1335, "segmentation": {"size": [512, 512], "counts": "Q=5j?1N3N2N2N2N2N2N2M2O2N2N2N2N2N2N2M3N1O2000000000000O01000000M3N2N2N1O2N2N2M3N2N2N2N2N1O2N2M3N2N2N2NkbV7"}, "image_id": 414, "id": 6685}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 490.0, 45.0, 22.0], "area": 766, "segmentation": {"size": [512, 512], "counts": "noY72i?5L4L4L4M30000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O00"}, "image_id": 414, "id": 6686}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 8.0, 15.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "0?a?O2O2M2N3N2M3Nfok7"}, "image_id": 415, "id": 6687}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 0.0, 5.0, 3.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "P`72n?1OO100OQPf7"}, "image_id": 415, "id": 6688}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 0.0, 34.0, 29.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "Q`d02n?2M3N2N1O2N2N2N1O2N2N1O2N2N2N1OO100O1O100O100O3M3N1N3N2M2O2M3M3N1N`_j6"}, "image_id": 415, "id": 6689}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 0.0, 21.0, 9.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "P`b11o?2N1O2N2N1O00O1O100O100O100O100O1O100O10PPS6"}, "image_id": 415, "id": 6690}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 0.0, 12.0, 7.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "P`o12n?1O2N2NO100O100O1O100OQ`j5"}, "image_id": 415, "id": 6691}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 0.0, 23.0, 21.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "Q`X21o?1N3N2N2N1O2N2N2N1O2N2N00O1O3N2M2O2M3N1N3M3Nbo[5"}, "image_id": 415, "id": 6692}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 0.0, 18.0, 12.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "P`b21o?2N2N1O2N2N2N00O100O100O100O2N3N2Ml_T5"}, "image_id": 415, "id": 6693}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 0.0, 51.0, 59.0], "area": 1588, "segmentation": {"size": [512, 512], "counts": "X`f3230f?1Y@1d?8M2O2M3N1N3M3N1O2O1N1N3N2M2N3N2M2O2N2N1O2N2N10100ON3N2M2O2M3M2O2M3N1N3M3N1N3N2M2O2M3M2O2M3N1N3M3Nen_3"}, "image_id": 415, "id": 6694}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 0.0, 34.0, 17.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "PP`41o?1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O1OO1O1O1O1O1O1N2O1O1O1O1O1O1O1OQPo2"}, "image_id": 415, "id": 6695}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 0.0, 14.0, 6.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "PPi71o?001O00001O001O001O00001O00"}, "image_id": 415, "id": 6696}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 2.0, 43.0, 62.0], "area": 1530, "segmentation": {"size": [512, 512], "counts": "caT73d?Oa@4[?0a@3]?9M4M2M4M2M3N3L3O2O000N3L3N3L3N2M4N101O01O010O0N2M4M2dNeAP1_>mNcAQ1_>lNeAP1g>N3L3N2M4M2M4M2M3N3L3N_o5"}, "image_id": 415, "id": 6697}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 17.0, 58.0, 60.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "RaP32m?2O2M3N2M2O2M3M2O2M3N2M2N3N1N01O010O01O102M3N2M2N3N2M2O2M3N2O0010N2M3M2O2M3N1N3M3N2M2O2M3N2M2N3N2M2O2M3M3N1N3N2M2OR^R4"}, "image_id": 415, "id": 6698}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 36.0, 22.0, 25.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "YQU11o?2M2O2M3N2M2O2M3M3N2O01N1N3N2M3N2M2O2M3M3NYn_6"}, "image_id": 415, "id": 6699}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 39.0, 52.0, 61.0], "area": 1609, "segmentation": {"size": [512, 512], "counts": "eQX22m?2O2M3N2M2O2M3M2O2M3N2M2O2M3M3N1N3N2M3N1N3M3N2M2O2O10N1N3N2M3M2O2M3N2M2N3N2M2O2M3N2M2N3N2M3N1N3N2M3M2O]mm4"}, "image_id": 415, "id": 6700}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 46.0, 22.0, 24.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "cam01o?2M2O2M3N2M3M2O2M3N2N02N2M3N1N3M3N2M3N1N3NP^g6"}, "image_id": 415, "id": 6701}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 49.0, 20.0, 33.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "d1n0R?O010O010O010O2O2M3N2M3N1N3N2M3M3N2M3Nime7"}, "image_id": 415, "id": 6702}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 57.0, 54.0, 61.0], "area": 1645, "segmentation": {"size": [512, 512], "counts": "[bc11o?1N3KMY@5d?6N2M2O2M3M3N1N2OO0012M3M3N2M2O2M3N2M3N1N3M3N2M2001M3M3N1N3N2M3N2M2O2M3M3N1N3N2M3N2M2N3N2M3N2M2O2M3Ni\\a5"}, "image_id": 415, "id": 6703}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 66.0, 29.0, 25.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "\\Rj61n?2N2N2N1O2N2M3N2O1O1000000O1N2N000002N1O2M3N2N2N2N2N2N2Nf]g0"}, "image_id": 415, "id": 6704}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 71.0, 22.0, 20.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "_b51o?2M3M3N1N3N2M0010O0010O010O01O3N1N3N2M2O2M`]_7"}, "image_id": 415, "id": 6705}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 79.0, 54.0, 48.0], "area": 1307, "segmentation": {"size": [512, 512], "counts": "Wcl51n?2N2N2N3M2O1N2N2N2N3M2N2O1N2N2N3M2N2N2O1N00001O000001O002N2O2M2N2N2N2N2N3O00000000001M2N2N2O1N3M2N2N2N2N2Od\\X1"}, "image_id": 415, "id": 6706}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 99.0, 63.0, 75.0], "area": 2083, "segmentation": {"size": [512, 512], "counts": "nTj61n?2N2N2N2N2N2CDSA>k>DSA>k>DSA>k>DSA>k>DSA>k>=N2N1O2N2N1O000000000000000O1000000000000000O10000000000000000000O1001O2N2N2N2N2N2M3N2]On@5T?In@5T?In@5T?Ho@6R?IPA5_?N2N2NS\\6"}, "image_id": 415, "id": 6707}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 104.0, 53.0, 54.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "fSP11n?3N1N3N2M2N3N2M3N1N3N2M2N3N2M3N1N3M3N1N3N2M2N3N00O2O2M3N1N3N2M2N3N2M3N1N00010O2O2M2N3N2M2O2M3N2M2N3N2M2Oc[U6"}, "image_id": 415, "id": 6708}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 118.0, 5.0, 14.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "Pdm73j?3N3M2M3ZL"}, "image_id": 415, "id": 6709}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 134.0, 69.0, 53.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "c4`0^?2M4M2N3L3O2O01O010O010O00010O010O010O00010O010O01O01O010O010O01O01O010O010O01O01O010O010N1O110O0010O@n@2R?LPA5P?HSA7m>FVA;j>BYA=U?010O0010O010O001M2N2N3L3NdZm6"}, "image_id": 415, "id": 6710}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 155.0, 16.0, 43.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "oUh71m?2M3N3L3N3L3N2N3L3N3L3N3L3N2N3TK"}, "image_id": 415, "id": 6711}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 169.0, 14.0, 19.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "Y5`0`?010O00010O010O00O2L3N3L3Mejh7"}, "image_id": 415, "id": 6712}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 186.0, 85.0, 105.0], "area": 3493, "segmentation": {"size": [512, 512], "counts": "kgX41n?2O1N3M2N2O2M2N3M2O1N3M2O1N3M2N2O2M2N2N3N1N3M2N2O2M2N2VOZNXCh1eb0POg[14N2N3N1N2N3M2O1N3M2N2O2M2N3M2O1N3M2O1N3MQi\\2"}, "image_id": 415, "id": 6713}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 191.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "ZVe31n?3N1N3M2O2M2N3N1N3M2O2M0010O00010O2N3N1N3M2O2M2N3N1N3M2OaYl3"}, "image_id": 415, "id": 6714}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 204.0, 52.0, 37.0], "area": 1347, "segmentation": {"size": [512, 512], "counts": "\\6k0U?1O01O01O01O010O01O01O010O00010O010O00010O0010O0010O001OO1000000000O10O10000000O1000O10000000O5L7I7I7IchU7"}, "image_id": 415, "id": 6715}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 213.0, 65.0, 65.0], "area": 1883, "segmentation": {"size": [512, 512], "counts": "mW`23m?1N3M2O2M2N3JBf@=Z?5O01O01O01O01O0G^OVA`0l>BQA?n>CPA=P?Fn@5W?;N2N3N1N3M2O0O00012M2O2M2N3N1N3M2O2M3M2O`NmAU1P>lNQBT1n=kNUBT1k=jNWBU1k=iNWBT1k=jNWBT1l=iNWBU1j=jNWBT1]>M2O2M2N3N2M2N3N1N3M2O2M2N3N1N3M2OiW_4"}, "image_id": 415, "id": 6716}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 240.0, 2.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "`75k?Nbhn7"}, "image_id": 415, "id": 6717}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 248.0, 105.0, 94.0], "area": 3185, "segmentation": {"size": [512, 512], "counts": "ZZn42m?1N3M2N3N1N3N1N2OO1O010O03d@^OW?f0O2N110O10O010O01M21N1O2O01O010O01O010O010O010O01O0O20M2N3M2N3M2N3M2M01000002M3N3M2N3M2M4M2N3M2N3L3N2_OjNVBY1g=iNWBY1g=jNSBIN`1o=fNQB]1P>710O101O3M2N3O010O01M2N2N3M2N3L3N3M2N3M2N3M000O3HYAUOi>i0YAUOi>i08N2M3N2N2N2N3M5K4LQX]1"}, "image_id": 415, "id": 6718}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 271.0, 16.0, 52.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "_8c1]>000010O00000K5K6J5K5K5K5K6J5Kcgg7"}, "image_id": 415, "id": 6719}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 275.0, 47.0, 68.0], "area": 1770, "segmentation": {"size": [512, 512], "counts": "QZk61m?2^@NS?5j@OS?4j@NS?5k@NR?a0M4M2N2M4M2M4M2M3N3L3N3M2M3N3O010O0001L3N3M2M3N3L3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3MTW="}, "image_id": 415, "id": 6720}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 280.0, 38.0, 27.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "WYf21n?2M3N2N2N110O1000O1M3N2NO102N2N2N1O2N2M3N2N2N2N1O01O2N2M3N2N2N2N1O2M3N2N2Noff4"}, "image_id": 415, "id": 6721}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 296.0, 30.0, 31.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "mie32l?2N2N3M2M4M2N3M2N3M2010OO2O0010O01O01O001L3N3M2N3M2N3M5K2MbVk3"}, "image_id": 415, "id": 6722}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 301.0, 29.0, 32.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "mYa23l?1N3N2M2O2N2M2O2M3N2N1O2000O10O1000O010O1M3Gh@GY?8i@EZ?89N1O2M3NXVP5"}, "image_id": 415, "id": 6723}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 304.0, 25.0, 57.0], "area": 841, "segmentation": {"size": [512, 512], "counts": "mjc72l?2N3L3N2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3O000010O01^F"}, "image_id": 415, "id": 6724}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 316.0, 30.0, 60.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "nin1f0Z?S1mN000N20000000000000001O0000000000O10000000000000002M106JlUb5"}, "image_id": 415, "id": 6725}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 323.0, 27.0, 27.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "\\ja42n?1N3M2O2M3M2O2M2N3N1N00100O2O1O01N3Ii@@Y?>i@AY?=6N3M2O2M2O2M_eP3"}, "image_id": 415, "id": 6726}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 325.0, 17.0, 18.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "^Zn23l?2M3N1O2M3N1O2000O1M2O2M3N1O2M3NeUi4"}, "image_id": 415, "id": 6727}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 330.0, 30.0, 30.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "U[[61g?2\\@0b?2\\@1a?8N2N3L3N1O001O21O010O01O010O010O01O0O2M2N3M2M4M2N2N3L`eU1"}, "image_id": 415, "id": 6728}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 339.0, 48.0, 61.0], "area": 1479, "segmentation": {"size": [512, 512], "counts": "gkn23l?2M2O2M3N1e@Fn>=PADo>=o@Fn>=PADo>h0M3N1N3N2M2O2M3N1N3M2OO0010O0102M2O2M3N1N3N2M2O2M3N1N3N2M2O2M2O2M3N1N3M3N1N3NoTY4"}, "image_id": 415, "id": 6729}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 348.0, 57.0, 46.0], "area": 1371, "segmentation": {"size": [512, 512], "counts": "cki31o?2M3N2M3N2N2M3N3L3N2M2O000O010O0100O0100O010O01000O010O010O011O1ITOWAl0i>420O0100O0100O04M2M010O01003L3N2M4M2N2M4M2M4M[dY3"}, "image_id": 415, "id": 6730}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 352.0, 30.0, 29.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "dkT71l?3N3L3N3L3N2M4M201O01O01O010O01O01O010O01O0O1N3L3N3L3N2M4MjT<"}, "image_id": 415, "id": 6731}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 359.0, 15.0, 12.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "^[b51l?3M3O2O00010O00010O00010OM3MiTV2"}, "image_id": 415, "id": 6732}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 377.0, 59.0, 63.0], "area": 1934, "segmentation": {"size": [512, 512], "counts": "glj42m?2N2N1X@J30]?:a@H]??N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N2N2N1O0000O1000002N2N2N2N2N2N2N2N2N1O2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N^cW2"}, "image_id": 415, "id": 6733}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 381.0, 39.0, 34.0], "area": 771, "segmentation": {"size": [512, 512], "counts": "b\\[61m?2M4L3M3N3L3M4M2010O0010O0010O0010O010O00010O010O00010O010O00N3M2M4M2M3N3L3NiSQ1"}, "image_id": 415, "id": 6734}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 398.0, 42.0, 40.0], "area": 1016, "segmentation": {"size": [512, 512], "counts": "Z]l31m?2M4M2M3M4M2M4M2M3N3L301O01O010O01O01O010O01O01O01O010O01O000N3L3N3L3N2M4M2M4M2M3NYc^3"}, "image_id": 415, "id": 6735}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 401.0, 24.0, 20.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "klm63k?2M4M2N2O2O010O0010O0010O010O0010O001M2N3L3N3MYSf0"}, "image_id": 415, "id": 6736}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 410.0, 17.0, 21.0], "area": 236, "segmentation": {"size": [512, 512], "counts": "Qma472Jb?;O2O01OO2L3O2O00010ON3L3N3L3N2NUcU3"}, "image_id": 415, "id": 6737}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 418.0, 54.0, 63.0], "area": 2215, "segmentation": {"size": [512, 512], "counts": "lmi51m?3M2M3^@JX?9e@IY?`0M4M2M4ZATOT>n0eAPON5[>n0cAZO\\>U11OM4N1010O0010O0010O010O0010O0010O010O0O1O2O010O010O00010M2M4M2N3L3N2M4M2N3@l@OW?Ol@NW?Nl@0V?Nm@NV?0lR[1"}, "image_id": 415, "id": 6738}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 440.0, 33.0, 39.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "enQ52l?3L3N3M2M3N3L3N3M2M3N3L301O01O01O010O0001M2M3N3L3M3N3L3M3M4M2M3MUb]2"}, "image_id": 415, "id": 6739}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 442.0, 55.0, 52.0], "area": 1511, "segmentation": {"size": [512, 512], "counts": "i^e63k?2M3N3L3N3L3N2M4M2N3L3N2N30O01O2O0O010O0001lNYAn0f>nN_AQ1h>10O0O1010O010O01O01O010O01O01O010O010OO1M4M2M4M2M3N3L3N3L3N2NaQ?"}, "image_id": 415, "id": 6740}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 467.0, 28.0, 45.0], "area": 835, "segmentation": {"size": [512, 512], "counts": "a_b74j?2N3L3N3M2M3N3M2M4M2N3L3N2010OXAjNf>X10010O010O01lNYAn0f>oN^AP1i>01O001O"}, "image_id": 415, "id": 6741}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 472.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "h>2Vao7"}, "image_id": 415, "id": 6742}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 494.0, 41.0, 18.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "o_41m?200001O001OO1N2N2M3N2N2M3N2001O00001O001O001O00001O001O00001O001O00001O001N1N3LZPW7"}, "image_id": 415, "id": 6743}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 46.0, 35.0], "area": 1089, "segmentation": {"size": [512, 512], "counts": "b04j?3M2M3N3L3N3L3O101O001O001O2N001O001O001O00N2001O001O001O00001ON2M3N2N2M3N2M3N2M3N2N2M3N2MSPY7"}, "image_id": 416, "id": 6744}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "P`n71o?0P`0"}, "image_id": 416, "id": 6745}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 6.0, 46.0, 45.0], "area": 1150, "segmentation": {"size": [512, 512], "counts": "VQk01l?3N3M2N3L3N3M2M4M2N2N30O010O01O2OO0010O010ON2010L3N2N3L3N3NN4M2N2N3L3N2N3L3N2N3M2M3N3M2M3Ngo]6"}, "image_id": 416, "id": 6746}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 25.0, 50.0, 48.0], "area": 1510, "segmentation": {"size": [512, 512], "counts": "daa14j?2M3N3L3N3L3N2M4M2M4M20010O010O01O010OSAQOi>S110O01O010O010O0010O010M2N2O2M2M12M3N3L3N2M4M2M4M2M3N3L3N3F_@0fne5"}, "image_id": 416, "id": 6747}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 42.0, 45.0, 39.0], "area": 1100, "segmentation": {"size": [512, 512], "counts": "Rb^23k?2N3L3N3M2M3N3M2M3N3N110O01O01O01O01O01O010O01O01O01N1M3N30O010O0N2M4L3M3N3L3M4M2M3M4M2Mcnj4"}, "image_id": 416, "id": 6748}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 51.0, 21.0, 31.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "c1k0V?0O00010O010O00010O01O0N2M4M2M4M2M4M2M3NW^e7"}, "image_id": 416, "id": 6749}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 51.0, 48.0, 43.0], "area": 1288, "segmentation": {"size": [512, 512], "counts": "\\RX33k?3L3N3L3M3N3L3M4M20010O010O000O2O0010O001QOPAm0R?O01O0O2M2M3N30O01O01O010O010O000DXA_Oj>>XA@j>=ZA_Oj>>XA@j>=>N3L3N3M2MXno3"}, "image_id": 416, "id": 6750}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 59.0, 27.0, 28.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "\\b93k?2N3L3N2M4M2M40O010O00010O010O0010O0010M2N3L3N2M4M2NomX7"}, "image_id": 416, "id": 6751}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 70.0, 33.0, 36.0], "area": 695, "segmentation": {"size": [512, 512], "counts": "obb12l?3M2M4M2M3N3L3N3M2M3O2O0010O0010O0010O0010OM4M2M3N3M2M4M2M3N3L3Ndml5"}, "image_id": 416, "id": 6752}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 71.0, 23.0, 25.0], "area": 354, "segmentation": {"size": [512, 512], "counts": "iRT12l?2M3M4M2M4L3N2010O0010O0010O001N1N2M4L3N3L3Mf]`6"}, "image_id": 416, "id": 6753}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 78.0, 55.0, 44.0], "area": 1364, "segmentation": {"size": [512, 512], "counts": "bSn31l?3M4M2M3O20O01O01O01M2N2M4L3N3L3M3N3L3N11M4M2M3N3O0010O00010O0010O0010O0N3L3N2010O001M2M4M2M4L3N3L3N3L3M4M2M^]V3"}, "image_id": 416, "id": 6754}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 87.0, 51.0, 102.0], "area": 2626, "segmentation": {"size": [512, 512], "counts": "bUl43k?2M4M2lNF_B>^=D_B?^=D`B>^=E^B?^=D`B>]=FWBCLk0b=TO_B`00DMj0a=UO_B`01f0^=T1L3N3L3N2M4M2M4M02M3N3L3N3L3N2M4L3N3L3N2M4CQBeNQ>X1SBdNQ>Y1QBeNQ>X1=N3L3M4M2M3N3L3N2M4L3N3L3N2MQ]Z2"}, "image_id": 416, "id": 6755}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 91.0, 29.0, 44.0], "area": 702, "segmentation": {"size": [512, 512], "counts": "mSn21l?3N3L3N3L3M3N3L3N3L3N2M4L30010O0010O0FYA[Og>b0\\A_Od>>_AAa>8dAE_>8eAD^>:dAD_>8f0M4MQ]c4"}, "image_id": 416, "id": 6756}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 98.0, 30.0, 41.0], "area": 769, "segmentation": {"size": [512, 512], "counts": "f301M2M4M2N3L3N2N3L3N3M2Mel`7"}, "image_id": 416, "id": 6757}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 98.0, 33.0, 41.0], "area": 751, "segmentation": {"size": [512, 512], "counts": "Qd\\23k?2N2M4M2N3L3N2N3L3N3M2M4M2O1010O010O0O1N3L3N3L3N2M4M2M4M2M3N3L3NilR5"}, "image_id": 416, "id": 6758}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 106.0, 21.0, 21.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "gSb31m?3L3N3M2N3M201O01O010O010O010M2N2N3M2M4Mb\\S4"}, "image_id": 416, "id": 6759}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 114.0, 37.0, 32.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "Tdl02l?2M4M2M4M2N2M4N1010O010O00010O010O010O00010O010O010O00010L3N3M2M4M2N2M4MTl`6"}, "image_id": 416, "id": 6760}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 126.0, 53.0, 45.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "kdd51m?3L3N3L3N2N3L3N3L3N2M4O001O010O01O01O010O01O010O01O01O010O01O001L3N2M4N11O0N3L3N2N3L3N3L3N3L3N2N3L3N3L3Nmk`1"}, "image_id": 416, "id": 6761}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 129.0, 41.0, 50.0], "area": 1160, "segmentation": {"size": [512, 512], "counts": "Wee32l?2N2M4M2M4M2N2M4M2M4M2N2M4M2M4M20010O010O00010O010OM3N3L3N3L3N2M4M2M4M2M3N3L3N3Lhke3"}, "image_id": 416, "id": 6762}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 142.0, 51.0, 51.0], "area": 1401, "segmentation": {"size": [512, 512], "counts": "`e]64j?2N2N3L3N3M2N2M4M2N3M2N210O010O0010O0010O010O00010O010O0O1M4M2M4M0O02O3M2M4M2M3N3L3N3M2M3N3L3N3L3N3M]kh0"}, "image_id": 416, "id": 6763}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 143.0, 31.0, 34.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "Veg13k?2M4M2N2N3L3N3M2M3N30O01O01O010O010O00010M2M4M2M3N3L3N3L3N2M\\kh5"}, "image_id": 416, "id": 6764}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 147.0, 40.0, 51.0], "area": 1195, "segmentation": {"size": [512, 512], "counts": "g55i?3L3N2M4L3N3L3N3L3N2M4M2M4L30001O010O010O01O010M2N3L3N2M4M2M4M2M4M2M4M2N2M4M2MVk[7"}, "image_id": 416, "id": 6765}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 153.0, 50.0, 75.0], "area": 2087, "segmentation": {"size": [512, 512], "counts": "VVW71l?3N2N3L3l@G_>;^AH`>;\\AI`>:_AG_>?cABY>a0gA_OW>d0iA]OR>f0nA[Oo=g0RBXOl=j0SBWOm=i0PB[OP>d0mA_OS>W110O00010O00010O0001O0M2OO010O001O0102M4M2N2M4M2N2M4M2M4M2N2M4M2NTK"}, "image_id": 416, "id": 6766}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 155.0, 46.0, 38.0], "area": 999, "segmentation": {"size": [512, 512], "counts": "[UV42k?4M2N3L3N2N3M2010O0010O0010O00h@_OR?g001O01O010O01N1O101O010O01O01O010O01O000M4M2M4M2M3N3L3N3L3NkjR3"}, "image_id": 416, "id": 6767}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 160.0, 28.0, 30.0], "area": 508, "segmentation": {"size": [512, 512], "counts": "dUb23k?2M3N3M2M4M2N3L3O110O010O00010O010O0001L3N3L3N3M2M3N3Lkjo4"}, "image_id": 416, "id": 6768}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 163.0, 46.0, 57.0], "area": 1566, "segmentation": {"size": [512, 512], "counts": "\\ff03k?2N3IJ_@9_?6M2M3N3L3N3M2M4UAlNf>X1N2N30OM4L3O2O01O01O010O010O01O0N2M4M2N3L3N3M2M3N3M2M4O001N1N2Ce@3^?Jd@3g?M\\Zb6"}, "image_id": 416, "id": 6769}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 166.0, 27.0, 29.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "ieZ52l?3L3N3L3N2M4M2N30O010O00010O010O0010ON2M4M2M4M2N2M4MdjW2"}, "image_id": 416, "id": 6770}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 184.0, 28.0, 35.0], "area": 595, "segmentation": {"size": [512, 512], "counts": "^fQ6340`?3]@O`?;M2N3L3N2N3L2OO2O2O110O010O0010O00N3L3N3M2M3N3L3N3MSZ`1"}, "image_id": 416, "id": 6771}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 193.0, 45.0, 51.0], "area": 1298, "segmentation": {"size": [512, 512], "counts": "Ugc11m?2N3M2N3M2N3L3N2N3M2N3M4K3N3M2N3M20010O010O0010O0010O0010N1M4M2M3N3L3N3L3N2N3L3N3L3N3L3Nfie5"}, "image_id": 416, "id": 6772}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 195.0, 16.0, 15.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "[fm41m?3L3N3N110O010O010O0010M2N3M2MkYj2"}, "image_id": 416, "id": 6773}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 207.0, 42.0, 50.0], "area": 1224, "segmentation": {"size": [512, 512], "counts": "eg_22k?4M2M3N3L3N3L3N2M4M2M4M2M3N3L301O010O01O01O010O01O0O1M4M2M4M2M3N3L3N3L3N2M4M2M3N3LZYk4"}, "image_id": 416, "id": 6774}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 221.0, 13.0, 16.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "XWT63k?2N2N3L3N30O001M2N3M2M4MQYe1"}, "image_id": 416, "id": 6775}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 222.0, 29.0, 32.0], "area": 553, "segmentation": {"size": [512, 512], "counts": "hWf62e?Oa@3]?0_@3^?:M2M4M2N2M4O010OO2L3N2N30O01O010O01O01M2N3L3N2N3L3NlXk0"}, "image_id": 416, "id": 6776}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 224.0, 50.0, 56.0], "area": 1658, "segmentation": {"size": [512, 512], "counts": "\\XY32k?3N2@JSA:i>JTA8j>JSA:i>IUA9i>`0M4M201O0M3N3L301O01O01O01O010O01O01O010O0O1M4M2M4M2M3N3L3M3N3L2O120O0010OM3N3L3N3L3Nghm3"}, "image_id": 416, "id": 6777}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 231.0, 19.0, 13.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "\\WZ62k?3N3O000010O00010O010O00010O010N1M3NfX\\1"}, "image_id": 416, "id": 6778}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 235.0, 29.0, 27.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "lgh52k?4M2M3N3M2M4M200010O010O00010O0010O0010O001M2N2N3L3N3L3N_hh1"}, "image_id": 416, "id": 6779}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 244.0, 23.0, 19.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "lW_44j?2N2N3M2010O01O01O010O010O00010O010M2N3M2M3NWXU3"}, "image_id": 416, "id": 6780}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 245.0, 27.0, 30.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "\\hh41m?2M3N3M2M4M2N2M4M2N3O001O01O010O0N2N3M2M4Ab@9_?Ec@;b?M2O110IZ@0iWj2"}, "image_id": 416, "id": 6781}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 260.0, 43.0, 45.0], "area": 1123, "segmentation": {"size": [512, 512], "counts": "QYo32l?2N3M4CIn@6P?LPA5m>MTA2j>1SA1j>2SA2j>c0O20O010O0010O0010O010O0010O0010O010O0010OO1M4M2N3L3N3M2M3N3M2M4M2N`W[3"}, "image_id": 416, "id": 6782}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 278.0, 53.0, 47.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "Ric43j?3N2e@Jj>9RAKj>8TAJj>9RAJn>6PAMP?a001O010O000N3O010O01O010O01O010O01O01SAoNi>T11O010O010O01O01O010O0N201O0N3O0010O00O2M2Dl@JW?3l@IW?4l@JV?4=L3Nlfa2"}, "image_id": 416, "id": 6783}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 285.0, 47.0, 55.0], "area": 1338, "segmentation": {"size": [512, 512], "counts": "TZd52m?2M2O2N2M2EEPA=n>DQA>l>ERA=l>EQA=m>FQA=N1N3N2M3N1O2M3N2M2O01N2O2N2M3N1N3N2N1N3N2M3N1O2M3N1N3N2N2M2O2N2M2O2M3NcVd1"}, "image_id": 416, "id": 6784}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 294.0, 26.0, 23.0], "area": 347, "segmentation": {"size": [512, 512], "counts": "dY]63l?2N2M2O2N2M2O2N2M2OO102N10100O10O10O1N1O2M3N2N1N3NafU1"}, "image_id": 416, "id": 6785}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 304.0, 22.0, 53.0], "area": 685, "segmentation": {"size": [512, 512], "counts": "j9Y1g>1O0N2M4L2OO3N3L3M4M2M4L3N3L3N3L3M3N3L3M4M[fd7"}, "image_id": 416, "id": 6786}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 304.0, 25.0, 20.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "jYQ42l?2M4M2M301O010O010O01O01O010O01O01O010OO2M2M4M2M[Vb3"}, "image_id": 416, "id": 6787}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 322.0, 47.0, 52.0], "area": 1423, "segmentation": {"size": [512, 512], "counts": "X[?3j?3N2M4M2N3L3N2M4M2M40O010ON2N3M2M4M2N2N3O0010O0010O010O0001N1M4M2N2M4M2M4M2M3N3L3N3L3N2M4M2MgUi6"}, "image_id": 416, "id": 6788}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 326.0, 57.0, 55.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "PkQ62m?3N1N3M2N2O2`@CX??f@CX?e0KYOm@h0Q?6M2O1N3M2O2M1O10O0001O01O01O01O01O00010O03M10O01O010O01O02N3N1N2N3N1N3M2O1N3M2O2M2L4N3N1N3M2OQeQ1"}, "image_id": 416, "id": 6789}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 339.0, 20.0, 32.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "Z[f72l?2M4L3N2M4M2O2O0O1M4M2O2O01O010O01O01OZE"}, "image_id": 416, "id": 6790}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 343.0, 50.0, 50.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "h[W11m?3M2M3N3L3N3M2O1N3M2N3L3N3M2O101O010O01O01N1M4O001^AdN_>_1O01O01O01O0N3N100O2M2M4M2M3N3M2M4M2N2M4M2M4M2NQeo5"}, "image_id": 416, "id": 6791}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 362.0, 54.0, 57.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "^lo13k?2M4M2N210O001L3N2M4M2M4M2O101O010O01O01O0O2L3N3L3N2N3bAcNV>d1010O001O0M3O2O010M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M_TU5"}, "image_id": 416, "id": 6792}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 364.0, 26.0, 28.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "o;2l?3M2M4M2N2M4M2M4O01O010O010O01O01N1N3L3N2N3L3N3M2M`db7"}, "image_id": 416, "id": 6793}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 378.0, 14.0, 14.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "Sle01m?3M2N3L3010O010O001N1N2N3MTTS7"}, "image_id": 416, "id": 6794}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 383.0, 17.0, 23.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "b\\m01l?3N3L3N3L3N3M2N20O2M2M4M2M3N3L3NPTj6"}, "image_id": 416, "id": 6795}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 386.0, 50.0, 29.0], "area": 1346, "segmentation": {"size": [512, 512], "counts": "Rlc6:f?00b0^O000000000001O00000000000000000000000001O00000000000000000000000000000000000000000001O00000000lSc0"}, "image_id": 416, "id": 6796}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 387.0, 44.0, 61.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "`]m2142a?1\\@3a?7M4L3N2M4M2M4L3N2M4L3N2010O010O00M4M2M3N2M04M201O0M3M4M2M4L3N2M4M2M4L3N2M4L3N3L3M3Ngc\\4"}, "image_id": 416, "id": 6797}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 401.0, 9.0, 22.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "alk73m??A4L0000000000O`C"}, "image_id": 416, "id": 6798}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 402.0, 24.0, 32.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "[md12k?3N3L3N3M2M3N3L3N2N3N1010N1M3M4M2M4M2M3N3L3N2M]So5"}, "image_id": 416, "id": 6799}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 404.0, 53.0, 65.0], "area": 1844, "segmentation": {"size": [512, 512], "counts": "cmc32k?4M2M3N3L3010Of@AS??j@ES?e0M3N3L3N3M20010O010bAjNP>V1nAlNS>T1iAPOV>P1hAROX>\\10O0001M2M4M2N21N1N2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M3N3L3N3L3NUca3"}, "image_id": 416, "id": 6800}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 413.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "m<2Qco7"}, "image_id": 416, "id": 6801}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 432.0, 48.0, 53.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "inb41l?4M2M4M2M3N3L3N3N10010O010O000O2L3N3L3N2M4M2M4M2O0110O01OEeAQO\\>l0fASO[>j0iASOZ>j0hASO[>j0hATOZ>k0?0O01O0O1N3L3N2M4M2M4M2M3NZRe2"}, "image_id": 416, "id": 6802}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 442.0, 21.0, 47.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "bne71l?4M2N2c@Kl>9QAIm>9QAIl>;PAIm>f0N3L3N201O010O010O00010O010QB"}, "image_id": 416, "id": 6803}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 444.0, 55.0, 45.0], "area": 1376, "segmentation": {"size": [512, 512], "counts": "fn`63k?2M3N3L3N3N10010O010O00010O010O001M2M3N3L3N3M2M3010O010O010O00010O010O00010O010O001M2N2M4M2M4M2M3N3M2M4M2M3Njac0"}, "image_id": 416, "id": 6804}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 450.0, 26.0, 26.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "d^[21m?2M4L3N2M4L3N30O0010O00010O0010O0010O00M4L3N2M4L3NiaW5"}, "image_id": 416, "id": 6805}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 460.0, 19.0, 24.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "nn;2k?4M2N2M4M2M4M2O110O0001M2M4M2M3N3L3NbaZ7"}, "image_id": 416, "id": 6806}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 460.0, 27.0, 39.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "[_V13j?3N3M2M4M2N2M4M2M4O0010O00KUOUAi0k>4103M0O4M2N2M4M2Jb@Ha?56N3LcQ\\6"}, "image_id": 416, "id": 6807}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 464.0, 28.0, 31.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "VoT31m?3L3N3L3N2N3L3N2M4O0010O0010O00010O0010L3N3L3M3N3L3M4M[Q]4"}, "image_id": 416, "id": 6808}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 469.0, 17.0, 43.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "e>[1e>01O0N3L3N2M4M2M4M2M3N3L3N2M4M2MXQg7"}, "image_id": 416, "id": 6809}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 470.0, 31.0, 32.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "W_S21m?2N2Y@Mb?9O0O2M2M3N3M2N3M210O01O010O01O01O010O0N3M2M3N3M2M4M2M3NTQ]5"}, "image_id": 416, "id": 6810}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 473.0, 19.0, 26.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "j^Z73m?b0^O0O100000000000004L000000000000K>GnP<"}, "image_id": 416, "id": 6811}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 484.0, 40.0, 28.0], "area": 731, "segmentation": {"size": [512, 512], "counts": "ooZ61m?2M3N2M3N2N2M3N2N2M3N2O11O001O001O00001O001O001O00001O001O001O00001M2M4M2N2M4MbPQ1"}, "image_id": 416, "id": 6812}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 490.0, 20.0, 15.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "_oW43i?4O2O010O010O00010O010O01O01O010ON3M2NaP^3"}, "image_id": 416, "id": 6813}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 493.0, 43.0, 19.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "no<2l?2N2M3N2N2O1001O00O1N2M300001O001O00001O001O001O000Ea@5f?O00001O001O001O00001O001O00001OQ`m6"}, "image_id": 416, "id": 6814}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 496.0, 29.0, 16.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "ooQ31l?3N2M3N2O1O1N2O1001O00001C`@8e?O001O001O00001O001O001O001O0NU`_4"}, "image_id": 416, "id": 6815}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 499.0, 34.0, 13.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "ooP41l?3N2M3N2N21O00001O001O00001O001O00001O001O001O00001O001O00001O001OQP^3"}, "image_id": 416, "id": 6816}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 499.0, 29.0, 13.0], "area": 226, "segmentation": {"size": [512, 512], "counts": "o_W71m?2M3N2N2M3001O00001O001O001O00001O001O001O00001O001O001MUP:"}, "image_id": 416, "id": 6817}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 510.0, 5.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "ooX21n?11O0000Q`d5"}, "image_id": 416, "id": 6818}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 511.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "ooZ11o?000Q`c6"}, "image_id": 416, "id": 6819}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 0.0, 24.0, 9.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "P`61o?001O00001O001O001O00001O001O00001O001O00N2N2MS`]7"}, "image_id": 421, "id": 6820}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 0.0, 14.0, 13.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "YPV11m?2N3M2N3N101O0000N2N2N2M3NRPc6"}, "image_id": 421, "id": 6821}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 0.0, 19.0, 12.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "T`]13k?2N3O001O00001O001O00001O00O1N2M3N2MSPY6"}, "image_id": 421, "id": 6822}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 0.0, 63.0, 46.0], "area": 1959, "segmentation": {"size": [512, 512], "counts": "iPn11m?2N3M3M2N3N1N3M2N3M3M2O2M2O2O001O1O001O001O010O010O10O010O10O010O10O001O001O001O1O001O000000000000000000000001O000000000000@T`R5"}, "image_id": 421, "id": 6823}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 19.0, 7.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "P`l21o?001O00001O001O00001O001O001O00N2N2NRPj4"}, "image_id": 421, "id": 6824}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 0.0, 34.0, 15.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "Q`P52m?2O001O00001O001O00001O001O00001O001O00001O001O00001O001ON2M3N2M3NR`^2"}, "image_id": 421, "id": 6825}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 0.0, 67.0, 42.0], "area": 1642, "segmentation": {"size": [512, 512], "counts": "^`f61n?4M3M3L4M3M3L4M3M3L10O10O10O10O01000O01000O01000O01000O010O00100O001000O10000O10000O100O1O1O100O1O1O100O1O1O100O1O2N3N1N2N3M2O1N3M2Odo7"}, "image_id": 421, "id": 6826}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 21.0, 71.0, 48.0], "area": 2046, "segmentation": {"size": [512, 512], "counts": "RQa41m?2M4M2e@Hk>;SAHj>;SAGl>:RAHn>9n@KR?a0010O01O01O010O010O01O010O01O01O010O010O0010O0010O010O0010O0010O001M2N3L3N3O000O2M03L3N3N11OlNXAP1m>O010O01O010O01O001L4M3M3L4Fb@Na?Ob@Na?Oa@OYn[2"}, "image_id": 421, "id": 6827}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 24.0, 32.0, 69.0], "area": 1342, "segmentation": {"size": [512, 512], "counts": "_R`71m?3L3N3M2M4M2M4M2N3L3N2M4M2N3L3N3L3N3M2M4M2M4M210O01O010O001M2NVO"}, "image_id": 421, "id": 6828}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 26.0, 51.0, 78.0], "area": 2504, "segmentation": {"size": [512, 512], "counts": "Pb82l?2k@NY>6dAMY>5WAG87_>9_AJ^>9_AI_>9^AJ`>9]AJ_>m0N3M2N3M2M4M2N3M2N3N11O010O01O010O010O010O01N1N3M2M4M2N2N3M2N3HdAgN_>W17M2N3O0VOTA`0m>]OUA:NEP?NVA:MEo>OVA:]?L3N3M_nm6"}, "image_id": 421, "id": 6829}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 27.0, 52.0, 33.0], "area": 887, "segmentation": {"size": [512, 512], "counts": "_aT11m?3L3N3L3N2M4M2M40O010O0010O0010O010O00010O010O0010O0010O0O2M2M4M2O1010O01O01O010O01O010O01O01O0N3L3N3Md^Q6"}, "image_id": 421, "id": 6830}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 30.0, 43.0, 49.0], "area": 1135, "segmentation": {"size": [512, 512], "counts": "baj52l?3L3N3M2N3L3N2N3M210UAWO]>i0aAYO`>g0]A\\Ob>d0\\A^Oe>b0WAAi>l00O01O010O2OO010O010O01O01M2N3M2_ORAMR?OQAOQ?OQAOR?1k@1U?Oi@3W?:2M2N3Gb@L`?2b@Kb?27Nen_1"}, "image_id": 421, "id": 6831}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 49.0, 29.0, 20.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "mag62n?1N2O0O1O10000O1O2N2O1N2N1O01O010O1O00010O1O2N2O2M2N2N3NWni0"}, "image_id": 421, "id": 6832}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 67.0, 91.0, 61.0], "area": 2462, "segmentation": {"size": [512, 512], "counts": "hbT41m?2M4M2M3N3L3N3O010O00010O010O010O0N2M4M2O2O010O01O01O010O01O010O01UASOa>m0]AUOd>k0YAWOg>R10O010O01O01O010O01O010O01nNZAh0f>VO\\Ak0l>010O01O01O010O010O01O01O0O2M201O00010O010O01M2N2N3L3N3M2M3O2O010O010O0M3N3M2M4Mnl]2"}, "image_id": 421, "id": 6833}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 69.0, 67.0, 44.0], "area": 1664, "segmentation": {"size": [512, 512], "counts": "kRf11m?3M2M3N3L3N2N3L3N3O00010O0010O0010O0010O0010O0010O0010O0010O00010O010O00010O010O0001N1M4M2M3N3M210O00010OM4M2M3N3L3N3L3N2M4M2M4M2M3Nf]X5"}, "image_id": 421, "id": 6834}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 78.0, 13.0, 14.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "^2:f?10O0010O0010O0010OO1M4M_]i7"}, "image_id": 421, "id": 6835}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 84.0, 36.0, 29.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "Tco02l?2M4M2M3N3L3O2O01O010O01O01O010O01O010O01O01O010O01O010O0N2N3M2M4M2M3NS]^6"}, "image_id": 421, "id": 6836}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 88.0, 30.0, 34.0], "area": 595, "segmentation": {"size": [512, 512], "counts": "ccf51m?3L3N3L3M3N3L3N2M4M2N30O0010O00M4M2M40O0010O0O1M4L3N3L3N2MT]j1"}, "image_id": 421, "id": 6837}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 91.0, 5.0, 14.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "Tcm73k?3M2M4M2UM"}, "image_id": 421, "id": 6838}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 93.0, 48.0, 65.0], "area": 1740, "segmentation": {"size": [512, 512], "counts": "`T^62l?2N3L3N3L3N2M4M2N3L3N3L3N2N3L3N3L3N3M2M3N3N110O010O01OM40L3M3N3L3M3N3L3M4M2M3M4ROQAg0U?M4L3N2M4L3N2Mkli0"}, "image_id": 421, "id": 6839}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 107.0, 33.0, 72.0], "area": 1433, "segmentation": {"size": [512, 512], "counts": "d3k1S>3M2M4M21O010O01ON3M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L\\\\_7"}, "image_id": 421, "id": 6840}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 108.0, 62.0, 50.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "Yd]12l?2M3N3L3M4M2M3N3L3M4M2O110O0010O0010O0010O0010O00010O010O00010O010O00010OoNUAm0P?0O00010O010O00010O010O0001O0N3L3N2M4M2M4M2M3M4Mn[c5"}, "image_id": 421, "id": 6841}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 120.0, 39.0, 74.0], "area": 1841, "segmentation": {"size": [512, 512], "counts": "ce\\71m?3L3N3L3N2M4M2M4M2HWOVAm0g>UOWAm0g>8L3N3M2O101M2N3M2N3M2M4M2N30O0010O010O00N3L3N3M2N3M2MUL"}, "image_id": 421, "id": 6842}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 131.0, 63.0, 48.0], "area": 1788, "segmentation": {"size": [512, 512], "counts": "ndl31l?3N3M2M4M2N3L3N2M4M2N3N110O00010O010O010O00010O010O00010O010O010O00010O010O010M2M3N3M210O010O01N1N2N3L3N3M2M3N3M2M4M2M4M2N2MekS3"}, "image_id": 421, "id": 6843}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 143.0, 36.0, 36.0], "area": 760, "segmentation": {"size": [512, 512], "counts": "YeR53k?3M2M4M2N2M4M2N3L3N3M2O20O0010O01L3N2N3N1010O010O0010O0N3L3N3M2M3N3M2MZ[[2"}, "image_id": 421, "id": 6844}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 146.0, 67.0, 58.0], "area": 2106, "segmentation": {"size": [512, 512], "counts": "dUQ11l?4M2N3L3N2N3L3N3M2M4M2N2N30ON3M2O2O01O01O010O01O010O01O010O01O010O0jN\\AQ1c>lN`AT1g>O010O01O010O01O01O010O010O01O01O010OO2M2M3N3M2M4M2N3L3N2M4M2NfZm5"}, "image_id": 421, "id": 6845}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 163.0, 33.0, 25.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "bUW61m?2N3L3N2M4M2O2O01O010O01O010O01O01O010O010O01O0O1N3M210O2O0M2M4MbZX1"}, "image_id": 421, "id": 6846}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 172.0, 31.0, 27.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "le:1l?4M2M3N3L3N3N11O01O01O010O01O01O010O01O01O010O01ON3M2M4M2M3N]jU7"}, "image_id": 421, "id": 6847}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 178.0, 61.0, 49.0], "area": 1889, "segmentation": {"size": [512, 512], "counts": "^Ve32k?4M2N3L3N3M2N2M4M2N3L3N201O010O010O01O01O010O01O01O010O010N1N2M4N110O01O01O010O010O00010O01N1N3L3N2N3L3N3L3N2N3L3N3L3N3MTZ\\3"}, "image_id": 421, "id": 6848}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 193.0, 35.0, 33.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "cf^71m?3M2M3N3L3N3M2N3O01O01O010O01O01O010O01O01O010O01O010O01O010O01O01O0eI"}, "image_id": 421, "id": 6849}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 196.0, 30.0, 33.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "kfo42l?2M4M2N3L3N2M4M2M4N11O010O01O01O010O01OO2M2M4M2M3N3L3N3L3NfYa2"}, "image_id": 421, "id": 6850}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 198.0, 61.0, 57.0], "area": 1869, "segmentation": {"size": [512, 512], "counts": "XWk02l?2N3L3N3M2M3N3M2M4M2N3L3N2N3M2010O010O00010O010O010O00010lN]Ai0c>UO`Ak0`>RObAn0i>O01O010O010O01O01O010O010O0N2M4M210OO2L3N3M2M3N6J2M4M2N2I]@NhhV6"}, "image_id": 421, "id": 6851}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 199.0, 70.0, 74.0], "area": 2475, "segmentation": {"size": [512, 512], "counts": "ogk51l?4M2N2M4M2N3M2M4M2N2M4M2N3M2N30O0010O0010O0010O010O0010O0010O010O00010N1N3L3N3L3N2N1N1000O010O011O2M4M2N3L3N2M4M2N3L3N3L3N2N3L3N3M2M3N3L3NcYQ1"}, "image_id": 421, "id": 6852}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 200.0, 36.0, 39.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "XWj63k?2M4M2N3M2M4M2N3M2N2M10000001O3M3M2N3M2N3M2N30O10O10O010O010O01N1N3M3M^ic0"}, "image_id": 421, "id": 6853}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 35.0, 27.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "m6;b?3N201O010O01O01O010O01Fa@1_?Md@3[?Jh@6a?10O0010O001N1N2M4O010O00010OO2L3N2M4M2NmX^7"}, "image_id": 421, "id": 6854}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 228.0, 70.0, 52.0], "area": 2081, "segmentation": {"size": [512, 512], "counts": "mgZ32l?2N3L3N3M2N3L3N2N3M2M4O000010O010O01O01O010O010O00010O010O01O01O010O010O0001N1N3L3O101O010O0N30O0010O0010O0010OSO_A;a>CaA>a>\\OcAc0]>[OeAf0Z>WOiAi0h>M2M4M2M4M2N2M4M2MRXb3"}, "image_id": 421, "id": 6855}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 233.0, 37.0, 25.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "ig_41m?3M2M4M2M3N3M201O01O010O01O010O01O01O01M2M4O001O01O010OC`@:d?O01O010O01N1M4MZhm2"}, "image_id": 421, "id": 6856}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 249.0, 87.0, 53.0], "area": 2410, "segmentation": {"size": [512, 512], "counts": "^h52l?2M3N3L3N3L3N2N30O010O010O00010O010O00010O010O00010O01N1N3L3N2N3O010O01O010O01O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O010O01OO2M2M4M2M3N3L3N3M2M3N3L3Ncg^6"}, "image_id": 421, "id": 6857}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 257.0, 16.0, 24.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "cXS54i?3M4M2M3M4L3O1000M4L3M3N3L3M3MPhd2"}, "image_id": 421, "id": 6858}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 266.0, 6.0, 16.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "Z8`0`?M4L3N2M4Mfgl7"}, "image_id": 421, "id": 6859}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 266.0, 17.0, 29.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "\\hg78h?O`A2_>O`A3]>0UAG81^A1`>1^A0a>2\\A1a>2]A0a>1^A0a>i0M3N2N1N3N2N2M20100O10O1O1N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1OleY3"}, "image_id": 421, "id": 6867}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 320.0, 60.0, 58.0], "area": 1937, "segmentation": {"size": [512, 512], "counts": "S[Q73k?3L3N3M2M3_OB^Aa0^>B_Aa0_>B^Aa0`>@]Ac0c>]O[Ae0e><0O010O01N11O01O010O01O01O010O01O010O01O010OjN`Al0a>QOaAo0_>oNcAR1f>O010O0010O0010O0010O0010O0N3M2M3N3L3N3L3N3M2M3NVe0"}, "image_id": 421, "id": 6868}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 324.0, 51.0, 58.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "Q[a53m?2N3L3N3M2M3N3M2M4M2N2M0100O01O1M4M1N3M3N1N1O3N2N1101N100O011O0O101N101O2L2N2M4L3N1N3M3N3L3M4M2D^@7g?M4MdUe1"}, "image_id": 421, "id": 6869}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 325.0, 49.0, 62.0], "area": 1902, "segmentation": {"size": [512, 512], "counts": "R[f42l?2e@Od>5XANf>4XAOd>5YAMe>5XANe>5YANd>h0N3L3N3L3N2010O010O00010O010O01O01O010O010O000O2M2M4M2M3N3M2M4M2M3N3L3N3M2M4M2M3N3L_Ua2"}, "image_id": 421, "id": 6870}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 332.0, 56.0, 57.0], "area": 1809, "segmentation": {"size": [512, 512], "counts": "k:g0V?4O010M2JTOUAP1g>6N201O010O00010O010O010O00010O010O0001mN[Aj0d>SO`Al0a>QOaAP1g>010O01O01O010O01O01O010O01O01O010ON3L3N2M4M2N3L3N2M4M2MkdS7"}, "image_id": 421, "id": 6871}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 335.0, 20.0, 17.0], "area": 197, "segmentation": {"size": [512, 512], "counts": "jZg61l?3N2N3L3O20O0010O0010O0010O0001L3N3L3N]en0"}, "image_id": 421, "id": 6872}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 369.0, 11.0, 32.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "\\lj71m?3HM^@6_?M_@6S?JSA=k>ESA=m>Do@`0Q?701ON3M_D"}, "image_id": 421, "id": 6873}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 381.0, 19.0, 25.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "`\\Z62k?3N3M2M4M210O001M1O000002N3M2M4M2N3MRT\\1"}, "image_id": 421, "id": 6874}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 394.0, 38.0, 40.0], "area": 1020, "segmentation": {"size": [512, 512], "counts": "Z@[Ad0b>^O\\Ad0a>_O\\Ad0b>?L3N3L3N2M4M2M4O01O010O01O01O0N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L\\cZ5"}, "image_id": 421, "id": 6876}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 396.0, 62.0, 40.0], "area": 1982, "segmentation": {"size": [512, 512], "counts": "a\\b61o?d0\\O:F00003M12M000000O10000000000000000000000000000000000000O1000N20000000L4000O10000000000L40000000000000000000000004L0O"}, "image_id": 421, "id": 6877}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 432.0, 29.0, 31.0], "area": 554, "segmentation": {"size": [512, 512], "counts": "T^[74j?2M4M2M3N3L3M4N10010O001O0N30O0010O01M2N2M4M2M4M2N2M4M2N\\R6"}, "image_id": 421, "id": 6878}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 435.0, 23.0, 17.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "j]a33k?2M4M2010O00010O010O00010O010O0010O0001M2M4MWRS4"}, "image_id": 421, "id": 6879}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 439.0, 65.0, 58.0], "area": 2223, "segmentation": {"size": [512, 512], "counts": "knh23k?2M3N3L3N3M2M3N3L3N3L3N2M4M2N3O01O010O01O01O010O01O01O010O01OfN^AV1g>O010O01O01O01O010O01O01O0M4N11O010O0M3N3L3N3L3N2M4M201O01OM4M2Fb@Ob?N`@0XQW4"}, "image_id": 421, "id": 6880}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 17.0, 40.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "i=W1j>0O01O0O1M4M2N3L3N3M2M3N3L3N3M2MSRg7"}, "image_id": 421, "id": 6881}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 454.0, 68.0, 50.0], "area": 1854, "segmentation": {"size": [512, 512], "counts": "j^\\62l?2O2M2O2M2N3N2M2N3N1N3M30O010O010O10O10O010O010O10O10OO2M2N3N2M201O010O01000O010O010O01000O010O010N01O2M2N3N2M2N3M2O2M2N3M3N1N3M2O2M2NZaa0"}, "image_id": 421, "id": 6882}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 463.0, 41.0, 32.0], "area": 676, "segmentation": {"size": [512, 512], "counts": "m^l11l?3N3L3N2N3M2010O0010O0010O01b@@[?c0O0010O0010O010O00010O010O0010O0010O00N3L3N3L3N3M2MVQ_5"}, "image_id": 421, "id": 6883}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 465.0, 45.0, 47.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "mog31l?4L3L4M3M3L5L3L4M3M3L4M3L4000000001O000000001O0000001O000000001O000N2K5M40O0000^OVANk>LZA5e>G_A9T?M4Jj`a3"}, "image_id": 421, "id": 6884}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 475.0, 29.0, 30.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "__^22l?2N2M4M2N2N3M2M3N210O0010O01O00010O0010O010O0N3M2Ae@7]?Gf@5d?N3MiPS5"}, "image_id": 421, "id": 6885}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 475.0, 45.0, 37.0], "area": 924, "segmentation": {"size": [512, 512], "counts": "h_b41m?2O2M2N3N2M2N3N1N5K2N2N2M3N2N2O100001O001O001O00001\\OSA3m>KUA5l>HWA7i>GYA9h>D[A;e>C]A=T?O001O00001O001O001N1M4M2N[Pg2"}, "image_id": 421, "id": 6886}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 491.0, 59.0, 21.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "ood11l?3N2M3N2N2M3N2N2O11O001O00001O001O001O00001O001B`@:c?0001O001O00001O001ON2O11O001O00O1O100001O001O001O00001O001O00001O0OU`]5"}, "image_id": 421, "id": 6887}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 494.0, 51.0, 18.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "ooi51m?2M3N2M3N2M3N200001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00Q`\\1"}, "image_id": 421, "id": 6888}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 501.0, 20.0, 11.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "n_U32k?3N2N2N21O001O001O00001O001O00001O001NU``4"}, "image_id": 421, "id": 6889}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 503.0, 27.0, 9.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "m_h63k?2M3O11O00001O001O00001O001O00001O001OO1001O001O0000QPj0"}, "image_id": 421, "id": 6890}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 509.0, 11.0, 3.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "no`32m?10000001O00001O0000Q`Y4"}, "image_id": 421, "id": 6891}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 12.0, 25.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "0i0W?0000N2M3N2M3N2M3N2M3NRPj7"}, "image_id": 422, "id": 6892}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 0.0, 31.0, 40.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "Pa92k?4M2M4M2N2M4M2M4M2M3N3M2M4O00M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3NRPW7"}, "image_id": 422, "id": 6893}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 0.0, 52.0, 40.0], "area": 1435, "segmentation": {"size": [512, 512], "counts": "jPc33T?OaA4]>M`A6`>J^A8b>IZA;e>EYA=g>DUA`0j>@TAb0l>901O001O001O00001O001O00001O001O00001O00N2M3N2N2O100001O001O0000N2M3N2M3N2M3N2M3N2M3N2MSPc3"}, "image_id": 422, "id": 6894}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 0.0, 65.0, 30.0], "area": 1146, "segmentation": {"size": [512, 512], "counts": "^`]62l?3L3N3M2M4M20001O001O001O00001O001O00001O001O001O00001O001O001OM3N2N2M3N2M3N2N200001O001O00001O001O001O00001O00N3N4M2O0O01O01M2Mkoa0"}, "image_id": 422, "id": 6895}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 0.0, 28.0, 15.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "TP^73k?2N3O00001O001O00001O00001O001O00001O001O0000N2N2M3N2MSP4"}, "image_id": 422, "id": 6896}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 0.0, 5.0, 6.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "S`m71l?301O001O"}, "image_id": 422, "id": 6897}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 8.0, 91.0, 78.0], "area": 3101, "segmentation": {"size": [512, 512], "counts": "WQ\\41l?3N3M2M4M2N3M2M3N3M2N3M2010O010O0010N1N3L3O2O01O010O01O010O01O010iNZAR1j>10O010O00010O010O010O00010O010lNWAm0P?M2M_AXOQ>f0nA\\OR>e0jA^OV>b0hAAX>>fADZ>=bAG]>9aAI\\>;aAH^>m00010O010O0010O0010O010O0010O0VObA2_>JeA6Z>HhA8Y>EiAAnA>S>_OoAb0P>\\OSBc0m=ZOVBf0c>O0N2N3L3N3M2M4M2NQ^V2"}, "image_id": 422, "id": 6898}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 34.0, 46.0, 58.0], "area": 1420, "segmentation": {"size": [512, 512], "counts": "cbk21m?3L3N3M2M3N3M2M4M2N3L3N3M2O2O010O0N0O101N3N1O0O10O301O01O010O010O01M2N3M2N2M4M2Cn@JU?2n@LT?2o@JU?3m@KU?2>Nh^]4"}, "image_id": 422, "id": 6899}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 43.0, 20.0, 36.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "VRf71m?3M2M3N3M2N3L3N2N3L3N3M2010O0010O0010ObN"}, "image_id": 422, "id": 6900}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 46.0, 100.0, 69.0], "area": 3201, "segmentation": {"size": [512, 512], "counts": "ibm52k?4M2M4M2M4L3N2M4M210O010O00010O010O00010O010O0010O0010O0010O010O00010O01O010O01O01O010O010O00010O010O00010O010OO101ON1N3M2O2M3N1N3N1N3N1N3N2M2O2M2O2001N101N10001L3N3L3N2M4M2M3N3L3N3L3N2M4M2M4L3N2M4MZ^`0"}, "image_id": 422, "id": 6901}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 47.0, 77.0, 73.0], "area": 2822, "segmentation": {"size": [512, 512], "counts": "Pc[32k?3N3M2N2M4M2N3M2M4M2N2N3M2O2O010M2N00O03N2N2M0100030O00010O010O010O01O01O0fAcNM0o=]1QBfNMOR>[1nAoNR>2mAm0S>POPBQ1P>kNTBT1l=iNWBX1h=eN[B[1V>O010O01O0O2L3N3L3N0O13M2N2M4M2N3M2M4M2N2N3L3N3M2M4M2N3M2M3N3M2Nnm]3"}, "image_id": 422, "id": 6902}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 73.0, 77.0, 73.0], "area": 2840, "segmentation": {"size": [512, 512], "counts": "jSZ41m?3M2N3M2M3N3M2N3M2M4M2N3M2N201O010N0O0O1O3M4MO01O1201O010O00010O010O010O0001cAeNOMP>]1oAhNNOR>h11aNnAn0R>POPBQ1P>kNSBU1m=hNWBX1i=eNYB[1W>0O010O000O2M2M4M2N1O01N4M2N3M2M4M2N2M4M2N3L3N3M2N2M4M2N3L3N3MS]_2"}, "image_id": 422, "id": 6903}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 98.0, 75.0, 73.0], "area": 2892, "segmentation": {"size": [512, 512], "counts": "cdY53k?2N2M4M2M4M2N3L3N2M4M2N3FmNcAU1[>nNeAQ1X>ROhAo0U>SOkAm0R>WOlAk0Q>WOlAi0X>=201O0O1M2O201O010O00010O010O00010O01kAbNg=]1QBeNM1R>Z1oAnNP>1QBo0P>nNRBR1n=lNTBU1l=gNXBX1h=fNZB[1V>O00010O01O0N2M4M2M3N01O2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N[l`1"}, "image_id": 422, "id": 6904}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 105.0, 36.0, 48.0], "area": 1054, "segmentation": {"size": [512, 512], "counts": "XT^72k?4M2M3M4M2M4M2M3M4M2010O01O01O010O01O01O010O010O01O01O010OO2M2M3N3L3N3fL"}, "image_id": 422, "id": 6905}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 125.0, 43.0, 60.0], "area": 1411, "segmentation": {"size": [512, 512], "counts": "nTZ63j?3N3L3N2e@EL0l>?TAJj>8TAKh>h0N3M2010O0001O0M4M2N30O0001M1N02O3M2M4M2M3N3L3N3ERA_OQ??RA^OP?`0:O0010OM3N3L3N3L3Nm[P1"}, "image_id": 422, "id": 6906}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 126.0, 88.0, 58.0], "area": 2552, "segmentation": {"size": [512, 512], "counts": "jd`21m?3L3N2M4M2N3L3N2N3N1N3M2N2010O01O010O01O010M21O010O01O01O010O000010O010O00010O010O01O01O010O010O00010O010O00010O010O01O01O010O010O00010O010O00010O010O000N3M2M4M2M3N3L3N3M2M4MV[S4"}, "image_id": 422, "id": 6907}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 135.0, 24.0, 25.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "bdo32k?3N2M4M2O2b@CX?_1O010O01O01O010O010O00010O010O01O01O010O010O01O0O1N3M2M4M2N1N01003M210O01O01O010O010O0N2M4M2N3L3N2N3L3N3LUZU2"}, "image_id": 422, "id": 6910}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 188.0, 35.0, 34.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "aVo52k?4M2N3L3N2N3L3N3M21O010O01O01O010O010O01O01O010O01OM4M2N3L3N3L3N2M4MkY_1"}, "image_id": 422, "id": 6911}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 199.0, 95.0, 61.0], "area": 2774, "segmentation": {"size": [512, 512], "counts": "PWo12l?2N3L3N3L301O01O010O01O01O010O01N1N3L3N2N3L3N3M21O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010M2N2N3L3N3L3N3M2M3N3L3N3MmXa4"}, "image_id": 422, "id": 6912}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 216.0, 61.0, 50.0], "area": 1692, "segmentation": {"size": [512, 512], "counts": "bge64j?2N2M4M2M4M2N3L3N2N3L310O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010O010O00O2L3N3M2M4M2N2M4M2N3L3Neh;"}, "image_id": 422, "id": 6913}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 244.0, 69.0, 59.0], "area": 2362, "segmentation": {"size": [512, 512], "counts": "dXS44j?2M4M2M4M2M3N3L3N3L3N2N3L3010O00010O010O0010O001ZAgNd>Z10010O0010O010O00010O010O00010O010O00010O010O00010O010O00001L3N3L3N3M2M3N3L3N3L3N2M4M2MiWj2"}, "image_id": 422, "id": 6914}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 244.0, 25.0, 25.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "SXc54i?3M3N3L3M4N100010O0010O00010O0010O0O1M4L3M4L3N2MYXP2"}, "image_id": 422, "id": 6915}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 246.0, 71.0, 59.0], "area": 2352, "segmentation": {"size": [512, 512], "counts": "jXe11m?3M2N3M2M3N3M2N3M2N201O0010O0010O010O0010O0010IWOTAj0j>8L3N2N3L3N3O01O01O010O01O010O01O010O01O01O010O01O01O010ON3M2M3N3L3N3M2M3N3O0O2L3N3M2M3N3L3NlWW5"}, "image_id": 422, "id": 6916}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 276.0, 76.0, 55.0], "area": 2246, "segmentation": {"size": [512, 512], "counts": "[iS64j?2N3L3N3L3N201O010O010O000N3M2M4M2N3O01O01O010O01O010O01O010O01O01O0TAnNk>T1O01O010O01O010O01O010O01N101O01O010O01O010O01O010O01O01M2N3L3N3M2M3N3M2M4M2N3L3NhVf0"}, "image_id": 422, "id": 6917}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 283.0, 69.0, 53.0], "area": 2073, "segmentation": {"size": [512, 512], "counts": "oYR32l?2M4M2N3L3N2M4M2N30O0010O0010O010O00010OO2M2M0100O03N2M3VAmNe>Y1O010O01O01O010O010O00010O010O01O01O010O010O00010ON3M2M4M2M3N3M2M4M2M3N3M2M4M2MhVk3"}, "image_id": 422, "id": 6918}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 312.0, 56.0, 63.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "\\[Z41m?3L3N3M2M3N3L3N3CYO]Ai0a>ZO[Aj0a>YO]Ai0a>YO]Ai0a>=L3N3M2M4M21O01O010OO2M2M4M2O110O010O0010O0010N1N3M2M4M2N2M4M2N3L3N3M2M3N3N101N1N2M4Mjei2"}, "image_id": 422, "id": 6919}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 316.0, 76.0, 55.0], "area": 2425, "segmentation": {"size": [512, 512], "counts": "gjX13k?2M4M2M4M2M3N3M2010O0010O0010O0010O00010O010O0M3N3N1010O0010O0010O00N3L3N3L3O110O010O00010O010O00010O010O00010O010O00010O010O00N3L3N2M4M2M4M2Dj@KY?2j@LY?1j@KY?2=MgUa5"}, "image_id": 422, "id": 6920}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 317.0, 83.0, 59.0], "area": 2538, "segmentation": {"size": [512, 512], "counts": "ijg51l?3N3M2M3N3M2M4I]Om@e0Q?6M4O0010O00010O010O00010OM4N10010O010O00010O010O0WAlNf>W11O010O01O01O0O2O00010O010O000lNXAQ1l>O01O01O010O01O01O010O01O01O01N1O20XOVA8j>EZA:g>C[A>d>_O`A`0`>^ObAc0m>01O010O000M4M2M4M2M3Nmdn0"}, "image_id": 422, "id": 6921}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 336.0, 40.0, 46.0], "area": 1138, "segmentation": {"size": [512, 512], "counts": "ck\\24j?2M3N3L3N2M4M2M4O01O01O0O1N3L3M3N3L3N3O0001L3N210O010O00010O000UOYA@[ABZA;j>AYABZA;i>CZA:h>C[A:i>CZA:W?M3N3L3N^d\\4"}, "image_id": 422, "id": 6923}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 372.0, 76.0, 55.0], "area": 2333, "segmentation": {"size": [512, 512], "counts": "al`51m?2N3L3N3M2M3N3L3N3M2M3N3N110O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O0N3L3N2N30O010O0010O0010N1fN]AU1i>M2M4M2M4M2N2M4M2M4M2M3N3MkSY1"}, "image_id": 422, "id": 6924}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 376.0, 83.0, 54.0], "area": 2430, "segmentation": {"size": [512, 512], "counts": "]l_32l?3L3N2M4M2M4M2M3O20O01O010O01O01O010O010O0m@WOP?l001O010O010O00010O01ROPAk0R?01O01N1M4M2N3N11O01O010O010O01O01O010O01O010O01O010O01O01O010OO2M201O00010O010N1M4M2N2M4M2If@E^?86N2M4MccV3"}, "image_id": 422, "id": 6925}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 425.0, 79.0, 81.0], "area": 3337, "segmentation": {"size": [512, 512], "counts": "V^`11l?3Y@M^?6_@N4LR?`0l@CT?KQB3Q>JSB2P>LRB2Q>JSB2P>LRB3P>JRB6n=GVB9j=DXB110O00010O01N1N3L3N2M4M2NSQX5"}, "image_id": 422, "id": 6926}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 425.0, 66.0, 52.0], "area": 1960, "segmentation": {"size": [512, 512], "counts": "S^[53k?2M4M2N3L3N3L3N2N3L3N3O0010O01O01O010O01O010N1M40O0010O010O010O0010O0010O010O0010O0lNXAQ1l>O0010O010O0010O010O0O2M2M4M2N2N3L3N3M2N3L3N3MRbc1"}, "image_id": 422, "id": 6927}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 439.0, 53.0, 61.0], "area": 1979, "segmentation": {"size": [512, 512], "counts": "Po\\32l?2N3L3FHj@:T?Hj@;R?Hk@;S?;O00010N110O0O2M2M3N3M2M4M21O01O010O01O01O010N1M4M2O110O010O00UOiAMX>0kA0T>OmA1T>NmA2R>OmA1S>OmA2S>NmANV>0lANW>NmAOU>OmANV>OmAOV>NmANV>OmAOV>NmANV>0lANWPi3"}, "image_id": 422, "id": 6928}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 441.0, 34.0, 71.0], "area": 1456, "segmentation": {"size": [512, 512], "counts": "k__73k?2M4M2N2M4M2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2N200001O00O1"}, "image_id": 422, "id": 6929}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 481.0, 47.0, 31.0], "area": 971, "segmentation": {"size": [512, 512], "counts": "oo31l?3N2N2M3N2M3N2M3N2M3N2M300001O00001O001O00001O001O001O00001O001O00001O001O001O0N2N3L3N3L3N2Me`T7"}, "image_id": 422, "id": 6930}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 486.0, 67.0, 26.0], "area": 1003, "segmentation": {"size": [512, 512], "counts": "n_n42l?2N2N2N2N2N2N2N2N2N2N2N21O001O001O00001O001O001O001O00001O001O001O00001O001ON2O11O001O001O00001O001O001O00001O001O001O00001O001O0O2MWPP2"}, "image_id": 422, "id": 6931}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 494.0, 15.0, 13.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "f_^11m?2N3L3O1010O01O01O010N1N2M4M`PZ6"}, "image_id": 422, "id": 6932}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 495.0, 35.0, 17.0], "area": 360, "segmentation": {"size": [512, 512], "counts": "ooS31l?3N2M3N2M3N2O11O00001O001O001O00001O001O00001O001O001O00001O001JZ@0k?000Q`Z4"}, "image_id": 422, "id": 6933}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 498.0, 31.0, 14.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "o_Q11m?2M3N2M3N2O100001O001O001O00001O001O001O00001O001O001O001O0NVP_6"}, "image_id": 422, "id": 6934}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 509.0, 6.0, 3.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "m_j13n?O001O0000Q`R6"}, "image_id": 422, "id": 6935}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 510.0, 6.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "nof12n?0001O0000QPV6"}, "image_id": 422, "id": 6936}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 327.0, 64.0, 142.0], "area": 7115, "segmentation": {"size": [512, 512], "counts": "T;X3h<00001O0L4K5L4K6J5L4N201O000001O01O000001O01O0001O01O000001O01O000001O01O0001O01O0000O2J5K5L4K5K6K4K5K5PNQCk0T=POPCk0U=POPCk0U=POPCl0T=POPCk0U=POQCj0U=POPCl0T>K5K6K4K5Kcdo6"}, "image_id": 423, "id": 6937}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 350.0, 69.0, 75.0], "area": 1491, "segmentation": {"size": [512, 512], "counts": "Ulm61n?2N2N2M2O2N2N2N1O0O010000000O010000000O10O1001O2N2M3N2N2N2N2M3N2N200O10001O000000^OYAJf>5\\AKd>2`AM`>1bAO^>OdA1\\>MfA3Z>KhA5W>IlA7T>GnA9R>EPB;P>CRB=n=ATB?g>0000000000000000000000000000000VC"}, "image_id": 425, "id": 6938}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 444.0, 91.0, 59.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "aob61l?3M4O01ON3L3M3M4L3M3M4L3M00N2000O1000004O00010O00010O00010O00010O00010O00010O0001BVAEi>8ZAHf>5]AKd>1`AO_>NdA2\\>KgA5Z>GjA9n>00010O00010O00010O0010O00010O00010O00010O00010O00010O00010O0001O01O01O01O01O01O01O01Od@"}, "image_id": 425, "id": 6939}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 0.0, 36.0, 23.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "PP51o?00001O001O00001O00001O000]@NX?3d@0\\?0b@3]?80001O001O00001O001O001O00001OO1N2Df@0\\?Mh@O[?Og@O[_Y7"}, "image_id": 426, "id": 6940}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 0.0, 37.0, 42.0], "area": 988, "segmentation": {"size": [512, 512], "counts": "cPm02k?3N3M2M4d@Eo>=o@Fm>>PADn>i0M201O00001O001O001O00001O001O001O00A\\A@f>>\\A@f>>]A_Oe>?]A^Of>?]A_Oe>??N2M3N2N2M3NR``6"}, "image_id": 426, "id": 6941}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 0.0, 37.0, 43.0], "area": 997, "segmentation": {"size": [512, 512], "counts": "o`b14j?2N3L3N2_OC]A`0`>B]Aa0`>C]A`0b>@\\Ab0d>_OXAd0h>;O001O001O00001O001O0000O2L3N2M4M2M4M2M4M2M3N3M2M4M2Mmoj5"}, "image_id": 426, "id": 6942}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 0.0, 63.0, 39.0], "area": 1493, "segmentation": {"size": [512, 512], "counts": "_PP32l?2N3L3N2M4M2O2O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O0010O00010UOTA`0l>]OWAd0i>YOZAf0P?010O0010O001L3N3M2M3N3L3N`_P4"}, "image_id": 426, "id": 6943}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 0.0, 70.0, 34.0], "area": 1421, "segmentation": {"size": [512, 512], "counts": "X`k42l?2N3M2N201O001O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O001O00001O001O001O00001O001O001OM3N2N2M3N2N2M3N2N2M3N2N2N2MS`Q2"}, "image_id": 426, "id": 6944}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 0.0, 27.0, 11.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "P`b71o?001O00001O001O00001O001O001O00001O001O00001O001O0000"}, "image_id": 426, "id": 6945}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 24.0, 63.0, 90.0], "area": 3349, "segmentation": {"size": [512, 512], "counts": "abk61m?2M4M2N3L3N2M4M2N3L3N2N3L3N2M4M2N3fAbNm=a1PBcNm=_1QBcNm=i1M4M2M4M200010O01O0_BeM^=_2O00010O01O01O010O01O01L3O20O0010OSNjBDNm0X=\\OmBDNP1V=YOnBEOo0U=YOPCDNP1V=[OlBC0P1V=]OZC?i010O010O01O01O010O010O010O00010O01O0N3L3N2N3L3N3M2N3L3N3M[^]2"}, "image_id": 426, "id": 6947}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 35.0, 89.0, 64.0], "area": 2940, "segmentation": {"size": [512, 512], "counts": "SbX23j?3N3L3N3M2O2O01O010O01O0N3L3N3L3N201O010O010O01O01O010O010O010O00010O0O2O0010O00VAnNg>U101O010O000O2O0O2N1O2N1N3M2M3N3M2N3OO2M2N3M2M3N3M2N3L3N3M2N3O00010O0010OM4M2N3M2M4M2N2N3M2M4M2NbnZ4"}, "image_id": 426, "id": 6948}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 46.0, 23.0, 18.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "hQb11m?2M4M2M40O010O00010O010O00010O010O001M2N2M4M\\^R6"}, "image_id": 426, "id": 6949}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 50.0, 96.0, 69.0], "area": 3105, "segmentation": {"size": [512, 512], "counts": "b1T1l>0010O010O0010ON201O010O01O010O01O01O010O010O01O01O010O010O00010O010O0010O0010O0010O010O0010O0010O010O00010O010O01O01O010O01O010O01O010O01O010O01O01O010O010O00010O010OM3N3M2M4M2N3L3N2N3L3N3MYm_6"}, "image_id": 426, "id": 6950}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 63.0, 24.0, 19.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "XR^13j?3N2M4N110O00010O010O00010O010O00010O01M2N2M4MkmU6"}, "image_id": 426, "id": 6951}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 73.0, 72.0, 59.0], "area": 2325, "segmentation": {"size": [512, 512], "counts": "XcU43k?2ELg@7V?Mf@7V?Lh@6U?=M4K30001O0O2L30001ON3O0102M1O01O01O01O010O01O01O01O0N2010O0N30O0\\AhN`>\\11dNaAU1e>1O01O0N210O00010O001mNUAP1m>01VORAa0m>]OUAc0k>ZOXAf0R?0O01O01O01O01O010OO2L3M3N3L3M3N3Le\\f2"}, "image_id": 426, "id": 6952}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 80.0, 113.0, 89.0], "area": 4104, "segmentation": {"size": [512, 512], "counts": "nca12l?2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3M20010O010O0010O0010O010O00010O010O01TOjAMU>1mAOS>NPB2Q>KRB5m=HVB8k=EXB;g=C[B=e=@^Ba0b=\\OaBc0_=ZOdBf0\\>0O0010O0010O0010O010O0001O0M4M2M3N3L3010O010O00010O010O010O00010O010O00010O010O010O00010O010O0010O0010M2M4M2M3N3L3N3L3N3L3N2M4M2Mnke4"}, "image_id": 426, "id": 6953}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 123.0, 50.0, 52.0], "area": 1687, "segmentation": {"size": [512, 512], "counts": "a4>_?4M2M3N3L3N3L3N2N30O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O00001O010O0BWAFh>8ZAHg>4]AKc>3_AMb>ObAN`>0bAMa>0bANa>ObAMa>0^lV7"}, "image_id": 426, "id": 6954}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 142.0, 65.0, 57.0], "area": 2258, "segmentation": {"size": [512, 512], "counts": "]Uh32l?3M2M4M2N3L3N2N3M2M4M201O00010O001N1M4M2O110O0010O010O00010O010O01O010O00010O010O0010O0010O0010O010O00010O010O0N2M4M2N3L3_OQAOQ?OQANS?NPA0R?NQAORjW3"}, "image_id": 426, "id": 6955}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 148.0, 73.0, 59.0], "area": 2693, "segmentation": {"size": [512, 512], "counts": "fUT11l?4M2N3M2M4M2N2M4M2N3L3N2N5K2N3O00010O010O010O00010OO2NO30O0O101O010O010O01O01O010ON3M2N3N10010O3M010O01O010O01OO2M2N3L3N3M2M3N3N1ROSAi0R?01N1N2N3Ce@0^?Ne@O^?Me@1Wjg5"}, "image_id": 426, "id": 6956}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 162.0, 20.0, 22.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "_eZ33k?3M2M4M2N3M210O01O010O010N1N2M4M2N3M2NjZ[4"}, "image_id": 426, "id": 6957}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 177.0, 14.0, 12.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "gea32l?2N2N3O0010O00010O010O0M3N^ZW4"}, "image_id": 426, "id": 6958}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 177.0, 74.0, 68.0], "area": 2720, "segmentation": {"size": [512, 512], "counts": "hVn53e?9H7J70O0010O010O00010O010l@ZOl>f0RA\\On>l00O010O0010O0010O010O00010O01M2M3N3L3N3M2M3N3L3N3L3N3O000010O0O2M2M4M2M3N3N101O0JbAhN`>V1bAhNa>V1510O000O2L3N3M2M4M2M3N3L3N3M2M3NUjl0"}, "image_id": 426, "id": 6959}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 180.0, 25.0, 22.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "PVg32k?4M2N3L3N3O01O010O01O010O010O010O01O0O2M2M3N3M2NVZl3"}, "image_id": 426, "id": 6960}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 191.0, 34.0, 79.0], "area": 1812, "segmentation": {"size": [512, 512], "counts": "o5e1=aN[=`1bBbN^=^1_BfNa=Z1[BiNe=W1YBkNg=h10O0010O0010O00010O00010O010L3QOXBCk=:XBDk=8XBEk=8YBDj=:XBCl=9WBEk=8YBDj=;WBBm==SBAo=>k0L3N2M4Lhi^7"}, "image_id": 426, "id": 6961}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 196.0, 50.0, 72.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "fVW72V10]=3aB0\\=3aBO\\=4XBHA8T>2XBIB7S>4XBHB7S>3XB:f=IWB9h=HUBm=BPBa0o=h01O01O010O01O01O010O010O01O01O010ON3M2N2M4M2M4M2M3N3M2M4M2M4M2M3N3M2M4M2M3N3L^I"}, "image_id": 426, "id": 6962}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 206.0, 74.0, 55.0], "area": 2398, "segmentation": {"size": [512, 512], "counts": "jVh07g?4g@He>:XAHg>:UAJi>8TAJl>6RAMl>d00001O001O00010L3XAoN_>[1M210O010O0010O010N101N101O0O1O2O001O0M4N1O2O0O101N101O0O2O0O2O00001O001M2O02M2010O010O010O00010O0O2L3N3M2N3L3N2N3M2MPiR6"}, "image_id": 426, "id": 6963}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 207.0, 62.0, 52.0], "area": 1839, "segmentation": {"size": [512, 512], "counts": "\\gj33k?2M3N3L3N3L3N2M4M2M4M2N2010O01O010O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01O01O010O01O010M2M3N3M2M4M2M3N3M2M4M2MPYV3"}, "image_id": 426, "id": 6964}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 234.0, 28.0, 28.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "nW[31m?2M4M2M3M4M2M4M20010O0010O010O0010O0001M2N3L3N3M2M3N3M`hV4"}, "image_id": 426, "id": 6965}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 246.0, 19.0, 23.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "VXf62l?3L3N2M4M2M4N110O00010OO2L3N3M2M3N3LXXP1"}, "image_id": 426, "id": 6966}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 262.0, 62.0, 55.0], "area": 1916, "segmentation": {"size": [512, 512], "counts": "Sih31m?2M3N3L3N3M2M3N3L3N3L3N201O010O01O01O01O010O01O01O01XAnNa>Q1]AROb>V10010O0010O0010O0010O0010O000N3M2O2ON3L3N2M4L3N2M4M2M4M2M3N3L3M4M2MaWX3"}, "image_id": 426, "id": 6967}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 283.0, 37.0, 46.0], "area": 671, "segmentation": {"size": [512, 512], "counts": "Tjl21m?3M2N3M2N3M2N3M2N1L3OKn@@R?`0QA]On>d060O1O1020010O010O01O01O010O01O0M4M2N2M4M2N3M2MPg`4"}, "image_id": 426, "id": 6968}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 287.0, 77.0, 57.0], "area": 2471, "segmentation": {"size": [512, 512], "counts": "lYl51l?3N2M4M2M4M2M3N3L3N3L3O101O010O01O01O01O010O01O01O010O01O01O010O01O01O010ON3L3O110O010O00010O01O01O010O000N3N1010O00010O010O0[O_AKa>2bAO^>NeA1[>LhA5W>IlA6U>FnA:R>CQB>o=_OTB?g>N3L3N2M4L3NkUm0"}, "image_id": 426, "id": 6969}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 309.0, 67.0, 101.0], "area": 4137, "segmentation": {"size": [512, 512], "counts": "nk_11;0U?4g@0V?2h@0V?3g@0W>IbBf0\\=\\ObBg0[=\\O\\BJDl0m=]O\\BJDl0n=\\O\\Bm0a=VO\\Bl0a=WO\\Bm0b=UO[Bm0e=h0O01O010O010O01O01O010O010OO2M2M3N3M2M4M2N3L3N2NO4M2M3N3M2\\MhB]2_=01N1M4M2N1N102N3L3N30OQOmA1S>OmA2S>MnA2R>OmA1T>LoA4P>JRBBK7V>3SBBK7U>5RBBK7U>5RBBK7V>3^BIe=5]BIf=4T1M4MPf^5"}, "image_id": 426, "id": 6970}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 318.0, 60.0, 62.0], "area": 2085, "segmentation": {"size": [512, 512], "counts": "Y;7_?Jh@9U?Ih@:V?9M4O010O000N3L3N3L3N2N3L3N3L3N2M4O010O0010O0010O0010N1M4M2N1N102M4M2M3N3L3N3L30002O0O010O00010O010M2M3N3M2M4M2M3NgeQ7"}, "image_id": 426, "id": 6971}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 318.0, 73.0, 52.0], "area": 1956, "segmentation": {"size": [512, 512], "counts": "gj\\31m?2M4M2N3L3N3M2M3N3M2N3O001O010O01O010O01O010O01O01O010O010O01O01O010O010O01O01O010O010O01O01O010O010O010O00010O010O010O0001M2N3L3N3M2M3N3M2N3L3N[e^3"}, "image_id": 426, "id": 6972}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 339.0, 8.0, 22.0], "area": 98, "segmentation": {"size": [512, 512], "counts": "T[l72k?4M2N3L3N2N3L3]E"}, "image_id": 426, "id": 6973}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 341.0, 32.0, 22.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "jZT71m?3M2O2O0010O01[@Ha?7]@Lb?910N1O2O010ON30O010O0010O0010O010OEa@5e?1O01O0M4Mnd;"}, "image_id": 426, "id": 6974}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 344.0, 63.0, 62.0], "area": 2377, "segmentation": {"size": [512, 512], "counts": "R\\i52l?2M3FKg@8U?Ki@7U?^OVAd0g>;aAG_>9^AJb>k00O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010ON0O111010O0EaAUO`>h0bAVO`>g0cAWO_>f0eAVO_>g0>M4M2M3N3L3N3L3NZcW7"}, "image_id": 426, "id": 6980}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 402.0, 46.0, 36.0], "area": 920, "segmentation": {"size": [512, 512], "counts": "Sm`13k?2M4M2N3M2N3M2N201O010O010O010O00010k@ZOn>f0PA]OP?i0010O001M2N2N30O010O010O010O00010OO2M2N3M2M4M2N2N3MPSh5"}, "image_id": 426, "id": 6981}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 415.0, 83.0, 60.0], "area": 2428, "segmentation": {"size": [512, 512], "counts": "mm^21m?2M4M2M3N3L3N3M2M3N3L3N3N11O01O010O01O01O010O010O00010O010O00010O010O00010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00TOWAa0j>\\OYAd0f>YO]Ag0n>10O0010O0010O001N1N2M4M2M4M2N2M4MlaW4"}, "image_id": 426, "id": 6982}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 427.0, 53.0, 57.0], "area": 2202, "segmentation": {"size": [512, 512], "counts": "cnl68W?a0I70007I000G90000001O00000L400000000000N2J7O0000000000000001O00dA_NY>e1O00000000000000UOkAKU>IWB7i=^ObBb0_>O000000000001OM3@Sb8"}, "image_id": 426, "id": 6983}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 435.0, 56.0, 75.0], "area": 1999, "segmentation": {"size": [512, 512], "counts": "`_m04j?2N3L3N2N3L3J_Ok@d0Q?7N2N3N101L3N3M2M3N3M2M40O0001M2N3L3NO100O011N3N3M2M4M2M4M2N2M4M2M4M2M4M2N2M4M2O20O01F`@3_?Je@5c?010ON2MnaV6"}, "image_id": 426, "id": 6984}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 436.0, 33.0, 33.0], "area": 687, "segmentation": {"size": [512, 512], "counts": "R^j145M_?5^@N_?;N3L3N30O010OM3O20O010O0010O0010O010O00010O0M4M2N3L3N2M4M2N3LURe5"}, "image_id": 426, "id": 6985}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 456.0, 27.0, 52.0], "area": 880, "segmentation": {"size": [512, 512], "counts": "X>a1`>O010O01O01O010N1N001N4M2N3L3N2N3L3N3L3N3M2M3N3M2M4MbQb7"}, "image_id": 426, "id": 6986}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 457.0, 23.0, 22.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "g^m41m?2M4M2N3M2M3010O010O01O010O01O010M2N3M2M3N3MbQg2"}, "image_id": 426, "id": 6987}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 460.0, 52.0, 52.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "coo41l?4M2N3a@HQ?;m@GQ?;l@HQ?e0N21O001O00M3N2N2M3N2N2M3N2N2M31O001O001O00001O001O001O00001O0N3M2M3N3L3N3L3N3L3N2M4M2M4M2MZQV2"}, "image_id": 426, "id": 6988}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 463.0, 15.0, 15.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "gnW63j?3N3M2O2O010O010O00N3M2N3M2N_a`1"}, "image_id": 426, "id": 6989}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 466.0, 64.0, 46.0], "area": 1793, "segmentation": {"size": [512, 512], "counts": "`oY21l?4M2N2M4M2M4M2M3N3M2M4M2O110O0010OO2O00010O0010O0010O0010O0010O0010O010O001O000QOWAg0j>VOXAj0o>01O001O001O00001O001O00001O001M2N2M4M2M4M2NfPf4"}, "image_id": 426, "id": 6990}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 472.0, 26.0, 27.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "Y_n53k?3M2M4M2N2N3L3O2O01O010O010O01O01O001L3N3L3N2N3L3NSad1"}, "image_id": 426, "id": 6991}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 483.0, 32.0, 29.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "oo^11m?2M3N2M3N2M3M3N2M3N2M30000001O001N1N201O010O01OO2M2M4M2N3L3N2MiPQ6"}, "image_id": 426, "id": 6992}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 483.0, 26.0, 21.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "f_]61e?:G90000000001O00000000000000000001O0000000000000DXaU1"}, "image_id": 426, "id": 6993}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 484.0, 32.0, 28.0], "area": 574, "segmentation": {"size": [512, 512], "counts": "oo61m?2N2M3N2M3N2N2M3N2M3N2O11O00001O001O001O00001O001L3N2N3L3N3L3NfPY7"}, "image_id": 426, "id": 6994}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 499.0, 11.0, 13.0], "area": 81, "segmentation": {"size": [512, 512], "counts": "n_Z12l?2N2M3N2N21O0N3M2N3M\\P`6"}, "image_id": 426, "id": 6995}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 502.0, 21.0, 10.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "ooX71n?100O1O100O1O100O1O100O1O1002N1O2N1O1O2NQ`<"}, "image_id": 426, "id": 6996}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 509.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "o_U21m?200001O1O00QPg5"}, "image_id": 426, "id": 6997}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 38.0, 16.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "P``03m?3M3M3M3M1O00O1000000O10000O10000O10000O10000O10000O1000000O10000O10002M4Mk_l6"}, "image_id": 427, "id": 6998}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 0.0, 53.0, 31.0], "area": 1070, "segmentation": {"size": [512, 512], "counts": "VPU14l?3L3N3M2M4M2M4M3M0O10O010O010O10000O100O100O10000O100O100O10000O100O100O10000O100O100O10000O100O3N2M4M2Ng_P6"}, "image_id": 427, "id": 6999}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 0.0, 58.0, 48.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "hPU42m?3N1N3N2M3M3N2M3N2M2O2M3N0O010O010O0010O010O01EROdAm0]>UO`Al0`>UO_Aj0b>XO[Ah0f>90O100O100O100O100O100O1O100O100O2O2M3N2M2O2M3N2M3N2M3M3N1NZom2"}, "image_id": 427, "id": 7000}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 0.0, 31.0, 20.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "P`n54l?2N2N3M2N3M2N2NO100O10000O100O10000O100O10000O100O102N3L3N2Mhoa1"}, "image_id": 427, "id": 7001}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 0.0, 53.0, 21.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "PPk61o?3M3M2N3M2N3M3M1O0000O100O10000O100O100O10000O100O100O10000O100O100O10000O100O100O10000O100O10000O100O2Oo_:"}, "image_id": 427, "id": 7002}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 2.0, 17.0, 21.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "7`0`?O010O010O010O010O011N3N2M3N2M3Nc_g7"}, "image_id": 427, "id": 7003}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 10.0, 10.0, 10.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "^Pe51n?3N2M2N010O03N2M2OaoU2"}, "image_id": 427, "id": 7004}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 12.0, 32.0, 27.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "iPW52n?2M3N2M3M3N2M10O010O010O010O010O010O0010O0010O012M3N2M3N2M2O2MXoX2"}, "image_id": 427, "id": 7005}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 16.0, 29.0, 30.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "j02m?2O2M3N2M3N2M3N2M2O2M10O010O010O01O02N3N2M3N1N3N2M3N2M3Nn^a7"}, "image_id": 427, "id": 7006}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 17.0, 53.0, 55.0], "area": 1592, "segmentation": {"size": [512, 512], "counts": "XaU73l?3N1N3N2M2O2M2N3N2M2O2M3N110000OO2M2O0O0010O0010O01O01HPO_AP1a>SO]Am0c>TO[Al0e>710O00010O010O01O01O01O2O2M3N1N3M3N2M2N3N2MoN"}, "image_id": 427, "id": 7007}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 31.0, 62.0, 48.0], "area": 1603, "segmentation": {"size": [512, 512], "counts": "ZQa13m?2M3N2M3M3N2M3N2M2O2M3N2M3N2O1O1000O1M3N1N10O01O01O010O010O010O010O01O01O010O010O010O010O010O000102M3N2M3N2M2O2M3N2M3N2M2N\\n_5"}, "image_id": 427, "id": 7008}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 32.0, 25.0, 22.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "Yad52n?2M3N2M3M3N1N100O010O010O01O010O011N3N2M3N2M2O2Menn1"}, "image_id": 427, "id": 7009}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 37.0, 55.0, 55.0], "area": 1442, "segmentation": {"size": [512, 512], "counts": "Uba41o?2M3N2M3N2M3N1N3N2M3M3N1N10O0010O010O010CTOeAl0[>WObAj0^>WOaAh0_>[O^Af0b>\\O\\Ac0d><10O0010O010O010O010O012M3M3N1N3N2M3N2M3N2M2O2M3M3N2M3N1Nmmb2"}, "image_id": 427, "id": 7010}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 38.0, 31.0, 28.0], "area": 505, "segmentation": {"size": [512, 512], "counts": "`Qa62m?3N2N2M3N2M3N2M3N0O10O010O010O010O010O010O102M3N2M3N2M3N2M3NZ^o0"}, "image_id": 427, "id": 7011}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 40.0, 29.0, 32.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "aan02m?3N2M4M2M3N2N2M3N2M3NO010O010O010O012N2M3N2M3N3L3N2M3N2MTnb6"}, "image_id": 427, "id": 7012}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 53.0, 34.0, 28.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "Rbe53m?2M3N2M3N2M3M10O010O010O010O010O010O010O0010O010O0102M3N2M3N2M2O2Mo]i1"}, "image_id": 427, "id": 7013}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 59.0, 26.0, 28.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "Tb=1o?2M2N3N1N3N2M2N3N1N3M3NO01O03M2O2M2N3N2M2N3N1N3M2Od]U7"}, "image_id": 427, "id": 7014}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 63.0, 57.0, 59.0], "area": 1763, "segmentation": {"size": [512, 512], "counts": "QSR21n?3N1N3M3N2M2O2M3N2M2N3N2M3@SOiAn0V>TOgAm0X>UOgAj0Z>XOcAi0\\>ZObAe0_>=01O010O010O01O01O010O010O010O01O010O2O2M3N2M2N3N2M3N2M2O2M3M3N1N3N2M3N1N3MQ]Q5"}, "image_id": 427, "id": 7015}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 69.0, 35.0, 56.0], "area": 1073, "segmentation": {"size": [512, 512], "counts": "Tc^71n?3M2N2O2M2N3N1N2N3M2O2M2N2O2M2N3M2OO0001IlNbAS1^>oN`AQ1a>7O0000010O00010O0000010kM"}, "image_id": 427, "id": 7016}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 70.0, 29.0, 29.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "_b33m?1N3N2M2N3N2M2O2M2N3N1N10O00010O102M3M2O2M2O2M3M2O2M3N1NYm]7"}, "image_id": 427, "id": 7017}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 84.0, 60.0, 54.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "bSm42n?2M2O2M3N2M2O2M3M3N1N3EWO[Ak0c>WOZAk0d>XOZAg0g>9O010O010O010O01O010O01O010O01O010O010O010O3N0O00010O010O010O003N2M2O2M3N2M3N2K5M3N2M2O2MelT2"}, "image_id": 427, "id": 7018}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 86.0, 34.0, 31.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "Xch61o?2M2N3N1N2N3M2O2M0001O01O01O01O00010O0001O01O01O01O03M2O2M2N3N1N3MP]f0"}, "image_id": 427, "id": 7019}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 87.0, 48.0, 32.0], "area": 721, "segmentation": {"size": [512, 512], "counts": "\\cS11n?3N2M3N2M3N0O10O010O01O010O3N0O10O010O0010O010O010O010O0010O010O010O010O0010O0101N2O2M3N2M3N2Mn\\T6"}, "image_id": 427, "id": 7020}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 95.0, 20.0, 41.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "o2Y1g>O101N3N2M3M3NO010O01[Om@=R?CQA:Q?EPA:R?CQA:R?DPA9^?N2M2O2MUle7"}, "image_id": 427, "id": 7021}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 98.0, 29.0, 36.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "bSh03m?2M3N2M2O2M3N2M3M3NO01HZOVAe0k>]ORAd0m>^ORAa0o>8O03N2M5L2M3M3N1N3N2M3N2M3NU\\i6"}, "image_id": 427, "id": 7022}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 103.0, 35.0, 28.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "eSl51n?2O2M3N2M3N2M3NO010O01O01O010O010O010O010O010O2OO01O010O010Lb@Ea?95M3N2Mb\\b1"}, "image_id": 427, "id": 7023}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 120.0, 83.0, 65.0], "area": 2280, "segmentation": {"size": [512, 512], "counts": "VeQ21o?2M3N2M2N3N2M3N1N3N2M1O010O01O010O01O010O01O010O\\OZOoAg0P>\\OmAd0T>^OjAb0U>AhA?Y>CeA<[>GbA:]>IaA6`>e0O010O01G`AQO`>P1bAnN]>R1eAlN\\>U18010OOO010O010O0010O0010O0101N2O2M2N10O0010O010O0010O0010O010O1O3N2M2O2M3M3N1N3N2M3N1Nfkd4"}, "image_id": 427, "id": 7024}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 132.0, 31.0, 32.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "YTd6161a?0]@3`?0]@2a?8N3N1O2O01O01M2O2M2N2OO01O0002N2O2M2N3N1N2N3M2O2M2N2O2M[[l0"}, "image_id": 427, "id": 7025}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 134.0, 56.0, 51.0], "area": 1470, "segmentation": {"size": [512, 512], "counts": "RUY51n?2N3N1N3N2M2N3N1N3M2H[OSAg0k>[ORAh0k>8O2M1O010O00010O00010O010O00010O00010O000100O2O2M3M2O2M2N3N1N3M100O010O002O1N3K]@He?65N2MVkj1"}, "image_id": 427, "id": 7026}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 135.0, 32.0, 30.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "iTZ72m?3M2N2O2M2N3M2N010O0000010O000000010O00000010O2N2N2O1N3M2N2N3N_k5"}, "image_id": 427, "id": 7027}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 143.0, 46.0, 49.0], "area": 1290, "segmentation": {"size": [512, 512], "counts": "P5o0P?2OO010O010O010O010O010O0010O0010O010O010O010O010O010O0010O0010O102M3N2M3N2M3N2M2O2M3N2M3MkjX7"}, "image_id": 427, "id": 7028}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 150.0, 88.0, 88.0], "area": 2813, "segmentation": {"size": [512, 512], "counts": "fV72m?3N1N3M3N2M3N2M2F\\OVAf0h>\\OUAg0h>\\OVAf0h>;M010O01O01O010O010O0NjN\\AV1d>201O01O010O010O010O010O102M3M2O2M3N2M2OO0010O010O01HWOYAh0h>ZOUAf0k>]OSAc0m>7010O01O010\\OPOWBP1j=ROTBn0k=UORBk0o=WOoAh0Q>[OlAf0T>[OkAd0U>_OhAb0W>AgA>Z>d0O010OVOhAMY>3hALW>4lAIU>6nAGR>:PBDP>;SBBm=?UB_Oj=a0XB]Oj=b0XB\\Oj=a0YB\\Oj=b0l0M3N2M2N3N2M3Nhj\\6"}, "image_id": 427, "id": 7029}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 163.0, 61.0, 53.0], "area": 1643, "segmentation": {"size": [512, 512], "counts": "lef21o?2M3N2M3M3N2M3N1N3N2M3M3N2M3NO0010O0010O010O0010O010O0010O010O0010O0010O010O010O00010O010O102M2N3N2M3N2M2O2M3N2M2N3N2M3NTjZ4"}, "image_id": 427, "id": 7030}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 167.0, 55.0, 61.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "^Vi51n?2O1N2N3M2O1N3M2N2N2O2M2N2N2DTOaAo0\\>TObAm0W>QOeA42m0W>QOeA42k0Z>?0001O0001O0001O01O00002O1N2N3M2O1N201N1N2N3N1N2N2N3M2O1N2Kd@C_?:5O1N3M2N2NlY[1"}, "image_id": 427, "id": 7031}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 168.0, 37.0, 35.0], "area": 614, "segmentation": {"size": [512, 512], "counts": "mUo62m?2N2O2M2N2N3N1N2N1O01O01O0000011N2N2N3N1N\\Oo@DSA;l>FUA8l>GTA:k>FUA:k>FWA8i>HYA6i>IYA4i>JWA7j>GWA8X?01O2O2M2N2NSZ>"}, "image_id": 427, "id": 7032}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 179.0, 39.0, 39.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "Ufm12n?2M3N2AHQA;l>HRA:l>HQA;l>HRA:l>HQA;l>?N1N10O2O2M3N2M3NO02O1N3N2M3N2M3N0E]@8c?J^@4a?N`@O`?3a@J_?840O000002N2N2N2Oli^5"}, "image_id": 427, "id": 7033}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 196.0, 11.0, 20.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "[fj72m?2N2N2N3M2N2O100N2N2kI"}, "image_id": 427, "id": 7034}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 202.0, 31.0, 28.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "efZ12n?2M3N2M3N2M3N2M2OO010O010O010O010O010O010O011N3N2M3N2M3N2M3NWiU6"}, "image_id": 427, "id": 7035}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 210.0, 26.0, 25.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "nfa72n?1N2N2N3M2N2N2N2O1N2N1O001O01O2N2N2N2N3M2O1N2N2N2NTY1"}, "image_id": 427, "id": 7036}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 214.0, 59.0, 47.0], "area": 1531, "segmentation": {"size": [512, 512], "counts": "RWT31o?2M2O2M2O2M3M2O2M2O2M3M2O2N110O10OO2N1N3M2O1N010O00010O0101N3M10O01O010O00010O010O00010O1O2O2M3M2O2M3N1N3M3N2M2N3N2MeXn3"}, "image_id": 427, "id": 7037}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 216.0, 51.0, 54.0], "area": 1378, "segmentation": {"size": [512, 512], "counts": "jWY61n?2N2N3M2N2N2N2N2G@o@b0o>@o@b0o>@o@b0o>9N2N2O1N2N2N2N00000000001O00000001O2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2NcXm0"}, "image_id": 427, "id": 7038}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 224.0, 14.0, 15.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "TgV21n?3N2M2O2N2M2OO003N2M2O2M3MhXb5"}, "image_id": 427, "id": 7039}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 229.0, 57.0, 58.0], "area": 1970, "segmentation": {"size": [512, 512], "counts": "^hd01o?2M3WOKcA7Z>LcA7[>KcA7Z>MbA6\\>LbA6[>McA5[>MbA4]>OaA0`>2]AOb>g010O010O010O010O010O0100O010O010O010O0100O010O010O0100O3N2M3N0O10O010O0100O3N3]On@1U?Ln@2T?Ln@1U?Ln@2T?Ln@1c?Njg^6"}, "image_id": 427, "id": 7040}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 234.0, 51.0, 37.0], "area": 946, "segmentation": {"size": [512, 512], "counts": "mgo12n?2M3N2M3N2N2M3N2M100O010O10O010O010O010O010O010O010O010O010O010O10O010O010O010O10O102M3N2M3N2M3N2M4MUhV5"}, "image_id": 427, "id": 7041}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 238.0, 16.0, 16.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "egZ71n?2N2N2O2M2N2N2N11N2N2N2O1N3M2N[X="}, "image_id": 427, "id": 7042}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 246.0, 53.0, 61.0], "area": 1460, "segmentation": {"size": [512, 512], "counts": "ohk62n?1N3M2N2N2N2N2N2N2N2N2N2N2N3\\OSOQBn0m=TOQBn0m=TOQBl0o=VOoAj0Q>XOmAh0S>ZOkAf0U>\\OiAd0W>_OfAa0Z>c00O00001O2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2O1N2N`g9"}, "image_id": 427, "id": 7043}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 258.0, 59.0, 48.0], "area": 1457, "segmentation": {"size": [512, 512], "counts": "cXc31o?1N3M2N2O2M2N2N3N1N2N3M2O1O20OO1N3N1N2N30O00001NO01O0001O01O0001O01O0001O01O0001O01O2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2NZW_3"}, "image_id": 427, "id": 7044}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 277.0, 54.0, 55.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "dim01n?3N2M3M3N1CDSA?j>DTA>j>DSA?j>DTA>i>?N2M3NO010O010O010O0010O0010O010O010O010O010O010O00102M3N1N3N2M3N2M3N2M2O2M3M3N2M3N2M2O\\VW6"}, "image_id": 427, "id": 7045}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 281.0, 29.0, 30.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "Tik23m?1N3M2O2M2N3N1N3M2O2M2N1O10O00011N3M2O2M2N3M2O2M2N3N1N3Mffe4"}, "image_id": 427, "id": 7046}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 289.0, 35.0, 31.0], "area": 553, "segmentation": {"size": [512, 512], "counts": "hYQ23m?2ENc@5Z?Nc@4\\?8O010O010O1O10O0010O010O010O00010O010O010O0010O3M3N2M2O2M3N2McV]5"}, "image_id": 427, "id": 7047}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 295.0, 62.0, 62.0], "area": 1691, "segmentation": {"size": [512, 512], "counts": "`jP43l?2N2O1N3M2N2N2O2M2N2N3N1N2N2N2N01O0000HQO`AP1_>SO_Al0a>VO]Aj0d>WOZAi0f>9O000001O01O0001O00MiN^AX1a>31O00011N2N2N2N3N1N2N2N010O3M2N2N3N1N2N2N3N1N2N3M2N2OPVP3"}, "image_id": 427, "id": 7048}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 302.0, 6.0, 6.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "`YV72m?2N2O00O3Maff0"}, "image_id": 427, "id": 7049}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 308.0, 30.0, 68.0], "area": 932, "segmentation": {"size": [512, 512], "counts": "TZa71n?2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2O1eAmNg=T1WBnNi=R1UBPOl=P1QBROo=n0oATOQ>l0nAUOR>k0lAWOT>i0jAYOV>g0hA[OX>X1O01OoE"}, "image_id": 427, "id": 7050}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 309.0, 16.0, 15.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "lih13l?3N1N3NO01O01O010O01O02N3N2M2OUVo5"}, "image_id": 427, "id": 7051}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 311.0, 9.0, 11.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "kYR32m?2N3M2N21M2N3M2OSVi4"}, "image_id": 427, "id": 7052}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 311.0, 30.0, 29.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "TZo61n?2O1N2N2N2N2N2N2N2N2N3M2N2O1N01O2N2N2N3M2N2N2N2N2N2O1N2N2Nlea0"}, "image_id": 427, "id": 7053}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 320.0, 68.0, 57.0], "area": 1785, "segmentation": {"size": [512, 512], "counts": "QkZ11o?1N3M3N1N3M2O2M3N1N3M2O2M3M2O1N010O0010O00010O00IPO`AP1_>SO^Am0c>UOZAl0e>71O01O01O01O010O01O01O01O010O2O2M2N3N2M2N3N1N3N2M0010O01O01O01O011N3M3N1N3N1N3M[Uc5"}, "image_id": 427, "id": 7054}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 327.0, 10.0, 9.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "[jk62n?1N2N2N1O02N2N2N2OeUo0"}, "image_id": 427, "id": 7055}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 334.0, 17.0, 16.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "fZ[32m?2N3N1N2N2N1O01O0000101N3M2N2N2O\\U\\4"}, "image_id": 427, "id": 7056}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 336.0, 27.0, 24.0], "area": 337, "segmentation": {"size": [512, 512], "counts": "lZ_23m?1N3M3N1N3M2O0O0010O0010O00010O0010O00102M2N3N1N3M3NVUS5"}, "image_id": 427, "id": 7057}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 343.0, 61.0, 52.0], "area": 1746, "segmentation": {"size": [512, 512], "counts": "^kd41o?1N2N2N3a@IP?9n@IP?9n@IP?9n@IP?9n@IP?e0N2N2N2O1000N0000000002N2N2N2O1N2N02N2N2N2N2N2N0001O0000000000000001O1O2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2Nid\\2"}, "image_id": 427, "id": 7058}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 361.0, 55.0, 53.0], "area": 1376, "segmentation": {"size": [512, 512], "counts": "f[g32m?2N3M2N2N3M2N3M2N3M2N2N3M20O1NPATOo>j0QAXOo>f0QA]On>a0RAAo>h00CQAHo>6QALo>2QA0P?MPA5P?IPA9P?EPA=P?:1UOo@d0Q?ZOo@h0Q?VOo@l0Q?2N2N2N00000000000000000002ETA@m>>UA@m>>UA@m>>UA@m>>mNoAT1o=nNoAR1Q>POmAP1S>SOjAm0W>TOgAm0X>=00000000001O00002N2O1N3M2N2N2N2N3M2N2N2O1N2N000000001O000001O02N2N2N2N2N3M2N2N2O1N3MhRf4"}, "image_id": 427, "id": 7065}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 419.0, 42.0, 31.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "lmZ41n?2N2O1LKZ@6e?200000001O10O1O00000010O2N3M2N2N2N2N2N2N2O1N1OMQAVOQ?i04N2N2N2N2N2N2N2N2N3N1N2N`RP3"}, "image_id": 427, "id": 7066}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 425.0, 62.0, 53.0], "area": 1544, "segmentation": {"size": [512, 512], "counts": "Q^P61n?2N2N2N2N2N2N2N2N2N2N2N2O10001O00000000000N2N2O1N2N2N2N000000000000001O00000000000002N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2NTbP1"}, "image_id": 427, "id": 7067}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 432.0, 26.0, 45.0], "area": 717, "segmentation": {"size": [512, 512], "counts": "h=U1j>010O00010O000010O00011N2N3N1N3M2N3Ii@AX?=j@AY?<7O2M2N3N1Nmab7"}, "image_id": 427, "id": 7068}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 444.0, 53.0, 60.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "T_m22m?2N2O1N2N2N2N2N2N3M2N2N2N2N2_OSOlAo0R>SOlAo0S>ROlAo0R>TOkAl0U>VOiAj0W>XOgAh0Y>ZOeAf0[>?0000000000101N2N2N2N3M2N2N2N2N0000000001O0002N2@n@OT?On@OT?On@OT?On@OT?On@OT?On@OT?On@Ol`X4"}, "image_id": 427, "id": 7069}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 450.0, 23.0, 23.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "_n_51n?2N2N2N2N3M2N2N2N00000001O010O2N2N2N2N2N2N2NhaT2"}, "image_id": 427, "id": 7070}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 453.0, 41.0, 42.0], "area": 861, "segmentation": {"size": [512, 512], "counts": "d^m32n?1N3M2N2N2N2N2N2N2N2N2N2O1N3M2N2N2000000001O000O1N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2NSQ^3"}, "image_id": 427, "id": 7071}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 460.0, 60.0, 52.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "mnd41n?2N2O1N2N2N2N3M2N2N2N2N2N2N2O1N2O101O00000000000O1N2N2N2N2N3M2N100O1O1O11O1O1oNcA=^>AdA>]>@eA?\\>_OfA`0\\>^OeAa0\\>]OgAa0Z>]OhAb0Y>\\OiAc0X>[OjAd0j>O1O1O1O1O1N2N3M2N2N2N2NYP]2"}, "image_id": 427, "id": 7072}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 472.0, 54.0, 40.0], "area": 1306, "segmentation": {"size": [512, 512], "counts": "d?NPA`0P?8000000000001O000000000000000000000000001OM3000000000000000000001O000000000000000000000000001O000000000000000000000000H800000000O1B^`e0"}, "image_id": 429, "id": 7080}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 0.0, 35.0, 19.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "P`^7c0]?000000000000000000000000000000000000000000_Od@>\\?Ba00000000000000000000000"}, "image_id": 429, "id": 7081}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 6.0, 21.0, 21.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "gPe23\\?a0000000000000000000000000000001O0001O00i_P5"}, "image_id": 429, "id": 7082}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 10.0, 15.0, 16.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "dPX34f?6L4001O0001O0001O000001OL4Ki_`4"}, "image_id": 429, "id": 7083}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 32.0, 23.0, 53.0], "area": 1072, "segmentation": {"size": [512, 512], "counts": "Sbd7a0_?000L4QOo000000000000001O000000000000000000000PO"}, "image_id": 429, "id": 7084}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 38.0, 53.0, 47.0], "area": 1922, "segmentation": {"size": [512, 512], "counts": "Vbj17Z??F:001O00000001O00000000000000000001O00000001M2I700000000000000\\AiN_>\\100000000000010O000000000000000000B>A?AXoZ5"}, "image_id": 429, "id": 7085}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 38.0, 10.0, 18.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "`QW77_?:01O000000000000Nlnc0"}, "image_id": 429, "id": 7086}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 39.0, 56.0, 40.0], "area": 1638, "segmentation": {"size": [512, 512], "counts": "RRg32i?5J7J5J6K51O01O4L0010O00000000010O000000010O00000001O01O000000010O0M300001O01O000001O01O00000001O01M2J6K5J6K6Jdn\\3"}, "image_id": 429, "id": 7087}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 47.0, 77.0, 46.0], "area": 2293, "segmentation": {"size": [512, 512], "counts": "TRR65[?`00000000000000000000010O0000000L4N2001O000000PAA_>P10001O000000000000000001O0000000000015J0000000I7M3000000001O00000001O0000000O1N200000000001O00000001O0H8@g^g0"}, "image_id": 429, "id": 7088}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 64.0, 23.0, 16.0], "area": 247, "segmentation": {"size": [512, 512], "counts": "YbU33h?5L4000010O000000010O00000010O00000010OO1K5LPn^4"}, "image_id": 429, "id": 7089}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 79.0, 46.0, 64.0], "area": 1456, "segmentation": {"size": [512, 512], "counts": "`2S1l>21O000001O0000000001O000001M2O10000001OO1O2O000000O1N2N2N2N3M2O1N001O2N3M2N2N2N2O1N2N2I[@0l?NTlX7"}, "image_id": 429, "id": 7090}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 85.0, 68.0, 52.0], "area": 1884, "segmentation": {"size": [512, 512], "counts": "obk01o?3M2M4M2M4M3M2M4M2M4M2N3L3N3L3N3M0O1000O010O01000O011O3L3N3L4M2N2M011N3N3M2M4M2M11O2M4M2M2OO10O10O010O10O10O010O10O11N4M2M4M2N3L3N3M2MR\\R6"}, "image_id": 429, "id": 7091}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 98.0, 35.0, 36.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "]cP22n?1N2N3M2O1N3M2N2O2M2N2N2N3N1N2O2O0001M2O1N3M2N2N2O2M2N2N3N1N2N3M2O1NXl]5"}, "image_id": 429, "id": 7092}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 100.0, 50.0, 75.0], "area": 2295, "segmentation": {"size": [512, 512], "counts": "Uei26g>0PB6j=0PB7i=OQB7k=MoA9Q>k0000001O01O00000001O0O1J6J6N201O000001O0K5J6L4000001O0lNlA]OSB=S>]OSB=S>]OSBd0e>O00000000010O000000000K6I6JU\\]4"}, "image_id": 429, "id": 7093}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 100.0, 40.0, 89.0], "area": 2328, "segmentation": {"size": [512, 512], "counts": "^U\\72m?1O2M3N1N3N2M2VNHkC:Q70S=a0lBAS=?kBCU=>iBDV=I7000000000000000000010O0000000000000000000000000000010O000000000000000000000000C=@llP1"}, "image_id": 429, "id": 7095}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 119.0, 7.0, 9.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "icP23m?2M3NO012M3NUlk5"}, "image_id": 429, "id": 7096}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 124.0, 63.0, 44.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "cTc34g?5L4K6J5N20000010O00000010O0O1M301O01O0000010O00000001QAUOh>Q1000001O01O0000L50O000000010O000000010O000L4K5K6N1000000010ON2K5K6J5KS\\]3"}, "image_id": 429, "id": 7097}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 130.0, 16.0, 54.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "R4f1[>N2QOeA6]>HeA6^>GdA7^>GdA7^>GdA7^>GdA7^>GdA7^>HcA6_>HcA6_>HcA6_>HcA7U?M2N2Nhjg7"}, "image_id": 429, "id": 7098}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 139.0, 45.0, 51.0], "area": 1143, "segmentation": {"size": [512, 512], "counts": "hTc11o?2M2N3N2M2O2M2N3N2M2O2M3M2O2M3M2O2M2O2M3M2O20O1M2O2M3N1N3M3N1N3N1N3M3N1N3M3N1N3N2M2N3N1NbZf5"}, "image_id": 429, "id": 7099}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 157.0, 11.0, 11.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "QUc02n?1N2N2N2N2OO2N2N2N2NP[W7"}, "image_id": 429, "id": 7100}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 164.0, 54.0, 50.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "hUZ69S?d0000001O0000000001O00000000000000000001O01O00000000000000XAYOW>Y1O0001O00000000000000000000001O01O00000000G9@`0AWkj0"}, "image_id": 429, "id": 7101}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 170.0, 70.0, 52.0], "area": 2139, "segmentation": {"size": [512, 512], "counts": "YfV37c?60000000010c@FR?:h@LX?>0000N2H[OUAf0h>\\OYAc0g>^OXAb0h>^OUAe0k>8L4J7K4001O000001O0001O0M3J6J6O1010O000005K0001O01O00000001O01O00000001O01O00000001O01O00000001O01N1J6J6J6J_Zf3"}, "image_id": 429, "id": 7102}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 180.0, 28.0, 28.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "QVa11n?2N2N2N2N2O1N3M2N2N2N2N2N2N01O2N2N2N2N2N2O2M2N2N2N2N2NPjP6"}, "image_id": 429, "id": 7103}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 189.0, 75.0, 60.0], "area": 2054, "segmentation": {"size": [512, 512], "counts": "Rgi11o?1N2N2N2N2N3M2O1N2N2N2N3M2N2OO2O1N2N2N20N2N2N2N3M2O1JQOXAQ1f>6N1O10O0000000000010O00001O2N1O01O0001O00000001O0001O001O2N3N1N2N2N2N2N3N1N2N2N2N3M2N2O1N2N2N_iP5"}, "image_id": 429, "id": 7104}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 201.0, 9.0, 8.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "]Vj12m?2N2N1O01O2N2N2OdYQ6"}, "image_id": 429, "id": 7105}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 215.0, 57.0, 67.0], "area": 2153, "segmentation": {"size": [512, 512], "counts": "egX66^?<000000000000000000000000000008H0N2O11O00PAFZ>P10000000000000000000000@`0000001O0000000000000000M3]Oc0]Oc0L5O0001O000000@hij0"}, "image_id": 429, "id": 7106}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 215.0, 47.0, 67.0], "area": 2183, "segmentation": {"size": [512, 512], "counts": "fhX71^?a0001O00000000O2]Ob0_Oa0E;001O00000000000000000000000001O00000001SNTBd1T>00000000000000mNTB2l=\\OfBd0[>00001O00000nG"}, "image_id": 429, "id": 7107}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 218.0, 54.0, 61.0], "area": 1680, "segmentation": {"size": [512, 512], "counts": "agm02m?2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N201O000O1N2N2N2N2N2N2N2N2N3M2N200000N2N2O1N2N2nNcA>_>@cA>_>AbA=a>@aA>a>@aA>a>@aA>a>@aA>a>@aA>R?N3N1N2N2N2N2NnWW6"}, "image_id": 429, "id": 7108}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 59.0, 69.0], "area": 1960, "segmentation": {"size": [512, 512], "counts": "X87h?2N2N2N2O1N2N2N2N3N10000O1N2N2N2N2N2N3N1N2N2N2N2N2N2N2N3M2N2O1O100000cNQBh0o=VOSBj0m=TOUBl0k=ROXBl0j=ROWBl0k=ROWBl0k=ROWBl0k=ROWBl0k=ROWBl0k=ROWBm0_>N2N3M2N2N2N2N2N2N2N2O1N3M2NmVR7"}, "image_id": 429, "id": 7109}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 242.0, 11.0, 11.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "fgZ55g?41O01O0001O0000010O[h_2"}, "image_id": 429, "id": 7110}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 246.0, 28.0, 27.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "RXi11n?2O2M2N2N2N2N2N2N3N1N2N2N1O0101N2N2N3M2N2N2N2O1N2N3M2Nmgh5"}, "image_id": 429, "id": 7111}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 269.0, 10.0, 11.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "bXh13l?2O1N2N000002O1N2NagR6"}, "image_id": 429, "id": 7112}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 283.0, 40.0, 35.0], "area": 739, "segmentation": {"size": [512, 512], "counts": "^Ym01n?2N2N2N2N2N2N2N2O2M2N2N2N2N2N000000010O0001O2N200000000N2N2N3M2N2N2O1N2N2N2N2Ndf^6"}, "image_id": 429, "id": 7113}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 283.0, 19.0, 10.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "kho68h?0000000000002N000000000000000001O00Tgf0"}, "image_id": 429, "id": 7114}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 288.0, 13.0, 9.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "Qif77h?1000000010O000000000000of2"}, "image_id": 429, "id": 7115}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 299.0, 69.0, 65.0], "area": 2774, "segmentation": {"size": [512, 512], "counts": "ajk63j?3L5K4M3o@_Oa>c0\\A@d>a0WADh>e001O01O0001O01O0001O01O00010O0001O01O0001O01O0001M2M2M1O2O4K4L4L4O2O0001O01O0001O01O00010O0001O01O0001O0VOoAFP>6TBJl=3XBKj=0ZBLj=0ZBLj=0ZBMi=0[BKj=0ZBLj=0cg1"}, "image_id": 429, "id": 7116}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 311.0, 59.0, 92.0], "area": 2682, "segmentation": {"size": [512, 512], "counts": "^k^52n?1N2N2N2N2N3M2N2N2N2O1N2N3M2N2N2N2N2N2N2O2M2N2N2N2N2^Ob0O2N1O1O1@?0O1O1O1O1TOiBnNX=R1iBlNX=S1jBlNV=S1lBkNU=T1mBjNT=U1oBhNR=W1PCgNQ=X1QCfNP=Y1RCeNo<[1RCcNo<\\1SCbNn<]1TCbNl<^1VC_NP=\\1U1FON2N2N=C1N1O0000000001O01O0000000001O011N00001O00000001O0001O000000010O2N2N2N2N2010O000000O1N3M2N2N2Da@4a?Ja@4g?O1N[UV7"}, "image_id": 429, "id": 7119}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 351.0, 4.0, 15.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "[[n72j?4L4L5PE"}, "image_id": 429, "id": 7120}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 396.0, 68.0, 75.0], "area": 2516, "segmentation": {"size": [512, 512], "counts": "i]X62m?2N2N2O2M2N2N2N2N2N2N2N2O11O01M2N2N2N2N2N2N2N2N2O2M0000001O00000000000001O00000001O00002N2N2N2O1N1O001O000000YOnAAT>=nAAU>0^A3>KV>0^A3>LU>0^A2?LU>0^A2`0KT>1_A1e?Nmbe0"}, "image_id": 429, "id": 7121}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 401.0, 80.0, 68.0], "area": 2555, "segmentation": {"size": [512, 512], "counts": "]]Z41n?2N2N2N2O1N3M2N2N2N2N2N20001O2N0O1N2O1N2N3M2N2N2N0001O2O101O000000N3M2N2N2N2N2000N1O1O00000001O00000001CcAVO]>j0eATO[>l0gAROY>n0iAQOV>o0lAoNT>R1mAlNS>T1<0001O2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2N2N2Nlb]2"}, "image_id": 429, "id": 7122}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 436.0, 17.0, 16.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "mm[71n?2N2N2N2N1O2N2M20O2N2N2N2N2N1N3NWb;"}, "image_id": 429, "id": 7123}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 443.0, 15.0, 32.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "_nh73l?1N3N2M2O2N2M3N1N3N2N1N3N2M2UB"}, "image_id": 429, "id": 7124}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 461.0, 79.0, 51.0], "area": 2528, "segmentation": {"size": [512, 512], "counts": "[_U53l?2O1BLm@6Q?Lm@6Q?Ml@5S?Lk@6S?Lk@7R?=N2N2N2N2O1N3M2N2N1O1001O1O1O1O1N2N30OO1O1O1O1O2N1O1O1O1O1O00O1O1O1O1O1O1O100O1O1O1O1O1O1O10J\\APOc>Q1^AnNa>R1aAmN^>S1dAlN[>T180001O0001O01O2N2N2N3M2N2O1N2N2Be@4^?Jc@4_?Jc@4f?N3NlPc1"}, "image_id": 429, "id": 7125}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 491.0, 60.0, 21.0], "area": 789, "segmentation": {"size": [512, 512], "counts": "j_R71n?2N2N3M2N1O1O1O1O1O1001O1O2N00O1O1O100O1O1O1001O1O2N1O1O1O1O1O1O0000O1O1O1O1O1O1O100O1O1O1O1O1O12N1O1O1O1O1O1O2N1O1O1O"}, "image_id": 429, "id": 7126}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 509.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "ooe61n?100O1001O1OQ`V1"}, "image_id": 429, "id": 7127}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 0.0, 45.0, 20.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "SP?2k?301O001O00001O001O001O001O00001O001O001O001O00001O001O001O001O00001O0000N2N2M3N2N2M3N2NR`j6"}, "image_id": 430, "id": 7128}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 0.0, 40.0, 20.0], "area": 614, "segmentation": {"size": [512, 512], "counts": "RP^11m?200W@4^?8O0000001O0000001O0000001O0000001O000000001O000000O1L40000001O0000O1L5K4LSPn5"}, "image_id": 430, "id": 7129}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 0.0, 41.0, 21.0], "area": 595, "segmentation": {"size": [512, 512], "counts": "[`h24g?5J601O000000001O0000001O0000001O0000001O000000001O000000N2M3L4L4L400000000001ONRPc4"}, "image_id": 430, "id": 7130}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 32.0, 16.0, 22.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "aag42l?3L3N2O2O0O2L3N0O03N2N3L3N3L3NP_P3"}, "image_id": 431, "id": 7131}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 48.0, 44.0, 29.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "laS42l?3L3N2M4M2010O0010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O01N1N2M4M2MT^V3"}, "image_id": 431, "id": 7132}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 66.0, 36.0, 38.0], "area": 841, "segmentation": {"size": [512, 512], "counts": "lbn32k?4M2M4M2M3N3L3N3L3N2O2O010O00010O010O00010O010O0001N1N3L3N2Hi@D[?9g@E[?89M2M3Ng]_3"}, "image_id": 431, "id": 7133}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 75.0, 95.0, 89.0], "area": 4948, "segmentation": {"size": [512, 512], "counts": "Wdj41m?2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M310O010O00010O01O01O010O01O01O010O01O01O010O00010O010O00010O010O00010O010O00010O010O00010O010OO1M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3M4M2M3N3L3Nkle1"}, "image_id": 431, "id": 7134}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 91.0, 19.0, 16.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "Scf22k?4M2M4O000010O010O00010O010O0N2M4M2MSmo4"}, "image_id": 431, "id": 7135}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 91.0, 24.0, 18.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "Scd33j?3N2N3N110O01O01O010O01O01O010O010O0001M2N3L3No\\o3"}, "image_id": 431, "id": 7136}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 103.0, 39.0, 34.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "iS`31m?2M4M2M4M2N3L3O110O0010O010O0010O0010O0010OGg@KZ?2h@OW?Nm@1S?Mo@4Q?IRA6\\?0010O010O010O00010O0N3MP\\l3"}, "image_id": 431, "id": 7137}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 148.0, 68.0, 94.0], "area": 3304, "segmentation": {"size": [512, 512], "counts": "PgR32l?3L3N3M2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N3M2N2M4M2N3L3N3M2M3N3M2010O010O0001ONO1000O103M2M3N3M2M4M2N3L3N3_NjAT1X>jNjAT1X>jNkAR1Y>kNiAS1d>M2M3N3M2M4M2N3L3N2N3L3N3MhZk3"}, "image_id": 431, "id": 7138}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 232.0, 27.0, 24.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "cWo21l?3N2M4M2O20O010O0010O0010O00_@D_??OGc@O]?Of@1Z?Lh@4b?010O010O0001O0MXXc4"}, "image_id": 431, "id": 7139}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 246.0, 11.0, 17.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "QXl24j?2M3N3L3O20L3N3L3N2M[Xn4"}, "image_id": 431, "id": 7140}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 309.0, 33.0, 43.0], "area": 878, "segmentation": {"size": [512, 512], "counts": "[Zn22a?1g@2V?1f@2X?0f@26Ih>c0UA_Ok>a0SABl>i001M2N2010O01O01O010O010O01OM4M2N3L3N2M4M2N3L3N3L3N2NTVa4"}, "image_id": 431, "id": 7141}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 356.0, 12.0, 19.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "bk]33k?3L3N2M4M2N12M2M4M2M3NlT\\4"}, "image_id": 431, "id": 7142}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 385.0, 68.0, 80.0], "area": 2633, "segmentation": {"size": [512, 512], "counts": "Q^g21n?1O2M3N1O2M3N2N1\\OB_AOIb0f>A_ANJc0e>A_Af0^>\\O`Af0_>\\O_Af0_>\\O_Af0^>`0N1O2M3N2M2O2N1N1000O01000O2O1N3N2N2M10O10O10O10O10O10O2O2N2M3N1N3N2O1O01000O1N1N3N2N2M2Fm@FU?8m@FU?7m@GU?8m@FU?7X110O01O01O010O01O01O010ZAhNb>\\10O0fN^AW1a>gNbAX1c>0010O010O000oN[Ah0e>UO]Ak0c>RO`A3Ke0n>XOTAh0R?010O01O01O010OM4M2M3M4M2M3N3Ll^V1"}, "image_id": 432, "id": 7145}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 52.0, 53.0, 95.0], "area": 2942, "segmentation": {"size": [512, 512], "counts": "jcm62l?3^@NQ?5m@No>5n@N_>HlA=BN_>HkAk0R>XOlAj0R>XOkAl0Q>XOkAk0l=iNVB?Lk0k=hNWBk1f=9N3L3N2N3L3N3O00010L3M4O000001L3N2M4L3N3L3N2M4L3N3L3ZNnA\\1T>aNoA;Nc0\\>YOhAg0W>VOlAj0T>TOnAl0S>POQBo0`>M4M2M3M4M2M3N3L3M4MZm7"}, "image_id": 432, "id": 7146}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 66.0, 89.0, 61.0], "area": 2679, "segmentation": {"size": [512, 512], "counts": "mbX52l?2M3N3L3N3M2M3010O010O010O00010O010O01O0O1N3M2M4M2N2M4O010O010O00010O010O01O01O0O2O010O00010O010O010O00010O0100O1O010O010O01O01O010O0RO[Ab0e>[O]Ae0c>XOaAg0_>WOcAj0i>010O010O00010O01N1N3L3N2M4M2N3L3N3MjlZ1"}, "image_id": 432, "id": 7147}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 82.0, 17.0, 82.0], "area": 714, "segmentation": {"size": [512, 512], "counts": "\\dg71m?2M4L3j@Gc>;[AGb>=ZAGc>;[AGb>=ZAF[>GgAe0LGZ>GfAV1n=eNXBg1d=]NXBf1f=\\NXBf1e==M2M4M2^M"}, "image_id": 432, "id": 7148}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 306.0, 13.0, 28.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "c9i0V?110O00010ON2M4L3M3M4K4M]Vi7"}, "image_id": 432, "id": 7149}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 24.0, 70.0, 88.0], "area": 2595, "segmentation": {"size": [512, 512], "counts": "VSa15h?3L4L4L5K4L4M301N1O10001O0000001O0001OO2I600O10M2M3N3L3M40O1O2O0O100O1O2O0O010O000N3M2M3M4L3N3L3M3N30XORB]Oo=d0SBXOo=g0SBWOm=i0UBTOl=m0UBQOk=o0XBmNj=S1WBjNj=V1XBhNh=X1`010O000O2K4L4L4_Om@2W?Im@3W?Jl@2[^\\5"}, "image_id": 433, "id": 7150}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 56.0, 73.0, 57.0], "area": 2433, "segmentation": {"size": [512, 512], "counts": "eb45k?3M2EJh@6X?9000O0104L3L5L4L3M1N0100000IjNcAV1^>mN_AS1`>7000O01000O10O1000O01000O10OI_AQOa>o0cAmN]>S18O01000O0100000O0100000O01000O014L1OO010000O010000O010003M3L5L4L3L5L4L3MZmf6"}, "image_id": 433, "id": 7151}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 58.0, 60.0, 61.0], "area": 2221, "segmentation": {"size": [512, 512], "counts": "]Sg21m?3L3N3M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M1N10O300010O0010O010O00010O01O01O010O01O01O0O1M4M2M4M2M3N3L3N2N3L3N3L3N3L3N2M4M2MkmZ4"}, "image_id": 433, "id": 7152}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 81.0, 69.0, 90.0], "area": 2944, "segmentation": {"size": [512, 512], "counts": "oda32l?2N3M2N3M2N3M2N3M2N3SOWO_Bk0_=WO_Bl0^=WO_Bk0_=WO_Bl0a=TO\\Bo0X=iNdB82Q1X=jNfB5OT1X=iNiB3MV1Y=iNjB1JY1Y=hNmBOH[1Y=iNoBa1oh0RA\\On>k000L5K4L4L4L5K4L4L4L4K6O0001O01O0001O01ON2L3L103M4L4N3O01O0001L3L4L4L5K4L4L4L5K4L4L4L5KYlk6"}, "image_id": 433, "id": 7155}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 119.0, 54.0, 45.0], "area": 1311, "segmentation": {"size": [512, 512], "counts": "_Ti43k?3M2M2O2N2N2O20O010O010O01L3N2N3M2M4N1010O010O00010O010O010O0010O0010O010O010O0010O00WOSA`0m>\\OVAd0j>ZOYAf0P?00N3L3@c@;c?M2N3L3N`k[2"}, "image_id": 433, "id": 7156}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 124.0, 29.0, 31.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "aTl22l?2M4M2M3N3M2M4M2N30O00010O010O00010O010L3N3M2M3N3L3N3M2Mo[e4"}, "image_id": 433, "id": 7157}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 131.0, 58.0, 47.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "hTn53j?3N3L3N2M4M2M4M2O110O0010O0010O0010O0010M2N30O00010O0010O0RASOi>R11O010O01O01O01N1O2O00010O010O00010OO2M2M3N3Ao@JT?4n@JU?2o@JT?4n@JT?3?MckT1"}, "image_id": 433, "id": 7158}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 146.0, 66.0, 95.0], "area": 3148, "segmentation": {"size": [512, 512], "counts": "[Vo02l?2M4M2SOHoAElA>T>BjAa0V>^OgAe0Y>a0ZB\\NQ=d1lB^NT=b1iBbNV=_1gBcNZ=\\1dBgN[=Z1aBiN^=X1`BjN^=X1`BkN\\=n1N3L3N201O010O010O01O01O010OfNlBFW=7lBFV=8lBFV=7nBEV=8lBFV=8lBFW=6mBFV=8lBA]OEj=f0mBBZ=ZOWAi0f>:N3L3N3M2M3N3L3BUNeBn1W=VNeBm1Y=UNeBm1X=WNeBl1X=?N3L3N3L3N2M4M2N01O3M2M4M2M3N3L3N3M2M4M2M3N3L3N3M2M3N3L3N3L3N2F^ATOe>i0^ASOe>j0^ATOe>i0:M4N11O01N1M4M2M3N3L3Nljg4"}, "image_id": 433, "id": 7160}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 152.0, 33.0, 31.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "[Uc41m?2M4M2N3M2N3L3N3N11O010O010O010O01O010O01O010O010M2N3M2M3N3M2N3LP[l2"}, "image_id": 433, "id": 7161}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 154.0, 23.0, 20.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "ZUX42l?3L3N3L3N2N1O02M4L3N30O00010O010O0N2OO3M2M3NV[\\3"}, "image_id": 433, "id": 7162}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 179.0, 69.0, 52.0], "area": 1916, "segmentation": {"size": [512, 512], "counts": "[fc51l?3N2M4M2M4M2M3N3L3O2O00010O010O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O0PATOm>o0010O0001M2N3O00010O010O00010O0010O001L3^OSAOo>OTAMP?OSAOo>OSANQ?NSAOo>O^jY1"}, "image_id": 433, "id": 7163}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 199.0, 69.0, 79.0], "area": 2839, "segmentation": {"size": [512, 512], "counts": "]Wm22`0Ok>3RA0k>3SA0j>3RA0m>1QA2n>On@4S?=O01O01N1N3N11O010O01O01O010OTAQOg>U1O010O0O1M4M2M4M2M3N3L3N3L3N2M4M2N3O001O01L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2MaYP4"}, "image_id": 433, "id": 7164}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 241.0, 55.0, 97.0], "area": 2800, "segmentation": {"size": [512, 512], "counts": "^ij43k?2M3N3PAFm=0dA==Em=1cA=3VA1g>2VA0g>3VA0h>2VA1f>3VA0i>1UA2j>b0N2M4N101O01O010OO2M2010O00010O010O000N3M2M4M2N2M4M2M4M2O20O0010O0010O001O0N30O0010O0010O0010O0010O001L3N3L3N2M4M2N3LPXR1"}, "image_id": 433, "id": 7166}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 243.0, 52.0, 67.0], "area": 1874, "segmentation": {"size": [512, 512], "counts": "\\hl32k?4M2M3N3L3N3O01O01O0`A]Oc=d0ZB^Od=d0YB@c=c0[B_Oc=d0YB@e=a0nATO6=m=c0PB@P>a0mABS>U101O01O010O01O01O01O0M100O03N2M4M2M4M2N3L3N2M4M2M4M2M4M2M4M2M4M2M3NWXY3"}, "image_id": 433, "id": 7167}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 288.0, 55.0, 68.0], "area": 1975, "segmentation": {"size": [512, 512], "counts": "Rj^53k?2M4M2M4M2M3N3M2010O0010O0010N1M[A\\OS>e0jA_OR>e0kA]OS>e0jA^OS>e0kA^OR>e0jA^OS>e0kA^OR>Y1N2010O010O00010O010O00010O0WNQBb1o=ZNTBe1T>M4M2M4M2M3N3GVAWOm>f0VAXOm>e08M3N3L3N3L3N2M`fe1"}, "image_id": 433, "id": 7168}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 298.0, 80.0, 68.0], "area": 2944, "segmentation": {"size": [512, 512], "counts": "VZ23k?3M2M3N3L3N3M2M4M2M3N3M210\\APOV>P1hAROX>n0fAUOZ>k0bAXO^>V10O00010O010O001O0M3N3M2M4O00010OM310O010O00010O0_AlNW>T1eAoN[>Q1cARO]>X100010O010O00010O010O010O00010O0`NcA^1_>01O010N1N2N3L3N3L3N2N6I3N2M4M2N3L3N2Mkee6"}, "image_id": 433, "id": 7169}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 350.0, 61.0, 55.0], "area": 2104, "segmentation": {"size": [512, 512], "counts": "Z;P1Q?OO2M2N3L3N2N3N1010O01O01O010O01O010O01O010O01O010O01O001L3N2M4M2N0O2O3M201O01O010O01O010O01O010O01O0O1N3L3N3M2M4M2N2M4MbTQ7"}, "image_id": 433, "id": 7170}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 360.0, 51.0, 57.0], "area": 1855, "segmentation": {"size": [512, 512], "counts": "^\\Z22l?2M4M2N3CFo@=n>FPA=M210O0WAQO`>P1^ASO_>P1^ARO`>Y1L31O010O010O00O2MO013M21O010O01O010O01O010O01N1N2N3L3N3N101ITAUOo>5QAdNgA]1X>4010O0010O0010O00102M3N3L3M3N3M2010O0001N1M3N3L3N3L3N2M4L3N3L3Nncl3"}, "image_id": 433, "id": 7173}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 401.0, 93.0, 72.0], "area": 3323, "segmentation": {"size": [512, 512], "counts": "Xmn32l?2M4M2M3N3M2M4M2M4N1UAUOa>k0]AWOc>j0YAZOg>o00010O0010O0010O010O00010O010O00010O010ZAkN_>U1_AnN`>Y110O010O01O01O010O01O01O010O01lN_Ag0a>VOcAj0]>SOeAm0[>POiAP1c>0010O0010O0010O0001M2M4M2M4M2N2O20O010O0010OO1N3L3N3L3N2N3N101O0N2M4Gl@BV?;:M2N3L3Nebb2"}, "image_id": 433, "id": 7174}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 416.0, 52.0, 55.0], "area": 1711, "segmentation": {"size": [512, 512], "counts": "Vnm13k?2M4M2M3M4M2M4M2M3N3L3N3L3N2M4O010O01O01O010O01O01O010O01O01O010O01O01OO2L3N3L3M3O2O0O2GPA]OS?`08N3L3N3L3N2MeRX5"}, "image_id": 433, "id": 7175}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 421.0, 68.0, 65.0], "area": 2410, "segmentation": {"size": [512, 512], "counts": "Z^n61m?3Z@MZ?6c@NY?5d@NY?>M4L3N210O001N1M3N0O001O3M3O2_AdN]>_10010O00010O00010O00010O00010O00gNaAQ1`>kNcAV1d>O00010O00010oNZAf0f>XO]Ag0c>VO`Ak0j>010O00010O00010O00010O00010O00010O01O01O01OgA"}, "image_id": 433, "id": 7176}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 471.0, 55.0, 41.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "`_c61=1Q?1l@2g>MXA4N1g>OWA300g>OWA3O1h>j0M31O001O00001O001O001O00001O001O00001O001O00001O001O3M2N2_Oh@6Z?Ei@;_?O00001O001O00001O001O001O00001O001N1NXPa0"}, "image_id": 433, "id": 7177}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 472.0, 79.0, 40.0], "area": 2204, "segmentation": {"size": [512, 512], "counts": "d_b33k?3L3N2M4M2M4M2M301O0010O0010O0010O0010O001L300001O001O00001O001O00N2N2O11O00001O001O00N2N2M3N2N21O00001O001O00001O001O00001O001O00001O001O000GRA@o>:WAEj>7YAIg>5[AHh>5d0M2MoPV3"}, "image_id": 433, "id": 7178}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 478.0, 71.0, 34.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "o_d11l?3N2M3N2M3N2M3FAQAa0n>90000O100O10000O10000O11O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O0Ic@K]?2f@N[?Og@1Y?Mj@2b?01O001O00001O00QPX5"}, "image_id": 433, "id": 7179}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 465.0, 95.0, 47.0], "area": 2284, "segmentation": {"size": [512, 512], "counts": "oof51n?1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1001O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1OQ`i0"}, "image_id": 435, "id": 7180}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 0.0, 37.0, 20.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "PPl12n?0X@8X?800000000000001O0000000000000000001O00000000N20000001O000000000000N2FZ`a5"}, "image_id": 437, "id": 7181}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 0.0, 43.0, 32.0], "area": 953, "segmentation": {"size": [512, 512], "counts": "_`a21f?9J606J1O00000000000000001O000n@YOk>n0000001O0000000000K5O1000000000000001OF:H80000000000MSPi4"}, "image_id": 437, "id": 7182}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 1.0, 9.0, 22.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "1f0\\?N2M2Lb@D`?:5M3M2O2Mb_k7"}, "image_id": 437, "id": 7183}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 18.0, 86.0, 45.0], "area": 2592, "segmentation": {"size": [512, 512], "counts": "daa43]?`0000000E;00000000000000000000000000000000I8O000000000000000001O000000000000007I000000000000000000000000000000000000000007I000000000N22OO01O00010N1M3010O000010O00010O00gN^AT1g>10O0M3M4K4N210O000M4L3M3M4K4M3Mbn]7"}, "image_id": 437, "id": 7186}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 40.0, 80.0, 53.0], "area": 2646, "segmentation": {"size": [512, 512], "counts": "Tbl16b?8H8H9M200000000N20001O01O0000002N0001O00000007I000001O0001O000000PAZOf>Q1O00000001O000001O00000001O000001O00000001OI700010O000000000000010O0O1H8H8H8000001O00000K5Hf^k4"}, "image_id": 437, "id": 7187}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 49.0, 50.0, 86.0], "area": 2864, "segmentation": {"size": [512, 512], "counts": "PTj04Q?NdA6g=LQB264e=OPB1X100000001O0000000M3001O000000000001O0000000000@lAoNZ>c0SB]Om=6`BJS[f2"}, "image_id": 437, "id": 7190}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 78.0, 43.0, 68.0], "area": 2054, "segmentation": {"size": [512, 512], "counts": "^cb5:Z?V1001TOZAXOfAh0i>001O000000000001O000N21O00000000000001O0E;DelS5"}, "image_id": 437, "id": 7193}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 139.0, 1.0, 6.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "[46_ko7"}, "image_id": 437, "id": 7194}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 148.0, 58.0, 46.0], "area": 1745, "segmentation": {"size": [512, 512], "counts": "kU36\\?>M300000000O1B>I700010O00000000000000000H8G9000001O0000000001O0000000000000001O0000000001O0000000000000001O000H8Bfko6"}, "image_id": 437, "id": 7195}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 152.0, 77.0, 51.0], "area": 2724, "segmentation": {"size": [512, 512], "counts": "ZUb4;Y?S10000000000001O0001O0000000000000J600000001OL4J60001O0000000000000001O0009G000000001O00000001O00000000000001O000000015J000000000000000000000EUOQBk0b>1O00J6E;DgZd5"}, "image_id": 437, "id": 7197}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 213.0, 80.0, 57.0], "area": 2612, "segmentation": {"size": [512, 512], "counts": "eg[41m?3L3N2M4M2M4L3N210O0010O0010O0M3N3L3N3L300O011N10001O0010O00100O2O2M1O01O010O0O1M4O0O101O001O01O01O010O01O01O01O0iN^AQ1b>lNaAS1f>10O00010OO2O0O10N1N3M3N3L3N3L3M3N3L3N3L3NiX\\2"}, "image_id": 437, "id": 7198}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 238.0, 44.0, 60.0], "area": 1854, "segmentation": {"size": [512, 512], "counts": "^7h1Y>O0000000001O000001O0000000001O0001O000000000001OUOkAKU>MSB3n=C[B=e=ZOeBe0[>0000001O00000001O0000000L4G9HegY7"}, "image_id": 437, "id": 7199}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 248.0, 59.0, 70.0], "area": 2391, "segmentation": {"size": [512, 512], "counts": "_ij06b?8L5O000001OI7H8H8H9O0001OL4H8N20001O0000000001O0001O0000000001OPBRNn=P2000YORB\\On=0001O01O0000000000010O00000000M3H8HQgW6"}, "image_id": 437, "id": 7200}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 263.0, 42.0, 78.0], "area": 2288, "segmentation": {"size": [512, 512], "counts": "ZYR21d?;J6000000m@G_>m00000G9G9001OSBaNY=_1\\BlNd=i1O0000000000000001O0001O000009G0000000000001F9E;E;E;E;EhgX5"}, "image_id": 437, "id": 7201}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 264.0, 61.0, 63.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "Qio31m?3L3N3L3N2M4M2M4M2M310O010O00010O010O00010O0`ATOm=l0oAXOP>h0nAZOR>f0kA^OU>b0hA@X>`0eAC\\>Q1O0010O0010O0010O0010O0010O0O1N0O2O2M4M2M3M4M2M4M2M3N3L3N3L3N2M4MZgQ3"}, "image_id": 437, "id": 7202}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 291.0, 52.0, 66.0], "area": 1830, "segmentation": {"size": [512, 512], "counts": "TZn41l?4L3M4M2M3O2O00010O00010O010N1SA_OX>d0eA@W>d0fA^OX>d0eA@W>c0fA@W>V1M4M2O110O00010O0010O0010O00010OM3N3L3M3M4BaAWOa>f0bAWOb>f0`AWOc>f0=M4L3N2M4L3M3NefW2"}, "image_id": 437, "id": 7203}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 311.0, 15.0, 25.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "j9f0Y?1000O10O1000O1000O15K5J5L5KfUh7"}, "image_id": 437, "id": 7204}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 340.0, 74.0, 57.0], "area": 2351, "segmentation": {"size": [512, 512], "counts": "a[\\32k?3M4M2M4L3N201N1M3N3L3M4N11O010O00010O00010O010O00O2O0ZAlN`>T1^AnNc>X1O00010O00010O010O00010O0003NO010O00010O00010O010OM3M4O00010nNUAm0P?0O00010OO2M2M3M4M2M3M4M2M4Lcd^3"}, "image_id": 437, "id": 7205}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 359.0, 62.0, 71.0], "area": 2066, "segmentation": {"size": [512, 512], "counts": "S]`41m?2M3M4L3M3N3L3M3M4M2010O0001N1M3M4M2M3M4L31O010O01O01O01O01O00M000010O0000003N2M3M4L3M3N3L3M4O00010O00010O0N2N3L3M3M4L3N3Ldd`2"}, "image_id": 437, "id": 7206}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 459.0, 14.0, 28.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "\\>k0T?100000O2O4L4L2M10OJe@I[?7:M4LTah7"}, "image_id": 437, "id": 7207}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 510.0, 6.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "oo81o?0000000O1RPd7"}, "image_id": 437, "id": 7208}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 0.0, 63.0, 36.0], "area": 1205, "segmentation": {"size": [512, 512], "counts": "R`a03k?3O1O2N1O2N1O2N1O2N1O1O2N1OO11O1O2N1O1O2N1O2N1O2N00O1O100O1O1O100O1O100O1O100O1O1O100Fj@IW?6k@HV?8k@GU?8n@ES?::O100O1O100O1O100O1O1O100O1O10PP_6"}, "image_id": 438, "id": 7209}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 0.0, 47.0, 36.0], "area": 947, "segmentation": {"size": [512, 512], "counts": "P`o41o?2N1O2N1O2N1O2N1O2N1O1O2N1O2N1O2N1O2N1O2N1O2N1O1O0000O1O100O1O2O1N3M2N3N1N3M2O2M2N3N1N2N3N1N\\oX2"}, "image_id": 438, "id": 7210}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 0.0, 61.0, 71.0], "area": 2487, "segmentation": {"size": [512, 512], "counts": "a`l52n?1RA0i=3UBOi=3TB0i=3TB0j=2TB0i=3TB0j=2TB0i=3TBOk=3RB0k=3RBOm=3QBMn=6oAJR>8lAGT>S10O010O010O010O03N2M1O010O010O01O010O0]OQBWOo=i0SBTOn=k0UBSOk=m0WBPOj=o0YBiNG1P>V1`BgNa=X1d000O100O101N3N2M3N2M3N2M3N2M3N2M3N1N3N2M3NRoT1"}, "image_id": 438, "id": 7211}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 4.0, 97.0, 71.0], "area": 2995, "segmentation": {"size": [512, 512], "counts": "_QW11n?2N3\\@KY?6f@KX?8e@KX?7f@KY?`0M2N3N2M2N3JnNZAS1d>oNZAQ1f>50O0010O0010O00010O002O1N1O010O0010O00010O0010O00010O00010O02O2M1O01O01OK\\AoNe>Q1400010O03M2O2M2O1N00010O0010O002O1N3M2O0O00010O00010O01O01O0100O2N3N1N3M2O2M2N3N2M2N3N1LY@Lj?1[_X5"}, "image_id": 438, "id": 7212}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 7.0, 7.0, 20.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "f`l72l?3L3N2M4L3N3H"}, "image_id": 438, "id": 7213}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 39.0, 76.0, 49.0], "area": 2016, "segmentation": {"size": [512, 512], "counts": "fa_21n?2O2M2N3N1N3M2j@Ai>a0UAAh>a0VAAh>b0UA@j>a0TAAj>a0UA@i>m0O1O11O001M2N0101N1O0010O00010O00010O01O01O01O01O102M2N3N1O2O010O010O01O010O01N1N01O01O01O01O2O2M2N3N1N3M2O1N0010O0003N1N3M2Oi]Z4"}, "image_id": 438, "id": 7214}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 39.0, 13.0, 15.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "[Q[52n?2M3N1N3M3NO012M3M2O2M2Oa^^2"}, "image_id": 438, "id": 7215}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 62.0, 16.0, 18.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "SRa51n?2O2M3M3N1N3N2M02N3N2M3N1N3N2MgmV2"}, "image_id": 438, "id": 7216}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 75.0, 68.0, 59.0], "area": 2032, "segmentation": {"size": [512, 512], "counts": "ccd12n?2M3POKPB7n=KQB7m=KPB7n=LPB6n=KPB8m=KPB7n=LPB6n=KPB8m=KQB5o=LoA4Q>OlA2S>1kANV>3hAMX>n0O0101N2N3N1N3N2M2N3NO01O01OLXAQOi>n0ZAPOe>Q15O01O01O01O010O01O2O2M3M2O0O10O01O01O010O01O101N1O102O1O0N3N2M2N3N1N3NP]Y5"}, "image_id": 438, "id": 7217}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 76.0, 18.0, 26.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "lRg73k?3L3N2M4M2M4N1010O0010O0010O0010O0`M"}, "image_id": 438, "id": 7218}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 81.0, 47.0, 51.0], "area": 1205, "segmentation": {"size": [512, 512], "counts": "mbk52m?2N3N2M3N2M2O2M3M3N1N3N2M3N1N3M3N2M2O2M3000N1N3M3N2M2O2M3N0O0010O02O2M3N1N3M3N2M2O2M3N2M3M2O[l\\1"}, "image_id": 438, "id": 7219}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 107.0, 42.0, 47.0], "area": 1254, "segmentation": {"size": [512, 512], "counts": "`Tg61k?4M3M4K4M3M3M4K4M3M4L3O1010O00010O000010O00010O000010O00010O0M3M4K4M3M3M4K4M3M4L3L`lc0"}, "image_id": 438, "id": 7220}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 109.0, 82.0, 76.0], "area": 2700, "segmentation": {"size": [512, 512], "counts": "Se]22n?2M2O2M3POFUBLnA3R>0kA1T>m010O02N2O2M3NO10O010O000DdAVO\\>j0eATO[>l0hAROX>m0kAPOU>Q1lAmNT>S1oAkNQ>U1<01O01O010O01O01O010O01O01O010O01O01O0102M3M2O2M3N1N3M01O010O01O01O0100O3M3N1N3N2M2N3N2M2Om[Y4"}, "image_id": 438, "id": 7221}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 114.0, 17.0, 17.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "hcm32n?1N3M2O2M2N3NO00010O1O3N1N3M2O2MWli3"}, "image_id": 438, "id": 7222}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 114.0, 25.0, 25.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "RT\\74i?3L4M4K4N201O01O0001O01O01O01O0001O01N1L4M4K4M3L]\\7"}, "image_id": 438, "id": 7223}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 126.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "n32Plo7"}, "image_id": 438, "id": 7224}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 128.0, 26.0, 26.0], "area": 388, "segmentation": {"size": [512, 512], "counts": "XdU63m?2M3N2M3N2M3N2M2O0O010O010O010O02O2M3N2M3N2M3N2M2O`[]1"}, "image_id": 438, "id": 7225}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 141.0, 43.0, 49.0], "area": 1189, "segmentation": {"size": [512, 512], "counts": "c4[1d>10O1O1O100O2N101N3M2O2M000010O00010O00010O000010O000010O02N2O1N3M2O2M2N3N1N3M2N2O2MR[Z7"}, "image_id": 438, "id": 7226}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 147.0, 14.0, 29.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "ZUi71l?3N3L3N3L3N2M4M2M40O00010O[K"}, "image_id": 438, "id": 7227}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 148.0, 28.0, 26.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "ldb61o?2M4M2M3N2M3N2M3N0O10O010O010O010O2O2M3N0O011N3N2M4L3MoZo0"}, "image_id": 438, "id": 7228}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 165.0, 93.0, 66.0], "area": 2921, "segmentation": {"size": [512, 512], "counts": "fVm21b?7b@KY?:f@HW?c0M2O2M2N3N2EnNcAT1\\>nNaAU1\\>mNcAR1]>810O010O00010O00010O000100O3N1N3M2OO00010O0010O00010O0010O0010O00010O00012M2O2M2N3N1N3M2O0O00010O02N3N1N1O10O0010O0010O00010O00010O0010O02O2M2N3N2M2N3N1N3M2O2M3NXZd3"}, "image_id": 438, "id": 7229}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 178.0, 7.0, 19.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "Qfl71m?3L3N3L3N2M4]J"}, "image_id": 438, "id": 7230}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 180.0, 61.0, 58.0], "area": 2128, "segmentation": {"size": [512, 512], "counts": "Z6917T?=M2O1N3M2N3N1N0010O000010O000010O2N2N3N1O10O2M2N10O01O01O0001O011N1O01O00010O0001O01O00010O2N2O1KUAROn>l04N3M2N3N1N2N3N1N3M2NjYQ7"}, "image_id": 438, "id": 7231}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 213.0, 46.0, 43.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "_Wn63k?2M3N3M2M4M2N3L3N2N3L301O0010O0010O010O0010O0010O010O0010O0010O010OO1N3M2M4M2N3L3N2N3L3N3Moh:"}, "image_id": 438, "id": 7232}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 214.0, 76.0, 65.0], "area": 2375, "segmentation": {"size": [512, 512], "counts": "_gZ31n?3M3N1N3N2M3N1N3c@^OW?h0N2M2O2M1O10O03N2M2O2M3M3N1N3N2M03N2M3N1N2OO00010O010O010O0001D^AZOa>g0`AWOa>h0bAVO]>k0eARO\\>m0fAQOZ>o0;10O010O00010O010O010O00010O010O102M3M2O2M3N2M2N3N2M3N1N3N2MdX_3"}, "image_id": 438, "id": 7233}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 240.0, 20.0, 50.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "gXf74j?2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M200010O_H"}, "image_id": 438, "id": 7234}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 263.0, 25.0, 26.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "hXU72k?3N3L3N3M2M3N30O010O010O00010O010O0O2L3N2N3L3N3McW>"}, "image_id": 438, "id": 7235}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 264.0, 81.0, 64.0], "area": 2789, "segmentation": {"size": [512, 512], "counts": "oXo32n?2N3L3N2M4^@C[?c0M3N3MO010O03N2M3FoNaAS1]>oN`AU1\\>:N2M4MO010O10O010O10O3N1N10O010O10O010O010FfAmN[>R1hAlNW>U1kAiNX>S1=N2N2M10O010O010O01000O010O010O01000O010O010O01000O010O03N3L3N2M4M2N2M4M2M3N2MPWh2"}, "image_id": 438, "id": 7236}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 275.0, 60.0, 66.0], "area": 2109, "segmentation": {"size": [512, 512], "counts": "QiZ12m?2N3N1N2N3N1N3M2e@@S?h0O1N3M2O2N10010M2O2M2N2N3N1N3M2O2M2N2O2N101O01O0N3M2O1N3M2O2M2N201O001N1N3M2O1N3M2XOo@>T?@n@>S?@o@>S?@o@>\\?N1N3M2N3N1NlUg5"}, "image_id": 438, "id": 7237}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 293.0, 26.0, 26.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "diY73k?3L3N3L3N2M4O001O01O010O01O01O010O01OO2L3N3L3N2M4MfV9"}, "image_id": 438, "id": 7238}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 304.0, 62.0, 76.0], "area": 2179, "segmentation": {"size": [512, 512], "counts": "X[e02m?3N2M2O2M3M3N1N3N2B[O\\Ah0b>ZO\\Af0c>\\O[Ad0e>_OYAa0g>;ClNoAU1Q>lNnAS1d=mN]B3LQ1f=oN\\B2Ln0i=ROXB3Lk0l=UOUBY1j=iNUBV1l=?O010O010OLSBVNn=j130010O102M3N1N3M3N2M3N1N3N2M3N2M2N3N2M3N0O010O011N3M2O2M3N2M3N1N3NVe[6"}, "image_id": 438, "id": 7239}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 307.0, 57.0, 82.0], "area": 2224, "segmentation": {"size": [512, 512], "counts": "YZQ53m?4L4]OITA9k>KQA5o>Om@1R??0O1000O5L4L4L4K2OO10O1000O10O1000O10O1000004K5L4L3N0300000000O1L4L3L5L00000O01004L4K5L4L4K5L4L3M4K5LSTR2"}, "image_id": 438, "id": 7240}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 325.0, 20.0, 49.0], "area": 584, "segmentation": {"size": [512, 512], "counts": "W[f72k?4M2N3L3N3M2M3N3L3N3M2UAPOc>X1N3M2O20O0010O0iE"}, "image_id": 438, "id": 7241}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 333.0, 50.0, 48.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "fjW42n?4L3M4K5L2N003L5L4L3L5L2N000O10O1000O01000O10O1000O10O10O10O1000O10N5L5L3M4K100000O0102N3L5L4L3M4KcTo2"}, "image_id": 438, "id": 7242}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 340.0, 57.0, 63.0], "area": 2109, "segmentation": {"size": [512, 512], "counts": "akm12n?2M2O2N2M3N2M2O2M3N2M3N2M2O2M3EoNcAS1[>oNbAT1[>oNcAR1\\>9O010O010J^NnAa1S>aNjA_1V>60O010O10O010O0100O1O101O02N01N2N3N2M2O2M3N2M3N1[OSA4P?JRA4P?IRA5Q?HRA5Q?IQA5_?M3NQdU5"}, "image_id": 438, "id": 7243}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 365.0, 68.0, 54.0], "area": 2068, "segmentation": {"size": [512, 512], "counts": "Slm01o?3M2M4M3L3N3M2M4M3M1N100O01000O0100O0100O01000O0100O0100O01000O010LSOUAl0k>5000O02O3M2M4M3L3N2N0O01000O010003L4M2M4M3M2M4M2M4M3M2M4M3M2M4M3LYSP6"}, "image_id": 438, "id": 7244}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 398.0, 46.0, 45.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "c\\a47i?7I6JO0106J7I7I1O000O10000000O1000O1000000000O10O10000000N2O1000O100002N7H8I7M4H7IO0100000]bg2"}, "image_id": 438, "id": 7245}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 400.0, 46.0, 36.0], "area": 1250, "segmentation": {"size": [512, 512], "counts": "k\\V55k?1N1K412N7I7H5L0000O100000O100000O100000O100000O100000O100000O100000O100000O1000005K7I7I7HgbR2"}, "image_id": 438, "id": 7246}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 411.0, 57.0, 64.0], "area": 2329, "segmentation": {"size": [512, 512], "counts": "X]`25k?a0_O3M006Ib0_O3M000000O100000E;0000000000000000O10000000O1000003Ma0_O;E00006J0002N000000000000M30000000000000O1003Ma0_O]Qc4"}, "image_id": 438, "id": 7247}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 423.0, 50.0, 56.0], "area": 1942, "segmentation": {"size": [512, 512], "counts": "c]`1:f?;E6J0000O01000000MVOPAj0P?300O10000000O10000000000003L=D4L0K`NjA`1V>500000000000O100000O10000000000009G;D=D:FO10000000000000000000000009G7I0000000000000O100O1000000eCDWiCDV<6mAMR>Q110O1N1N3N1N300O01O0N3N2M2aNoAm0S>QOoAl0S>ROoAl0T>ROnAl0T>QOoAl0S>ROoAl0T>ROnAl0S>ROPBk0d>O2M2N3N2M2N3N1N3N2M2N3N]mg5"}, "image_id": 440, "id": 7258}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 0.0, 8.0, 4.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "P`S21o?2N1O00O1O100OQ`h5"}, "image_id": 440, "id": 7259}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 0.0, 31.0, 19.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "XP`32k?3N3L30001O001O001O00001O001O00001O001O00001OO1N2M3N2N2M3N2MS`P4"}, "image_id": 440, "id": 7260}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 0.0, 39.0, 32.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "[`[41m?2\\@1T?2j@0T?3i@OU?3i@0T??k@WOS?k00001O001O001O001OO1N2M3N2N2O11O000000N2M3N2N2N2N2M3N2O1001O00O1NRPQ3"}, "image_id": 440, "id": 7261}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 0.0, 43.0, 49.0], "area": 1291, "segmentation": {"size": [512, 512], "counts": "UaX52k?4M2N2M4M2N3L3N3FXOXAj0f>YOVAj0g>:M2N3O001O00001O001O001O0000O1N2M3N2N3L3N3M2M4M2N2M4M2M4M2N3L3NloQ2"}, "image_id": 440, "id": 7262}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 2.0, 25.0, 29.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "gPR32k?4M2N2N3L3N3M2M4M201O01O01O01N1N3L3N3M2N2M4M2N3Lk_a4"}, "image_id": 440, "id": 7263}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 13.0, 46.0, 57.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "iQn52l?3L3N3M2M3N3L3N3L3N2M4M2M4M2M3N3M2M4O00010O010O00010OO2M2M3N3L3N3L3N2M4M2M4M2M3N3M2M4M2M4MZoZ1"}, "image_id": 440, "id": 7264}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 20.0, 22.0, 23.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "j`U21o?2M2N3N2M2O2M2N3N1N3N02M2O2M2O2M3M2O2M2N3Nl^_5"}, "image_id": 440, "id": 7265}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 32.0, 46.0, 60.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "abk61l?3N3M2M3]OF^A=_>E^A>_>F^A=_>E_A=_>F]A=`>F^A=_>c0N3L3O20O0010O010O0010O0010O01N1N2N3L3N3M2M3N3M2M4M2N3L3N2M4M2N3L3N3Md^="}, "image_id": 440, "id": 7266}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 35.0, 26.0, 28.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "fQh32k?4M2N2N3L3N3M2M4O0010O00010O010O0N3M2N2M4M2N3M2M4Mhnj3"}, "image_id": 440, "id": 7267}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 41.0, 53.0, 54.0], "area": 1469, "segmentation": {"size": [512, 512], "counts": "ga91o?2M3M2O2M3N1N3N2M2N3N2M3N1N3M3N1N3N2M2N3N2M2O2M0101N3N1N3M3N2M2O2M3N1N2N010O011N3M3N1N3N2M2N3N2M2O2M3M2Ocmk6"}, "image_id": 440, "id": 7268}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 45.0, 32.0, 30.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "oQV53j?3N2M4M2N3L3N3N100010O010O0010O0010O010O00010O0M4M2M4M2N2M4M2M]nY2"}, "image_id": 440, "id": 7269}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 60.0, 29.0, 59.0], "area": 1147, "segmentation": {"size": [512, 512], "counts": "\\ca71m?2N3L3N3M2M3N3L3N3WO[OSBg0l=BiAb0V>d00001O001O0O101O001O001O000OO2N2MWN"}, "image_id": 440, "id": 7270}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 63.0, 104.0, 57.0], "area": 2740, "segmentation": {"size": [512, 512], "counts": "hRf23k?2M4M2N2N3L3N3M2M4M2N210O010O010O00010O010O010O010O00010O010O010O00010ON3M201O010O01O010O0N2O2O010O0010O0010O010O0010O010O0010O010O0010O0010O01O0M3N30O01O010O01O010O010O01O010O01O010O01O010M2N2M4M2N3M2M4M2Nole3"}, "image_id": 440, "id": 7271}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 75.0, 21.0, 41.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "\\2X1h>O011N2O2M2N3N2M2N3N2M2O2N110M3M2O2JZ@Mi?14Nj\\e7"}, "image_id": 440, "id": 7272}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 82.0, 16.0, 20.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "Rc]51m?2M4M2M3N3L3O20O001L3N2M4M2M4M\\]Z2"}, "image_id": 440, "id": 7273}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 82.0, 27.0, 41.0], "area": 537, "segmentation": {"size": [512, 512], "counts": "fS[61h?0\\@2a?2\\@1]?Ke@=Y?Ee@=X?8M2N3M2N3L1000O3N3M2N3N100M4M2N3M2M4M2N3M2NZ]W1"}, "image_id": 440, "id": 7274}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 83.0, 32.0, 37.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "_cm51m?3L3N3M2M3N3M2M4M2N3L3N210O010O0010O001M2N3L3N2N3L3N3M2M4M2N2MX]b1"}, "image_id": 440, "id": 7275}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 103.0, 32.0, 36.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "dc<1n?3N1N3M2O2M3N1N3M2010OJ^Om@`0T?Bl@>NAQA1l>OWANj>4VAIj>:XABh>?XA_Oj>b0VA[Om>b0UA]Ol>a0k0QAWOo>m010O0010O010O0010O0010O0010O0010O010O0010O0010O0010O0010O010O0010O0010M2M4M2M4M2N2M4M2N3L3Nckn1"}, "image_id": 440, "id": 7277}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 112.0, 22.0, 27.0], "area": 356, "segmentation": {"size": [512, 512], "counts": "SdS73k?3L3N3L3N2M4M2N3O01O010O000M4M2N3L3N2M4M2M^\\a0"}, "image_id": 440, "id": 7278}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 125.0, 167.0, 94.0], "area": 5564, "segmentation": {"size": [512, 512], "counts": "hd[12a?Oh@4V?Og@4U?Oh@4X?;0O00O2M2M4M2N210O010O00010O010O01O01O010O01O01O010O01O01O010O010OO1N3L301O00010O01N1N3M21O01O010O01O01O010O01O01O010O01O01O010O01OWATO^>m0_AVO`>j0]AYOd>Q1010O01O01O010O0UO[AA`A>a>_OaAa0_>\\OeAd0[>YOgAg0OWO]>j0`AXOa>g0]A[Oc>e0ZA_Of>a0WAAi>l0O01O010O01O010O01O0mNXAo0g>oN\\AP1j>10O00010O010O00010O010O00010O010O00010O010N1N2NKVOYAf0g>]OYAa0g>BYA:g>IVA8j>=010O02O3L3N3L3N2M4M2M40O0010O0010M2M4M2M3O2O0N3M2M3N3L3N3L3N2M4M2M4M2M4M2M[kP4"}, "image_id": 440, "id": 7279}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 131.0, 2.0, 5.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "VTo71l?4lK"}, "image_id": 440, "id": 7280}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 142.0, 30.0, 27.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "eT=1n?4M3M4K4M3M3L100000O01000O01000O0100000O01000O3N3M4K4M3M4LojS7"}, "image_id": 440, "id": 7281}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 171.0, 31.0, 43.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "e5Q1n>1O010O01O01O01O010O01O01O01O0101N2N3N2M2N3N2M2O2M2N3N2M2O2MPZ`7"}, "image_id": 440, "id": 7282}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 179.0, 134.0, 71.0], "area": 3922, "segmentation": {"size": [512, 512], "counts": "hV_53k?2N3M2M3N3M2N3M2N3L3O20O010O010O0010O0010O010O010M2N3M2N3M2010M2M4M2N3M2010O010O00010O010O010O010O010N1O20O010O000PATOm>P1O010O010OUOTAb0m>[OUAf0j>XOYAg0P?10O010O0010O0010O010O010O01O0M4M2N2N3M2N3L3N3M2N30O0N3M2N3M2N3L3N0002M3N3M2N0000O01001O3M2N3L3N3O01O010O01O010O010M2N3M2N3L31O01Gb@L`?2b@La?18NTj="}, "image_id": 440, "id": 7283}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 183.0, 41.0, 33.0], "area": 671, "segmentation": {"size": [512, 512], "counts": "XVb03m?2M2N3N2M2O2M3M2OO0010O0010O0010O0010O0010O0010O0010O00111O01N2O0O2M3N1N3M3N1N3NfYi6"}, "image_id": 440, "id": 7284}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 188.0, 19.0, 17.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "Sfi14j?2N3L31O01O010O01O01O010O01O01OM4M2MQjl5"}, "image_id": 440, "id": 7285}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 215.0, 34.0, 31.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "cWP21m?2N2M4M2N3OO0N12N2M4M2N3L3N3O00N3M2N2O20O010O010O00O2M2M4M2N3M2M4MSi^5"}, "image_id": 440, "id": 7286}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 220.0, 53.0, 63.0], "area": 1596, "segmentation": {"size": [512, 512], "counts": "XX42m?2M3N2N2N1O2N2N2N2N2N2]OYOiAj0U>XOiAj0U>XOiAj0U>WOjAk0S>XOkAi0T>YOjAi0T>YOjAi0T>YOjAi0T>a000000000002N2M3N2N2N2N2N1O2N2N2N2M3N2N2N2N2N2N1O2N2N2M3N2N2N2N2N\\XQ7"}, "image_id": 440, "id": 7287}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 222.0, 5.0, 10.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "Rgm72m?2N2N2N2QI"}, "image_id": 440, "id": 7288}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 226.0, 30.0, 33.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "jgb12l?2M4M2N2M4M2M4M2M4M20010O0010O0010O01O0N2M4M2M4M2M3N3M2M4MhXn5"}, "image_id": 440, "id": 7289}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 241.0, 47.0, 61.0], "area": 1537, "segmentation": {"size": [512, 512], "counts": "SYb21l?3N2M4M2N3L3N3M2M3N3L3N3M2M4M2N2M4M2M4M2N2010O010O0O2M2M3N3M2M4M2M3N3M2M4M2N3L3N2M4M2N3L3N3MVXf4"}, "image_id": 440, "id": 7290}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 257.0, 52.0, 57.0], "area": 1565, "segmentation": {"size": [512, 512], "counts": "ZYk02l?3L3N3L3N3M2O1010O01O01O010ON3M2M3N3L3N3M2M3N3L3N3L30010O0010O010O0N2M4M2M4M2M4M2M3N3L3N3L3N2N3L3N3L3NggZ6"}, "image_id": 440, "id": 7291}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 259.0, 55.0, 44.0], "area": 1404, "segmentation": {"size": [512, 512], "counts": "lhU52l?2M4M2M4M2M3M4M2M4M21O01O010O00010O01O01O010O01O01O010O01O01M200010O010O00010O010O00010L3N3L3N2M4L3N2M4M2M4M`gn1"}, "image_id": 440, "id": 7292}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 259.0, 21.0, 19.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "YXX71n?2M3N1O2N2O1O10O10000000O10O100N2N1O2M3NcW="}, "image_id": 440, "id": 7293}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 261.0, 32.0, 31.0], "area": 639, "segmentation": {"size": [512, 512], "counts": "iXU63j?3M4L3M3M4L3N201O01O010O00010O00010O00010O00010O0N2M4L3M3M4L3MfgZ1"}, "image_id": 440, "id": 7294}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 292.0, 16.0, 15.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "[Yb72m?2M2O2M3N101O10O10O01M3N2M2O2Ngf5"}, "image_id": 440, "id": 7295}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 292.0, 2.0, 5.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "WYo72k?3lF"}, "image_id": 440, "id": 7296}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 301.0, 29.0, 32.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "TZ`12l?2M4M2M3N3L3N3L3M301O010O01O01O010O01O0O1M4M2M4M2M3N3L3N^VQ6"}, "image_id": 440, "id": 7297}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 306.0, 64.0, 41.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "]jX41k?4M3M4K4M3M4K4M310O0001O01O00010O00010O0001O01O00010O00010O00010O0001O01O00010O00010OM3M4L31O00010O00010O00010O0000O2L3L4M4L3MPVg2"}, "image_id": 440, "id": 7298}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 309.0, 61.0, 35.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "SZn1d0V?61O0000000000000000000000000000000001O00000000000000000000000O100O100O10O010O010O010O010O010O011N2OO03N2M4M2M3N2M3N2K5NgUS5"}, "image_id": 440, "id": 7299}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 311.0, 45.0, 47.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "bjY72l?3M2M4M2N3M2N3M2N2N3M2M4N110O010O010O010O0010O010O00N3M210O010O010O010O010OO2M2N3L3N2N3MQF"}, "image_id": 440, "id": 7300}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 312.0, 18.0, 24.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "ZZf52l?2M4M2M4M2N2M4N1010N1M3N3M2M4M2M3NWfP2"}, "image_id": 440, "id": 7301}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 313.0, 20.0, 23.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "nig62n?2M3N1N3M3N1N3N2M21O0O2M3M2O2M3M2O2M3NfUn0"}, "image_id": 440, "id": 7302}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 343.0, 42.0, 53.0], "area": 1436, "segmentation": {"size": [512, 512], "counts": "Rl\\61l?3M3M4L3L4M4L3M3M4K4M3M3M4M200010O00010O00010O0001O01O00010O000M4L3M3M4AUADn>9VABn>;>L3M3LUUn0"}, "image_id": 440, "id": 7303}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 345.0, 30.0, 25.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "XkQ53k?3L3N2M4M2M4O00010O0010O0010O0010O000OO00O3N3L3N2M4N110M2MWU_2"}, "image_id": 440, "id": 7304}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 349.0, 52.0, 55.0], "area": 1360, "segmentation": {"size": [512, 512], "counts": "^kn31n?2N3M2O1N2N3M2N2O2M2N2N2O2M2N2N2N3N1N2N2N3N11O0001O01O01O00N3N1N2N3M2N2O1N3M2N2N2O2M2N2N2N3N1N2N2N3N1NoSW3"}, "image_id": 440, "id": 7305}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 358.0, 50.0, 54.0], "area": 1467, "segmentation": {"size": [512, 512], "counts": "P\\_52k?4M2N2M4M2M4M2M3N3L3O20O00010O010OYASO[>m0cAVO\\>k0`AXOa>g0]A\\Ob>Q11O01O010O01O01O010O010O00O2L3N3L3N2M4M2M4M2M3N3L3N3M2M3N]dg1"}, "image_id": 440, "id": 7306}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 391.0, 25.0, 24.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "gli41m?3L3N2N3L3N3M21O010O01O010O01O01O01M2N3M2M3N3M2Meci2"}, "image_id": 440, "id": 7307}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 412.0, 81.0, 68.0], "area": 2790, "segmentation": {"size": [512, 512], "counts": "l0iA_Nm=a1QBaNn=a1oAbNQ>f1010O010O010XOnA@R>>PBCP>:SBEm=9UBHk=5XBJh=4ZBJi=3ZBJh=4ZBJi=3ZBJh=4ZBLg=1\\BIWOO\\>6_BHUO5]>0bB3]=KcB7^=FbB=]=AbBb0\\>3N1010O010M2M4L310O0100O001M3M2N3M3M2N3M2M3N3M2N2N3M2N2M3O2O01O001M2M3N3M2N3L3N2N3M2N0O103M2N3M2M4M2N3M2N2M4MdRg6"}, "image_id": 440, "id": 7308}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 445.0, 50.0, 57.0], "area": 1695, "segmentation": {"size": [512, 512], "counts": "[oY22l?2M3YOKaA8[>LaA7]>KaA8[>LaA7]>K`A8]>KaA8\\>g0M40O00010O0010O0010O0010O0010O00001L3N3L3N2M4M2M4M21O010O01O01OO2L3N2M4M2M4M2M3MgQm4"}, "image_id": 440, "id": 7309}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 461.0, 30.0, 33.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "Tom12l?2M4M2M4M2M4M2N2M4N110O00010O010O00010O01N1N3L3N2K6M2M4M2M^Qc5"}, "image_id": 440, "id": 7310}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 463.0, 8.0, 24.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "c^l725Ob?4\\@Nb?3\\@0a?9N3N2M3N1`A"}, "image_id": 440, "id": 7311}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 476.0, 47.0, 36.0], "area": 976, "segmentation": {"size": [512, 512], "counts": "doX71o?2M3N1N3M3N2M100O1O100O1J_Om@a0R?Bk@?T?700O100O1O100O100O1MROTAn0k>4001O2N1O2N2N2N1O2N2N1O2N2N2N1O2N2N1O2N2N"}, "image_id": 440, "id": 7312}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 491.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "[oo71d0"}, "image_id": 440, "id": 7313}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 498.0, 32.0, 14.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "nog32l?2M3N2N2M3001O001O001O00001O001O00001O001O00001O001O00001O001NTPh3"}, "image_id": 440, "id": 7314}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 505.0, 16.0, 7.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "o_n11o?0O100O100O100O100O100O11O2N2NR`i5"}, "image_id": 440, "id": 7315}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 506.0, 15.0, 6.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "noP12k?3O11O001O00001O001O00001O00Q`g6"}, "image_id": 440, "id": 7316}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 45.0, 71.0, 185.0], "area": 7955, "segmentation": {"size": [512, 512], "counts": "Rf?001O00L4@b]Q3"}, "image_id": 442, "id": 7319}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 62.0, 38.0, 43.0], "area": 1487, "segmentation": {"size": [512, 512], "counts": "nbZ3:o>g0G9000000000000000000000000000000010O0000000000000000000000000000c0]Oc0]Oc000000000000000000010O000000000000000000000000000000J6]Oc0\\Oe0\\Obn[1"}, "image_id": 442, "id": 7321}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 65.0, 38.0, 55.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "^SS21d?;E;i@_Oc>U1E;000000000000000001O01O00000K5M30000001O0001O00000M3^OaA^Od>6hAJX>KSB5l>0001O01OLQmY5"}, "image_id": 442, "id": 7322}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 70.0, 44.0, 35.0], "area": 1032, "segmentation": {"size": [512, 512], "counts": "`RS53l?5L3M4L4K4M4L000O1000O0100000O01000O0100000O0100000O01000O0100000O01000O3N3M4L4K4M4L4LRmV2"}, "image_id": 442, "id": 7323}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 82.0, 31.0, 35.0], "area": 763, "segmentation": {"size": [512, 512], "counts": "Yc`72^?`001O01O000000000000000017_O9000000000000001O000001OI700ZOo@=Z?000TM"}, "image_id": 442, "id": 7324}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 107.0, 15.0, 26.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "Pdg24X?d0O100000000000000000010O00]OWmP5"}, "image_id": 442, "id": 7325}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 109.0, 5.0, 6.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "]S96j?0000000c\\d7"}, "image_id": 442, "id": 7326}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 114.0, 41.0, 33.0], "area": 1077, "segmentation": {"size": [512, 512], "counts": "QT]3a0_?0000000000000000000000000A?00000000000000000000001O0000000001O0000000000000000\\OQ]n3"}, "image_id": 442, "id": 7327}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 116.0, 5.0, 6.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "dS:6j?0000000\\\\c7"}, "image_id": 442, "id": 7328}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 117.0, 9.0, 16.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "eSn1`0`?000000000000000[\\m5"}, "image_id": 442, "id": 7329}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 122.0, 20.0, 20.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "oSQ5>]?500000010O00000000000000000000000000Cbld2"}, "image_id": 442, "id": 7330}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 126.0, 27.0, 17.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "RT[5<`?4000000000000000000001O0010O000000000000000001O000C\\\\W2"}, "image_id": 442, "id": 7331}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 137.0, 11.0, 23.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "fdj79Z?=01O000000000000000gK"}, "image_id": 442, "id": 7332}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 143.0, 6.0, 8.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "_Ta68h?01O000000`k[1"}, "image_id": 442, "id": 7333}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 148.0, 15.0, 11.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "dTo1;e?000000O10000000000000000000\\[i5"}, "image_id": 442, "id": 7334}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 149.0, 21.0, 25.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "ZUW33X?e0000000000000000000000000000000000001OM^[^4"}, "image_id": 442, "id": 7335}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 152.0, 6.0, 6.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "hTa66j?000000000Xk[1"}, "image_id": 442, "id": 7336}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 154.0, 11.0, 18.0], "area": 183, "segmentation": {"size": [512, 512], "counts": "jTo2a0_?01O00000001O0000000U[k4"}, "image_id": 442, "id": 7337}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 155.0, 34.0, 46.0], "area": 1366, "segmentation": {"size": [512, 512], "counts": "kTU5d0\\?001O000m@LZ>m000000000000000000000000000001O000000000000000000000A?Li0[O_jY2"}, "image_id": 442, "id": 7338}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 164.0, 35.0, 76.0], "area": 2237, "segmentation": {"size": [512, 512], "counts": "SWP6;V??A?A?A?M300000000000000010O00000000000000000000000000010O0H8A?A?A?AY[^1"}, "image_id": 442, "id": 7339}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 165.0, 22.0, 24.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "WU`4e0Y?3O0000000000000000000001O000000000000000GSkT3"}, "image_id": 442, "id": 7340}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 175.0, 65.0, 39.0], "area": 2289, "segmentation": {"size": [512, 512], "counts": "keo1i0k><1O00000000000000000000000001O01O000000000000000000000000001O000000000000000001O0000000001O00000000000000000000000000001O00000Egjo4"}, "image_id": 442, "id": 7341}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 183.0, 18.0, 19.0], "area": 302, "segmentation": {"size": [512, 512], "counts": "keZ7?]?4000000000000000000000J6000000000YZ<"}, "image_id": 442, "id": 7342}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 197.0, 94.0, 43.0], "area": 2832, "segmentation": {"size": [512, 512], "counts": "lVZ39Y?>G900000010O000000000000000000000000010O000000000000000000000000016I00001OXOXA8h>[OeAe0k>0000000000000000000010O00000000F:JO7000000000000010O000000000000000000000000010O000000000000000000000001J5BRjV3"}, "image_id": 442, "id": 7343}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 212.0, 9.0, 21.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "Rge16\\??O0000000000000\\iU6"}, "image_id": 442, "id": 7344}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 227.0, 23.0, 31.0], "area": 647, "segmentation": {"size": [512, 512], "counts": "eg79VAIh>9VAIh>9VAIh>9VAIh>9VAGj>;TAEm>f000000001O1O1O0000000000000000000000000000000000000001O000001O000000000002N2N2N2N2N2N2N2NfG"}, "image_id": 442, "id": 7353}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 286.0, 19.0, 20.0], "area": 197, "segmentation": {"size": [512, 512], "counts": "Ui81n?2N2N2N2N3M2N2O10000000N2N2N2N2N2N3Mhf]7"}, "image_id": 442, "id": 7354}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 300.0, 31.0, 25.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "dYi02m?2O1N3M2N2N2O1N2N2N3O000000N00011N001O00001O2N2O1N2N2N2N2N3M\\Vg6"}, "image_id": 442, "id": 7355}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 313.0, 30.0, 30.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "XZ`01n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N10O2N2N2N2N2N2N2N2N2N2N2N2N2NkeP7"}, "image_id": 442, "id": 7356}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 325.0, 30.0, 52.0], "area": 826, "segmentation": {"size": [512, 512], "counts": "a:X1f>10O100000O1000O10002N2N1O2M3N2N2N2N1N3N2N2N2N1O2M3N2N2N1O]e`7"}, "image_id": 442, "id": 7357}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 337.0, 11.0, 12.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "gZj02m?3M2N2N1O0002N2N2N2N\\UP7"}, "image_id": 442, "id": 7358}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 354.0, 29.0, 33.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "[[V11o?1N2N2[@K\\?7b@K\\?7b@K\\?>N2N2N2N201O00000000N2N2N2N2N2O1N3M2N2N2N2N2N\\T[6"}, "image_id": 442, "id": 7359}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 367.0, 61.0, 50.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "R\\71n?2N2O1N2N2N2N2N2N2N2N2N2N2N2O100000000000000000001O0O1KSOUAn0i>6N1O0000000000000000000002N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2NnSj6"}, "image_id": 442, "id": 7360}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 377.0, 28.0, 29.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "Sla12m?2N3M2N2N2N2N2N2N2N2O100000000000O1N2N2N2N2N2N2N2N2N2NhSP6"}, "image_id": 442, "id": 7361}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 404.0, 83.0, 52.0], "area": 2718, "segmentation": {"size": [512, 512], "counts": "ima26\\?>A?H80000000001O000000000000O1K50000000000POZAf0P?01O00000001O00000000000000000001O00000001O000000000000000001O00000:F001O000000000000000001O0000000001OL4I71O00000000O2@\\cT4"}, "image_id": 442, "id": 7362}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 409.0, 28.0, 28.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "W]m12m?2N2O1N2N2N2N3M2N2N2N2N0001O00001O2O1N2N2N2N3M2N2N2N2Nmbd5"}, "image_id": 442, "id": 7363}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 412.0, 55.0, 56.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "a]Q11n?2N2N2N2`@IT?9j@IT?9j@IT?9j@IT?c0N2N2N2N3M2N2N2000000O1N2N2N2N2N0000000001O1O2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N\\RS6"}, "image_id": 442, "id": 7364}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 477.0, 87.0, 35.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "nok42l?2M3M3M3N2M3M3M3M3N2M3M3001O00001O00001O00001O001O00001O00001O00001O001O00001O00001O00001O00001O001O00001O00001O00001O00001O001O00001O00001O00001O001O00001O00001O00001N1M3N[`h1"}, "image_id": 442, "id": 7365}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 494.0, 78.0, 18.0], "area": 695, "segmentation": {"size": [512, 512], "counts": "ooQ31n?1O1O1O1O1O1O1O1O11O1O00O1O1O1O1O1O11O1O1O1OO1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O00O1O11O1O1O001O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1OQPg3"}, "image_id": 442, "id": 7366}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 0.0, 9.0, 5.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "P`l51o?1O1O001O1OO1O1OQPo1"}, "image_id": 443, "id": 7367}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 0.0, 60.0, 91.0], "area": 3423, "segmentation": {"size": [512, 512], "counts": "YQR71n?2N2M3N1O2N2N2M3N2N1O2N2WOXOVBk0h=WOVBk0h=WOVBj0i=XOUBj0j=VOTBm0k=TOSBn0l=SORBo0m=ROQBP1n=QOPBP1P>QOmAR1R>nNmAT1R>>O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O"}, "image_id": 443, "id": 7368}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 48.0, 53.0], "area": 1579, "segmentation": {"size": [512, 512], "counts": "0m0S?1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O101OO1O1N2N3M2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3N1N2N2NSoW7"}, "image_id": 444, "id": 7369}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 61.0, 34.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "QP\\53l?2O1O1O1O1O1O1O1\\@Ea?>O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O00O1O1O1O1O1O1O1O1O1O1O1O1O1001O1O1O1O1O1O00Am@0T?On@OS?0o@NR?1PAMQ?3PAKQ?4QAJP?5>O1O1O2NP`e1"}, "image_id": 444, "id": 7370}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 4.0, 56.0, 55.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "WQj22m?2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2O1N1O00000000000000000000001O000000000001O1O2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M[oY4"}, "image_id": 444, "id": 7371}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 5.0, 29.0, 30.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "bP`22m?2N2N2O1N2N3M2N2N2N2N2N2N2N20N2N2O1N3M2N2N2N2N2N2N2N2N2O\\_Q5"}, "image_id": 444, "id": 7372}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 14.0, 21.0, 21.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "f`P51n?2N2N2N2N2N2N2N2000000000O1N2N2N2N2N2N2NWod2"}, "image_id": 444, "id": 7373}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 22.0, 45.0, 44.0], "area": 1122, "segmentation": {"size": [512, 512], "counts": "^Qi11n?2N2EMf@5Y?Le@6Y?Le@6Y?Le@7X?:N2N2N2N2N2O1N3M2O1000O1N2N000000000010O02N2N3M2N2N2N2N2O1N2N0000002Ea@2a?M`@1b?M`@1i?Nh^`5"}, "image_id": 444, "id": 7374}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 30.0, 12.0, 10.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "SQ]41n?1O2N2O1N2N10O2N2N2N1OPo\\3"}, "image_id": 444, "id": 7375}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 31.0, 59.0, 56.0], "area": 1511, "segmentation": {"size": [512, 512], "counts": "`ab41n?2O1N2N2N2N2N2N2N2N2N2N2N2N2N3M2O1N2N200000000000000000001O00000001O00000O1N2N2N1O0000002N2N2N2N2O1N2N2N2N2N2N2N2N2NQn_2"}, "image_id": 444, "id": 7376}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 39.0, 28.0, 27.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "hQY21n?2N2N2N2N3N1N2N2N0000000010O00000000001O2O1N3M2N2N2N2NbnX5"}, "image_id": 444, "id": 7377}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 44.0, 42.0, 57.0], "area": 1238, "segmentation": {"size": [512, 512], "counts": "SbW32m?3M2N2O1N2N2N2N2N2N3M2m@YOj>i0TAYOj>i0TAYOk>o0N2N2N2N3M2N2N2N2N1O0001O2POgA7Z>GhA9X>EjA9X>EjA9X>EkA8W>FkA8X>FiA8Y>FiA8Y>FiA9X>EjA9X>EjA9P?N2N2N3M\\]S4"}, "image_id": 444, "id": 7378}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 46.0, 20.0, 20.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "ha;2m?2N2N3M2N2O1N2N1O000002O1N2N3M2N2N2N2OY^Z7"}, "image_id": 444, "id": 7379}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 63.0, 50.0, 55.0], "area": 1405, "segmentation": {"size": [512, 512], "counts": "dbo02n?1N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2000N2O1O10000000010N1N2N2N2TOVA?l>@UA>m>@UA>m>@UA>m>@UA>m>@UA>Y?B^@:e?N2N2N2N2Nj\\W6"}, "image_id": 444, "id": 7380}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 73.0, 54.0, 55.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "mbo31o?1N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N3M2N2N2N20000001O0001O000000O1N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2Nf\\U3"}, "image_id": 444, "id": 7381}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 75.0, 85.0, 81.0], "area": 2644, "segmentation": {"size": [512, 512], "counts": "Wdh12m?3ELe@6Y?Le@6Y?Le@6Y?Lf@5X?;l@ZOk>h0SAZOk>h0SAZOk>o0O0O00001O2N2O20N1N2N1O00001O0001O02N2N3M2N1O000001O0001O0000EXO_Ah0b>YO\\Ag0d>[OZAf0]>VOeA6Ld0_>YObA5Md0_>YObAQ1^>800010O00000000000000011JaAhNa>V16O1N1O000000001O2N2O1N3M2N2N2N2N2N2O0O00001G`@1b?M`@1b?M`@1i?NSml4"}, "image_id": 444, "id": 7382}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 90.0, 30.0, 30.0], "area": 474, "segmentation": {"size": [512, 512], "counts": "XSU32m?3M2N2N2O1N2N3M2N2N2N2N2O0O1O0003M2N2N2N2O1N3M2N2N2N2N2N3Nfl[4"}, "image_id": 444, "id": 7383}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 98.0, 47.0, 44.0], "area": 1255, "segmentation": {"size": [512, 512], "counts": "bS<2g?O^@4_?N`@3^?O`@3_?8O10000O1N2N2N3M2N2N2O1N2N2N2O10001O0000O1N000001O01O2N20000000001O0VORA`0o>^OSA`0o>^OSA`0o>^OSA`0X?Bb@7`?Gc@6e?N2N2Nm[l6"}, "image_id": 444, "id": 7384}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 105.0, 48.0, 49.0], "area": 1310, "segmentation": {"size": [512, 512], "counts": "RTX71n?2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2N2N2O1N3M2N1O0000000002N2N2N2O1N02N1O0101JWAROk>l06N2N2N2N2N2Be@4]?Je@4]?Jf@3e?NRL"}, "image_id": 444, "id": 7385}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 128.0, 55.0, 59.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "idT32m?2N3M2O1N2N2N2N2N2N2N2N2N3M2N2O1N2N2VAlNe>Y1N2N2N001O2N2N2N3N1O10O1N2N2N2N2N3M2gN]AR1e>lN]AR1k>M2N2N2O1N2N2N2N2N2N2L`@Ec?84N2N2N2Oojo3"}, "image_id": 444, "id": 7386}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 132.0, 27.0, 27.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "YTa21n?2U@Od?3Z@Oe?7N2N3N10000N2N2N2000001ON2O1N2N3M2N2N2N2N2N2N2O\\[Q5"}, "image_id": 444, "id": 7387}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 146.0, 17.0, 51.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "d4_1`>3M03M2N2N2N2N2N2N2N2VOQA401P?ISA3O2P?ISA3O2Y?Mh@1Z?Mh@1Z?Mh@1oig7"}, "image_id": 444, "id": 7388}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 148.0, 86.0, 81.0], "area": 2659, "segmentation": {"size": [512, 512], "counts": "aV<1n?2N2FMd@5Z?Md@6Y?Le@6Y?:N2l@ZOk>h0SAZOl>g0RA[Ol>n0N0000002N2N2000O1N2N1O001O000000101N2N2N2N1O1O00000001O00000FWO^Ai0b>YO\\Ag0d>[OZAe0^>XOcA6Mb0_>ZObA6Mc0_>XObAS1^>oNaAP1_>8000001O0000000001O2N2N2N2L2000000000002N2N2O1N2N2N3M2N2N2N1O0010O01Eb@3`?Kb@3`?Kb@3g?NjjX6"}, "image_id": 444, "id": 7389}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 148.0, 19.0, 22.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "Ref72n?1N2N2N2N2N2N1O0000000000000002N2N2NZK"}, "image_id": 444, "id": 7390}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 154.0, 49.0, 50.0], "area": 1357, "segmentation": {"size": [512, 512], "counts": "ce^61n?2N2N2N2N2N2N2N3M2N2O1N3M2N2N2N2N2N3M2N2N2N0000000010O2N2N2N2N10O2N1O02N2JVASOl>k06N2N2N2N2N2Cd@3^?Kd@3^?Kd@3^?Kd@3f?Najh0"}, "image_id": 444, "id": 7391}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 167.0, 25.0, 28.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "dU[71n?2N2N2N2N2N2N2N2N2N2O100O1N2N1O0002N2N2Ag@4[?Jh@3[?Jg@4d?N2NYZ8"}, "image_id": 444, "id": 7392}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 170.0, 55.0, 45.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "TV]21l?3N3L3N3M2M3N3L3N3L3O20O0010O0010O0010O0010O010O00010O010O0010O0010O010O00010O010O0010O00N3M2N3L3N2M4M2N3L3NVZg4"}, "image_id": 444, "id": 7393}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 178.0, 32.0, 31.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "PVg11n?2N2N2N2N2N2N2N2N2O2M2N2N2N2N2O01N2N3M2N2N2N2N2O1N2N2N2N2N2N2Noih5"}, "image_id": 444, "id": 7394}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 187.0, 50.0, 52.0], "area": 1304, "segmentation": {"size": [512, 512], "counts": "]fb52m?3M2N2N2N2N2N4L2N2N2N2N2N2N002O1N2N2N200N3N10000000000001O000fN\\AU1i>N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2NVYd1"}, "image_id": 444, "id": 7395}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 196.0, 10.0, 14.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "YVS74k?2N2M3N2001M2M3N2Ngig0"}, "image_id": 444, "id": 7396}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 205.0, 16.0, 16.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "cVT21m?3N2N2N2N20000000O10O01M3N2N2N[ic5"}, "image_id": 444, "id": 7397}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 17.0, 28.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "h6l0U?N2N100O001O2N2N2N2N2N2N2N2N2N2NnXg7"}, "image_id": 444, "id": 7398}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 217.0, 45.0, 39.0], "area": 913, "segmentation": {"size": [512, 512], "counts": "^Wn02m?2N2N2N2N1O2M3N2N2N2N2N2N2N000000000O201000000O10000000O100N2N2N2M3N2N2N1O2N2N2N2N2N2N2NbX[6"}, "image_id": 444, "id": 7399}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 227.0, 52.0, 46.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "iWo11m?3N2M2N3N1N3N2M2N3N1N3M3N1O200O0100O0100O0100O0100O0100O01000O0100O0100O010N1N3N2M2N3N1N3N2M2N3N2M2N3NWhV5"}, "image_id": 444, "id": 7400}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 243.0, 42.0, 55.0], "area": 1218, "segmentation": {"size": [512, 512], "counts": "lgV52m?3N2M3N3M5J3N2M3N2M3N2M4M2M3N2M3N2M3N3MO102M3N2M3N3L3N2M3N2M3N2N3L3N2M3N2M3N2M4M2MSWT2"}, "image_id": 444, "id": 7401}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 256.0, 9.0, 8.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "WXa01j?5N2000000000000PXZ7"}, "image_id": 444, "id": 7402}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 263.0, 8.0, 17.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "W8a0`?N3M2N2O1N2N2Nbgk7"}, "image_id": 444, "id": 7403}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 270.0, 53.0, 56.0], "area": 1496, "segmentation": {"size": [512, 512], "counts": "XY`12m?2N2N2N3M2N2O1N2N2N2N2N2N3HTOWAn0g>UOVAm0i>6N2N2N2N2N3M2N2O1000N2O1N3M2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3N1N2N2N2N2NdVe5"}, "image_id": 444, "id": 7404}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 271.0, 39.0, 38.0], "area": 739, "segmentation": {"size": [512, 512], "counts": "UY?1n?2N2N3M2N2N2N2N2N2N2N2N2O1N2N1O0000001O00000002N2N2N2N2N2N2N2N2O1N2N2N3M2N2NRWm6"}, "image_id": 444, "id": 7405}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 302.0, 28.0, 27.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "jY`52n?1N2N2N2N2N2N2N2N3M2N2N2N10O1O2N2N2N3M2N2N2N2N2N2O1N2NVfQ2"}, "image_id": 444, "id": 7406}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 307.0, 46.0, 65.0], "area": 1587, "segmentation": {"size": [512, 512], "counts": "mY^42m?4M3L3N3M2M4M2N3L4M2M4M2N3L3N3L3N3M3L3N3L2OO10O03N3M2M4M3L3N3M2M4M2N3L4M2M4M2N3L3N3L3N3M3Lhdj2"}, "image_id": 444, "id": 7407}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 313.0, 38.0, 52.0], "area": 1145, "segmentation": {"size": [512, 512], "counts": "\\Z]72m?2O1N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2O101O000000000000O2M2O1N2N2N2N2NhE"}, "image_id": 444, "id": 7408}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 323.0, 31.0, 31.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "gZi01n?2N2N2N2N3M2N2N2N2N1O010O000000000000000002N2Lc@B_?<4N1O00002IZ@1h?M[@0_eg6"}, "image_id": 444, "id": 7409}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 325.0, 30.0, 30.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "cjZ51n?2N2N2N2O1N3M2N2N2N2N2O2M2N1O0002N2N2N2N2O1N3M2N2N2N2N2O2M]UV2"}, "image_id": 444, "id": 7410}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 328.0, 33.0, 46.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "`Zk2=c?1O00000HP1XO000000000001O000000000000000000000000000000000000000YO^Vd4"}, "image_id": 444, "id": 7411}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 329.0, 36.0, 33.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "kZS21n?2N3M2N2N2N2O1N2N2N2N3M2N1O01O3M2N1O0000000010O000000Lf@C\\?O0000002N2N2O1N2N2N2N002N3M2N2N2O1N2N2N2N2N3M2N2Nhdb7"}, "image_id": 444, "id": 7413}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 344.0, 46.0, 55.0], "area": 1491, "segmentation": {"size": [512, 512], "counts": "Ykc33m?4K5L3M2NO01GCo@=Q?Fl@9U?900O10O1004K5L3M4K5L3M3M0O10O1000O01000O10O10003M4K5L3M4K5L3M4L4K4M4L4L3LjSe3"}, "image_id": 444, "id": 7414}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 346.0, 30.0, 27.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "njT72m?2N2N2N2O1000000000000N2N3N1N2N2000000001M2N2N2N2N2N2O1N2NgT<"}, "image_id": 444, "id": 7415}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 352.0, 54.0, 53.0], "area": 1322, "segmentation": {"size": [512, 512], "counts": "Ql^51n?3M2N2N2N2N2O1N2N3M2N2N2N2N2N2O1N2N00000000001O01O00000000000001O01O0000003M2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N_Tf1"}, "image_id": 444, "id": 7416}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 361.0, 49.0, 61.0], "area": 1589, "segmentation": {"size": [512, 512], "counts": "Ql82m?2N2N2N2g@Hg>:WAHh>9VAIh>:UAIh>9VAIh>9WAHg>:YAFe>l0N2N2O2M2N2N1O0001O1O001O0001O000001O2N2lNaAd0a>ZOaAd0a>ZOaAe0a>YO`Ae0b>YO`Ae0b>YO^Ag0m>0002N2N2O2_Od@9^?Ed@9c?N3M2N2Ngcn6"}, "image_id": 444, "id": 7417}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 366.0, 50.0, 54.0], "area": 1393, "segmentation": {"size": [512, 512], "counts": "alc62m?2M3N1O2M3N1O2M3N2N1N3N2N2M2O2M3N2N1N3N2N2M2O2N0O10O3N1O2N2O1O0N3N2N1N3N2M3N1O2M3Kh@_OZ?>h@AY?>6N2M3N1O2M3NRTc0"}, "image_id": 444, "id": 7418}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 372.0, 20.0, 18.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "PlR11n?2N2N2N2N2N1O000000010O0001O2N2N2N2N2NXTc6"}, "image_id": 444, "id": 7419}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 380.0, 10.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "Ql22m?3N1N1O00000101N2NSTh7"}, "image_id": 444, "id": 7420}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 400.0, 17.0, 18.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "flT33l?2N2N2N2N2O1N3N10N2N2N3M2N2O1N2NXcb4"}, "image_id": 444, "id": 7421}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 403.0, 29.0, 29.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "R][53l?1O2N2M3N1O2N2M3N2N1N3N2O1000O1O0O2M3N2N2N1N3N2N2M3N1O2NRSV2"}, "image_id": 444, "id": 7422}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 410.0, 26.0, 27.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "U]`32m?3M2O1N2N3M2N2O1N3M2N2N1O02O1N2N3M2N2O1N3M2N2N2N2OhbR4"}, "image_id": 444, "id": 7423}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 417.0, 28.0, 26.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "[mR52m?2N2M3N1O2M3N2N11000O10O1000O10O1000O01N2N2M2O2N2M3N1ObR_2"}, "image_id": 444, "id": 7424}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 421.0, 29.0, 26.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "Ym]65g?401O0001Oc@IQ?7k@MU?`0O01O0001O0001O0001O01O0001O0001O000L4L5J5LibS1"}, "image_id": 444, "id": 7425}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 422.0, 59.0, 54.0], "area": 1615, "segmentation": {"size": [512, 512], "counts": "lm;2m?2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N3N1O100N2O1N2N2N20O1N2N2O1hNaAl0a>ROaAl0b>RO_Al0c>RO_Al0l>N2N1O1O000001O00000002N3M2N2N2N2N2O1N2N2N2NRbf6"}, "image_id": 444, "id": 7426}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 429.0, 51.0, 47.0], "area": 1199, "segmentation": {"size": [512, 512], "counts": "U^[42m?2N3N1N3M2O1N3M2O2M2N2N3N1N3M2OO01O0001O01O01O01O0001O01O01O01O01O010O2N3M2O2M2N2O2M2N3M2O1N3M2O2M2NoQk2"}, "image_id": 444, "id": 7427}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 432.0, 12.0, 12.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "dm_32m?2N2O1N2N2000O1N2N2N2N[RZ4"}, "image_id": 444, "id": 7428}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 436.0, 8.0, 8.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "f]52n?1N2N2O10N2N2NZbf7"}, "image_id": 444, "id": 7429}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 445.0, 74.0, 67.0], "area": 2370, "segmentation": {"size": [512, 512], "counts": "dol01n?2O1N3M2N2N2N2N2N2N2N1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O00000O1N2O1N2O1O2M2O1N21O1O1O2N1O1O2N101N1O1O2N1O1O2N1O1O2M2M3N3L3M4L3M3N3L3M3MmQn5"}, "image_id": 444, "id": 7430}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 450.0, 59.0, 54.0], "area": 1442, "segmentation": {"size": [512, 512], "counts": "d^o51n?2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2N21O0000000001O0001O000000000O1N2N2N3M2N2N2N2N10O000000000001O2N2N2O1N2N2N2N3M2NRQS1"}, "image_id": 444, "id": 7431}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 454.0, 24.0, 24.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "c^X22m?2O2M2N2N2N2N2N2O0O000000000003N1N2N2N2N2N3M2O`a[5"}, "image_id": 444, "id": 7432}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 470.0, 26.0, 26.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "R_:1n?2N2N3M2O1N2N2N2N3M2N2N01O001O2N2N2N3N1N2N2N2N2N3Mo`X7"}, "image_id": 444, "id": 7433}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 474.0, 11.0, 10.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "nnl02m?2N2O1N2N10O2N2N2N2NSam6"}, "image_id": 444, "id": 7434}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 481.0, 10.0, 9.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "U_i01n?2N2N2N2O01N2N2N2Ok`Q7"}, "image_id": 444, "id": 7435}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 488.0, 37.0, 24.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "ooj11n?1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O11O2Bg@1Z?Nh@0Y?Ni@1X?Mj@2W?Lk@3b?O1O1O1OQ`b5"}, "image_id": 444, "id": 7436}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 488.0, 41.0, 24.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "g_V51n?1N3M3M2O2M2N3N1e@_OU?d0i@]OX?e0001O1O001O001O1O001O001O1O001O001O001O1O001O001O1O001O001M3MWPU2"}, "image_id": 444, "id": 7437}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 493.0, 30.0, 19.0], "area": 335, "segmentation": {"size": [512, 512], "counts": "k_j32m?2N2N2N1O100O1O1O1O1O1O1O100O1O11O2N1O1O1O1O1O1O1O2N1N2N2NU`f3"}, "image_id": 444, "id": 7438}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 494.0, 32.0, 18.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "oo\\41n?1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O2N1O1O1O1N2NTPS3"}, "image_id": 444, "id": 7439}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 502.0, 14.0, 10.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "koY62m?2N2N2N1O11O2N1O1O1O1O1O1OQP_1"}, "image_id": 444, "id": 7440}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 507.0, 18.0, 5.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "oof71o?00000O10000O1000000O1000000O1004L1"}, "image_id": 444, "id": 7441}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 52.0, 31.0], "area": 807, "segmentation": {"size": [512, 512], "counts": "P`l21o?001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O01000OO2N2N2N2N1N3N2N2N2M3N1O2N2Nd_Y4"}, "image_id": 445, "id": 7442}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 0.0, 31.0, 23.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "X`l52m?2N2N2N2N2N2N2N2O1O1O1O1O1O00O1O1O1O1O1O1O1O1O2N2O1N2N2N2N2Njoc1"}, "image_id": 445, "id": 7443}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 65.0, 37.0], "area": 1341, "segmentation": {"size": [512, 512], "counts": "Q`\\61n?2O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1O11O1O1O1O1O1O1O1O1O2N1O1O1OO1O1O1O1O1O1O1O1O1O2N2O1N2N3M2N2N2N2N2N2N2N2Ncob0"}, "image_id": 445, "id": 7444}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 3.0, 46.0, 49.0], "area": 1119, "segmentation": {"size": [512, 512], "counts": "aP^22m?3N1N2N3N1N3M2N2O2M2N2O2M2N3N1N2N3M2O1N3N1010O001M2O1N3M2N3N1N2N3N1N2N3M2O2M2N2N3N1N2N3N1Nmnj4"}, "image_id": 445, "id": 7445}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 7.0, 15.0, 17.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "]PU41n?2N2N1O2N2N2000000000H]@2c?K`@4g?N2N\\_c3"}, "image_id": 445, "id": 7446}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 12.0, 32.0, 33.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "h`l32m?2N2N2N3M2N2O1N2N2N2N2N2N3O00000000N2N2N2N2O1N3M2N2N2N2N2N2N2NR_c3"}, "image_id": 445, "id": 7447}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 15.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "?2_oo7"}, "image_id": 445, "id": 7448}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 32.0, 13.0, 14.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "Tab42m?2N2N2N2O101O00N2N2O1N3MinV3"}, "image_id": 445, "id": 7449}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 34.0, 52.0, 51.0], "area": 1319, "segmentation": {"size": [512, 512], "counts": "nam51n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N00000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NZ^X1"}, "image_id": 445, "id": 7450}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 35.0, 13.0, 12.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "WQW32m?2N2N2O1N2O10O1O1N2N2N2Nh^b4"}, "image_id": 445, "id": 7451}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 47.0, 10.0, 10.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "eaS41n?2M3N2N0002N1O2N2N_^g3"}, "image_id": 445, "id": 7452}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 55.0, 52.0, 44.0], "area": 1226, "segmentation": {"size": [512, 512], "counts": "Vbc11n?2N2N2N3M2N2N2N2N2N2N2N2N2N3N1N2O10000000000000001NO000000001O0000010O2N2N3M2N2N2N2N2N2N2O1N2N3M2N2N2Ng]b5"}, "image_id": 445, "id": 7453}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 65.0, 11.0, 11.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "VR`52m?2N2N2N2N02N2O1N2N2Nk]Z2"}, "image_id": 445, "id": 7454}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 68.0, 25.0, 25.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "_Rm22m?2N2O1N2N2N2N2N2N2N2N2N20O1N2N2N2N2N2N2N2N2N2N2Na]f4"}, "image_id": 445, "id": 7455}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 74.0, 24.0, 26.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "cbg41n?2N2N2N3M2N2N2N2N21O000001O00N2O1N2N3M2N2N2N2NW]l2"}, "image_id": 445, "id": 7456}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 76.0, 56.0, 56.0], "area": 1477, "segmentation": {"size": [512, 512], "counts": "PcV11n?2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2O100000000010O0000000000000O1N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2O`\\m5"}, "image_id": 445, "id": 7457}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 78.0, 50.0, 49.0], "area": 1240, "segmentation": {"size": [512, 512], "counts": "XcX5210i?5N2N2N2N2N2N2N2N2O101N1N2N2N2N2N2N1O0000000001O00000002N1O0001O2N2LTAQOn>m04N2N2N2N2N2N2N2N2N2N2N2N3M2NQ]n1"}, "image_id": 445, "id": 7458}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 90.0, 20.0, 20.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "USS22m?2N2N2N2N2N2N2N1O000001O2N2N2N2N2N2N2NPmb5"}, "image_id": 445, "id": 7459}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 94.0, 32.0, 32.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "\\cf22m?2N2N2N2N2N2N2N2N2N2N2N2O1N2N2000N2N2N2N2N2N2N2N2N2N2N2N2N2N2Nc\\i4"}, "image_id": 445, "id": 7460}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 107.0, 49.0, 69.0], "area": 1722, "segmentation": {"size": [512, 512], "counts": "cdW72m?2T@Ng?70000O1O1N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N10O00000000000000000000000001O00000001O0000000000fL"}, "image_id": 445, "id": 7461}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 109.0, 10.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "acW22m?2N2N2N2000N2N2N2N_\\c5"}, "image_id": 445, "id": 7462}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 114.0, 10.0, 9.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "gc[41n?2M3M201O01O1M3N1O\\\\_3"}, "image_id": 445, "id": 7463}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 115.0, 50.0, 55.0], "area": 1347, "segmentation": {"size": [512, 512], "counts": "RTa01o?2M3N2M3N1N3N20N00O002O1N3N2J6N1N3M3N2M2O2M3M3N1N3M3NO3N2M3N1N3M3N2M2O2M3N2M2O2M3M3N2M2O2M3N2M2N3NVke6"}, "image_id": 445, "id": 7464}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 115.0, 39.0, 42.0], "area": 828, "segmentation": {"size": [512, 512], "counts": "_T]41n?2M3N1N3N2N1N3N2M3N1N3N2N1N3N2M3N1O2M1001N3N1N3N2N2M2O2M3N1N3N2N1N3N2M3N1O2MS\\o2"}, "image_id": 445, "id": 7465}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 124.0, 15.0, 16.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "UTk12m?2N2N2N2N2OO000000003M2N2N2NP\\m5"}, "image_id": 445, "id": 7466}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 137.0, 28.0, 28.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "edl22m?2N2N2N2N2N3M2N2N2O1N2N2N2O01N2N2N2N2N2N2N2N2N2N2N2N2N[[e4"}, "image_id": 445, "id": 7467}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 141.0, 14.0, 14.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "bTh12n?1N2N2N2N2N2000O1N2N2N3N1N\\kP6"}, "image_id": 445, "id": 7468}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 150.0, 35.0, 35.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "Uei11n?2N2O1N3M2N2N2N2N2N2N2N2N2N2O2M2N2OO2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2Njjd5"}, "image_id": 445, "id": 7469}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 159.0, 10.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "SUd12m?2N2N2N20O1N2N2N2OljV6"}, "image_id": 445, "id": 7470}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 161.0, 39.0, 44.0], "area": 934, "segmentation": {"size": [512, 512], "counts": "mUe32m?1O2M3N1N3N2N2M2O2M3N1N3N2N1N3N2M3N1O2M3O01N1O2M3N1N3N2N2M2O2M3N2M2O2N2G_@Oc?O`@Ob?OkZg3"}, "image_id": 445, "id": 7471}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 164.0, 39.0, 45.0], "area": 1049, "segmentation": {"size": [512, 512], "counts": "X5l0S?2N2N2N2001O0001O00000000OO000001O00000000101N2N2N2N2N3M2N2N2N2N2N3M2N2O1N2NXZ\\7"}, "image_id": 445, "id": 7472}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 170.0, 7.0, 14.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "_el72m?2N2N3N1N2N2eJ"}, "image_id": 445, "id": 7473}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 180.0, 51.0, 53.0], "area": 1344, "segmentation": {"size": [512, 512], "counts": "WVg61n?2N2N2N2N2N2N2N2a@@[?d0N2N2N3M2N2N002N2N2O100O1N2N20001O0000000001O0N2N2jNXAQ1m>N2N3M2N2N2O1N2N2N2N2N2N2N3M2N2N\\Y?"}, "image_id": 445, "id": 7474}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 189.0, 31.0, 31.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "[ff01n?3M2N2N2N2N2N2N2N2N2O1N2N2N2N20O1N2N2O1N2N2N2N2N3M2N2N2N2N2Ndii6"}, "image_id": 445, "id": 7475}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 202.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "Zfo71e9"}, "image_id": 445, "id": 7476}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 204.0, 38.0, 44.0], "area": 851, "segmentation": {"size": [512, 512], "counts": "Zgm22l?3N1N3N2M2O2M3N1N3M3N1N3N2M2O2M3N1N3NO102M2O2M2N3N2M2O2M2O2M2O4K3N2M2O2M3NXY_4"}, "image_id": 445, "id": 7477}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 205.0, 11.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "cVk21m?2O2M3N1010O0O2N1N3MbYo4"}, "image_id": 445, "id": 7478}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 224.0, 5.0, 9.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "P79g?N3N2M2OoXm7"}, "image_id": 445, "id": 7479}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 227.0, 11.0, 10.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "Wg\\71n?2N2N2O1N2OO2N2N2N2Njh="}, "image_id": 445, "id": 7480}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 233.0, 35.0, 37.0], "area": 641, "segmentation": {"size": [512, 512], "counts": "fW82m?2N2N2N2N2N3M2N2N2N2N2N2N2000001O00000000O1N3M2N2N2N2N2N2N2O1N2N3M2NQXV7"}, "image_id": 445, "id": 7481}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 234.0, 53.0, 49.0], "area": 1326, "segmentation": {"size": [512, 512], "counts": "VhT61n?2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N3M2N2N1O1O000001O00000000002N2N2N2N0000002O1N2N2N2N2N2N2N2N2N2N2N3M2N2NVhP1"}, "image_id": 445, "id": 7482}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 242.0, 26.0, 27.0], "area": 363, "segmentation": {"size": [512, 512], "counts": "mga12m?2N2N2N2N2N2N2N2N2N2N20000000O1N2N2M3N2N2N2N2N2N2NQXQ6"}, "image_id": 445, "id": 7483}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 246.0, 39.0, 49.0], "area": 1064, "segmentation": {"size": [512, 512], "counts": "iXU22l?2O2ZOMo@O67i>Ko@076h>3UA0i>1UA1i>2UA0i>e0M2N3O10O010O01000O01000N1N3M2O2M3N1N3M3N1N3N1N3M3N1N3M3N1N3NjWW5"}, "image_id": 445, "id": 7484}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 253.0, 27.0, 24.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "^XS51o?1N2N3M2N2N10O00000000010O00000000010O00001O2N2O2M2NoW_2"}, "image_id": 445, "id": 7485}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 274.0, 19.0, 17.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "nXc61n?2N2N2N2N1O0001O00000000010O2N2N2N2N[WS1"}, "image_id": 445, "id": 7486}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 277.0, 40.0, 39.0], "area": 775, "segmentation": {"size": [512, 512], "counts": "ZYS71n?2N3M2N2N2N2N2N2N2O1N2N2N3M2N2N000000000001O010O2N2N2N2N2N2N2N3M2N2N2O1N2N2N2Nlf8"}, "image_id": 445, "id": 7487}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 280.0, 46.0, 47.0], "area": 1187, "segmentation": {"size": [512, 512], "counts": "bi^51n?2N2N2N2M3N2N2N2N2d@]OX?g0N2N2N2N2N2N1O1O2N2N2N0000000002N2N2N2N11N2N01O2KTASOn>k05N2N2N2N2Be@4]?Je@4]?Je@4]?Jd@5e?N2NdVj1"}, "image_id": 445, "id": 7488}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 289.0, 22.0, 24.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "Yii03l?2N2N2N2N2N2N2N210O000000O1O1N2N2N3M2N2N2NaVk6"}, "image_id": 445, "id": 7489}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 292.0, 13.0, 15.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "[98g?00000000001O02N1O1O2N2NjVi7"}, "image_id": 445, "id": 7490}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 295.0, 50.0, 55.0], "area": 1377, "segmentation": {"size": [512, 512], "counts": "ki\\11n?2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N20000000000000001O000000N3M2N2N2N2N2N2N2N2N2N2N2N2N2G_@0c?N_@0c?N_@0^ej5"}, "image_id": 445, "id": 7491}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 316.0, 58.0, 60.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "T[_41n?2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N0ImNbAS1^>oN`AQ1`>700010O000000000000000000002N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2O1N^ec2"}, "image_id": 445, "id": 7492}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 319.0, 39.0, 31.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "cZ_61n?2N2N2LJ[@8c?4N2N2N2N2N2N000000001O2N2N2OO2N2N2N2O10000000000N2N2N2N2N2N2N2N2N2N_Um0"}, "image_id": 445, "id": 7493}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 342.0, 52.0, 53.0], "area": 1365, "segmentation": {"size": [512, 512], "counts": "Ykd02m?2N2N2N3M2N2O1N2N2N2J[On@g0P?[On@g0P?6N200O1N2N2N2O1000001O00000001O00000O1O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2NZTa6"}, "image_id": 445, "id": 7494}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 346.0, 16.0, 16.0], "area": 143, "segmentation": {"size": [512, 512], "counts": "ojY72m?3N1N2N2N2N2O1000O1N2N3N1N2N2NnT>"}, "image_id": 445, "id": 7495}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 357.0, 13.0, 11.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "YkR71n?2N2N2O2M2O01O1N2N2N2O1Nfdf0"}, "image_id": 445, "id": 7496}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 359.0, 22.0, 22.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "^k]72m?2N2N3M2O1N2N2N2O10000000N2O1N2N2N2N2N2N3M]T7"}, "image_id": 445, "id": 7497}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 364.0, 24.0, 24.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "ikZ32m?2N2N2N2N2N2N2N2O1N0000000002O1N2N3M2N2N2N2N2N[TY4"}, "image_id": 445, "id": 7498}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 366.0, 36.0, 35.0], "area": 646, "segmentation": {"size": [512, 512], "counts": "R\\m61n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N10O00000000001O2N2N2N2N2N2O1N2N2N2N2N2N3MUd`0"}, "image_id": 445, "id": 7499}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 377.0, 48.0, 48.0], "area": 1307, "segmentation": {"size": [512, 512], "counts": "alg32m?3M2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N3M2N1O00000001O2O1N2N2N01O2N10O2O1IWATOk>j0WATOk>j07N2N2N2O1Be@4]?Je@4]?Je@4e?N2NbS`3"}, "image_id": 445, "id": 7500}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 384.0, 4.0, 7.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "R\\n71n?3M2O1oC"}, "image_id": 445, "id": 7501}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 385.0, 17.0, 31.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "Rl05N2N2N2N2N2N2N2N2N2N2N2N2N2NiRZ4"}, "image_id": 445, "id": 7505}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 408.0, 24.0, 25.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "P]Y62m?2N2N2N2N2N3M2N2O100000000000N2N2N2N2N2N2N2N2NkbZ1"}, "image_id": 445, "id": 7506}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 410.0, 29.0, 28.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "Umg62m?3M2N2N2O1N2N2N2N2N2N2N2O1000OO0002N2N2N2N2N2N2D_@6c?I^@6g?N2N2Nhbi0"}, "image_id": 445, "id": 7507}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 420.0, 41.0, 45.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "]=325`?M_@4_?8001OO1N2N2N3M2N2O1N2N2N2N2N201O00000000N0000010O002N2N2N3M2N2N2N2Bg@2[?Lg@3Z?Kh@3Z?Lg@2[?Lh@1e?NRR[7"}, "image_id": 445, "id": 7508}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 422.0, 50.0, 44.0], "area": 1158, "segmentation": {"size": [512, 512], "counts": "lm_42m?3N1N3N2M3N2M3N2M2O2M3NO01O010O01O010O010OJVOWAk0h>XOVAg0k>7O010O3N1N0101N3N1N10OLRAWOo>h0TAVOk>j0601N3N2M3N2M3N2M3N1N3N2MURg2"}, "image_id": 445, "id": 7509}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 436.0, 25.0, 25.0], "area": 307, "segmentation": {"size": [512, 512], "counts": "m]k31n?2N2N2N2N3M2O1N2N2N2000000000N3N1N2N2N2N2N2N2N2NnQh3"}, "image_id": 445, "id": 7510}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 441.0, 33.0, 33.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "V^X51n?2N2N2O1N3M2N2N2N2N2N2N2N2N2O10001OO1N3M2N2N2N2N2N2N2O1N2N2N2N2NfQW2"}, "image_id": 445, "id": 7511}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 446.0, 54.0, 53.0], "area": 1387, "segmentation": {"size": [512, 512], "counts": "mnX61n?2N3M2O1N2N2N2N2N2N2N2N2N2N2N2N3M2N2O1N1O0000000000000000000000000100O3M2N2N2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N_Ql0"}, "image_id": 445, "id": 7512}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 454.0, 51.0, 53.0], "area": 1337, "segmentation": {"size": [512, 512], "counts": "i^W21n?2N2N2N2N3JEb@=\\?5N2N2N2N2N2N2N2N2N2O100O1N3O01O0000000000000000000N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N2N3MjPo4"}, "image_id": 445, "id": 7513}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 459.0, 11.0, 11.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "anS61n?1O2N2N2N2N02N2N2N2Nbaf1"}, "image_id": 445, "id": 7514}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 493.0, 37.0, 19.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "o_g51n?1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2NQPf1"}, "image_id": 445, "id": 7515}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 498.0, 20.0, 14.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "b?>c?O1O1O1O00O1O1O11O1O1O1O1O1O1O2N1O1O1OQ`e7"}, "image_id": 445, "id": 7516}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 502.0, 19.0, 10.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "o_]51n?1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1OQPY2"}, "image_id": 445, "id": 7517}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 215.0, 40.0, 32.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "Tgb61m?2N3M3N1N3M2N3O1O010O0100O0100O010O0100O0100O010O010O01000O010OO2M2O2M3M2N3N1NjXi0"}, "image_id": 448, "id": 7518}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 235.0, 46.0, 39.0], "area": 861, "segmentation": {"size": [512, 512], "counts": "lWW72m?1N3M2N3M3N1N3M2N3N11000O010O010O0100O0100O010O010O01000O010O010O01000ON3N1N3M2N3N2M2N3M2ORh1"}, "image_id": 448, "id": 7519}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 316.0, 53.0, 83.0], "area": 1722, "segmentation": {"size": [512, 512], "counts": "V\\P71n?1N3M2N3M2N3M2N3M2N3N1N3]OUOmAm0Q>UOmAn0P>UOmAm0Q>UOmAn0P>UOmAl0S>UOkAk0U>?00000001O00000001O0000003N1N3IcAgN_>X1bAfNa>W15N3M2N3M2N3M2N3M2O2M3M2N3M2N3M2NkU5"}, "image_id": 448, "id": 7520}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 412.0, 50.0, 43.0], "area": 1055, "segmentation": {"size": [512, 512], "counts": "]]o63k?3M2O2M2N3N2M2N3M3N1010O10O10O01000O010O01000O01000O0100O0100O0100O01000O01O0O2M3M2O2M2O2M3M2O2M2N_b7"}, "image_id": 448, "id": 7521}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 0.0, 5.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPf11o?001O00OQ`W6"}, "image_id": 450, "id": 7522}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 56.0, 52.0], "area": 1410, "segmentation": {"size": [512, 512], "counts": "j`l21n?3M2X@Kb?7\\@Kb?;N2O1N2N2N2N2N2O11O0001M2N2N2N2N1O01O0000000000000000000001O0001O001O2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N3M2N__W4"}, "image_id": 450, "id": 7523}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 0.0, 28.0, 23.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "XPR41n?2N2N3M2N2N2O1N2N2O2N1O1O00O1O1O2O1N1O1O2N2N2N2O1N2N3Mgo_3"}, "image_id": 450, "id": 7524}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 0.0, 64.0, 29.0], "area": 929, "segmentation": {"size": [512, 512], "counts": "PPi41o?1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1Oa@LT?3l@OS?3k@NT?3j@OU?2i@1U?0i@2V?Oh@3W?8000O01000O0100000O0100000O0100003L5L4L4L3L5L4LckZ6"}, "image_id": 450, "id": 7542}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 118.0, 18.0, 17.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "ncn42m?2N2N2N2N2N2N2N10O2N2N2N2N2N2O1N2NS\\h2"}, "image_id": 450, "id": 7543}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 127.0, 52.0, 35.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "TTX25j?6K5K6J5K4L0O100000O10O100000O10O101O1O000O15K1N0100000Mn@XOR?h0210O100000O10O100000O10O10000000O04M5K5K5K5JZkm4"}, "image_id": 450, "id": 7544}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 129.0, 9.0, 10.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "Udc61n?2N2N2N2000N2N2NlkW1"}, "image_id": 450, "id": 7545}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 135.0, 38.0, 41.0], "area": 760, "segmentation": {"size": [512, 512], "counts": "lda127O^?3_@0^?3`@N_?3`@O^?;M3N2N1O2N2000O0100OO00O1000O01000O0100002M3N2N1N3N2N2M2O2N2N2M2O2N`[k5"}, "image_id": 450, "id": 7546}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 136.0, 29.0, 28.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "bd22m?2O2M2N3N1N3N1N3M2O2M2N100O000100O3M2O2M2N3N1N3M2O2M2N3NWk^7"}, "image_id": 450, "id": 7547}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 140.0, 51.0, 51.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "mdl42m?2N2N2N2N3N1N2N2N2N2N2N2N2N2N2N3M2N2000000000000001O01O0000N2N2N2N2N2O1N1O001O2N2N2N2N2N3M2N2O1N2N2NgjY2"}, "image_id": 450, "id": 7548}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 169.0, 13.0, 26.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "dei72m?2N2N2O1N2N2N3M2N2N2N2N2fJ"}, "image_id": 450, "id": 7549}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 171.0, 19.0, 35.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "\\5R1m>02N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2OSZf7"}, "image_id": 450, "id": 7550}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 172.0, 51.0, 52.0], "area": 1240, "segmentation": {"size": [512, 512], "counts": "^fo21n?2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N1HPO_AP1a>RO]An0c>TO[Am0d>71O0000000000001O001O2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2NQjV4"}, "image_id": 450, "id": 7551}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 184.0, 29.0, 30.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "UV^22m?2N2N2N2N2N3N1N2N2N2N2N2N3M0101N3M2N2N2N2N2N2O2M2N2N2N2NjYS5"}, "image_id": 450, "id": 7552}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 190.0, 15.0, 14.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "TfU21n?3M2N2N2N2N2O00O2N2N2N2N2N2Nmib5"}, "image_id": 450, "id": 7553}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 200.0, 49.0, 53.0], "area": 1279, "segmentation": {"size": [512, 512], "counts": "mfW71m?2O2M3N1N3N2M3M2O2M3N1N3N2N110O10O10O10O10O10O10O10O1000O01000O01000O10O10O3NO10O10O10OO2BQAGR?6QAHP?7QAGR?6QAHQ8"}, "image_id": 450, "id": 7554}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 203.0, 34.0, 32.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "nfe32m?2N2N2N1O00001O2N2N2N3M2N2N2N2N2N1O0002N2N2N2N2N2N2N2O1N3M2N2N2N2NWYi3"}, "image_id": 450, "id": 7555}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 208.0, 61.0, 63.0], "area": 1833, "segmentation": {"size": [512, 512], "counts": "lW32m?2N2N2N3M2N2BBWA`0g>BXA?f>CXA?f>CXA?f>CXA?f>CXA?g>?M2N2N001O0000000LcNgA\\1Y>fNeAZ1[>50001O000001O2N2N001O2N2O2M2N2N2N2N2N2N2N3M01O000002N2N2N3M2N2O1N2N2N2N2NkXn6"}, "image_id": 450, "id": 7556}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 210.0, 45.0, 56.0], "area": 1288, "segmentation": {"size": [512, 512], "counts": "Rg]12m?2N3N2M2N3N1N3N1N3M3N1N3M2O2M2N3N2M2O2M2N3N1N3M3N00O2N3N2M2mN`Ad0b>ZO_Ad0c>ZO`Ac0c>ZO_Ae0b>ZO`Ac0c>ZO`Ac0P?O2M2N3N1N3N2M2N3NThk5"}, "image_id": 450, "id": 7557}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 231.0, 63.0, 62.0], "area": 2042, "segmentation": {"size": [512, 512], "counts": "mg]33l?2O1N2N2N2N3M2N2RA_OZ>c0dA_OZ>c0eA^OY>e0dA]O[>d0cA^O[>d0cA_OZ>c0dA_OZ>c0dA_OZ>U1N2N1O0011N2N2N2N3M2N2O100N2N3M2N2OJQO\\Am0d>VO[Ah0e>ZOYAf0g>9001O01O00000001O01O000002N2N2N2O1[Oo@8T?En@9T?Fm@8U?Fn@7T?Gn@7_?N2O1N2NQhb3"}, "image_id": 450, "id": 7558}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 242.0, 64.0, 57.0], "area": 1682, "segmentation": {"size": [512, 512], "counts": "kWo62n?2M3X@Ka?@RA>m>ERA9o>HRA6m>LTA1o>OQAOP?1QALQ?4PAIS?7m@GT?9m@DU?<90O00010O00010O00010O00O1N3M2N2O2Mof0"}, "image_id": 450, "id": 7559}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 248.0, 50.0, 48.0], "area": 1161, "segmentation": {"size": [512, 512], "counts": "YhP12m?2N2N2N3M2O1N2N2N3M2N2O1N2N3M2N0010O0000101000000010O000000010O000001N1N2N2N2N3N1N2N2N2N3M2O1N2N2NXWV6"}, "image_id": 450, "id": 7560}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 261.0, 17.0, 28.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "W8i0V?2N10O2N2N2N2N2N2N2O1N2N2N2N2N2N_Wg7"}, "image_id": 450, "id": 7561}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 266.0, 85.0, 82.0], "area": 3038, "segmentation": {"size": [512, 512], "counts": "PZQ42m?2N3M2N2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M2N1O01O0001O0000000000000001O0001O0000000000000001O0001O0000000000000001O010O3M2N2N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2N2N3M2O1N2NlVd2"}, "image_id": 450, "id": 7562}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 273.0, 37.0, 31.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "jhk12m?2N2N2N2N2N1O1010001O00000O1O1N2N2N2N2N2N2N02N2N2O1N3M2N2N2N2N2N2N2N2N2NQga5"}, "image_id": 450, "id": 7563}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 284.0, 54.0, 52.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "SYa21o?2M4M2M3N2M4M2M3N2M4M2N2M3N2M4O01O0001M1N10O010O0100O0100O010O010O010I^AoNb>R1aAkN_>T1610O102M3N3M2M3N2M4M2M3N2M4M2N2MXfc4"}, "image_id": 450, "id": 7564}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 293.0, 52.0, 51.0], "area": 1267, "segmentation": {"size": [512, 512], "counts": "fiT62m?2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O10000O1000000000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NmUQ1"}, "image_id": 450, "id": 7565}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 293.0, 37.0, 37.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "ii\\71n?2N3N1N2N2N2N2N2N2N2N3M2O1N2N0000000000001O02N2N2N2N2N2N2N2N3N1N2N2N2N2N]f0"}, "image_id": 450, "id": 7566}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 300.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "^in72m?2N2cF"}, "image_id": 450, "id": 7567}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 304.0, 25.0, 55.0], "area": 726, "segmentation": {"size": [512, 512], "counts": "l9[1f>N2N00000000000001O01WOZA7f>I\\A5d>K^A3b>MaA0_>0cAN]>3dAK\\>5fAI\\>5fAJ[>4gAJ[>4gAJ[>4gAJ[>4k0M2NYVc7"}, "image_id": 450, "id": 7568}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 306.0, 9.0, 9.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "dYR12m?2O2M2O11M2N2O1NZVi6"}, "image_id": 450, "id": 7569}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 307.0, 39.0, 39.0], "area": 587, "segmentation": {"size": [512, 512], "counts": "iiW21n?2N2N2N2O1N2N3N10000N2N20001O0000000001O01O0000000001O01OO1N2N2N3M2N2N2N2O1NaeT5"}, "image_id": 450, "id": 7570}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 311.0, 10.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "jib32m?2N2N2O2OO1N2N2N2NUVX4"}, "image_id": 450, "id": 7571}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 313.0, 36.0, 31.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "Vjl12m?3N1N2N2N2N2N2N2N2N3M2O1N2N2N11O1N2N2N2N2N2N3M2O10000000000N2N3M2N2N2NbUa5"}, "image_id": 450, "id": 7572}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 325.0, 41.0, 35.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "lji62m?2N2N2N20OO000000002N2N2N2N2O1N3M2N2N1O0000001O01O00002N2N2N2N3M2N2O1N2N2N2N2N2N^ea0"}, "image_id": 450, "id": 7573}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 328.0, 72.0, 67.0], "area": 2083, "segmentation": {"size": [512, 512], "counts": "jjR51n?2N2N3M2N2O1N2N2N2N2N2N2N2N3M2N2N2O1N2N20000000000010O000000000000O1N2N201O000001O0000000N2N2N2N2N2N2N2N3N1N2N2N2N2N1O000000001O2O1N2N2N2N2N3MaTi1"}, "image_id": 450, "id": 7574}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 331.0, 38.0, 37.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "Tkl01n?3M2N2N2N2N2N2N2N2N2N1O00000000000001O000000000000011N2N2N2N2N2N2N2N2N2N2N[U`6"}, "image_id": 450, "id": 7575}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 334.0, 86.0, 83.0], "area": 3098, "segmentation": {"size": [512, 512], "counts": "Ulm21n?2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2N0010O0000000000000000010O000000000000000010O000000000000000010O00000000002N2N2N2O1N2N3M2N2N2N2N2N2N2O1N3M2N2N002N2F\\@5i?N2O1NhTg3"}, "image_id": 450, "id": 7576}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 348.0, 57.0, 52.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "j[^13l?2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N1O00001O00000001O0000000001O02N3M2N2N2N2N2N2N100O000000001O3M2N2N2L4N2N2N2NgTe5"}, "image_id": 450, "id": 7577}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 349.0, 48.0, 50.0], "area": 1292, "segmentation": {"size": [512, 512], "counts": "b;5j?2N2N2N2N2N2N3N1N2N2N2N2N2N3N1N2N2N2N2N3M2N10O001O2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N3M2N2N2O1N2N2N[dW7"}, "image_id": 450, "id": 7578}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 363.0, 28.0, 28.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "h[V42m?2N2N2N2N2N2N2N2N2N2N2N2O1N02N2N2N2N2N2O2M2N2N2N2N2N2NXd[3"}, "image_id": 450, "id": 7579}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 363.0, 44.0, 43.0], "area": 1133, "segmentation": {"size": [512, 512], "counts": "RlU72m?2CMa@165V?Oh@3V?Oh@3V?Oh@3W?j0UAXOj>i0UAXOj>o0O0000002O2O00O1O1N00000000001O1O2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2NTT4"}, "image_id": 450, "id": 7580}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 377.0, 46.0, 45.0], "area": 1093, "segmentation": {"size": [512, 512], "counts": "`lb43l?2N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N2N1O1O0001O00000000101N2N2N2N3M2N1O100O2N2N2N2Ab@9d?N2N2O2M2NcSf2"}, "image_id": 450, "id": 7581}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 387.0, 13.0, 27.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "^li72m?2N3M2N2O1N2N2N2N2N2N2N3kC"}, "image_id": 450, "id": 7582}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 390.0, 56.0, 53.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "Xmg02m?2N2O1N3M2N2N2N2N2J]Ol@e0R?]Ol@e0S?5N3M2N2N1O00000001O0001O0000000002N2N2N01O000000000001O002O2M2N2N2N2N2N2N2N3M2O1N2N2N2NZS\\6"}, "image_id": 450, "id": 7583}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 392.0, 24.0, 25.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "a\\l62m?2N2N2N2N2N2N2N3M20000001O0N2N2N2N2N2N2N2N2O1NZcg0"}, "image_id": 450, "id": 7584}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 398.0, 60.0, 54.0], "area": 1645, "segmentation": {"size": [512, 512], "counts": "`]`32m?2O1N2N2N2N2N2N2c@BU?f0N2N2N00002N2O1N1O0000001O000000002N2N2O1N1O00000000000001O2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2O2Mlba3"}, "image_id": 450, "id": 7585}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 409.0, 51.0, 50.0], "area": 1243, "segmentation": {"size": [512, 512], "counts": "f]X63l?2N2N2O1N2N3M2N2N2N2N2O1N3M2N2N2N000001OJoN^AQ1b>RO[Ao0e>52N2N1O1O0001O00001O10O2N2O1KRAUOP?i05O2M2N2N2N2N2N2N3N1N2N2NfRn0"}, "image_id": 450, "id": 7586}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 410.0, 22.0, 22.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "Q]f52m?2N2N2N2N2N2O1N3N10000000O1N2N3M2N2N2N2O1Njbn1"}, "image_id": 450, "id": 7587}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 412.0, 8.0, 9.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "PmQ62m?2N2N2N02N2N2NRSj1"}, "image_id": 450, "id": 7588}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 414.0, 46.0, 46.0], "area": 1097, "segmentation": {"size": [512, 512], "counts": "e]S52m?3M2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N2N0000000001O001O2N2O1N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N_bU2"}, "image_id": 450, "id": 7589}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 417.0, 29.0, 29.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "^]j13l?2O1N2N2N2N2N2N3M2N2N2O1N000001O2N2O2M2N2N2N2N2N2N2N3M2OaRg5"}, "image_id": 450, "id": 7590}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 430.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "^mo71a2"}, "image_id": 450, "id": 7591}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 435.0, 3.0, 6.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "emn71n?3M2\\B"}, "image_id": 450, "id": 7592}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 445.0, 51.0, 50.0], "area": 1232, "segmentation": {"size": [512, 512], "counts": "k^h63l?2N2N2O1N2N2N2N3M2N2N2O1N2N2N3M1O00001O0KnN^AQ1b>QO\\Ao0d>SOZAm0f>62N3M1O010O00000000002N2O1N2N2N2N2N3M2N2N2O1N2N2N3M2NbQ>"}, "image_id": 450, "id": 7593}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 446.0, 54.0, 57.0], "area": 1543, "segmentation": {"size": [512, 512], "counts": "S_\\42m?2N2N2N2GHg@:W?Hg@:W?Hg@:W?9N2N2N2N3M2O1N2N2N2N1O0000000000000010O00000000002N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2O[ah2"}, "image_id": 450, "id": 7594}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 449.0, 21.0, 15.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "YnZ21n?3M2O1N2N2N1O1O0001O2N2N11N2N2000F^@5g?N2N2OeaZ5"}, "image_id": 450, "id": 7595}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 452.0, 14.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "Y^T62m?2N2N2N2N2O1000N2N2N2N2N2Nfad1"}, "image_id": 450, "id": 7596}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 456.0, 45.0, 43.0], "area": 977, "segmentation": {"size": [512, 512], "counts": "h^b11n?2N2N2N2N2N2O1N2N2N2N3i@ZOo>h0o@[On>m0000000N2N2N2N00000001O2N2N2N2O2M2N2N2N2N2N001O2N2N2N2N3N1N2N2NVQg5"}, "image_id": 450, "id": 7597}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 482.0, 23.0, 23.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "__T72m?2N3M2N2N2O1N2N1O000000000002N2O1N2N2N2N3M2NfP`0"}, "image_id": 450, "id": 7598}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 484.0, 24.0, 24.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "a__72m?2N2O1N2N2N2N2N3M000000000010O2N2N2N2N2N2N2N3Md`4"}, "image_id": 450, "id": 7599}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 488.0, 24.0, 22.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "e_Q22m?2N2N2N2N2O1N2N1O00001O000001O1O2N2N3M2O1N2N2Na`b5"}, "image_id": 450, "id": 7600}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 491.0, 43.0, 21.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "loc521Oj?4O1O1O100O1O1001OO100O1O1O1O1O1O1O1O1O100O1O1001O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1OQ`f1"}, "image_id": 450, "id": 7601}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 492.0, 40.0, 20.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "ooh31n?1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100001O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1ORPc3"}, "image_id": 450, "id": 7602}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 496.0, 10.0, 16.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "g_k72m?2N2N2O1Z@Ha?j05O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O2N2Nooa3"}, "image_id": 451, "id": 7608}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 0.0, 61.0, 34.0], "area": 1244, "segmentation": {"size": [512, 512], "counts": "[P]51n?2N2N2O1N2N2N2N2N2N3M2N2O1O1O1O1O1O1O2N1OO1O1O1O1O100O1O1O1O11O1O1O2NO1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O2N2N2N2Nm_d1"}, "image_id": 451, "id": 7609}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 0.0, 38.0, 23.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "UP^63l?2O1N2N2N2N2O1O1O2N1O1O1O1O1O00O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O2N2Noon0"}, "image_id": 451, "id": 7610}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 0.0, 38.0, 31.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "bPQ71n?2N2IN]@4a?7N2N2N000000002N2f@\\OU?i0O1O1O1O1O00O1O1O1O1El@IU?6m@HT?7n@GS?9n@ES?:o@DR?;:O1O1O1O1O1O1O2N2Noo;"}, "image_id": 451, "id": 7611}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 0.0, 17.0, 28.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "Y`g71n?3M2N2O1N2N2N2N2N2N3N1O1O1O1O1O1O"}, "image_id": 451, "id": 7612}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 15.0, 54.0, 49.0], "area": 1383, "segmentation": {"size": [512, 512], "counts": "iPb42m?2N2N2[@I_?8_@J_?=N2N002N2O1N2N3O000000010O0000000N3M2N2N2O1N3M2N2N2O01N2N2O2M2N2N2N2O2N1N2N2N2N3M2Mc@A^?>3N3M2N2N2N2Oenb2"}, "image_id": 451, "id": 7613}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 26.0, 54.0, 60.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "gaj22?0n>1PA1n>1PA1n>1PA1n>1PA2m>0QA2m>0RA1l>2QA0m>c0N2O1N2N000000001O0001O000000000001O0001O00000000002O1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N2N2O`^Z4"}, "image_id": 451, "id": 7614}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 39.0, 68.0, 57.0], "area": 2387, "segmentation": {"size": [512, 512], "counts": "fbh68S?e0J600I71O000001O000000000000000000000000000000000B>000001OAlAlNZ>T1fAlNZ>T1fAlNZ>T1fAlNZ>T190000001O00000000000000WO^A4W?01O003M000000000000000001O00000000000000IW^5"}, "image_id": 451, "id": 7615}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 64.0, 66.0, 63.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "obX42m?3M2N2N2_OHWA:g>HXA9f>IXA9f>IXA9f>IXA9g>HWA:g>HWA:g>b0M2N2N2N2N200000000000001O0N2N2N000001O2O1N2N2N3M2N2N2N2N2N2N2N2N2O1N2N0000001O0000002N2N2N2O1N2N2N2N2N2N2NX]f2"}, "image_id": 451, "id": 7616}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 67.0, 5.0, 8.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "S28i?O1N2N2Nj]m7"}, "image_id": 451, "id": 7617}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 81.0, 38.0, 36.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "Zcd51n?2N2O2M2N2N2N2N2N2N0001O00000000000001O01O000000000000011N2N2N2N2N2N2N2N3NU]h1"}, "image_id": 451, "id": 7618}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 82.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "b21]mo7"}, "image_id": 451, "id": 7619}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 95.0, 63.0, 54.0], "area": 1894, "segmentation": {"size": [512, 512], "counts": "P4>a?2N2N2N2N000001O01O2N2N2N2N2N2N2N000001O00000000000002N2N2N2N2O1N2N2N2N01O2O1N3M2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N3NU\\P7"}, "image_id": 451, "id": 7620}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 111.0, 20.0, 20.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "mS^63l?2N2N2N01O000001O000001O000001O03M2N2N^lW1"}, "image_id": 451, "id": 7621}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 134.0, 50.0, 51.0], "area": 1311, "segmentation": {"size": [512, 512], "counts": "ldj01n?3M2N2N2N2N2N2N3N1N2N2N2N2N2N2N3N1N2N2N2N2N2N3M2OO2N2N2O1N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2N2O1N3M2N2NP[\\6"}, "image_id": 451, "id": 7622}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 137.0, 56.0, 58.0], "area": 1690, "segmentation": {"size": [512, 512], "counts": "Xeg41n?2N2O1N2HJc@9Z?Id@9[?7N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2O1N0000001O0001O2N2N2N2N2O2M2N2N2N2N2N2N2O2M2N2N2N2N2N2N3N1N2N2N2NlZ\\2"}, "image_id": 451, "id": 7623}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 137.0, 37.0, 36.0], "area": 671, "segmentation": {"size": [512, 512], "counts": "mTo52m?2N2N2N2N2N2N2N2O1N3M2N2N2N1O000000000010O001O3M2N2N2N2N2N2N2O1N2N3M2N2NY[^1"}, "image_id": 451, "id": 7624}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 144.0, 27.0, 50.0], "area": 751, "segmentation": {"size": [512, 512], "counts": "Qeb72m?2HN_@4_?N_@4_?8N2N2O101O000N2N2N2N2N2N2O1N2N3O000YAiNd>Z1000000N2ZK"}, "image_id": 451, "id": 7625}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 157.0, 12.0, 23.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "m4g0Z?N3M2O1N2N2N2N2N3M2N2Ofji7"}, "image_id": 451, "id": 7626}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 165.0, 63.0, 42.0], "area": 1407, "segmentation": {"size": [512, 512], "counts": "eeh61n?2N2N2N2N3M2N2N2N2O1N2N2N2O2O0000000000000N2N2N0001O0000000000000101000N2N2N2O1N2O20O00000000N2O0O000000002N2N2N2N2N3N1N2N2NWj7"}, "image_id": 451, "id": 7627}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 178.0, 59.0, 56.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "aVY52m?2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2N2O0O0000001O0001O2N2N00001O000001O0003M2N2N2N2N2N002N2N2O1Kg@@[?>5N2N2N2N3M2N2NlYi1"}, "image_id": 451, "id": 7628}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 181.0, 5.0, 10.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "e5:g?N2N3M2NWZm7"}, "image_id": 451, "id": 7629}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 182.0, 46.0, 47.0], "area": 971, "segmentation": {"size": [512, 512], "counts": "gV91n?3M2N2N2O1IGd@;Z?Gd@;Z?7N2N2N3N0O00000000000000000010O00000000000000001O2O1N2N3M2N2N2N2N2N2N2N2O1Nmio6"}, "image_id": 451, "id": 7630}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 189.0, 32.0, 31.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "\\Va42m?2O1N3M2N2N2N2N2N2N2N2N2N2N100O0002N2N2N2N3M2N2N2N2N2N2N2O1N2Nein2"}, "image_id": 451, "id": 7631}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 191.0, 65.0, 44.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "iVo02m?2N2M3N2N2N2M3K@g@c0V?5N2N2N2N2N2N2N0001O000000001011O0000000000O1N2N2O1O1N2N2O0O1O00000000000000000001O0001O2N2N2N2N2N2N2N2O1N2N2N2NdYP6"}, "image_id": 451, "id": 7632}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 195.0, 61.0, 47.0], "area": 1414, "segmentation": {"size": [512, 512], "counts": "mfR61n?3M2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2OO2N2N2N01O0000000000000001O0000010O3M2O10000001O00O1N2N2N2N01O0001O3M2N2N2N2N2N2N2N[in0"}, "image_id": 451, "id": 7633}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 200.0, 31.0, 58.0], "area": 1147, "segmentation": {"size": [512, 512], "counts": "gf`71n?2h@O^>3`AO_>3^AO`>3^AO`>3^AO`>3^AO`>3^AO`>3^AO`>3_AN_>4_AO^>3`AO^>k0N2N2N2N2000N2N2N2N3M2N2N2N2N2N2N2N2NZI"}, "image_id": 451, "id": 7634}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 223.0, 53.0, 48.0], "area": 1330, "segmentation": {"size": [512, 512], "counts": "cWe12m?2N2N2N2N2O1N2N3M2N2N2N2O1QAVOg>j0WAXOi>h0UAZOk>f0SA\\Om>k00O0001O2N2N2N10O000000000000010O2N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N^X`5"}, "image_id": 451, "id": 7635}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 224.0, 32.0, 31.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "`g;1n?3M2N2N2N2N2O1N2N2N3M2N2N1O000001O011N2N2N2N2N2N2N2N3M2O1N2N2NcXT7"}, "image_id": 451, "id": 7636}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 224.0, 30.0, 30.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "]Wn61n?2N2N2N2N3M2O1N2N2N2N2N2N3M2OO2N2O1N2N2N2N2N2N3M2N2O1N2N2Nbhb0"}, "image_id": 451, "id": 7637}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 235.0, 61.0, 47.0], "area": 1414, "segmentation": {"size": [512, 512], "counts": "Uh\\52m?3M2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N11N2N2N00000000001O01O000000000002N2N2O10000000001O0O1N2N2OO00000002N3M2N2N2N2N2O1N2NRhd1"}, "image_id": 451, "id": 7638}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 237.0, 10.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "`W21o?2M2N2N2O10N2N2N2N`hh7"}, "image_id": 451, "id": 7639}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 241.0, 8.0, 16.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "a7`0a?N2N2N2N2N2N2NYhk7"}, "image_id": 451, "id": 7640}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 257.0, 59.0, 48.0], "area": 1434, "segmentation": {"size": [512, 512], "counts": "ghW23j?NW@4g?4N00010O2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N20001O00N3M2N2N2N2N2O1N1O1O0000000001O000001O002N2N2N2O1N2N3M2N2N2N2N2N2O^gj4"}, "image_id": 451, "id": 7641}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 257.0, 20.0, 29.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "^Xf71n?2N2N2N3M2O1N2N2N2N3M2N2O1N0003M2O1N2NkG"}, "image_id": 451, "id": 7642}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 261.0, 14.0, 14.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "YX\\72m?2O1N2N2N2O2O00O1N2N2N2N2Neg<"}, "image_id": 451, "id": 7643}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 262.0, 23.0, 23.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "`X]31n?2N2N2N2N2O2M2N2N2N2N20O1N3M2N2N2N2N2O2M2N2N^WW4"}, "image_id": 451, "id": 7644}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 263.0, 30.0, 29.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "eXm02m?2N2N2N2O2M2N2N2N2N2N2N2N1O01O001O2N2N2N2O1N2N2N3M2N2N2N2N]gc6"}, "image_id": 451, "id": 7645}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 265.0, 49.0, 58.0], "area": 1491, "segmentation": {"size": [512, 512], "counts": "\\iU62m?2N2N2N2N2N2JCf@?X?Cf@?X?7M2N2N2N2N2O1N2N2N2N2N2N00001O000000000000000002N2N2N2N2N2N2N2N2VOo@b0S?\\Oo@c0R?[OPA6M4[?Jg@4[?Jg@4d?N2N2NifQ1"}, "image_id": 451, "id": 7646}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 272.0, 23.0, 42.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "c8W1h>000002N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2NlVd7"}, "image_id": 451, "id": 7647}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 290.0, 12.0, 11.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "XYY72m?1O2O1N2N001O02N2N2N2Nkf`0"}, "image_id": 451, "id": 7648}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 295.0, 31.0, 31.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "gi`11n?2N2N2N2N2N2N3M2N2N2N2O1N2N00001O01O3M2N2N2N2N2N2N2N2N2O1N3M\\fo5"}, "image_id": 451, "id": 7649}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 295.0, 70.0, 67.0], "area": 2099, "segmentation": {"size": [512, 512], "counts": "dZi61n?2N2N2O1N2N3M2N2N2N2N2N2N2N2O1N3M2N2N2N1O00000010O000000000001O2N2N2N2O0O0000GdAoN\\>Q1fAmNZ>S1iAjNW>V191O0001O000000002N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N3MUf3"}, "image_id": 451, "id": 7650}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 299.0, 21.0, 20.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "ei81n?2N2N2N3M2N2N2N2N1O01O2O1N2N2N2N2N2N2N2N]f\\7"}, "image_id": 451, "id": 7651}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 308.0, 57.0, 44.0], "area": 1293, "segmentation": {"size": [512, 512], "counts": "oie22m?3M2N2N2N2N2N2O1N3M2N2N2O1o@VOk>j0SAXOm>n000000001O0001O0O1N2N2O1OO01O01O0000000000000001O01O000001O2N2N2N2N2O1N2N3M2N2N2N2Noe]4"}, "image_id": 451, "id": 7652}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 329.0, 13.0, 13.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "_ZR11n?3M2N2N2O1N1O11N3M2N2N2OaUg6"}, "image_id": 451, "id": 7653}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 331.0, 47.0, 56.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "X[71o?4L5J5L5K4L4K010000000O0100000O010[OVOTBj0l=[OoAe0Q>_OkAa0T>EfA;[>e000000O01000000O01000000O01003M5K4K6K4L5K4K6K4L5KYTQ7"}, "image_id": 451, "id": 7654}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 338.0, 13.0, 13.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "fZm02m?3M2N2O1N2O10N2O1N2N2N2NYUl6"}, "image_id": 451, "id": 7655}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 341.0, 37.0, 62.0], "area": 1262, "segmentation": {"size": [512, 512], "counts": "nk]72m?2N3M2N2N2N2N2N2O1N3M2N2N2N2N2@QOlAQ1R>QOlAQ1R>QOlAo0T>TOjAk0V>WOhAi0X>YOfAg0Z>?0010O0000000002N3M2N2N10O000YE"}, "image_id": 451, "id": 7656}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 343.0, 54.0, 59.0], "area": 1534, "segmentation": {"size": [512, 512], "counts": "nkh11n?2N2N2N3N1N2N2N2N2N2N2N3[OWOoAj0o=YOnAi0P>YOnAi0P>YOnAi0P>YOnAi0P>YOnAh0Q>ZOmAf0S>\\OkAd0V>b000002O1N2N2N2N2N3M2N10O0001O003M2N2N2N2O1N2N2N3M2N2N2N2O1N2N3M2N`T\\5"}, "image_id": 451, "id": 7657}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 349.0, 20.0, 20.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "[kd21n?2N2N2K5O1N0000001O001O01O2N2N2N2N2N2NnTQ5"}, "image_id": 451, "id": 7658}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 356.0, 45.0, 59.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "R\\[41n?3N1N2N2N2N2N2N3B_OXAc0f>_OYAb0e>@YAb0e>@YAb0f>@WAb0g>>YAjN_>]1N2N2N2N2N2O0O02N2N2N3M2N2O1N2B\\A]Of>a0\\A]Of>b0[A\\Oh>a0ZA]Oh>a0ZA]Oh>a0=N2N2N3M2O1N2N2N2NTTn2"}, "image_id": 451, "id": 7659}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 364.0, 23.0, 23.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "hkS12m?2N2O1N2N2N2N2N2N2N00000001O2N2N2N2N2O1N2N2N]d`6"}, "image_id": 451, "id": 7660}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 380.0, 9.0, 8.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "m[_16i?1000010O0000000ST\\6"}, "image_id": 451, "id": 7661}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 389.0, 29.0, 30.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "f\\R22m?2N2N2N2N2N2N2N2N2N2N2N000000000002N2N2N2N2N2N2N2N2N2N2NaS_5"}, "image_id": 451, "id": 7662}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 394.0, 45.0, 60.0], "area": 1343, "segmentation": {"size": [512, 512], "counts": "X]f32m?3M2N2O1N2N2N2N2C_OXAd0e>^OYAd0f>]OXAe0f>]OXAe01VO_>8^Ac02VO^>W1`AkN^>^1M2N2N2N2O0O0002N3M2N2N2O1N2N2B^A[Od>c0^A[Oe>c0\\A[Of>c0\\A[Of>c0\\A[Of>c0=N2N2N2N3M2O1N2N2NnRc3"}, "image_id": 451, "id": 7663}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 407.0, 65.0, 58.0], "area": 2061, "segmentation": {"size": [512, 512], "counts": "Z]g01n?2N2N2N3M2N2N2N2O1N2N3M2N2N2N2N2N3M200O1N2N2N2O2O0000000000001O0000N3M2N2N2OO0000001O2N2O1001O0000N01@]ABe><]ABe><]ACe>:]ADe>:]ADe>;\\ACf>;\\ACf>;\\ACf>;a0N2N2N2N2N_RX6"}, "image_id": 451, "id": 7664}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 421.0, 4.0, 7.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "U=7j?N2N2Nibm7"}, "image_id": 451, "id": 7665}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 429.0, 61.0, 52.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "U^f41n?3M2N2N2N2N2O1N2N3M2N2N2O10000000001O000O1N2N2N2N1O0000000001O0001O000000000001O0000012M2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2NQR[2"}, "image_id": 451, "id": 7666}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 432.0, 38.0, 37.0], "area": 696, "segmentation": {"size": [512, 512], "counts": "T^h11o?1N3M2N2N2N2N2N2N2O2M2N2N2N2N0000000001O01O0001O2N2N2N2N2N2O1N3M2N2N2N2N2NSbd5"}, "image_id": 451, "id": 7667}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 432.0, 44.0, 60.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "^^Q33l?2O1N2N2N2N2N3M2C_OXAc0g>^OXAc0f>@WAb0OZOb>6]Ab0OZOb>6]Ah0a>=N2N2N3N1N2N1O0002N3M2N2N2O1N2A_A\\Oc>b0_A\\Od>b0^A[Od>c0^A[Od>c0^A[Od>c0^A\\Oc>b0?N2N3M2O1N2N2N2NiaX4"}, "image_id": 451, "id": 7668}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 455.0, 43.0, 43.0], "area": 931, "segmentation": {"size": [512, 512], "counts": "l^m32m?2N2N2N2N2N2O2M2N2N2N2N2N2N2N2O2M2N2N0000000011N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1NUQ]3"}, "image_id": 451, "id": 7669}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 475.0, 25.0, 25.0], "area": 335, "segmentation": {"size": [512, 512], "counts": "X_^52m?2N2N2N2N2N3M2N2O1N1O00000010O2N2N2N2N2N3M2N2N2NlPU2"}, "image_id": 451, "id": 7670}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 476.0, 43.0, 36.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "V?e0Z?3M2N2N2O0O1O1O1O1O1O11O1O2N1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O2N1O1O1O1OQPZ7"}, "image_id": 451, "id": 7671}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 479.0, 23.0, 24.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "Z_R13l?2N2N2O1N2N2N2N3M2N1O0011N3M2N2N2N2N2N2N2O2MfPb6"}, "image_id": 451, "id": 7672}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 491.0, 33.0, 21.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "gog02m?2N2N2N2N2N3M1O1O1O100O1O1001O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1NS`g6"}, "image_id": 451, "id": 7673}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 495.0, 34.0, 17.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "ooj51n?1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1002N1O1O1O1O1O1O1O2N1O1O1O1O1OQPd1"}, "image_id": 451, "id": 7674}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 497.0, 12.0, 12.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "foX51n?2N2N2N2N2O10O2M2N2N2NZPa2"}, "image_id": 451, "id": 7675}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 0.0, 24.0, 23.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "V`g41m?3N2M2O200]@G^?8`@J`?<000O010O001O10O0100E^@7a?Ha@7d?10O1M2N3N`_l2"}, "image_id": 452, "id": 7676}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 0.0, 13.0, 6.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "P`n41o?001O1O001O1O001OO1O1N2OQPk2"}, "image_id": 452, "id": 7677}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 222.0, 20.0, 23.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "Sg_31n?3N1N3M2O2M3N1N3O0100O0N3N1N3M3N1N3M2OaXV4"}, "image_id": 452, "id": 7678}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 29.0, 57.0, 148.0], "area": 8017, "segmentation": {"size": [512, 512], "counts": "mP`4d4\\;00000000000000000000000000000000000000000000000000000000000000000000000000000000000000oKZEh2f:XMZEh2f:XMZEh2f:XMZEh2f:XMZEh2f:XMZEh2o;0000000000000j]c2"}, "image_id": 453, "id": 7679}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 48.0, 39.0, 52.0], "area": 1150, "segmentation": {"size": [512, 512], "counts": "jQ32m?3_@MQ?5m@MMMk>8VANLMl>7UAOMLk>8VA6g>LWA6h>c0M3N1N3N0O00010O010O02O2M2N3N2M3N1N3M3N1N3N2M2N3N2M3N1N3N2M2N3N]]Y7"}, "image_id": 453, "id": 7680}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 51.0, 20.0, 130.0], "area": 1829, "segmentation": {"size": [512, 512], "counts": "]Uf77S?f0ZOf00000gN`NRD`1X;VOhDj0i:EWE;i:FVE:j:FVE:j:FnDb0R;^OXDX1h;hNUD[1k;eNUD[1k;eNUD[1k;eNUD[1k;eNUD[1k;]10001\\N"}, "image_id": 453, "id": 7681}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 58.0, 37.0, 40.0], "area": 1139, "segmentation": {"size": [512, 512], "counts": "SRf6b0U?90000000001O0000000000000000000000000000000000ROkAn0U>TOiAl0X>UOfAk0Z>WOdAj0[>XOcAh0]>=0000000KcAhN]>X1eAfN[>Z150001O000001O0000000000000001O0000101N2N2N2N2N3M2N2N2N000010O02N2N2N2N2N2N2N2N2N2N2Ob\\a6"}, "image_id": 453, "id": 7684}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 98.0, 23.0, 35.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "Y3j0U?2N2N0000000001O101N3M2N2N2N2N2N2N2N2N2N2N2Na\\d7"}, "image_id": 453, "id": 7685}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 116.0, 30.0, 31.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "XT22m?2N2N2N2N2N3M2N2N1O0000000000000000000002N2N2N2N2N2N2N2N2OSl^7"}, "image_id": 453, "id": 7686}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 122.0, 29.0, 33.0], "area": 805, "segmentation": {"size": [512, 512], "counts": "\\Tn64[?a0000000000000000000g@Dn>g0O10000000000001O00000000000000000OV\\c0"}, "image_id": 453, "id": 7687}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 123.0, 59.0, 58.0], "area": 1650, "segmentation": {"size": [512, 512], "counts": "^Td12m?3M2N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N200000010O0000000O1O1000000000N2N2N2N2N2N2O2O000O1N2N2N2N2N2N2N2N3M2N2O1N2N2NoZ^5"}, "image_id": 453, "id": 7688}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 128.0, 9.0, 17.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "Pd]7a0_?000000000000000Pl="}, "image_id": 453, "id": 7689}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 139.0, 24.0, 24.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "gdg5;Y?<000000000000001O000000000001O000000000000N2@T\\l1"}, "image_id": 453, "id": 7690}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 160.0, 33.0, 31.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "`e^11n?2N2O1N2N2N2N2N2N2N3M2N2N2N1O01O01O01O2N2N2N3M2N2N2N2N2N2N2O1N2NcjP6"}, "image_id": 453, "id": 7691}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 163.0, 23.0, 36.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "Z5m0R?001O00000001O001O2O2M2N2N2N2N2N2N2N3N1N2N2N_Zd7"}, "image_id": 453, "id": 7692}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 180.0, 56.0, 56.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "^fm02m?2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N20N2N2N2N2N2N2O1N3M2N2N2N2N2N2O1N2N2N2N2N2N2N2O1N2N2N2N2N`YV6"}, "image_id": 453, "id": 7693}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 193.0, 56.0, 50.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "PWV22m?2N2N2N2N2KFa@=\\?5N2N2N1O000000000000001O2N2N2N2N2N2N2N2N2N2O10000000O1N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NUim4"}, "image_id": 453, "id": 7694}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 216.0, 47.0, 52.0], "area": 1301, "segmentation": {"size": [512, 512], "counts": "[g;1n?2N3M2^@JW?8g@JW?8g@JW?8g@JW?a0N2N2N2N3M2N2N2N2N2N2N2N2O1000O1N3M2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N]hl6"}, "image_id": 453, "id": 7695}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 217.0, 33.0, 19.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "iVg5c0]?000000000001O0000000000000000000000000000001O000000000000000000UYh1"}, "image_id": 453, "id": 7696}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 220.0, 24.0, 25.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "WWV32m?2N2N2N2N2N2N2N2N2N2N3NO2N2N2N2N2N2N2N2N2N3N1Nih]4"}, "image_id": 453, "id": 7697}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 232.0, 34.0, 33.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "jW_11n?2N2N2N2O1N2N2N3M2N2N2N2N2N1O001O000002N2N2N3M2N2N2N2N2O1N2N2N2N2NZho5"}, "image_id": 453, "id": 7698}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 238.0, 37.0, 37.0], "area": 653, "segmentation": {"size": [512, 512], "counts": "kW[71n?2N2N2N2N2N2N2N3M2N2O1N2N2N200000000001O00000O1N2N2N2N2N2N2N2N2N2N2N2N2NlW2"}, "image_id": 453, "id": 7699}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 243.0, 58.0, 49.0], "area": 1331, "segmentation": {"size": [512, 512], "counts": "XhS61n?2N2N3N1N2N2N2N2N2N2N20000000000000000N2O1N2N2N2N2N2N00000000000000000000000002N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NlWo0"}, "image_id": 453, "id": 7700}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 251.0, 18.0, 17.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "TXW21n?2N2N2N2N2N2N2N1O01O2N2N2N2N2N2N2Nog_5"}, "image_id": 453, "id": 7701}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 255.0, 52.0, 46.0], "area": 1238, "segmentation": {"size": [512, 512], "counts": "Yhc21n?2N2N2N2N2O100^@E^?;`@G`?=0000N2N2N2N2N2N2N2o@TOk>R1N2O10O1N20N2N2N2N2O1001N1N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N3M2N2N2NYWb4"}, "image_id": 453, "id": 7702}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 258.0, 26.0, 34.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "X8a0^?2N2N2O1N2N2N2000000000N2N2N3M2N2N2N2N2N2N2N2N2N2N\\gb7"}, "image_id": 453, "id": 7703}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 262.0, 24.0, 23.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "che02m?2N2N2N2N2N2N2N2N00000001O001O2N2N2N2N2O1N2N2NcWn6"}, "image_id": 453, "id": 7704}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 263.0, 26.0, 25.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "cXk11n?2N3M2N2N2N2N2N2N2N2N2N01O1O2N2N2O1N2N2N2N2N2N2N2N_gg5"}, "image_id": 453, "id": 7705}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 267.0, 16.0, 17.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "^hS11n?3M2N2O1000000000001O01N1N2N2NZWd6"}, "image_id": 453, "id": 7706}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 274.0, 16.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "fX>1n?2O1N3M2N2O10000000O1N2N3M2N2OTgY7"}, "image_id": 453, "id": 7707}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 276.0, 59.0, 50.0], "area": 1436, "segmentation": {"size": [512, 512], "counts": "_Yj63l?2N2N2O1N2N2N2N2N2N2N2N0000002O100000000O1N2N2N2O1N2N0000000000000000000000000101N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2NkV8"}, "image_id": 453, "id": 7708}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 280.0, 54.0, 46.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "^Y^31n?2N2N3M2N2N2O1N2N2J_Oj@c0T?6N0000000000001O2000000000000O1N2N3M2N2000000000N2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N_ff3"}, "image_id": 453, "id": 7709}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 304.0, 29.0, 30.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "ni_21n?2N2N2N2N2N3M2N2N2N2N2N2N2N20N2N2N2N2N2N2N2N2N2N2N2N2N3NQfQ5"}, "image_id": 453, "id": 7710}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 310.0, 83.0, 87.0], "area": 2638, "segmentation": {"size": [512, 512], "counts": "i[g31h?1\\@1b?2[@0c?8N2N2N2N20001O0N2N2N2N2N2N2N2N2N1O00000000000LkN^AU1b>41O000000000000000000000000000001O00000000K]AnNc>R1_AlNa>T15000NkNZAU1f>2000000000000002N1O00002N3M2N2O1N2N2N2N2N2N2N2N2N2N3M2N2NhUo2"}, "image_id": 453, "id": 7711}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 324.0, 9.0, 8.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "UZP16j?1N100000O10006JfUk6"}, "image_id": 453, "id": 7712}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 333.0, 15.0, 23.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "cZQ17d?J`@7_?412N5K3M0000O01001O5K6J5JSUg6"}, "image_id": 453, "id": 7713}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 338.0, 22.0, 40.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "T[e73l?2U@Lg?8N2N2N2M2O2N2N2N2O1000OO2N2N2M3N2N1O2N1^E"}, "image_id": 453, "id": 7714}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 339.0, 11.0, 12.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "dZk06j?5KO100000O10O101O5KWUo6"}, "image_id": 453, "id": 7715}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 346.0, 20.0, 27.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "nj91o?3L4M3M3L4M3M4K1000O012N3M3L4M3M3L4M3M_T\\7"}, "image_id": 453, "id": 7716}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 350.0, 15.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "TkZ31n?2N2O1N2N2N2N2OO2N2N2N2N2N2Nmd]4"}, "image_id": 453, "id": 7717}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 362.0, 23.0, 32.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "f[n0d0\\?000000000O100000000000000E;0000000000000[Oe00fTf6"}, "image_id": 453, "id": 7718}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 366.0, 10.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "bkW32m?3M2N2N10O2O1N3M2N^Tc4"}, "image_id": 453, "id": 7719}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 368.0, 23.0, 25.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "dk23m?4L4L5J5L1OO10O100000O0100000O015K4L4L0O1N6Loca7"}, "image_id": 453, "id": 7720}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 369.0, 15.0, 15.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "hkn23l?2N2N2N2N2N1O01O2N2N2N2N2N2NZdi4"}, "image_id": 453, "id": 7721}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 372.0, 52.0, 52.0], "area": 1357, "segmentation": {"size": [512, 512], "counts": "dla41n?2N2N2N2N2N2N2N2N2N2N2N2N2N2JTOUAn0i>TOUAn0j>5N2N2N1O000000000000000001O1O2N200N2N2N2N2N2N2N2N2N2N2N2La@Da?;3N2N2N2N2NiSd2"}, "image_id": 453, "id": 7722}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 376.0, 6.0, 8.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "hk^18h?000000000XT^6"}, "image_id": 453, "id": 7723}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 386.0, 45.0, 55.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "]\\9160b?3[@0b?;L3L4M3M4K4M3M3L5L3M000O10OM4000O01000O03N0O010001N4M3M2M102N3L3N3M3L4M3M3L4M3M3L4M2NaRP7"}, "image_id": 453, "id": 7724}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 392.0, 23.0, 22.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "e\\R41n?2N2N2N2N2N2N2N2N1O01O0000002N2N2N2N2O1N2N2NaSb3"}, "image_id": 453, "id": 7725}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 413.0, 73.0, 82.0], "area": 2826, "segmentation": {"size": [512, 512], "counts": "bmQ32m?3N1N2N2N2N2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N200000000000000000001O00000000000N2N2N2jA[Nn=g1PB[Nn=m1N20000000N2N3M200001OlNmA:S>EnA:S>DoA:S>DoA;R>CPB;R>CPB;R>CPB;R>CPB;R>CPB;R>CQB:Q>DQB:R>CPB;R>CPB;l>N2N2N2N2NSai3"}, "image_id": 453, "id": 7726}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 422.0, 9.0, 11.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "[]Q24h?4O110O00010ON2LlRj5"}, "image_id": 453, "id": 7727}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 425.0, 10.0, 9.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "]]b42l?2O2N200O01O1N2N2NcbX3"}, "image_id": 453, "id": 7728}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 433.0, 51.0, 40.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "[n\\13i?4L4L5L3L4L4M4O0001O01O0001O01O0001O01O00010O0000010O0001O01O0001O01O0001O01O0001O01O00N3K4L4M3L5K4LXbi5"}, "image_id": 453, "id": 7729}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 457.0, 4.0, 7.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "Y>7j?N3N1Ndam7"}, "image_id": 453, "id": 7730}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 460.0, 53.0, 49.0], "area": 1368, "segmentation": {"size": [512, 512], "counts": "V_^21m?2O2M3N1N3M2O2M3N1N3M3N1N3N1N3M3O010O10O010O10O10O01000O010O01000O0100O01M3N1N3N1N3M3N1N3N1N3M3N1N3N1N3MQQg4"}, "image_id": 453, "id": 7731}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 469.0, 59.0, 43.0], "area": 1424, "segmentation": {"size": [512, 512], "counts": "h_n51n?2N2N2N2N2N2O1N1O1KBg@?X?5O100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O000002N2O1N2N2N2N2N2N2N2N2N2N000002O1N2N2O2ODj@LX?Ni@I17X?Nm@0U?Nm@0U?Nm@0R`T1"}, "image_id": 453, "id": 7732}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 500.0, 31.0, 12.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "o_P71n?100O1O1O1O1O1O1O1O1O12N1O1OO1O100O1O1O1001O1O1O1O1O2N1O1O1NSP`0"}, "image_id": 453, "id": 7733}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 243.0, 22.0, 18.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "jgh22l?2O2M3M210O010O10O10O010O10O10O01O0N3N2M2NUXl4"}, "image_id": 454, "id": 7734}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 252.0, 19.0, 14.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "Shl11n?3N2M3N0O010O0100O010O010O01002M3N2Mogi5"}, "image_id": 454, "id": 7735}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 264.0, 21.0, 18.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "_h[31m?2O2N2M2O200O01000O01000O01000O01N2M2O2M`gY4"}, "image_id": 454, "id": 7736}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 268.0, 15.0, 14.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "bXe22m?3N2M3N0O10O0010O0010O3N2M3N^WS5"}, "image_id": 454, "id": 7737}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 268.0, 40.0, 39.0], "area": 807, "segmentation": {"size": [512, 512], "counts": "nhm23l?1N3N2M3N1O2M3N1N3N2M2O2N20O01000O10O10O10O1000O01000M2O2M3N1O2M3N1N3N2M3N1N3NSW^4"}, "image_id": 454, "id": 7738}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 346.0, 14.0, 14.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "P[c03l?2O1N3M2O0O00010O03M2O2M2NQeU7"}, "image_id": 454, "id": 7739}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 402.0, 15.0, 16.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "mlU12m?1O2N2M3N1O1N010001N3N1N3N2N\\cb6"}, "image_id": 454, "id": 7740}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 0.0, 21.0, 9.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "PPj1110l?20001O001O001O00001O001O001O000000N2M3NR`k5"}, "image_id": 455, "id": 7741}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 0.0, 47.0, 38.0], "area": 996, "segmentation": {"size": [512, 512], "counts": "f`e21m?3N1N3M3N1N3N2M2O2M2N3N2M2O2O1O001O001O1O001O1ON2N2O1N2O1N2O1N2N2O1N2O1N2N2O1N2O1N2O1N2N2O1NRPc4"}, "image_id": 455, "id": 7742}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 0.0, 49.0, 67.0], "area": 1910, "segmentation": {"size": [512, 512], "counts": "hQe31m?2M3N3L3M4oNA]Ba0a=A\\Bb0a=A\\Bc0a=@\\Bb0a=A\\Bb0c=@ZBc0e=]OXBf0h=ZOUBj0j=WOSBk0m=UOPBn0Q>a0O0010O0010O00010O0010O001N1M3N3L3M4M2M3N3L3M3N3L3M4M2M3N3L3M3N3Lf_b3"}, "image_id": 455, "id": 7743}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 0.0, 16.0, 8.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "PPf52n?1O2N2N1OO1O100O1O100O1O100O10PPR2"}, "image_id": 455, "id": 7744}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 0.0, 26.0, 12.0], "area": 175, "segmentation": {"size": [512, 512], "counts": "Q`R61n?2O2N2N1O2N2N0000O1O100O100O1O100O100O1O100O100O1OQ``1"}, "image_id": 455, "id": 7745}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 10.0, 49.0, 42.0], "area": 1206, "segmentation": {"size": [512, 512], "counts": "Ta]41l?3N2N3L3N3M2M4M2N2M4M2010O01O01O010O010O01O010O011N10O010O0N210O010O0010O0001L3N3M2N3L3N2N3L3N3MXoi2"}, "image_id": 455, "id": 7746}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 18.0, 30.0, 29.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "SaW33k?2M4M2N3M2N3M2N3O0010O0010O010O0010O010O0N3M2N3M2M4M2N3M2NW_Y4"}, "image_id": 455, "id": 7747}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 53.0, 49.0, 41.0], "area": 1187, "segmentation": {"size": [512, 512], "counts": "[RU44j?2M4M2M3N3L3N3L3N201O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0N3L3N2M4M2M4M2M3N3Lo]R3"}, "image_id": 455, "id": 7748}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 56.0, 50.0, 43.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "TRV64l?5K6J6I7J4L0000000O0100000KQOYAo0g>50O100000O1000O100000O1000O100000O1000O1000O1000O100000O14L6I7J5K6J6JWmP1"}, "image_id": 455, "id": 7749}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 59.0, 11.0, 51.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "l1b1^>N2M3N1N3N2N2M9H:E:Gb]j7"}, "image_id": 455, "id": 7750}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 61.0, 29.0, 32.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "YRS52m?2N2N2N2N2O1b@FS?=j@ET?=j@ET?e0N1O0001O001O2N2O2M2N2N2N2N2N3N1N2N2N2Ne]^2"}, "image_id": 455, "id": 7751}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 75.0, 46.0, 58.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "hS:2l?3L3N2M4L3I@l@c0Q?_Ol@d0Q?701O0N3M2M3N3L3N3L3N2O2O0010O0010O0010N1M4M2M3N3L3N3L3N2M4M2M3N3L3N3L3N2M4M]mn6"}, "image_id": 455, "id": 7752}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 96.0, 44.0, 57.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "_d_11l?3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M210O010O0001M2N3M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2Mi\\j5"}, "image_id": 455, "id": 7753}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 103.0, 29.0, 30.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "aSm52m?3M3N1N3N2M2N3N2M3N0O2O1N1O010O01O2O2M2O2M3M3N1N3N2M2O2MX\\d1"}, "image_id": 455, "id": 7754}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 121.0, 13.0, 18.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "Q47f?3M4M21O010O00010L3M3M4MU\\i7"}, "image_id": 455, "id": 7755}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 147.0, 28.0, 28.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "VUn11l?4M2M4M2M3N3L3O2O00010O010O00010O010O00O2L3N3L3N2M4M2MYkc5"}, "image_id": 455, "id": 7756}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 152.0, 46.0, 55.0], "area": 1378, "segmentation": {"size": [512, 512], "counts": "Qf^32m?1N3M2N3N1N3M2O2M2N3ZO[OnAf0R>@fAc0Y>]O_AJ2k0_>_O_Ad0`>>01O001O1M2O2O010O01N1N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3N1N3M2N3MnZj3"}, "image_id": 455, "id": 7757}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 160.0, 54.0, 58.0], "area": 1783, "segmentation": {"size": [512, 512], "counts": "dUT12l?3M2m@KY>8dAKX>8fAJX>9dAKY>7eAKY>8dAKX>8eAKY>7eAK[>n0O010O010O01O01O010O010O010O01O01OO2L3N3M2N3L3N3M2N2M40O010O010O0010O0N3M2N3M2M4M2N2N3L`jP6"}, "image_id": 455, "id": 7758}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 164.0, 32.0, 37.0], "area": 653, "segmentation": {"size": [512, 512], "counts": "oe=2l?3M2N3M2N3M2M4M2N3M2N3M2N2010O010O001M2N3L3N3M2N3M2N3M2N3L3N2NfZR7"}, "image_id": 455, "id": 7759}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 199.0, 12.0, 13.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "^fY43j?4M2N2010O00010O0N3M2MiY`3"}, "image_id": 455, "id": 7760}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 200.0, 43.0, 68.0], "area": 1755, "segmentation": {"size": [512, 512], "counts": "RX\\42W?0[A3c>O[A3U>JgA703V>IgA613U>JgA702W>JfA604V>T1L3N201O010O00010O010O01O01OM4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2M3N3L3N3L^Yn2"}, "image_id": 455, "id": 7761}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 205.0, 26.0, 40.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "\\WY62k?4M2M3M4L3M3M4L3M3M4M200010O00010O000@XAIh>3\\ALd>1_AOa>NbA2_>JeA2^>KeA2^>KeA2mWZ1"}, "image_id": 455, "id": 7762}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 218.0, 45.0, 56.0], "area": 1446, "segmentation": {"size": [512, 512], "counts": "SXi04j?2N3DIk@9S?Jj@9S?Ij@:T?:M4M2N3M2M4M2N3M2N210O010O010O010O0010O0N3L3N3M2N3M2M3N3M2N3L3N3M2N3M2M4M2N2NkX`6"}, "image_id": 455, "id": 7763}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 222.0, 26.0, 29.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "n6;f?0\\@F`?:_@Ha?<0000000O10N2N2O1O1000O1000000N1N3N2N2N2N2N2Mchb7"}, "image_id": 455, "id": 7764}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 227.0, 50.0, 31.0], "area": 853, "segmentation": {"size": [512, 512], "counts": "gWU31m?2M3M4M2M4M2M3N3O0010O0010O0010O0010O00010O010O0001ZOi@b0Z?01O0M3M4O0010O001M21O010ON30O00010O0N3M2M3N3L`hQ4"}, "image_id": 455, "id": 7765}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 232.0, 56.0, 53.0], "area": 1400, "segmentation": {"size": [512, 512], "counts": "WXP52m?2N2O1N2N3M2N2N2O2M2N2N2N3N1N2N2N2N00010O000000010O000000010O00000000010O0002N2N2O2M2N2N2N3M2O1N2N3M2N2O1N2N3MThS2"}, "image_id": 455, "id": 7766}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 252.0, 45.0, 57.0], "area": 1444, "segmentation": {"size": [512, 512], "counts": "XiT63k?2N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3O010O01O01O010O01M2M3N3L3N3L3N2N3L3N3L3N3L3N2M4M2M4MkgT1"}, "image_id": 455, "id": 7767}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 263.0, 14.0, 12.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "]XU32k?4M2O1010O010O00010O0O2L3Nggc4"}, "image_id": 455, "id": 7768}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 266.0, 58.0, 49.0], "area": 1680, "segmentation": {"size": [512, 512], "counts": "R98e?3]@F^??M4N1010O0001M2N3M2M3N3L3N3O00010O010O00O2M2O20O00010O010O010O0001oNTAm0P?O010O00010O010O00010O010M2M3N3L3N3L3N3L3N2MTgR7"}, "image_id": 455, "id": 7769}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 268.0, 35.0, 30.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "mXV43j?3N3L3N2M4M2N3O010O00010O010O01O01O010O01O01O010O01O0O1N3L3N3L3N2N3L\\WX3"}, "image_id": 455, "id": 7770}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 279.0, 56.0, 44.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "`Yj22l?2M4M2M3N3L3N3L3N2M4O0010O00010O010O00010O010O00010O01L3O1010O010O00010O010O00010O010O0N2N3L3N3L3N2M4M2M4M2M3NofY4"}, "image_id": 455, "id": 7771}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 279.0, 28.0, 28.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "YYg42k?4L3N3L3M3M4N100010O00010O0010O0010O00010L3M3M4M2M4L3MUgj2"}, "image_id": 455, "id": 7772}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 308.0, 27.0, 23.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "QZk42l?3L3N3L3N2O20O010O00010O010O00010O010O001O0N2M4M2M4MUVg2"}, "image_id": 455, "id": 7773}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 321.0, 2.0, 5.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "Q:5l?MPfn7"}, "image_id": 455, "id": 7774}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 322.0, 36.0, 29.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "bjd33j?3N3L3N2M4L30001O010O01O01O010O00010O01O01O010O01O01O01O000M4L3N3L3N2MgUi3"}, "image_id": 455, "id": 7775}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 327.0, 6.0, 15.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "W:?a?N2N3M2N3Ljel7"}, "image_id": 455, "id": 7776}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 335.0, 59.0, 46.0], "area": 1500, "segmentation": {"size": [512, 512], "counts": "Y[a21l?3N2M4M2M4M2N2M4M2M4N10010O010O0010O0010O0010O0010O010O00010O010O00010O010O0010O0010O0010O0010O01L3N2N3L3N3L3N2M4M2NPUa4"}, "image_id": 455, "id": 7777}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 335.0, 58.0, 49.0], "area": 1451, "segmentation": {"size": [512, 512], "counts": "T[V53k?2M4M2M4L3N2M4M2O2O01O01O01O010O01O01O010O01O01On@YOk>n0010O0010O00010O010O0001RORAi0m>TOWAk0o>1O01O01O01O010O01OO2M2M4L3N2M4M2M3N3Lldl1"}, "image_id": 455, "id": 7778}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 349.0, 33.0, 42.0], "area": 785, "segmentation": {"size": [512, 512], "counts": "o[T41l?3N3L3N3L3N2M4L3N3L3N2M4L30010O0010O000N3M2M4L3N2M4M2M4M2M3M4M2MoT[3"}, "image_id": 455, "id": 7779}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 365.0, 18.0, 13.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "d[k41l?3N2N3O010O00010O010O00010O01O0M3N`dk2"}, "image_id": 455, "id": 7780}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 386.0, 27.0, 26.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "alc44j?2M3N3L3N3L30010O010O00010O010O00010O01O0M3N3L3N3L3Nhcn2"}, "image_id": 455, "id": 7781}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 387.0, 57.0, 44.0], "area": 1390, "segmentation": {"size": [512, 512], "counts": "i\\R54j?2M3N3L3N3L3N2M4M210O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O0001L3N3L3N2M4M2M4M2M]SQ2"}, "image_id": 455, "id": 7782}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 390.0, 24.0, 17.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "_\\`31l?3N2M4N101O01O010O01O01O010O00010O010O000N3L3NecS4"}, "image_id": 455, "id": 7783}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 391.0, 58.0, 47.0], "area": 1494, "segmentation": {"size": [512, 512], "counts": "PmY24i?3N2M4M2M4M2M3M4M2N3O01O01O01O010O01O01O010O01O01O01O01O010O01O01O0QOTAk0P?010O00010O010O00010O01O01O01M2M3N3L3N3L3N2M4LUSi4"}, "image_id": 455, "id": 7784}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 397.0, 30.0, 34.0], "area": 618, "segmentation": {"size": [512, 512], "counts": "Tml33k?2M4M2N3L3N2M4M2M4N110O01O01O010O01O01O01L3N3L3N3M2M3N3L3N]Sd3"}, "image_id": 455, "id": 7785}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 404.0, 31.0, 37.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "_]Z33j?3N3L3N2N3L3N3L3N3M2M301O0010O0010O01N1N2M4M2N3L3N2M4M2N3L3NWSV4"}, "image_id": 455, "id": 7786}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 414.0, 30.0, 42.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "h]a72k?3N2M4M2N3L3N3M2M3N3M201O010O00010O010O01O03M10O0010O0010OkB"}, "image_id": 455, "id": 7787}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 419.0, 64.0, 54.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "_mW11n?3M3N1N3M2O0O01O011N3N2M21O0O2M3N1N3M2O2M3M2O2M2O2M3M2O2M2N3N2M2O2M2N3N02M3M2O2M2O2M3M2O2M2N3N1N10O0002O2M3M2O2M2O2M3M2O2M2N3NiQh5"}, "image_id": 455, "id": 7788}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 429.0, 15.0, 13.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "dmi32l?3M2N2O2O010O010O01O0N3M2N2Nabn3"}, "image_id": 455, "id": 7789}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 430.0, 58.0, 49.0], "area": 1604, "segmentation": {"size": [512, 512], "counts": "\\^i41m?2N2M4M2M4M2M3N3L301L3N3L31O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01M2N3L3N3L3N2M4M2N3L3N2MSbY2"}, "image_id": 455, "id": 7790}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 443.0, 50.0, 58.0], "area": 1441, "segmentation": {"size": [512, 512], "counts": "Ynd02n?1N3M3N1N3N2M2N3N2M2O2M3M2O2M3N1N3M3N1N3N2M2N3O1O01O1M2O2M3N1N3M3N1N3N2M2N3N2M2O2M3M2O2M3N1N3M3N1NlPb6"}, "image_id": 455, "id": 7791}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 449.0, 26.0, 21.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "\\^o32k?4L3N3L30001O01O010O01O01O01O010O01O01O01N1M4L3N2Mkac3"}, "image_id": 455, "id": 7792}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 456.0, 48.0, 56.0], "area": 1733, "segmentation": {"size": [512, 512], "counts": "^>k01XOP?n0M1O011N2O2M3N2M2N3N2N2O010OOO100O12N1O2N2M3N1N3N2M2N01O010O010O01O01O01BVAFj>:XACk>:XADj>:WADk>:XACk>:`0N2M2O2M3Nm`W7"}, "image_id": 455, "id": 7793}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 458.0, 54.0, 51.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "Q_n11o?2M3M3N1N3N2M3M2O2M3N2M2O2M3M3N0O01O01O010O0NlNZAS1g>3O010O01O01O010O010O0002O0O102M3M2O2M3N1N3N2M3M2O2M3N1N3M3Nk`V5"}, "image_id": 455, "id": 7794}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 462.0, 30.0, 30.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "nnV32m?2N1N3N2N2N2N2N2N2N2N2N2M2O20O0O2N2N2M3N2N2N2N2N2N2N1O2N2NVQZ4"}, "image_id": 455, "id": 7795}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 473.0, 44.0, 39.0], "area": 1227, "segmentation": {"size": [512, 512], "counts": "c_Z71l?4M2M4M2N2M4M2M4M2N2N3O010O010O00010O010O01O01M2010O010O00001O001O00001O001O001O00001O"}, "image_id": 455, "id": 7796}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 474.0, 56.0, 38.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "^oV42m?2N2N1N3N2N2N2N2N2N2N2N2M2O2N2N2N2N2O01O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O1O1O1N2N2NVPm2"}, "image_id": 455, "id": 7797}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 492.0, 27.0, 20.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "go`13m?1N3N2M2O0O100L400O100O100O100O12N2N2N1O2N2N2N1O2N2NR`Q6"}, "image_id": 455, "id": 7798}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 498.0, 18.0, 14.0], "area": 170, "segmentation": {"size": [512, 512], "counts": "go`23l?3N2M2O1N1O100O100001O2N2N2N1O2N2NRPV5"}, "image_id": 455, "id": 7799}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 504.0, 19.0, 8.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "oof01o?0O1O100O100O1O100O100O100001O2N2N1OR`o6"}, "image_id": 455, "id": 7800}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 504.0, 13.0, 8.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "mon12n?1N1O100O1O100O12N1O2N1OR`j5"}, "image_id": 455, "id": 7801}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 507.0, 10.0, 5.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "ooZ21o?0O1O100O100O12N1ORP`5"}, "image_id": 455, "id": 7802}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 510.0, 2.0, 2.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "n?2o?OQ`n7"}, "image_id": 455, "id": 7803}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 510.0, 5.0, 2.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "ooP41n?100001OQ`l3"}, "image_id": 455, "id": 7804}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 0.0, 66.0, 34.0], "area": 1687, "segmentation": {"size": [512, 512], "counts": "U`R33l?5L5K4L5K4K5L00000O1000O1000O10O1000O10O100000000O10001O1N0100000O010000000O1000000O1000000O100000000O1000000O100000000O104L4L5J5L__l3"}, "image_id": 456, "id": 7805}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 0.0, 49.0, 24.0], "area": 732, "segmentation": {"size": [512, 512], "counts": "PPQ62n?3M2N2N2N2N2N2N3M2N2N0000O100O10000O100O100O100O100O100O10000O100O100O100O100O100O10000O102M3N2Mk_V1"}, "image_id": 456, "id": 7806}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 3.0, 60.0, 36.0], "area": 1614, "segmentation": {"size": [512, 512], "counts": "[`>1o?5K4K6K5K4L4K1000O1000O1000O10O100000O10O1000O1000O1000O4M04L1O000O010000000O01000000O01000000O010000000O05L5K4L5J6K4LR_c6"}, "image_id": 456, "id": 7807}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 9.0, 54.0, 61.0], "area": 1971, "segmentation": {"size": [512, 512], "counts": "XaT54l?5K4K6K5K4L3M0O1000O10O100000O10O100JSOYAl0h>6000000_OoNRBQ1o=TOlAl0T>XOhAh0X>`0O0100000O10O100000O10O100000O03N5K5K4L5J5L5K4L5J6K4LW^P2"}, "image_id": 456, "id": 7808}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 12.0, 63.0, 61.0], "area": 2176, "segmentation": {"size": [512, 512], "counts": "VQX62n?1N3N1N3M2O2M3M2O2M2O2M3M2O2M2N3N1N3M3N1N3N0FbNQB^1P>dNmA]1R>eNmAZ1T>9O000102M00010O2N2O2M2O2M2N3N0O03N1N1O01O01O02N2O2M2N3N1ZORA7Q?FQA9Q?EQA8Q?FQA8R?EQA9P?FRA7]?N3N1N_^h0"}, "image_id": 456, "id": 7809}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 45.0, 73.0, 121.0], "area": 3723, "segmentation": {"size": [512, 512], "counts": "dQP14l?4L5K2M10O1001N6K4L5K4L5J5L5K1O0O010000000O012N4L5J5L5K3M00O0100004L5N1010dBnMdJcA6^>O]A1b>5YAKg>:TAFl>g00O010000000O010000000O010000005J6K5K5K5K4Ko\\Y2"}, "image_id": 456, "id": 7813}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 75.0, 62.0, 80.0], "area": 2943, "segmentation": {"size": [512, 512], "counts": "SSQ72m?3N1l@KZ>8dAJY>9dAJZ>7eAJY>9dAJY>9dAJZ>7eAJW>;fAHX>:fAGX>;fAHW>S1N1N3N2M2N3N2M010O0010O0010O0010O0010O0010O00010O102M2N3N2M2O2M3M2O2M3M2O2M3N1N3M3N1WORA=Q?AQA=Q?@QA>Q?AQA<\\?N1N3N2MTL"}, "image_id": 456, "id": 7814}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 99.0, 70.0, 38.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "acb32m?6K4L5K4K5L000000OK60000000O0100000O010000000O0100000O01005K000O01000000O01000000O01000000O01000000O01000000O01000000O010000000O6K4L5K4K6KV\\Z3"}, "image_id": 456, "id": 7815}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 107.0, 3.0, 10.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "[3:k?K4L\\\\n7"}, "image_id": 456, "id": 7816}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 113.0, 64.0, 40.0], "area": 1741, "segmentation": {"size": [512, 512], "counts": "ic]56j?4L5K5J5L5K3M000O0100000O10O100000O0100000O10O100000O0100000O10O100000O01000003M5L3OOM01000000O01000000O01000000O0101O4L5J5L5Kb[b1"}, "image_id": 456, "id": 7817}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 129.0, 29.0, 26.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "bd21n?2O1N2N3M2N2N2O0O1O00000001O01O0000000002N2O1N2N3M2N2N2Ofk^7"}, "image_id": 456, "id": 7818}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 148.0, 27.0, 51.0], "area": 938, "segmentation": {"size": [512, 512], "counts": "[eb73i?NZ@3e?NY@5d?5N2b@DV?>g@DW?>h@DU?e0HVOXAl0f>UOXAm0f>VOWAm0f>8O1N3M1O10O000010O02N3N1NN2010O]K"}, "image_id": 456, "id": 7819}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 153.0, 63.0, 39.0], "area": 1697, "segmentation": {"size": [512, 512], "counts": "WUo32n?5K5AEn@`0n>Em@`0m>>LO100000O010000000O0100000O10O6K3M000O010000000O010000000O0100000O010000000O010000000O0100000O010000000O3N4L5K5K4KcZQ3"}, "image_id": 456, "id": 7820}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 158.0, 76.0, 38.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "ded54l?5J6K2N000O1ECSA=m>Hn@7S?;0000O10O100000O10O100000O10O100000O10O1000O10O100000O10O100000O10O100000O10O100000O10O101O3M000O10O10N2000O010000000O01000001O5J6K5KaZU1"}, "image_id": 456, "id": 7821}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 159.0, 55.0, 51.0], "area": 1450, "segmentation": {"size": [512, 512], "counts": "kU>1n?2N2N2N2O1N2^@E\\?=b@E\\?a0N2N2N2N2N1O002N2N2N2N2N2N2N1O01O0001O00000001O2N2N2N2N2N2N201O0OO01O2N2WOk@d0Z?N2N2N2N2N2N2N2N2N2NZZf6"}, "image_id": 456, "id": 7822}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 181.0, 34.0, 30.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "nee42n?5K5K4K6KO1000O1000O1000O1000O1000O10O1000L4000O1000O1001N6K5K4L5JhYi2"}, "image_id": 456, "id": 7823}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 184.0, 24.0, 26.0], "area": 297, "segmentation": {"size": [512, 512], "counts": "oU21o?1N3M2N2N2O1N3M2N2O101O01O000N201O0Ja@I`?6a@Ha?67M2N2Niia7"}, "image_id": 456, "id": 7824}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 184.0, 45.0, 52.0], "area": 1720, "segmentation": {"size": [512, 512], "counts": "SVi63m?4L5K5K5J5L5K5K4DjNfAV1Y>90O100000O10O100003M05K2N0O0100000O10O100000O010000000O011O5RO[A7j>D[A7j>C\\A7[?L5KjX`0"}, "image_id": 456, "id": 7825}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 200.0, 49.0, 85.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "_fm52m0OR>6jAOQ>6jANIJh=e060O10O100000O102NO10O1000O1N20O10O1000O1000O1000O10O103M4L5K4K6Kgh[3"}, "image_id": 456, "id": 7829}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 212.0, 7.0, 15.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "hfl73m?1N2N3N1N2N3ZI"}, "image_id": 456, "id": 7830}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 231.0, 37.0, 28.0], "area": 724, "segmentation": {"size": [512, 512], "counts": "^Wo42m?5L5K4L5K1N0100000000O0100000000O010000000O010000000O010000000O02O6J5K5KXX^2"}, "image_id": 456, "id": 7831}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 236.0, 50.0, 55.0], "area": 1912, "segmentation": {"size": [512, 512], "counts": "lgP74l?5K4L5J5L5K4L5K3L0100000O10O1000O10O100000O10O1000O10OJjNdAX1Z>700O10O1000O10O100000O10O10002N4YO\\AIj>2ZAJj>2[AIj>2ZAJj>1_X6"}, "image_id": 456, "id": 7832}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 244.0, 47.0, 70.0], "area": 2007, "segmentation": {"size": [512, 512], "counts": "nWn13m?5K4L1N1M2010004L5J5L5K4L5K4K6K4L5K3L01000000O01000000O01000000O2O5K4L5J5L5K4L5K4K6K4L5K4K6K`VZ5"}, "image_id": 456, "id": 7833}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 259.0, 59.0, 42.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "_XQ43m?4L4K6K4L4L5J2OO100000O0100000O10O100000O0100000O0100000O10O100000O0100000O0100000O10O100000O0100000O02O4L5K4L4K6K4LRWQ3"}, "image_id": 456, "id": 7834}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 287.0, 25.0, 24.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "SiT55k?5K5K4L1N10000000O01000000O010000000O0104L5K5K5J_f^2"}, "image_id": 456, "id": 7835}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 298.0, 45.0, 43.0], "area": 1064, "segmentation": {"size": [512, 512], "counts": "RZS11n?2N2N2O1GJe@8Y?Je@8Y?Je@85Cm>f0QA\\Om>f0QA\\Om>m0N0000002N2O1000O1N1O0000010O0000002N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2NUVV6"}, "image_id": 456, "id": 7836}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 314.0, 60.0, 39.0], "area": 1523, "segmentation": {"size": [512, 512], "counts": "VjT41o?4L4K6K4L5K4K1000O1000O1000O10O1000O1000O1000O10O1000O10O100000O10O1000O10O100000O10O1000O10O100000O10O1003L6K4L5K4L4K^Um2"}, "image_id": 456, "id": 7837}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 314.0, 72.0, 49.0], "area": 2654, "segmentation": {"size": [512, 512], "counts": "\\ZX65k?=C?00000000O1000O100000003M000000000000000O100000O102N3M0000O10000000000000O10000;E8G10000000006J=Codc0"}, "image_id": 456, "id": 7838}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 330.0, 34.0, 34.0], "area": 612, "segmentation": {"size": [512, 512], "counts": "jZc11n?2N2N2N2N2N2N3M2N2N2N2N2N2N2N2O1N11N2N2N2N2N2N2O1N2N2N2N2N2N2N3M2NVek5"}, "image_id": 456, "id": 7839}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 343.0, 45.0, 44.0], "area": 1127, "segmentation": {"size": [512, 512], "counts": "`k>1o?1N2N2N2FJg@8W?Jg@8W?Kf@7X?:k@ZOm>h0QAZOm>n0O1N0000001O3M2O10O1N2N1O00001O01O0002N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2KV@1cTk6"}, "image_id": 456, "id": 7840}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 362.0, 52.0, 33.0], "area": 1191, "segmentation": {"size": [512, 512], "counts": "c[Y42n?5K5K4K6K3M00000O10O100000O10O1000O10O100000O10O1002N0O10O10000N11000O1000O1000O1000O1000O103M5K5K5J5LPdl2"}, "image_id": 456, "id": 7841}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 364.0, 25.0, 26.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "`[]548M[?8`@L\\?>M10003MO0100000O10O100000O0100000O4M4L5K4K5LPTV2"}, "image_id": 456, "id": 7842}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 370.0, 64.0, 52.0], "area": 2273, "segmentation": {"size": [512, 512], "counts": "T\\b67i?5K6J6J6J4K100000O1000O1000O1000O100000O1000O100000O1000O100000O10GSO^An0a>9000O100000O1000O100000O1000O1000O1000O10003M6J5K0O1^Om@7T?HQA3U?GQA3hR>"}, "image_id": 456, "id": 7843}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 378.0, 21.0, 68.0], "area": 1006, "segmentation": {"size": [512, 512], "counts": "k;S2m=0000O0100001N5L4L5K4L4K01000nNaAc0a>[OdA?a>]OcA?S?L5K4L\\Re7"}, "image_id": 456, "id": 7844}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 380.0, 48.0, 54.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "a\\R11n?2N2IN]@5`?7N2O10000O1N2N2N2N3M2O1N2N2N2N2N2N2N2N3NO2O100001O000000N3N1N2N2N2ROVAc0l>\\OUAb0m>\\OUAb0m>\\OUAb0V?N3@b@9d?N3N1N2N2NnbU6"}, "image_id": 456, "id": 7845}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 381.0, 42.0, 40.0], "area": 1157, "segmentation": {"size": [512, 512], "counts": "[\\^21?1o>3h@LN4V?5h@0X?;1000O4M3MOKYOSAg0m>6000O0100000O0100000O01000000O01000000O0100000O011O4L5J5L4L4L4KZcl4"}, "image_id": 456, "id": 7846}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 408.0, 54.0, 44.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "Zm^43m?3M2M3N3L3N2M3N3L3N2N0O10O0100O0100O010O010O0100O0100O010O0100O0100O010O010O0100O0102M3N2M4M2N2M4M2M3N2M4M_Rf2"}, "image_id": 456, "id": 7847}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 411.0, 31.0, 26.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "o\\b53m?5K5K4L5J1000000O010000000O010000000O03N3MO10O100O1O014L4L5JfRn1"}, "image_id": 456, "id": 7848}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 414.0, 20.0, 24.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "S]n12m?3N2M3N3L3N2M4M1O0O0102M4M2N2M3N3L3N2Mabg5"}, "image_id": 456, "id": 7849}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 425.0, 67.0, 52.0], "area": 2271, "segmentation": {"size": [512, 512], "counts": "Qne62m?6K5K6J5K3L10002NO1000O1000N2000O1000OLVOTAj0l>50000O01000000000O010000000O10O100000HRO^An0b>712M6K000000O10O10000000O10O1000QOcA;]>EeA8\\>HdA8[>IeA7[>IdA8\\>GfA8Z>HlA2Z>HlA1Z>JkA1X?JYa8"}, "image_id": 456, "id": 7850}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 441.0, 61.0, 33.0], "area": 1608, "segmentation": {"size": [512, 512], "counts": "RnY23l?8I2N000LEc@;]?400O6K7I000000O1000O10000000O1000O10000000O1000O100022MO00000O100000O100000O100000O100000O100000O10002N7I7I8H^ag4"}, "image_id": 456, "id": 7851}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 451.0, 53.0, 42.0], "area": 1329, "segmentation": {"size": [512, 512], "counts": "e^S51n?6K4L5K5K4K0100000O010000000O010000000O0100000O0100000H[OUAe0k>70100000O010000000O01000000O010002N5J6K4L5K4L5JPQR2"}, "image_id": 456, "id": 7852}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 453.0, 60.0, 34.0], "area": 1592, "segmentation": {"size": [512, 512], "counts": "]^75k?6J3L100N2O101O6J5KO10O10000000O10O1000000000O01000000000O10O100031L00O10O10000000O10O1000000000O01000000000O07J6JOCc@9]?Gj@2fPk6"}, "image_id": 456, "id": 7853}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 476.0, 22.0, 11.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "ln[2;e?00000000000000000000000000000000000000000TQY5"}, "image_id": 456, "id": 7854}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 479.0, 29.0, 26.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "T_d43m?5K4K6K4L00O10O1000O10O100000O10O100000O10O1000O4M5K4L5K`Pm2"}, "image_id": 456, "id": 7855}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 488.0, 21.0, 17.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "[?>b?0O10O100000O1000O100000O1000O1000002N5JbPe7"}, "image_id": 456, "id": 7856}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 495.0, 27.0, 17.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "h_W73m?2M2O2M1O100O100O1O100O100O1O100002N1O2N2N1O2N2N1O2NRP;"}, "image_id": 456, "id": 7857}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 502.0, 45.0, 10.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "hoZ58h?000000000O10000000000000000000000000000000000000000000000O10000000000000000000000000000Z`n1"}, "image_id": 456, "id": 7858}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 506.0, 34.0, 6.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "loa04l?0000000N21O000000000000000000000000000000000000001O00000000000000TPm6"}, "image_id": 456, "id": 7859}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 509.0, 14.0, 3.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "moY23m?0000000000000001O00000000RP_5"}, "image_id": 456, "id": 7860}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 509.0, 6.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "ool71n?100O1001O2"}, "image_id": 456, "id": 7861}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 68.0, 97.0, 87.0], "area": 4108, "segmentation": {"size": [512, 512], "counts": "jbS23i?5K4K5L4L5N11O0000010O0000010O0000010O0000010O0000010O00oAZOhjBFV=:fBJZ=6bBO]=2^B2c=MXB8h=HTBeNlA]1R>eNlA]1R>:N2N2N2N200000000N2N2N0000000000000000000000000000000000000000000eChMo:X2QEjMm:V2SEmMj:S2VEoMh:Q2XEQNf:o1ZESNd:m1\\EUNb:k1^EWN`:S1WDQOY1N^:Q1[DoNW12oN^OQ;a1kDmNU16mN^OQ;_1oDkNS1:kN^OQ;]1SEiNQ1>iN^OQ;[1WEiNm0`0iN^OQ;Y1[EiNi0n0j99_EiNe0P1j97cEiNa0R1j95gEiN=T1j93kEiN9V1j91oEiNAE:c1T:OSFiN]OF;d1S:MVFjNZOG;d1S:KZFjNVOI;d1S:I^FjNROK;e1R:FcFSOYOY1R:DgFSOUO[1R:BkFSOQO]1R:@oFSOmN_1R:^OSGSOiNa1R:\\OWG\\1g8dN[G\\1d8cN^G]1c8`N_G`1a8^NaGb1_8\\NaGf1_8ZN_Gh1a8XN]Gj1c8VN[Gl1e8TNYGn1g8RNWGP2i8PNUGR2k8nMSGT2m8lMQGV2o8jMoFV2S9jMkFV2W9jMgFV2[9jMcFV2_9jM_FV2c9jM[FV2g9jMWFV2k9jMSFV2o9jMoEV2S:jMkEY1BXNe:?gEY1FQNN_Og:W1cEY1JoMN@f:X1`EY1NmMN@f:Z1\\EY12kMj:l0REY16iMj:n0nDY1:gMj:P1jDY1>eMk:d3WEZLk:d3XEYLj:e3XEZLi:e3XEYLj:e3j0N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N]ZP5"}, "image_id": 457, "id": 7863}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 175.0, 31.0, 29.0], "area": 606, "segmentation": {"size": [512, 512], "counts": "Tf[52j?4L4L5K4L4O110O0000010O0000010O0000010O000010O000O1L5K4L4L4L_jT2"}, "image_id": 457, "id": 7864}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 178.0, 69.0, 143.0], "area": 5165, "segmentation": {"size": [512, 512], "counts": "b5\\4e;000000M3N1O2N2N2N2N2N2N2N2N2N2N2N2N2O1000000^O]CfMc`00000000000000000000000000001O00000000000000000000000000_Oa0\\Od`m6"}, "image_id": 459, "id": 7873}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 0.0, 68.0, 38.0], "area": 1955, "segmentation": {"size": [512, 512], "counts": "mPV22e?9G9H8N21O01M20000000000000001O00000003M000001O01O00000000000001O01O000000000000010O00000000000001O01O000000000000010O000000000M3G9H8GQPh4"}, "image_id": 459, "id": 7874}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 0.0, 78.0, 36.0], "area": 2040, "segmentation": {"size": [512, 512], "counts": "\\Pm4b0]?7J00O10000000000000000000000000000000000000KZOPAf0P?50000J1500000000000000O10000000000000000000000000000000000000000000000O100000000L40000000000O101O00000000c0]O\\ok1"}, "image_id": 459, "id": 7875}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 0.0, 67.0, 48.0], "area": 2029, "segmentation": {"size": [512, 512], "counts": "e`n63m?2M2O2M3M2O2M3N1N3DZOZAh0d>YOZAi0d>ZOYAi0d>ZOZAh0d>Z1O01O01ON3N100010O01O0N2M4M2M4L3N2M4L3N2M4M2M`^c2"}, "image_id": 459, "id": 7879}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 44.0, 43.0, 38.0], "area": 1112, "segmentation": {"size": [512, 512], "counts": "haQ65j?6K4L5K5J3N00OJVOXAj0h>[OSAe0m>7O010000000O010000000O01000003M4K10O1000O10O100002M6K2N00OJd@J\\?5;M5KomX1"}, "image_id": 459, "id": 7880}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 50.0, 58.0, 33.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "Sbo1;Z?;K50001O00000001O000000000001O0000000001O000002N001O0M300000N2000001O00000001O0000000003M000000000010O00000000H8De^S5"}, "image_id": 459, "id": 7881}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 61.0, 46.0, 51.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "P2R1k>310O010O00010O010O0010mNWAm0o>O01O010O01O01O010O01O01O010O010O01O01O0N3L2OO4M2N3L3N2M4M2M4M2G`@0b?Ma@0V]Y7"}, "image_id": 459, "id": 7882}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 69.0, 31.0, 40.0], "area": 698, "segmentation": {"size": [512, 512], "counts": "Sc\\52k?3N3M2M4M2M3N3M2M4M2M4M2O110O010O00010OQOWAg0j>VOYAh0P?N2N3L3N3M2M3N3L3N^mS2"}, "image_id": 459, "id": 7883}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 86.0, 29.0, 30.0], "area": 563, "segmentation": {"size": [512, 512], "counts": "[Sc23j?3M4L3M3M4L3M300010O00010O00010O00010O000O2L3M3M4L3M3M4LV]n4"}, "image_id": 459, "id": 7884}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 89.0, 47.0, 37.0], "area": 1393, "segmentation": {"size": [512, 512], "counts": "ecX74d?8H8H8O101O00000001O000000O2L300001O00000001O000001O000000000RORAk0R?O0000000000000010O00000000000oL"}, "image_id": 459, "id": 7885}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 108.0, 63.0, 48.0], "area": 1612, "segmentation": {"size": [512, 512], "counts": "TTi11m?3L3N3M2M4M2M3N3L3N3O000010O010O00010O010O00o@SOo>o010O010O001TAQOf>l0ZAVOf>S10O010O0kNXAR1l>0O01M2N2M40O01O01O010O01O01O001M2001M2N3L3N2M4M2M4M2M4MQ\\W5"}, "image_id": 459, "id": 7886}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 108.0, 41.0, 52.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "edV64[?MRA7k>KRA8l>KPA8n>Jo@:m>>N2N3M200010OO1N3M2M3N3O01O010O0N2M4L3010O01M2N3L3NO02O3M2M4]Og@;[?Bh@;a?M2N3L3N[lT1"}, "image_id": 459, "id": 7887}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 115.0, 45.0, 51.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "e3S1k>3O001O010O01O010O01OnNXAl0i>QOZAo0k>00010O010O0010O0010O010O0010O0010O010O0001M2N3L3N3M2M3N3M2M4M2Nh[Y7"}, "image_id": 459, "id": 7888}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 138.0, 95.0, 60.0], "area": 2679, "segmentation": {"size": [512, 512], "counts": "jdV42k?3[@L^?4_@0]?Lb@<\\?Fe@:W?Ii@7T?S1O010O0001oNUAl0j>ROXAn0m>O2M20001O010O0VOl@i0V?O0010O00010O001k@XOQ?k00010O0010O00010O0010O0010O0010O000O2M2O2O00010O010O00010O00010O01QOSAg0o>VOUAg0S?N210O0Fj@IX?4j@HZ?5:MgjY2"}, "image_id": 459, "id": 7889}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 138.0, 39.0, 61.0], "area": 1810, "segmentation": {"size": [512, 512], "counts": "ne\\78X?`0@`0@`0001O00000000000001O0000000000000001MOC=12000000001O0000000000000000M4_OUL"}, "image_id": 459, "id": 7890}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 153.0, 62.0, 44.0], "area": 1613, "segmentation": {"size": [512, 512], "counts": "cU]12k?4M2M3N3L3N3L3N2M4M201O01O010OO2M21O010O01O01O010O01O04L10O0010O0010O0010O0010O00O2L3010O00010O010O00010O01M2N2M4M2M4M2M3Ndjc5"}, "image_id": 459, "id": 7891}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 163.0, 18.0, 28.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "UUn54l?4L4K6K4L5K0O10000000O104L5K4K6K4LWjh1"}, "image_id": 459, "id": 7892}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 180.0, 41.0, 38.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "PVZ62m?5L5K4L5J5L2N0HXOXAh0g>90O100000O010002N000O0100000O101O3M000O1000O10O1000004L5J5L5K4LbYQ1"}, "image_id": 459, "id": 7893}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 181.0, 24.0, 20.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "Pf41l?4M2N3L3O20O0010O010O0010O0010O010O0001M2N3L3NUZ_7"}, "image_id": 459, "id": 7894}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 183.0, 15.0, 13.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "leZ14j?2M4O00010O010O0010O001M2N3LWj]6"}, "image_id": 459, "id": 7895}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 193.0, 40.0, 44.0], "area": 1070, "segmentation": {"size": [512, 512], "counts": "Rgc01d?2a@1]?1`@2]?;N1010OO1M4M2M3M4M2M4O01O01O010O01O01O01O010O01O01L3M3N3L3N3L3M3N3L3N3L3MhYh6"}, "image_id": 459, "id": 7896}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 195.0, 60.0, 47.0], "area": 1633, "segmentation": {"size": [512, 512], "counts": "^VP43N2M4M2M4M2M3N3L3M4M2M3NPYi7"}, "image_id": 459, "id": 7899}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 223.0, 37.0, 64.0], "area": 1486, "segmentation": {"size": [512, 512], "counts": "ehb11o>0mA4o=OoA3o=0mA3P>0nA3o=0mA3P>0nA3o=OnA4o=0nA2P>0nA3o=0mA3S>n00O00010O010O00010O010O000O2M2M4QOdA6^>GeA7]>GeA6_>FeA7]>GeA6_>FeA7]>GeA6T?N3L3NShj5"}, "image_id": 459, "id": 7900}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 230.0, 29.0, 30.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "lg>1m?2M3M4L3M3N3L3M40O0010O00010O00010O0010O000M4M2M4L3M3N3LehR7"}, "image_id": 459, "id": 7901}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 238.0, 14.0, 56.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "UXi75b?9L5O000N2XABn=>iAKW>5`A4`>f000K5M301O0bH"}, "image_id": 459, "id": 7902}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 258.0, 67.0, 99.0], "area": 2506, "segmentation": {"size": [512, 512], "counts": "kjS31l?4M2N3L3\\OF`A=]>F_A=^>F`A<^>F_A>^>E_A=b>a0O01O01O03NO01O010O01O01O0N3M0O10O010FnNgAR1Z>QOcAo0\\>TOaAl0`>9O0_OgN_BY1a=iN\\BX1c=kN[BT1f=oNVBT1i=mNUBU1l=`0O010O0001O0N3L3N2M4M2M4M2N2M4M2M4M2M4M2M3N3L3N3L3N2M4Mdgj3"}, "image_id": 459, "id": 7903}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 259.0, 39.0, 61.0], "area": 1565, "segmentation": {"size": [512, 512], "counts": "_hR72n?5BJh@:T?Kg@:S??K4L5K4L5J5L5K1O0O15K3L010000000O0100000O0100003M4K6K4L5K4K6K4L5K5K4KYf9"}, "image_id": 459, "id": 7904}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 261.0, 19.0, 32.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "Yh_62m?4M4L4L4K5LO1000O10O13M3L4NO4L4L4Ae@0`?Ld@0fVW1"}, "image_id": 459, "id": 7905}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 270.0, 45.0, 40.0], "area": 1087, "segmentation": {"size": [512, 512], "counts": "WY61l?3N3L3N3M2M3N3L3N3M2O1010O01O010O01O01O010O01O01O010O0M3N30O010O01ON3M2TORAc0R?ZOQAc0W?M3N3M2M4M2M3NWWS7"}, "image_id": 459, "id": 7906}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 286.0, 14.0, 24.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "Pij4f0Z?0N200000000000000002N00=CcVn2"}, "image_id": 459, "id": 7907}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 287.0, 49.0, 47.0], "area": 1255, "segmentation": {"size": [512, 512], "counts": "kiQ13k?3L3N2M4M2M4M2M3N3L3N3N11O01O010O01O01O010O01O01O01QOVAg0i>WOYAi0P?0O00010O010O00010O010O00N3L3N3M2M3N3L3N3L]fU6"}, "image_id": 459, "id": 7908}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 289.0, 13.0, 22.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "QYc4f0Z?00000000000000000000000oVV3"}, "image_id": 459, "id": 7909}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 295.0, 62.0, 49.0], "area": 1952, "segmentation": {"size": [512, 512], "counts": "aYo51o?4L4L4K5EAm@c0o>;L4L00O10O1000O1000O12M5LO1000O10O1000O10O1003L32L1N01000O103M4L4K2O00000O0100000O0100000O04M4L4L4K5L4L4L4K5L4L`eQ1"}, "image_id": 459, "id": 7910}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 314.0, 11.0, 23.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "lij71o?4L5J5L5K2N0O1000O10VF"}, "image_id": 459, "id": 7911}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 323.0, 57.0, 38.0], "area": 1608, "segmentation": {"size": [512, 512], "counts": "]jQ52n?5K5J6ECi@b0R?;J1000000O10O100000O14K3N0000000O010000000O010031L1O00O010000000O010000000O0100000O010000001N6K5K4L5K5JReQ2"}, "image_id": 459, "id": 7912}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 336.0, 54.0, 40.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "gZn34l?f0ZO3L1000000K5000000O100000O100000005K0000000001O4L00000005K0000000000000O1000000000000000000000O1000000c0]OddV3"}, "image_id": 459, "id": 7913}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 454.0, 48.0, 42.0], "area": 1166, "segmentation": {"size": [512, 512], "counts": "o^[22k?3M4M2M4L3N2M4L3N2O2O0010O00010O0010O00102M0001oNTAm0o>01O0O1010O00010O01O01O010O0001L3M4M2M3M4L3N2M\\al4"}, "image_id": 459, "id": 7914}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 457.0, 30.0, 36.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "c^a73m?4K5L4L4L4K4M0LUOSAk0m>4O0100000O0100000O0100000O010001O3L010000dA"}, "image_id": 459, "id": 7915}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 469.0, 38.0, 43.0], "area": 1164, "segmentation": {"size": [512, 512], "counts": "Xog61o?4L4BJi@;S?Ii@;S?;NO010001O4K2O00000O100MlNZAT1e>4000000O1000000O010002N4K5L4LO10^Oo@4S?JRA2S?IQA3b?LUPe0"}, "image_id": 459, "id": 7916}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 476.0, 42.0, 36.0], "area": 1085, "segmentation": {"size": [512, 512], "counts": "ioe51^?5l@0P?4l@KU?9g@GY?`000O4M000I[OSAe0m>700O1000000O1000000O010000000O0104L2N0O10O1000O10003M5K4K5L5K4LVPe1"}, "image_id": 459, "id": 7917}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 490.0, 30.0, 22.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "noX21m?3L3N2M3N2M3N2M300001O001O00001O001O00001O001O0O1N3L3N3L3N`PX5"}, "image_id": 459, "id": 7918}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 493.0, 38.0, 19.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "goe41o?5J4M00000O100KHb@8^?5O100000000O100000000O1000000001O3M00O100000000O1003M5K5KTPg2"}, "image_id": 459, "id": 7919}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 501.0, 32.0, 11.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "m_W33k?2M3O100001OM31O00001O001O00001O001O00001O00001O001O00001O001OQ`X4"}, "image_id": 459, "id": 7920}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 72.0, 24.0, 21.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "\\Rd71o?7H2OO1000O100O6K4L000000000O10O1000000000007HbM"}, "image_id": 461, "id": 7921}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 112.0, 60.0, 79.0], "area": 2881, "segmentation": {"size": [512, 512], "counts": "gTm61o?6J3_OGTA:l>Ln@4R?=0002M7J6J5KO100000K5000O100000@fNYB[1g=lNRBT1n=7SB\\Nn=d17O10O104L0000000O8I6J5K0000O10O10000000O104L6J7I6J6I7J7I6J4L00006I7J6JZj4"}, "image_id": 461, "id": 7922}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 129.0, 55.0, 87.0], "area": 2880, "segmentation": {"size": [512, 512], "counts": "fdW53m?6J6J6J6J6I7J6J000000O01000000000O01AdNZB\\1f=jNTBV1l=POnAP1Q>`00O10000000O106J6J6J6J0O10O10000000002M7J6J6J7I6J6I7J6J6J6J6J7H7J]il1"}, "image_id": 461, "id": 7923}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 129.0, 53.0, 75.0], "area": 2818, "segmentation": {"size": [512, 512], "counts": "\\UT61n?8I6J6J7I5K0O1lNSOUCl0l\\I6I4M000000O1000fNYOZCh0f<^OTCb0ln0>I6J6I8I6J[hd5"}, "image_id": 461, "id": 7928}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 198.0, 77.0, 84.0], "area": 2794, "segmentation": {"size": [512, 512], "counts": "if`03m?1N3N1N3M3N1N3M010O010O1O3N2M2N3N1N3N2M2N3N1N3M3N1N3N1N3M2O2M3N110O010OO2N2N1100O010O0100O0100O01N2N1N3M2O2M3N1N3M2O2M3M2O2M2O0O1O2O2M3M2O2M2N3N2M2O2M2NigX6"}, "image_id": 461, "id": 7929}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 219.0, 16.0, 19.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "Q77h?2N2M2O2N200000O0O2N2M3N1O2M3Nnhg7"}, "image_id": 461, "id": 7930}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 276.0, 69.0, 50.0], "area": 2187, "segmentation": {"size": [512, 512], "counts": "d8Q1o>010O0001O01O0001O01L30001O01O00010O0001OM4N10000010L3O100010O000103L001O01O01O0001O01O01hN[AS1i>010O00010OVOXAA\\A>d>^O`Ab0o>10O0001O01O0001O01O0M3M4K4L4MaVm6"}, "image_id": 461, "id": 7931}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 290.0, 41.0, 96.0], "area": 2833, "segmentation": {"size": [512, 512], "counts": "]jj53m?7I7I7aNYOWCn0bbNoAW1d>H8I8H7I7IWd`1"}, "image_id": 461, "id": 7932}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 297.0, 54.0, 75.0], "area": 2689, "segmentation": {"size": [512, 512], "counts": "UZa62e?9H8G9N201O00000001O00000001O00000QBWOgIeA1b>HeA1a>IeA1TSY3"}, "image_id": 461, "id": 7935}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 325.0, 15.0, 37.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "U:U1k>01O0N2L4M4L3001ON3L3L4L5L3LkUh7"}, "image_id": 461, "id": 7936}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 326.0, 66.0, 82.0], "area": 2863, "segmentation": {"size": [512, 512], "counts": "m[b22l?3M2N3L3O2N100O2O0O2O0O2O001N100O2nNYOiBg0W=\\OeBf0Y=]OeBc0[=@aBb0^=@`B`0_=C_B>`=E\\B9RBEP>8RBFP>8SBFn=7UBGk=9XBDj=9WBEm=9TBDQ>8PBFT>8mADY>9i0M4M2M]d\\4"}, "image_id": 461, "id": 7937}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 334.0, 56.0, 52.0], "area": 1834, "segmentation": {"size": [512, 512], "counts": "_[84i?3L5K4M3M310O000O2K4M3L401O00000UAPOg>T100010O0000010O0001K4M3O110O0000010O00010O00M3M4K4M3L4O2O000010OL4M3L5L3L4L5L]ek6"}, "image_id": 461, "id": 7938}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 358.0, 19.0, 36.0], "area": 371, "segmentation": {"size": [512, 512], "counts": "hkf71m?3M2M3N3M2M4M2N201O010O010O0m@\\Ok>c0RA@n>j0O010eD"}, "image_id": 461, "id": 7939}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 389.0, 50.0, 64.0], "area": 2295, "segmentation": {"size": [512, 512], "counts": "]0]AiN\\>^1L4M3010O00000N3M2000010O05K0001O01O_NjAX1V>dNoA[1[>001O01O0001N1K50001O0001O01O0001K4L4L4M4K4L4L4L5K4L4M3LbcV7"}, "image_id": 461, "id": 7940}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 416.0, 93.0, 51.0], "area": 2852, "segmentation": {"size": [512, 512], "counts": "hmP25g?4M3L4M4K4O1010O00o@XOj>h0RA\\On>k010O0001O01OM3M4O0000010O000010O000010O00O1M4K4M3O101O01O01O01O0001O01O01O0001O01O01O000mNWAP1l>00010O0001O01O0001O01O00010O0001O01O000O1O2O000010O00000L5L3L4L5L3L4L4M4Kdb`4"}, "image_id": 461, "id": 7941}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 420.0, 69.0, 52.0], "area": 2290, "segmentation": {"size": [512, 512], "counts": "W^k63m?6SON_A:Y>MaA4^>3[AMe>g00000000O01000000000O10O1KhNbAX1^>5O1000O100000O1000O10000KbAhN^>W15100000K\\AnNd>R150O100005K6I1000000000O10O102NO4M1O000002N3M0O010000001O7I6I8IhQ2"}, "image_id": 461, "id": 7942}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 423.0, 69.0, 70.0], "area": 2679, "segmentation": {"size": [512, 512], "counts": "k]\\52n?7H8H7I001O1000000O10O100000000000O10O1000000000O3N00000000000O0100000000005K7H9@dNlAc1m=;M0O10000000O1000O1000000002M8I0000O12N7I8G8I7I8H7I7IlPa1"}, "image_id": 461, "id": 7943}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 436.0, 71.0, 62.0], "area": 2902, "segmentation": {"size": [512, 512], "counts": "ono35d?7J6I7O1010O0000I7J6I7M4hA`No=h100000001O0001O00000001O0001O000001O0001O0I7I7J6000010O0000000000010O000000000010O000000000010O0000000XOWA9i>@^A`0Q?1O00L5H7Ikal2"}, "image_id": 461, "id": 7944}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 452.0, 39.0, 53.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "\\>S1m>0VAnNf>V1O2N100O1O2N1O100O2N11O01O0001O01O01O01O00N3K4M3L5L3L4O110O000L4M4K4M3L5KhQ\\7"}, "image_id": 461, "id": 7945}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 469.0, 75.0, 43.0], "area": 2041, "segmentation": {"size": [512, 512], "counts": "j_m16b?8001O000000000000001O00000000000000ALSA3i>1WAOi>d01O00I7J6001O00000000000000001O000000000000001O000000000000001O0000000000000^OZAJg>MaA3_>DjA0000000001O00000000000L4HfPm4"}, "image_id": 461, "id": 7946}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 488.0, 57.0, 24.0], "area": 920, "segmentation": {"size": [512, 512], "counts": "^on66j?6J5J2O000000000O100000000O10000000000O10000000000O10000000000O1002N6J6J2N0000O10000000000O10000000000O100004L6JQ`4"}, "image_id": 461, "id": 7947}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 501.0, 21.0, 11.0], "area": 201, "segmentation": {"size": [512, 512], "counts": "g_m55k?4L0000000O1000000000000O10000000000007ITPh1"}, "image_id": 461, "id": 7948}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 506.0, 49.0, 6.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "l_[44l?000000000O10000000000003M0000000000O10000000000000000O10000000000000000O10000000000000000O1004LRPl2"}, "image_id": 461, "id": 7949}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 509.0, 19.0, 3.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "o?1o?0000000000000O100000000000000O10000SPf7"}, "image_id": 461, "id": 7950}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 180.0, 15.0, 11.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "ieR12k?4M20001O01O010O01O01O0O1N3L[je6"}, "image_id": 464, "id": 7951}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 191.0, 14.0, 14.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "Uf<1n?2M3N2N2N2O1000N1N3N2N2N2NlY\\7"}, "image_id": 464, "id": 7952}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 217.0, 82.0, 73.0], "area": 2602, "segmentation": {"size": [512, 512], "counts": "\\79f?2M3N2N1N3N2N2M2O2N2M3N1O2M300O010000O0100000O0100000O010000O0100000O010000O0100000O0100000O010000O0100000O010000O0100000O001N2N1N300O10OO2N2N2Ik@_OW??k@@W?>7N2M2O2N2M3N1Oggf6"}, "image_id": 464, "id": 7953}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 217.0, 30.0, 29.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "WgU11n?2M3N2M2O2N2M3N1O2M30O1000O10O1000O10O1N2N1N3N2M2O2N2M3N1OjX[6"}, "image_id": 464, "id": 7954}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 310.0, 23.0, 22.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "Qjd72k?3L4O2O00010O00010O000010O000101N00010M2M3M4YF"}, "image_id": 464, "id": 7955}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 173.0, 20.0, 15.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "cU\\61n?1N3M2O2O010O010O0100O010O010O10O01M2N]jY1"}, "image_id": 465, "id": 7956}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 203.0, 26.0, 25.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "ife51m?3N2N1N3N2M3N1O2M3N101000O010O1N1O2M3N1N3N2M3N1O2M]Ym1"}, "image_id": 465, "id": 7957}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 220.0, 60.0, 64.0], "area": 2108, "segmentation": {"size": [512, 512], "counts": "PXU51n?2M3N1N3N2N1N3N2M2O2M3N1N3N2M2O2M3N2M2O2N2M2O2M3N101000O01000O01000O01000O01000N1N3N2M2O2M30O10O1WO\\A5d>I^A6b>H`A7b>G`A6c>BUA2::b>CVA0:;W?N2M3N1N3Njgl1"}, "image_id": 465, "id": 7958}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 16.0, 2.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "02n?00000000000000000000000000O10PPh7"}, "image_id": 466, "id": 7959}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 0.0, 56.0, 10.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "P`h01o?0000000000000000000000000000000000000000000000000000000000009G00000000000000000000000000000000000000000000000P`[6"}, "image_id": 466, "id": 7960}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 0.0, 90.0, 11.0], "area": 666, "segmentation": {"size": [512, 512], "counts": "P`[34l?000000000000000000003M00000000000000000000002N0000000000000000000000O10000000000000000003M000000000000000000000000000000K50000000000000000000000000000000000000000000000000000000P`W3"}, "image_id": 466, "id": 7961}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 0.0, 65.0, 19.0], "area": 810, "segmentation": {"size": [512, 512], "counts": "RPW52l?201O0000000000000000001O000\\@L\\?<000001O00000000002N001O0000000000000000001O00000000000000O1H8001O0000000000000000001O000000000000HX`h1"}, "image_id": 466, "id": 7962}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 0.0, 59.0, 43.0], "area": 1692, "segmentation": {"size": [512, 512], "counts": "gP\\63k?2N3]OIZA9d>HZA:f>GWADVA>j>CTA?k>BRAa0m>_OQAc0o>8O001O001O1O001O001O001OO1N2O1001O1O001O001O001O1O001O001O1ON2N2O1N2N2O1N2N2N2O1N2Fk@GW?8j@FX?8k@FV?8:N2O1N2NR`f0"}, "image_id": 466, "id": 7963}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 0.0, 38.0, 83.0], "area": 2038, "segmentation": {"size": [512, 512], "counts": "RR]72m?2N2gNMbB5Q=JTB3i05Q=JTB3i05Q=JTB3i05Q=JTB3i05Q=JTB30Fe0?V=;hBGV=;hBGV=;hBGV=;hBGV=;hBGV=;iBFU=eBB\\=?bBA_=_11O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NA"}, "image_id": 466, "id": 7964}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 25.0, 23.0, 13.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "i`]1=c?00000000000000000000000000000001O0000000000VoV6"}, "image_id": 466, "id": 7965}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 32.0, 27.0, 58.0], "area": 936, "segmentation": {"size": [512, 512], "counts": "P1e1[>004MO0000000000000nNTB0n>0000000000000000010O0000000000A[^b7"}, "image_id": 466, "id": 7966}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 43.0, 83.0, 46.0], "area": 2613, "segmentation": {"size": [512, 512], "counts": "la?j0V?0000000000000000000000000000000000000I700000000000000000000K50002N0000000000000000000000000000001O00000K500000_O`AB`>>a00000000000000000000000000000000000F:01O0000000000dnV6"}, "image_id": 466, "id": 7967}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 86.0, 18.0, 18.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "lbd71n?2N2N3M2N2N2000000000O1N2N2N2N2N2NP]2"}, "image_id": 466, "id": 7968}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 88.0, 39.0, 78.0], "area": 2529, "segmentation": {"size": [512, 512], "counts": "fcU35_?>aAO_>1aAO_>1aAOR>V10000N2Mf0]O000000000000000000000001O00000000000000000000000^Ob0000000bNenV4"}, "image_id": 466, "id": 7969}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 90.0, 38.0, 83.0], "area": 2424, "segmentation": {"size": [512, 512], "counts": "hcS4Q1]>b000000000000000L4H80000WB\\NV=W2000000000000000000000000001O00000001ObNWCEiG9000000000000iNmAa0i>00XOm\\j7"}, "image_id": 466, "id": 7973}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 107.0, 45.0, 73.0], "area": 2253, "segmentation": {"size": [512, 512], "counts": "\\U:7X?a0_Oa0_Oa0B>000000010O00000000000000000000004L000M3_Oa0M30001O00000000000DC_A<`>J\\A6b>e0O1N1O2O1N1O2N2O1N1O2O1N101O2O2M1000O1000000O10001N101O001N2O2N2`NgAT1c>TO`A8a>FdA6\\>HiA5X>HnA3T>KPB2P>LVB0k=MZBOg=0R]]1"}, "image_id": 466, "id": 7975}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 111.0, 37.0, 73.0], "area": 2105, "segmentation": {"size": [512, 512], "counts": "gTV1g0Y?0000000TOl0000:F000D<0000000000000000000000000000000000000_Oa00000cNkAo0c>00DG900000000000000000000000000001O000000000000000001O000000000000000C_jb3"}, "image_id": 466, "id": 7979}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 184.0, 52.0, 62.0], "area": 2285, "segmentation": {"size": [512, 512], "counts": "YWR6=b?1O100O100O1TOl0E;O1O1O1O1O10000O10000000000001O001O001O1O1O2N:F00000000000000000000000[Oe0000000000000000giS1"}, "image_id": 466, "id": 7980}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 202.0, 54.0, 77.0], "area": 2680, "segmentation": {"size": [512, 512], "counts": "fWj42m?2N2O1N2N2N2a@EV?=i@DU?>i@ET?e0N2H8^Ob0O1O1O1O1O1O1O1DM3L^hZ2"}, "image_id": 466, "id": 7981}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 210.0, 55.0, 64.0], "area": 2422, "segmentation": {"size": [512, 512], "counts": "XhT79_?8000000000000iNW1I7000001O0000000000000000000000000000000007I0000000000000000Q1oN000000000000000000000000000000VH"}, "image_id": 466, "id": 7982}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 219.0, 76.0, 43.0], "area": 2633, "segmentation": {"size": [512, 512], "counts": "fg><^?7O00000O1E;00001O000000000000000000000000000000000000000000F:0000000001O000000000000000000001O00000000001O000001O0000000000000000000000000B>M300001OLXY[6"}, "image_id": 466, "id": 7983}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 235.0, 9.0, 12.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "[gh509UOb0000000000000000000000000000001O00000=C00000000000000000000000000000001O00000000000000000000004L000001O0000000000000000000000000000000000000000Sh`3"}, "image_id": 466, "id": 7985}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 257.0, 56.0, 81.0], "area": 3128, "segmentation": {"size": [512, 512], "counts": "Xij5b0\\?3O00000F:K5000000000000000002N00000I700000000000000000OB?00oAgN[=o1000000000000000000000000000000000000000000000TNdC4T>0WVY1"}, "image_id": 466, "id": 7986}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 258.0, 8.0, 24.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "R8h0X?01O0000000L4_O^hk7"}, "image_id": 466, "id": 7987}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 269.0, 14.0, 9.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "^hT18g?1000000000000000000000000cWd6"}, "image_id": 466, "id": 7988}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 274.0, 74.0, 46.0], "area": 2348, "segmentation": {"size": [512, 512], "counts": "^Ya0`0`?0000000J6ZOf0001O00000001O0000000000000000000000000000000;E000000000000000000000000000000001O0000008H1O000000000001O000000000000000000000000000000[O]gY6"}, "image_id": 466, "id": 7989}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 279.0, 11.0, 11.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "gXh7;e?0000000000000000000YW2"}, "image_id": 466, "id": 7990}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 280.0, 6.0, 11.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "hXb4;e?000000000XgZ3"}, "image_id": 466, "id": 7991}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 283.0, 22.0, 9.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "khe49g?000000000000000000000001N1000000000000000TWo2"}, "image_id": 466, "id": 7992}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 288.0, 58.0, 53.0], "area": 2207, "segmentation": {"size": [512, 512], "counts": "WZl68^?:01O00000000000001O000000000004L0E;A?L4000000000001O000001O00000000000000000M300001O0001O000000000000000UOeA1[>@TB`0f>00I7Acf6"}, "image_id": 466, "id": 7993}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 299.0, 50.0, 61.0], "area": 2064, "segmentation": {"size": [512, 512], "counts": "oZS39]?:00000000000000TOl0000000000000000000000000000J60000000000000B>0000000000000000000000000000P1PO000000eeS4"}, "image_id": 466, "id": 7994}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 299.0, 75.0, 44.0], "area": 2453, "segmentation": {"size": [512, 512], "counts": "fiZ4Q1o>00000000000000000000000000000000000000000000K230000000000000000000000000000001OG:O0000000000000000000000000000M3N20000000000000:F00001O00000000000\\Onf_2"}, "image_id": 466, "id": 7995}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 306.0, 6.0, 22.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "SZm74g?6J5K5N200^F"}, "image_id": 466, "id": 7996}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 335.0, 24.0, 35.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "k:c0Y?4K5M4O01O0001O01O0001O0001O0001OM4K4L4L4L5K4Kaec7"}, "image_id": 466, "id": 7997}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 336.0, 75.0, 70.0], "area": 2779, "segmentation": {"size": [512, 512], "counts": "ek<3j?3L4M4K4M3M4N10000010O0001O01O01O0001O01O00010O0000010O00010O0000010O00001M2L4M3L5K4M301O2N2O1N2N2N2O1N2N1O01OM2N30O2O0O100O2O0O100VOSB@o=8ZBFf=;[BBf=1oABc0<^=NnBOT=LRC2nN2Mgc]6"}, "image_id": 466, "id": 7998}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 414.0, 46.0, 70.0], "area": 1850, "segmentation": {"size": [512, 512], "counts": "j=R1m>2CnNhAU1U>mNjAT1U>nNhAU1V>mNhAU1U>P10000000000000000000000000000O1O10000000000000000000000000009G00000000000000000000000001O1O2N1O1O000000001O00000XOh000O1O2N2N2N2N3MXbl1"}, "image_id": 466, "id": 8000}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 435.0, 41.0, 77.0], "area": 2242, "segmentation": {"size": [512, 512], "counts": "]o[7a0_?0cN@ZC`0el<9SCGl<O1O"}, "image_id": 466, "id": 8001}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 441.0, 50.0, 71.0], "area": 2422, "segmentation": {"size": [512, 512], "counts": "bnV4b0^?0000000[Oe0O100O1O1fAVO]=g1000000000000001O1O1O1O4L00000000002N1O1O;E0000000000000000000ZOf0000000000000000`QP3"}, "image_id": 466, "id": 8002}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 449.0, 65.0, 63.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "a_82m?2M3N2N1N3N2M2O2N2M3N1N3N2M2O2N2M3N0O10O10O010O10O10O03N2O0100M3N1N3N1O0O010O10O2O2M2O2N2M2O2O1000OO1N100KPA[OQ?d0PAZOS?c07N1N3N2N1N3N2M3N1OeQg6"}, "image_id": 466, "id": 8003}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 468.0, 75.0, 44.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "noc22k?3L4N200O100O100O1001O00001O0000M3M300001O00001O00001O00001O00N2O100O100O100O10000N2M3M3M3M3N2001O0000O1L4O100O1O100O1001O0J\\APOd>m0`AROa>j0bAVO^>g0eAYO\\>c0hA\\OY>`0jA@X>2YA1b0M^>OfA0U?0001O0000Q`V4"}, "image_id": 466, "id": 8004}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 486.0, 11.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "[o01m?3N2N1O20O10O01M3N1Nh`i7"}, "image_id": 466, "id": 8005}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 497.0, 28.0, 15.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "o_b51o?000000000B>000000000000000000000000000000000000000000_`o1"}, "image_id": 466, "id": 8006}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 505.0, 5.0, 7.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "i?7j?O2N1O1NSPm7"}, "image_id": 466, "id": 8007}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 0.0, 57.0, 37.0], "area": 1237, "segmentation": {"size": [512, 512], "counts": "\\Pe13l?2O2M2N3N1N3M3N1N3N1N3M3N1N3M10O00010O1O100O100O1O100O1O100O1O100O100O1O100000000O100Lg@@Z?`03O1O100O1O100O1E^@7c?I^@6b?Ia@4h?N1Nh_^5"}, "image_id": 467, "id": 8008}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 0.0, 18.0, 9.0], "area": 91, "segmentation": {"size": [512, 512], "counts": "P`T31o?2N1O1O1O1O2N00O100O1O1O1O1O100O1OQ`b4"}, "image_id": 467, "id": 8009}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 0.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "P`j41o?1O1O00O1OQ`R3"}, "image_id": 467, "id": 8010}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 48.0, 27.0], "area": 717, "segmentation": {"size": [512, 512], "counts": "PP\\51o?1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1OO1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O2N2N2N2Nmok1"}, "image_id": 467, "id": 8011}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 11.0, 57.0, 53.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "m`e42m?2O1N2N2N2N3M2N2N2N2N2N2N3N1N2N2N2N2N3M2O1001O000001O000000N2N2N2O2M2N2N2N2N2N1O00000000011N2N2N2N2N2N2N2N2N3M2Nkn]2"}, "image_id": 467, "id": 8012}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 13.0, 101.0, 76.0], "area": 3133, "segmentation": {"size": [512, 512], "counts": "lP_32m?2N2N2N2N2O1N2N2N2N2N2N2XAXOU>j0iAXOU>j0iAYOT>j0iAXOW>h0gAZOY>HbAj03A^>?`AC`>=^ACe>j02N2N2N2N2N2N2O1O10000000000N3YOi@`0i>@gA>Y>DgA:Z>GfA7Z>LeA2[>0eAN[>4eAJ[>8eAF[>>aAB_>`0_AB_>`0_AB_>`0_AB`>`0]ABa>`0]ABa>P1O10001O01O00000000000001O01O00000000000001O01O000000000O1N2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2Nn]n2"}, "image_id": 467, "id": 8013}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 28.0, 58.0, 60.0], "area": 1761, "segmentation": {"size": [512, 512], "counts": "RRR22m?2N2N2N2N2O1N2GBm@a0P?Bm@`0Q?Bm@`0R?8N2EQObAQ1\\>QObAQ1]>POaAR1]>POaAR1]>:N2N2N1O0000000002N2N2N2N2N2N2N2O1N2N000000001O2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N]nP5"}, "image_id": 467, "id": 8014}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 31.0, 24.0, 24.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "]aV31n?2N2N2N2N2N2N2N2N2O0O0000000002N2N2N2N2N2N3M2Ni^]4"}, "image_id": 467, "id": 8015}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 36.0, 7.0, 15.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "T1?b?N3M3N2M2N3Nb^l7"}, "image_id": 467, "id": 8016}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 38.0, 39.0, 75.0], "area": 1497, "segmentation": {"size": [512, 512], "counts": "Yb\\71n?2N2N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N1jN"}, "image_id": 467, "id": 8017}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 58.0, 14.0, 12.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "oQZ31m?2N3N2O010O01000O010ON3N1NSn^4"}, "image_id": 467, "id": 8018}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 63.0, 56.0, 60.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "ZSg21m?2N3M2N3M3M2N3O0010O010O010OO2M2N3M2N3M2O2M3M2N3M2N3M2N3M2N3N1O02N101IkAbNU>\\1lAdNT>Z1oAfNQ>W1QBgNQ>W1>M2N3M2N3M2N3N1N3M2N3M2N3M3M2N3Mfm\\4"}, "image_id": 467, "id": 8019}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 65.0, 26.0, 47.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "T2\\1c>010O0001O3N1N3M2O1N3M2N3N1N2N3N1N3M2N2O2M2N3N1N2NTmb7"}, "image_id": 467, "id": 8020}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 79.0, 12.0, 12.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "cbP42m?2N2N2N2O1O10O1N2N3M2N\\]i3"}, "image_id": 467, "id": 8021}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 88.0, 36.0, 36.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "TSP41o?1N2N2N2N3M2O1N2N2N2N3M2N2O1N2O101O000000O2M2N2N2N2N3M2N2N2N2N2O2M2N2Ncl]3"}, "image_id": 467, "id": 8022}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 104.0, 51.0, 60.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "_d31n?3M2N2\\OJ[A8d>IZA9d>IZA9d>IZA9d>IZA9d>IZA9d>IZA9d>JYA8e>c0N3M2N2N2N2O1N0000000001O3N1N2N2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N3MlkR7"}, "image_id": 467, "id": 8023}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 111.0, 53.0, 61.0], "area": 1594, "segmentation": {"size": [512, 512], "counts": "fT[31n?2N2N3N1N2N2N2N3C@UAb0i>@UAb0i>@UAb0i>@UAb0i>=M2N2N2JeNeA\\1Y>fNeA\\1Y>50001O000001O00002N2N2N3M2N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2Ng[j3"}, "image_id": 467, "id": 8024}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 143.0, 52.0, 59.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "]ed01n?2N2N2N2N2N3N1N2N2N2N2n@ZOg>i0VAYOh>i0WAXOh>i0VAYOb>KaAV1]>mN`AU1^>9M0000000001O01O000002N3M2N2N2O1N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N3M2N2NfZa6"}, "image_id": 467, "id": 8025}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 175.0, 34.0, 46.0], "area": 716, "segmentation": {"size": [512, 512], "counts": "S6f0Y?2JYOPAi0n>YOPA=0Kn>9TAGj>9XAFg>;ZACf>?ZA_Of>b0\\A[Od>e0^AYOb>g0;01O000002N2N200000N2N3M2N2N2N2N2N2N2N2N2N2O1NQj^7"}, "image_id": 467, "id": 8026}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 182.0, 24.0, 24.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "SVW11n?2N2N3M2N2O1N2N2N2N0000000001O2N2N3N1N2N2N2N2NRj\\6"}, "image_id": 467, "id": 8027}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 192.0, 52.0, 52.0], "area": 1407, "segmentation": {"size": [512, 512], "counts": "kV_12l?2O2M2O2M3M2O2M3N1N3N2M2O2M3N1N3M30O010O10O10O01000O01000O01000O010O10O10O0O2oNVAh0m>VOUAg0m>WOVAg0R?N3N2M2O2M2O2M3N1N3MVif5"}, "image_id": 467, "id": 8028}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 202.0, 27.0, 28.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "ffm01n?2N2N2N2N2N2N2N2N2N2N2N20000000N2N2N2N3M2N2N2N2N2N2NXid6"}, "image_id": 467, "id": 8029}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 235.0, 53.0, 63.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "UXg02m?2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3N1N200001eNkAh0U>VOmAi0T>VOmAh0U>VOmAh0U>VOmAi0T>UOnAi0T>UOoAh0S>VOoAh0S>VOoAh0f>N2N2N2N2N2N3N1N2N2N2N2NZW^6"}, "image_id": 467, "id": 8030}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 244.0, 32.0, 31.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "\\h21n?3M2DLi@6U?Mh@5V?Mh@5V?Mh@4W?:01O01O00000001O3N100000N2N2N2N2O2M2N2N2N2N2N2N2NmW]7"}, "image_id": 467, "id": 8031}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 276.0, 53.0, 62.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "^Y11o?1N2N2N2N2N2N3M2N2N2N2N2N2N3M2O1\\APOU>R1iAPOU>R1iAPOU>R1iAPOU>R1iAPOU>R1iAnNW>]1000001O001O2000000000N2N2O1N2aNdAW1_>fNcAX1d>N2N2TOVA?l>_OWA>l>@UA>m>@UA>m>@UA?W?N2N2N2N2N2N3MRVT7"}, "image_id": 467, "id": 8032}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 321.0, 29.0, 56.0], "area": 845, "segmentation": {"size": [512, 512], "counts": "Q:h1Y>O1N2N2O1N2N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2O2M2N2N2N2NQUa7"}, "image_id": 467, "id": 8033}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 485.0, 36.0, 27.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "`_c021Oj?5N2N2N3M2N2N2N2N2N2N20O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O2N1O1OQ`j6"}, "image_id": 467, "id": 8034}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 0.0, 35.0, 36.0], "area": 860, "segmentation": {"size": [512, 512], "counts": "i`h33j?4L3M3N3L3M3M4L3N201O0000001O00001O00001O00001O00M3L4M3M3L4M3M3M3L4MSPf3"}, "image_id": 471, "id": 8035}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 0.0, 34.0, 60.0], "area": 898, "segmentation": {"size": [512, 512], "counts": "eQ`43j?3M3M4L3M3M4L3M3M4L3M3M4L3M00000001O3M3M3N2M3M3M3M3M3M3M3M3M3M3M3MSPo2"}, "image_id": 471, "id": 8036}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 0.0, 49.0, 88.0], "area": 1272, "segmentation": {"size": [512, 512], "counts": "`bX53k?2N3L3N3L3N2N3L3N3L3N3M1N1000O010O01000O010O01000O01000O010O01000O102M3N2N2M3N2N2M3N2M3N2N2M3N2MSPo1"}, "image_id": 471, "id": 8037}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 8.0, 24.0, 38.0], "area": 668, "segmentation": {"size": [512, 512], "counts": "[Pn23m?6I7J6J5K6J2NO01000002N000O100000O105H9J6J6J6Jkne4"}, "image_id": 471, "id": 8038}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 45.0, 23.0, 40.0], "area": 627, "segmentation": {"size": [512, 512], "counts": "Ybd73k?2M4M2M4L3N3L3N2M4M2M4O010O00010O010O0010O00_N"}, "image_id": 471, "id": 8039}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 120.0, 87.0, 56.0], "area": 2479, "segmentation": {"size": [512, 512], "counts": "cdT32k?4L3M3M4L3M3M4L3M310O0010O0010O00010O00010O00010O0010O0010O00010O00010O00010O00010O0010O0010O00010O00010O00010O00010O0010O0010O00010O00010O00010O00010O00O2L3M4L3M3M4M2M3M4L_k_3"}, "image_id": 471, "id": 8040}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 147.0, 3.0, 9.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "hdn74j?2M3]K"}, "image_id": 471, "id": 8041}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 157.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "mdo72Q;"}, "image_id": 471, "id": 8042}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 163.0, 61.0, 84.0], "area": 2614, "segmentation": {"size": [512, 512], "counts": "aVh61l?3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3M4M20010WB\\NT=d1jB^NW=b1fBaNY=_1eBcN[=]1cBfN]=Z1_BiNa=W1]BlNc=h100010O010O01O010O010O0010RO\\B\\Of=a0^B\\Oe=a0]B\\Of=a0^B\\Od=a0_B]Od=`0_B]Oc=`0`B^Oc=?`B]Oc=`0`B^Oc=`0^B^Od=c0l001O010N1N2M4M2M4M2M_Z9"}, "image_id": 471, "id": 8043}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 189.0, 21.0, 55.0], "area": 627, "segmentation": {"size": [512, 512], "counts": "Xge72k?3N3M2M3N3L3N2N3L3N3M2M3N3M2M3\\AgN_>^1M4M210O0RJ"}, "image_id": 471, "id": 8044}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 260.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "Tho73i7"}, "image_id": 471, "id": 8045}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 267.0, 52.0, 69.0], "area": 1874, "segmentation": {"size": [512, 512], "counts": "RjR62l?3N1010O0001N1\\OI\\A;`>H^A:`>I]A:`>H]A;`>H^A:`>I]A:`>d0M4M2N3L3N2N3L3O20O01O01O010O0N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3L3N3M2M3N[WS1"}, "image_id": 471, "id": 8046}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 294.0, 53.0, 75.0], "area": 2203, "segmentation": {"size": [512, 512], "counts": "Vjm61m?2N3L3N3M2M3N3M2M3N3M2M4RAQOi>S1010O2N010O0010O0[AmN]>S1aAPO_>P1]ASOc>U1hAeNi=^1SBfNj=\\1SBgNj=]1SBeNj=k1O010O01O01O01O01O01O01OXOVBZOk=c0WB^Oh=?\\B@e=<^BDb=DUB:;3_=@YB;;4]=]O\\B?61a=^O[B`05OP>MTB0n=NTBOo=NUBNo=NTB0n=MUB0ST8"}, "image_id": 471, "id": 8047}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 327.0, 12.0, 14.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "_jb63k?2N3L3O2O010O01M2N2N3LiUW1"}, "image_id": 471, "id": 8048}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 331.0, 83.0, 77.0], "area": 3670, "segmentation": {"size": [512, 512], "counts": "n[V21l?4L3M3N3L3N210O00010O00010O01OM4L3N2M4M21O01O01O01O01O010OXASO]>m0`AWO]>l0_AWO^>W1N3L3M3M4M1N0000102O101O01O01O010O00010O01O01O01O01O010O00010O01O01O01O01ON3M2WOSB\\OP>a0SB\\OQ>`0RB]OQ>a0RB]OP>?SBBl=8fA^Oa0:i=5^BJc=3_BN`=OcB1]=LgB1\\=KgB3[=KhB1bb`4"}, "image_id": 471, "id": 8049}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 350.0, 15.0, 43.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "V\\g12g?7J6J6J6J7I6M30N2J6J6J6J7I6JWUQ6"}, "image_id": 471, "id": 8050}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 364.0, 1.0, 4.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "\\ko74`4"}, "image_id": 471, "id": 8051}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 366.0, 48.0, 49.0], "area": 1395, "segmentation": {"size": [512, 512], "counts": "f\\n63j?3N2N3L3N3M2M4M2N2M4M2M4M2N3L3N2N12L3N3N1010O0010O0010O010O00010O01M2N2M4M2N3L3N3M2M3N3M2M4M2NWd9"}, "image_id": 471, "id": 8052}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 372.0, 73.0, 71.0], "area": 3101, "segmentation": {"size": [512, 512], "counts": "U]c53k?2N3L3N3M2HBk@a0S?Ak@b0Q?8N3M2M4M2N2M4M2N3L3O2O01O010O010O01O01O010O01O01O010O010O01O01O010O010O01O01O0O2M2M4M2N1N01000O10O021O2N10O1M2F^AVOd>h0^AVOd>h0;M4M2N3L4M3M4KYTX1"}, "image_id": 471, "id": 8053}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 380.0, 27.0, 28.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "^\\`33k?3M2M3N3M2M4M2N3O01O010O010O010OO1OO2N3M2N3L3N2N3M2MRTR4"}, "image_id": 471, "id": 8054}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 392.0, 19.0, 29.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "klf73j?3N3L3N2M4L3N3N10010O0010O0010O0010OdC"}, "image_id": 471, "id": 8055}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 431.0, 4.0, 10.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "f]n72l?2M4M2aB"}, "image_id": 471, "id": 8056}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 446.0, 35.0, 58.0], "area": 1397, "segmentation": {"size": [512, 512], "counts": "dn^71m?3M2M4M2o@FY>=eAEY>>dAEY>=dAFY>>dADZ>>cAF[>;cAG]>o00O01O01O010O010O01O01O010O010O00010O010O01O01OhA"}, "image_id": 471, "id": 8057}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 451.0, 114.0, 61.0], "area": 3174, "segmentation": {"size": [512, 512], "counts": "c^n41a02g>2VA0h>2VA0g>4VAOg>3WAOj>0TA3k>NRA4o>>01O010N1N2M4M2O2O01O010O00010O010O00010O0mN\\Ak0d>QO_Ao0a>oNbAP1g>1O010O01O0O1M4M201O00010O010O00010O010O00010O010O00010O010O00010O010O00n@YOk>h0QA[OP?j0001O00001O001O00001O001O00001O001O00001O001O00001OO1M3N2O1001O00001O001O000O2M2M4M2M3N3L3Nh`X1"}, "image_id": 471, "id": 8058}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_Z71P`e0"}, "image_id": 471, "id": 8059}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 0.0, 26.0, 9.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "P`o61o?00001O001O00001O001O001O00001O001O00001O00O1N2M3NR`c0"}, "image_id": 472, "id": 8060}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 0.0, 25.0, 24.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "\\`c73j?3N2N3L3N3O00001O001O00001O001O00001O001O00001O00"}, "image_id": 472, "id": 8061}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 7.0, 25.0, 20.0], "area": 291, "segmentation": {"size": [512, 512], "counts": "a`^62k?3N3M2M4O0010O00010O010O010O010O010O000O2M2M4M2NcoT1"}, "image_id": 472, "id": 8062}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 23.0, 23.0, 18.0], "area": 214, "segmentation": {"size": [512, 512], "counts": "PQS73d?1_@1b?7O0010O0010O010O0010O0010O01M2N3M20001O0NR_a0"}, "image_id": 472, "id": 8063}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 56.0, 73.0, 101.0], "area": 3725, "segmentation": {"size": [512, 512], "counts": "]dj61m?2N2M4M2N3L3N2N3L3N3M2M3N3L3N3M2M3N3M2M4M2N2M4M2N3L3N2N3L3N3L3N3M2M3N3M2O2OO2M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3M2M4M2Mmm0"}, "image_id": 472, "id": 8064}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 204.0, 22.0, 65.0], "area": 1131, "segmentation": {"size": [512, 512], "counts": "]fe2=c?=C>B=C;E000000000000000O100000O5L>B=C>B=CjWo4"}, "image_id": 472, "id": 8065}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 230.0, 24.0, 20.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "agk32l?2N2N3L3N30O01O010O01O010O010O01O01O0N3M2M4M2NdXh3"}, "image_id": 472, "id": 8066}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 250.0, 118.0, 103.0], "area": 4013, "segmentation": {"size": [512, 512], "counts": "`hW51n?2N2N2M3N2N1O2N2N2M3N2N2N1O2N2M3N2N2N11000000000O10O10000000O1000O10000000O10O1000000000O10O10000000O1000O10000000O10O1000000000O10O1000000000O10O10000000O1000O10000000O1N1O2N2N2M3N2000O10O01M3N2N2N2N2N1KWAQOl>m05M3N2N2N1O2M3N2N2N2N2N1N3N2NPVm0"}, "image_id": 472, "id": 8067}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 285.0, 28.0, 28.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "[iY42j?6K5L3N3N2M1O101N100O2O00001O01N100O101N1O101N3M3L4K5LjVX3"}, "image_id": 472, "id": 8068}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 324.0, 50.0, 85.0], "area": 2112, "segmentation": {"size": [512, 512], "counts": "_\\`42k?4L3M3M4M2M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M11N4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3Mgef2"}, "image_id": 472, "id": 8069}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 337.0, 53.0, 97.0], "area": 2403, "segmentation": {"size": [512, 512], "counts": "XmY53k?2M3M4L3M3N3L3M3M3M4L3N2M4L3M3M4L3N2M4L3M3M3M4M2M3M2N0000O2O003L5L3M3L5L3M3M3L5L3M3L5L3M3L5L3M3M3L5L3M3L]ek1"}, "image_id": 472, "id": 8070}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 365.0, 74.0, 147.0], "area": 2152, "segmentation": {"size": [512, 512], "counts": "ooi51m?2M3N2N2M3N2N2M3N2M3N2N2M3N2M2O00O01000O010O01000O010O01000O01000O010O01000O010O10O10O01000O010O01000O010O10O10O01000O010O2O3M2M4M2M4M2N2M4M2N3L3N`TQ1"}, "image_id": 472, "id": 8071}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 372.0, 27.0, 27.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "Rlm32k?5J6L3O2M3M101N1O100O100010O00000O1O101N1O3M2O2M2M4JVdd3"}, "image_id": 472, "id": 8072}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 438.0, 47.0, 74.0], "area": 1754, "segmentation": {"size": [512, 512], "counts": "o_m31l?3N2M3M3M3M3M3M3M3N2M3M3M3M3M3M3N2M3M3M3M3M3M3N2M3N20M3L5L3M3L5L3M3M3L5L3M3M4K4M3M4K4M3M4L3LXR[3"}, "image_id": 472, "id": 8073}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 442.0, 47.0, 70.0], "area": 1277, "segmentation": {"size": [512, 512], "counts": "o_i41l?3M3M3M3M3M3M3M3M3M3M3L4M3M3M3OO2M4LJjNhAS1X>SOeAk0Z>ZOeAb0[>CcA:_>a02YO^AOe>1[ALg>d03BVACl>f03M2M41O0001O0O101O0O101O0O2O2L4K5K5LlQ_2"}, "image_id": 472, "id": 8074}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 0.0, 76.0, 50.0], "area": 2283, "segmentation": {"size": [512, 512], "counts": "[aQ31j?5K5K5L5O01O000001L3K5L4K5K6L3000001O000000001O000000001OK5K5K5L4O11O000000001OL4L4001O000000001O000000001O000000001O0000001O000000001O0000O1K5K5K5L4KU`h3"}, "image_id": 473, "id": 8075}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 33.0, 64.0, 31.0], "area": 1283, "segmentation": {"size": [512, 512], "counts": "aQ:3i?4K5L5N100000010O000000010O0000010O000000010O000000010O0000010O000000010O000O1L5K400000010O000001O01O0000010O00000001K4K5L4K6Knne6"}, "image_id": 473, "id": 8076}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 44.0, 78.0, 100.0], "area": 4461, "segmentation": {"size": [512, 512], "counts": "lSd15f?5K5K6M200F\\OXAd0c>B]A=_>GaA:Y>KgA5T>0iA3R>P1K5K6J5K5L40010O0000000N3K4000000010O000000010O0000000100O00001O01O000nNoBUOQ=f0TC[Ok<`0[C_Of<<^CDb<7cCI]cNeA^1^>010O01O0N2N3M2M4M1OO010O3N2M2O2M3N1N3O0100O2O0O110O010O00010O0N3L3N2CWA@m><=N2M4M2Mk_:"}, "image_id": 474, "id": 8082}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 3.0, 96.0, 60.0], "area": 2977, "segmentation": {"size": [512, 512], "counts": "lPd41l?4L3N2M4L3N3L3M3N3N110O00010O010O0010O003NO010O01O010O01O0O2M2M3O2O010O00010O010O0010O0010O010O00010O010O00010O010O010O00010O010O00010O010O0010O0010O010O00010O010O0N00O102M4M2N2M4M2M4Bd@4^?Jd@4f?L3NSok1"}, "image_id": 474, "id": 8083}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 63.0, 82.0, 103.0], "area": 3978, "segmentation": {"size": [512, 512], "counts": "idl51m?3L3N3M2M3N3M2N3L3N2N30O01O000N3M2M3N00002M3N3M2N3O001O0N3XO[NQCg1m<[NPCi1l<[NQCg1m<[NQCg1m<\\NPCg1m<[NQCg1m<[NPCi1mGUA;i>HSAGUA;i>`0L3N2M4M2M4M2YL"}, "image_id": 474, "id": 8087}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 184.0, 76.0, 98.0], "area": 3278, "segmentation": {"size": [512, 512], "counts": "VhQ32l?2M4M2M3N3N1010O01O01O010O010O01O01O01O0N3L3N2M4M2M4eNnNdCT1Za0[A\\Oh>a0\\A\\Og>`0\\A]Og>`0?M2M3M4L3NgY;"}, "image_id": 474, "id": 8089}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 221.0, 37.0, 51.0], "area": 1600, "segmentation": {"size": [512, 512], "counts": "VXY48]?;E;E;H810O00000000000001O000000000000001O01O000000000001O000000000F:_Oa0^OaYT3"}, "image_id": 474, "id": 8090}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 223.0, 3.0, 8.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "Ugn71l?4L3QI"}, "image_id": 474, "id": 8091}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 242.0, 67.0, 80.0], "area": 3008, "segmentation": {"size": [512, 512], "counts": "`ih41m?2M3Z@M]?L3N3L3O11N1M4M2M4M2M3N3L3N3L3N2M4M2M40O00GiAiNX>T1kAjNV>S1mAkNV>R1=M3N3L3N3L3N2M4M2M4M2M3NWg]1"}, "image_id": 474, "id": 8095}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 285.0, 55.0, 46.0], "area": 1550, "segmentation": {"size": [512, 512], "counts": "nig22l?3L3N3L3O1JBk@>R?El@0ZA3b>1ZA2d>0ZA2c>1[A2a>2[A1c>1[A2b>g0010O00010O010O00010O010O010O00010O010O0N2M4M2N3L3N3L3N2N3L3N3L3N2N3Lnea3"}, "image_id": 474, "id": 8098}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 337.0, 48.0, 55.0], "area": 1691, "segmentation": {"size": [512, 512], "counts": "i[a22k?4M2M3VOMeA6X>NcA5\\>MaA6]>L_A7`>K]A7c>d0N101O00001O0010O00010O010O00010O010O00010O010M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4MTef4"}, "image_id": 474, "id": 8099}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 349.0, 7.0, 21.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "\\kl73j?3N3L4M2M4M2SE"}, "image_id": 474, "id": 8100}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 353.0, 34.0, 30.0], "area": 629, "segmentation": {"size": [512, 512], "counts": "ckP73j?3N3L3M3N3L3N201O010O01O01O01O010O00010O01O01O010O000M4L3N3L3N2M4LhT>"}, "image_id": 474, "id": 8101}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 360.0, 51.0, 59.0], "area": 1824, "segmentation": {"size": [512, 512], "counts": "bl[33j?3FLf@7W?Kg@7W?;L3N2N3L3N3M2M3N3M2M4N110O01O010O01O010O01O010O01O010O01O010L3N2N3L3N3M2M4M2M3N3M2M4M2N3L3N2N[dj3"}, "image_id": 474, "id": 8102}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 377.0, 71.0, 69.0], "area": 2103, "segmentation": {"size": [512, 512], "counts": "[mT41n?2N3M2O1N2N2N3M2N2O1N2N3M2N2N2O0O001O000001OHRO_An0a>TO]Am0c>TO[Al0e>7000000010O0002N1O000001O01O00000000010O000000000101N2N2N2N3M2O1N2N2N3M2N2Ke@C\\?;6O2M2N2N2N2Nbcg2"}, "image_id": 474, "id": 8103}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 387.0, 26.0, 37.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "m\\c71l?3N3L3N2M4L3N3L3N2M4O010O01O01O010O01O01O010O01O01fC"}, "image_id": 474, "id": 8104}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 390.0, 73.0, 58.0], "area": 2238, "segmentation": {"size": [512, 512], "counts": "]]Q22l?3L3N2N3L3N3L3O2O01ON3M2M4M2M3N3M2M4M2O11M2M4M2O14MO010O0010O0010O0N30O000O2M2010O000UO]A:c>C_A=a>@bAa0^>\\OeAc0[>[OgAe0Y>XOkAh0f>0010O010O00010O010O0010O0010O0O2M2M3N3L3N3L3N2MkRj4"}, "image_id": 474, "id": 8105}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 398.0, 27.0, 17.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "clS72k?3N3O0010O010O00010O010O010O010O00010O010O010O001M2NZc>"}, "image_id": 474, "id": 8106}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 400.0, 31.0, 28.0], "area": 524, "segmentation": {"size": [512, 512], "counts": "Qmg62f?1\\@2b?8L31O01O0N3L3010O0010O0010O0010O0010O0010OO1M4M2M4M2M3N3MYch0"}, "image_id": 474, "id": 8107}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 414.0, 47.0, 66.0], "area": 2016, "segmentation": {"size": [512, 512], "counts": "k]g53j?3N3M2M4M2TAAU>a0iAAT>c0hAAU>a0iAAU>b0gABU>a0iAAU>b0gAAW>V1N110O0010O0010O010O0010O00N3M201O0010O0M4L3N3L3M3N3L3M4M2]OXAKl>2VAKm>3UAKm>2WAJm>3a0NlRa1"}, "image_id": 474, "id": 8108}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 426.0, 57.0, 54.0], "area": 1830, "segmentation": {"size": [512, 512], "counts": "QnS72l?3M2M4M2_@FZ?a010O01O010OO2L3N2N3L3N3M210O00010O010O010O00010O010O010O00010O010O01O01O010M210O01O010O01O010O01O01O010VB"}, "image_id": 474, "id": 8109}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 442.0, 60.0, 70.0], "area": 2205, "segmentation": {"size": [512, 512], "counts": "foa135O_?3_@O_?:M3N2M3N2O11O001O00001O00M3001O00FYO\\Af0a>]O^Ad0`>_O]Ac0T>WOQB9Hc0T>WORB8Hc0T>WOQBY1m=jNPBX1m=kNQBW1m=?M3O11O00001O001O000O2M2M4M2M3N3L3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3NlQ`5"}, "image_id": 474, "id": 8110}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 448.0, 34.0, 31.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "bn`62l?3L3N2N3M2M4M2N3O00010O010O010O00010O010O0010O0010OO2M2M3N3L3N3M2MiQn0"}, "image_id": 474, "id": 8111}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 465.0, 17.0, 20.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "nnb33j?4M2M4M2M301O010O0001M2N3L3N2M4M]aT4"}, "image_id": 474, "id": 8112}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 478.0, 45.0, 34.0], "area": 1065, "segmentation": {"size": [512, 512], "counts": "ooi31m?2N2N2N2HHf@:T?Fj@3O9U?Fj@b0T?6N2N2001O00N2N2001O001O001O001O001OO1N2O11O001O001O001O0XOn@a0T?\\On@b0Z?M2N3M2N3M2N3Me`_3"}, "image_id": 474, "id": 8113}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 480.0, 52.0, 32.0], "area": 1171, "segmentation": {"size": [512, 512], "counts": "job22k?4M2M3M4M2M4M2M3N3L3001O00001O001O00001O00001O001O00001O001O00001O00N2O11O00001O001O00001M2N3L3N2M4M2MgPc4"}, "image_id": 474, "id": 8114}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 481.0, 62.0, 31.0], "area": 1331, "segmentation": {"size": [512, 512], "counts": "koP73`?Nj@4S?Oj@5T?Mi@5W?Kg@7Y?90O00N2N2M3N2O100001O001O00001O001O00001O001O00001O001O001O00001O001O00001OO1N200001O001O00001O001O00001Bb@8^?Fd@7d?N3L3N="}, "image_id": 474, "id": 8115}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 496.0, 17.0, 16.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "m_W23k?2M3M3M3N20000001O001O0O1M4M2M4M^P`5"}, "image_id": 474, "id": 8116}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 497.0, 50.0, 15.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "m_X53j?3M31O001O001O00001O001O001O001OO1O1M3N2N2M3O11O00001O001O001O00001O001O00001O001O001O00001O001O0NV`n1"}, "image_id": 474, "id": 8117}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 45.0, 46.0, 100.0], "area": 2224, "segmentation": {"size": [512, 512], "counts": "]1T3mg0mA]OP>d0oA^Oo=d0nA^OQ>d0mA^OP>e0mA]OR>Y1O001N10000O010000O0100XBSN^=n1aBTN]=n1`BTN^=o1`BSN^=Y2N2M2O2N2N2M2O2N2N2M2O2N2M3N1O2N2M3N1O2NO011O1N3O1O1000O10O1000O10OTD_LA0e;a3hDaLA0g;_3eDhLZ;Y3dDiL\\;V3cDkL^;U3_DnLa;R3]DPMb;Q3\\DQMd;n2ZDTMg;c30000O10O1000O10O1000O01M3O1O001O1O1O1O001N2O1O001O1O1O001N2M3N1010000O10O1000O10O1000O10O1000O10O1000O10O1000O1000O10O1000O1oCgLZ;X3eDjL[;V3bDmL^;S3_DPM`;Q3^DPMc;o2[DSMf;c31O0O2O1M3N2N1N3N2N2M2O2N2000O0100000O01000000O0100000O01000O1N1N3N2N2M2O2N2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N2N1N3N2N2M2O2N2IZ@0ThR4"}, "image_id": 477, "id": 8119}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 351.0, 57.0, 93.0], "area": 2484, "segmentation": {"size": [512, 512], "counts": "`ml12l?3L3M4M2M3M4M2M3M4M2M4L3N2M4L3N2M4L3N3L3M3N3L3M3N3L1O10O00011N3M4M2M3M4M2M3M4M2M4L3N2M4L3N2M4L3N3L3M3N3L3M3N3L3MkdV5"}, "image_id": 477, "id": 8120}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 397.0, 6.0, 15.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "]OPA4P?Mm@5S?Kj@9U?Gi@;W?8O00001O001O001O00001O001O001O00001O001O00M3001O00N2N2N2M3N2N2M3N2N2M3NR`n6"}, "image_id": 478, "id": 8124}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 0.0, 40.0, 42.0], "area": 1050, "segmentation": {"size": [512, 512], "counts": "]`W11m?2b@Ok>4QA0k>3SAOk>3RA0l>3QA0l>c001O001O00010O010O010O00010O010O010O0010O0N3M2M3N2M103M2M4M2N2M4M2Mj_T6"}, "image_id": 478, "id": 8125}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 0.0, 52.0, 67.0], "area": 1902, "segmentation": {"size": [512, 512], "counts": "\\aP22l?3M2M4M2M3N3M2M4FYOWAi0g>YOWAi0f>;M2aAiNR>[1kAgNS>[1kAgNS>e1M201O001O000010O010O01ON3M2N3L3N2M4M2N3L3N3M2M3N3L3N3M2M4M2N2N3O010M2N2Mg_U5"}, "image_id": 478, "id": 8126}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 0.0, 33.0, 23.0], "area": 508, "segmentation": {"size": [512, 512], "counts": "\\Po22k?3N3L3N3M201O001O00001O001O001O001O00001O0000N2N2N2M3N2N2M3N2N2NR``4"}, "image_id": 478, "id": 8127}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 0.0, 36.0, 11.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "S`o31m?2O101O001O00001OO1N200001O00001O001O00001O001O00001O001O0000O1N2M3N2NR`^3"}, "image_id": 478, "id": 8128}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 0.0, 38.0, 23.0], "area": 606, "segmentation": {"size": [512, 512], "counts": "X`X63k?2N3L3O2O00001O001O00001O001O001O00001O001O001O00001O001OO1N2O100Fd@1[?Mh@2X?Lj@4V?In@3a?Nf_T1"}, "image_id": 478, "id": 8129}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 0.0, 28.0, 13.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "Q`V72m?2O00001O001O001O00001O001O001O00001O001O001OO1N2M3N2NR`;"}, "image_id": 478, "id": 8130}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 1.0, 13.0, 13.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "X`f72k?4M2N3O00010O010OO1N3M2Moo2"}, "image_id": 478, "id": 8131}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 26.0, 53.0, 53.0], "area": 1600, "segmentation": {"size": [512, 512], "counts": "jao54i?3N2BIPA;m>Ho@;o>Gn@=o>;M4M2N210O010O010O00010O010O010O00010O010O010O0001O001O010O0010O00ZOWA6i>GZA8f>F\\A;d>A`A>`>@bA`0_>]OdAc0l>00N3M2N3L3N3M2MUnU1"}, "image_id": 478, "id": 8132}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 36.0, 37.0, 28.0], "area": 594, "segmentation": {"size": [512, 512], "counts": "hQl61m?2GO`@5]?Ma@5\\?9N3M201O00010O010O00010O010OO1M4M2M4O01O010O01O010O01O01N1M4M2M3Nc^a0"}, "image_id": 478, "id": 8133}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 38.0, 45.0, 54.0], "area": 1449, "segmentation": {"size": [512, 512], "counts": "kai22k?3i@L`>7\\ALa>7]AKb>6[ANd>3YAOh>0UA4j>MSA5m>?10O010O0M3N3O0O2M2M3N3M210O00010O010OO1N3L3N3M21OIYAROj>l0XAROi>l07N3L3M3N3L3N3L3N2Mfn_4"}, "image_id": 478, "id": 8134}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 41.0, 80.0, 60.0], "area": 2361, "segmentation": {"size": [512, 512], "counts": "ob[31l?3M3N3L3N3L3J^Ok@e0R?7M2M4L3N201O00N3L3NO3O2O010O00010O00010O010N1N2010O0N3M2M3M4M0O4M2M3]Of@=`?010O0N2M4L3N3I6O101N1N2M4O010O0001N1N3M20010O010O00010O000TOUAb0m>ZOVAc0U?N3L3M3N3L3MY^\\3"}, "image_id": 478, "id": 8135}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 83.0, 26.0, 26.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "USn51l?4M2M3M4L3M3O2O00010O010O00010O00010O00N3L3M3M4L3NYmd1"}, "image_id": 478, "id": 8136}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 86.0, 27.0, 32.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "]cY62k?4IL^@6`?7L3N2N3L3N3O00010O0010O010O0N2N3M2M4M2M3N3M2M4MUmX1"}, "image_id": 478, "id": 8137}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 86.0, 22.0, 38.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "bSe72l?3L3N2N3L3N3L3N2M4M2N3M210O01O01O010O01M2NXM"}, "image_id": 478, "id": 8138}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 93.0, 44.0, 69.0], "area": 1789, "segmentation": {"size": [512, 512], "counts": "gTW51m?2M4M2N2M4M2B@ZAc0c>_OZAd0c>_O[Ad0b>_OZAd0c>>N3M2M4M2N3L3N2N3O001O010O0001N0N101O2M4M2N3L3N2M4M2XO[A3h>J[A2h>K[A3h>FQAO99h>FQAO:7[?N3M\\lR2"}, "image_id": 478, "id": 8139}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 25.0, 53.0], "area": 829, "segmentation": {"size": [512, 512], "counts": "m3b1^>010O00010O010O01O0cNbAU1a>iNbAT1f>N2M4M2N3L3N3L3N2N3L3N3L3Nf[c7"}, "image_id": 478, "id": 8140}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 134.0, 44.0, 54.0], "area": 1418, "segmentation": {"size": [512, 512], "counts": "\\UW61m?3M2M4M2M3N3L3N3j@YOo>i0o@YOn>o0M2M4M2M4M2O101O010O00010O010O0001M2M4M1N010O4M2N2M4M2M4M2M3N3Db@2`?Kc@2g?NakR1"}, "image_id": 478, "id": 8141}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 136.0, 10.0, 27.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "kTk74j?2N3L3N3L3N2M4M210fK"}, "image_id": 478, "id": 8142}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 147.0, 45.0, 47.0], "area": 1237, "segmentation": {"size": [512, 512], "counts": "cU>4j?2M3N3L3N3L3N2M40O010O00010O01L3N2N3L3N3L3010O0010O0010O001O0M3N3L3N3L3N2M4M2M4M2M4M2N2MW[k6"}, "image_id": 478, "id": 8143}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 154.0, 46.0, 53.0], "area": 1392, "segmentation": {"size": [512, 512], "counts": "PVm63k?2M4M2N3L3N2N3L3N3L3N3M2M3N3M2M40O010O00010O010O010O0010OO1M4M2N3L3N3L3N2N3L3N3M2M4M2N2M4Mkj;"}, "image_id": 478, "id": 8144}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 160.0, 29.0, 40.0], "area": 726, "segmentation": {"size": [512, 512], "counts": "oea71l?3N2N3L3N3M2M3N3M2N3L3N3M2O101O010O01O01OM4M2N3L3N3L3N2NmJ"}, "image_id": 478, "id": 8145}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 163.0, 47.0, 50.0], "area": 1253, "segmentation": {"size": [512, 512], "counts": "XfW12k?3N2M4M2M4M2M3N3L3N30O00010O010O01N1N201O010O01O01N1N3L3N2M10O2O3L3N3M2M4M2M3N3L3N3L3N3L3N2NijP6"}, "image_id": 478, "id": 8146}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 186.0, 44.0, 56.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "Zgo11m?3L3JL^@7^?7N2M4L3N2M4M2M4L3N2M4M2M4L21N2M3M4O010O00010O010N0N2O3L3N3L3M3N3L3N3L3M4M2M4M2MQZZ5"}, "image_id": 478, "id": 8147}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 207.0, 27.0, 33.0], "area": 595, "segmentation": {"size": [512, 512], "counts": "n6=a?2N2M4M2N2NO12m@XOn>h0PAZOQ?j0010O01O01N1N3O01O01M2M4M2M3N3L3M3N\\Yb7"}, "image_id": 478, "id": 8148}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 209.0, 56.0, 61.0], "area": 2020, "segmentation": {"size": [512, 512], "counts": "nWd21m?3M2M4M2N2N3L3N3M2M3N3M2M4M2N3M2M3N3M2N3O00010O010O00001N1010O00010O010N1N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N3L3N2NVi_4"}, "image_id": 478, "id": 8149}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 214.0, 34.0, 36.0], "area": 728, "segmentation": {"size": [512, 512], "counts": "aWe01l?3N2N3L3N3L3N2N3L3N3L30001O010O01O01O010O01O0N2N3L3N3L3N2M4M2M3M4MTii6"}, "image_id": 478, "id": 8150}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 226.0, 49.0, 57.0], "area": 1620, "segmentation": {"size": [512, 512], "counts": "QXe31m?2M4M2M3N3L3N3L3N2YAXOS>l0iAXOT>j0jAXOU>j0gAZOY>e0eA]O[>c0cA_O]>S10O001M0201O01O01O010O010O00N3M2M010O012M3N3L3N3M2M3N3L3N3L3N2M4M2MlXb3"}, "image_id": 478, "id": 8151}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 239.0, 32.0, 74.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "_7U2k=01O010O010O01O01O010O010O0N2N3L3N3eNjAh0X>VOjAg0Z>VOhAh0Z>UOjAh0X>VOjAg0Z>VOiAg0i>N3L3N2N3L3N3M2Mgg_7"}, "image_id": 478, "id": 8152}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 239.0, 25.0, 27.0], "area": 417, "segmentation": {"size": [512, 512], "counts": "Phb13j?3M4L3L4M4N110O00010O010O00010O010ON2N3M2M4M2M4M[hP6"}, "image_id": 478, "id": 8153}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 253.0, 46.0, 81.0], "area": 1991, "segmentation": {"size": [512, 512], "counts": "QZ_44j?2mNKYB8d=JYB9TOE^>6[B9c=IZB;b=H\\B:b=IZB:c=I[B:b=IZB:d=H[B:a=I`B6^=MaB3\\=0dB1Y=2eBOY=3eB0[=0aB3_=W1010N10010N1EaBSNb=j1`BSNc=j1;N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N2M4Migi2"}, "image_id": 478, "id": 8154}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 261.0, 32.0, 40.0], "area": 744, "segmentation": {"size": [512, 512], "counts": "SiT23j?3N2M4M2M4M2M3N3L3N3L3N201O0010O0010OO1M4M2M4M2M3N3L3N3L3N2M4MfW[5"}, "image_id": 478, "id": 8155}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 267.0, 46.0, 82.0], "area": 1811, "segmentation": {"size": [512, 512], "counts": "^YX53k?2M4L3N2M4M2M3M4WAWOW>k0fAXOX>k0eAXO;K[=l0WB\\OT1RBiNQ>U1RBhNP>X1:0O11N4L3N2M4M2M4L3N2M4L3N3L3N2McgP2"}, "image_id": 478, "id": 8156}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 282.0, 74.0, 67.0], "area": 2486, "segmentation": {"size": [512, 512], "counts": "bZ=231c?1[@2b?7N3L3N2N3L3N2NOO203M2N3L3N3M2N3L3N1O01O2N3L3N3M2N2M4M2010O01O010O01O001M2N3O00010O010O010O010O0O1N3L3N2NO10O12N2M4M2N3M3L3N3M2N3L3N3M2N3L3N3M2MQg]6"}, "image_id": 478, "id": 8157}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 286.0, 29.0, 38.0], "area": 596, "segmentation": {"size": [512, 512], "counts": "biQ43k?2N3L3N3M2M3N3L3O2O001PAVOi>i0UAZOj>f0TA\\Om>l0O00010OO2M2JQAXOS?d06N2M4M2M4M2M3Nmf_3"}, "image_id": 478, "id": 8158}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 289.0, 29.0, 29.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "cYh11m?2O2M3M2O2M2N3N1N3M2O20O0100O0100O010N1O2M2N3M2N3M3N1N3MgVi5"}, "image_id": 478, "id": 8159}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 290.0, 13.0, 13.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "YYc23k?2N2N3N1010O0010OO2M2N3MlVV5"}, "image_id": 478, "id": 8160}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 290.0, 46.0, 63.0], "area": 1667, "segmentation": {"size": [512, 512], "counts": "gZn51m?2M4M2N2M4XODhA>V>DgA`0U>DhA>V>DhA>V>DgA`0U>DhA>V>DgA`0U>g0O2O01O010O01O0O1010O000N3M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3L3N2N3LffZ1"}, "image_id": 478, "id": 8161}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 294.0, 37.0, 31.0], "area": 706, "segmentation": {"size": [512, 512], "counts": "kio21l?3M4M2M3N3L3M4M200010O01O010O01O01O010OO2M2010O0010O005LO0010O0O2M2N3G]@0f?N\\@0QV^4"}, "image_id": 478, "id": 8162}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 302.0, 52.0, 51.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "_jR21l?3N3L3N3L3N2M4M2N3M21O01O0M4M2M3N3O010O01O01O010O01O0SO[A`0d>]O_Ac0b>ZOaAf0^>WOeAi0i>010O010O0010O0010O0010O0O2L3N2M4M2M4M2M4MhUS5"}, "image_id": 478, "id": 8163}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 310.0, 22.0, 27.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "YZn43j?3N2N3L3N3L3N2N30O01O010OO2L3N2N3L3N3L3N3MVff2"}, "image_id": 478, "id": 8164}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 320.0, 57.0, 47.0], "area": 1514, "segmentation": {"size": [512, 512], "counts": "kjR71l?4L3M3M4L3M4L3M4L30010O010O010O00010QAROj>R110O010OO1M4O010O010O00010O010O010O00010O010O010O00010O0M4M2N3L3N2N3L3N3M2M_e0"}, "image_id": 478, "id": 8165}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 322.0, 26.0, 27.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "eZd61k?4M4L3L5L3N30O0010O0010O010O0010O0001L3N3M2M4M2N2Mien0"}, "image_id": 478, "id": 8166}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 324.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "T:2jeo7"}, "image_id": 478, "id": 8167}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 332.0, 28.0, 28.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "ljc12m?2M2N3M2N3N1N3M3M201O01000O010O010OO2M3N1N3M2O2M3M2N3N[Un5"}, "image_id": 478, "id": 8168}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 343.0, 28.0, 28.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "Y[`51m?3M2N3M2N2N3M2N3M210O01O010O01O010O010O01M2M3N3M2M4M2NSeQ2"}, "image_id": 478, "id": 8169}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 345.0, 69.0, 54.0], "area": 2160, "segmentation": {"size": [512, 512], "counts": "nkl22l?3M2[@J^?9_@I`?=M3M2N3M2N3M2N3N1010O010O010O010O001M0012M2N3M2N3M2N20O2M2N3M2N3M3O010O01M2N3M2O2M2O2O010O01O0N3M3M2N3M2N3N1N3M2N3M2N3M2N3M2N3N1NndP4"}, "image_id": 478, "id": 8170}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 348.0, 46.0, 50.0], "area": 1292, "segmentation": {"size": [512, 512], "counts": "mkm12l?3M2N3M2N3N2M2N3M2N3M2O2M3M2N3M2O2M210O10O010O10O10O0O2N110O10OO2D[AZOf>e0[AYOh>d0[AYOh>d0;N3N1N3M2N3N2M2N3NeT[5"}, "image_id": 478, "id": 8171}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 349.0, 24.0, 22.0], "area": 320, "segmentation": {"size": [512, 512], "counts": "Zk_62k?3N3L3N2M4O010O00010O010O00010O010O00N3L3N3L3NnTT1"}, "image_id": 478, "id": 8172}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 352.0, 51.0, 32.0], "area": 967, "segmentation": {"size": [512, 512], "counts": "fko31m?2N3M2N3M2N3M2N3O010O0O2M2N3O010O0O1N02O2M2N3N1^Of@?^?O010O010O01M2N3O010O010O010O0010O001M2N3M2N3M2N3M2NedV3"}, "image_id": 478, "id": 8173}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 353.0, 56.0, 62.0], "area": 1851, "segmentation": {"size": [512, 512], "counts": "QnNdAT1[>nNcAT1Z>;M2N3N1N3N11O0N3M2O2M3M2O2M2N3M2O2M3M2N3N1N3O10O010O010O01000O010O010O01O1M2N3M2O2M2N3M3N1N3MYdS7"}, "image_id": 478, "id": 8174}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 378.0, 28.0, 28.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "]\\W62l?2M3N3M2N3M2N2M4N1010O010O0010O0010O001M2N2M4M2N3L3N3LQdZ1"}, "image_id": 478, "id": 8175}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 378.0, 53.0, 45.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "c\\i63k?2M4M2M3N3L3N3L3N3L300010O01O01O010O01O010O01O010O01O010O01O010O01O010O01O010O01O01N1N3L3N3M2M3N3L3N3M2MhS<"}, "image_id": 478, "id": 8176}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 381.0, 43.0, 51.0], "area": 1307, "segmentation": {"size": [512, 512], "counts": "Rm_43k?3L3N3L3N2M4H^On@d0P?8L3M3N3M210O00010O001M2M3010O010O0010ON2N3L3M4M2M4M2M4M2M4M2M4M2M3N3Llcj2"}, "image_id": 478, "id": 8177}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 390.0, 61.0, 61.0], "area": 2125, "segmentation": {"size": [512, 512], "counts": "WmX13k?2AMPA5m>MQA5n>Mo@6n>LPA7m>LPA6n>LQA6m>?N3M2O2O010O10O010O10O010OO1OO2N3M2100O0100O010`AcN[>]1cAfN]>_1O010O0100O010O0100OO2N1FcAoN`>n0bAPO`>n0;N2M2N3M2N3N1N3M2N3N1N3M3MXch5"}, "image_id": 478, "id": 8178}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 394.0, 46.0, 48.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "nlU53k?2M4M2e@Hk>;SAGk>9o@JP?c010O010O01OO2L3N3L3N201O010O010O01O01O010O01O01OO2L3N3M2M3N3L3N3M2M4M2M3N3L3N]SS2"}, "image_id": 478, "id": 8179}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 414.0, 64.0, 56.0], "area": 2028, "segmentation": {"size": [512, 512], "counts": "om\\22l?3M2N3N1N3M2N3M2O2M3M2N3M2O2M2N3M3[AfN`>^10100O0100O010O010O01000O010O010O0N1O1O01O0003M2O20ON3N1N3N101000OO2M2N3WOk@c0Z?O2M2N3M2N3O1O0N4LbRc4"}, "image_id": 478, "id": 8180}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 425.0, 63.0, 45.0], "area": 1630, "segmentation": {"size": [512, 512], "counts": "nml52l?3L3N2N3M2M4M2N2M4O001O010O01O01O010N1N3O00010O010O010O00010O010O01O01O010O010O00010O010O010O00010O010O01N1N2VOn@c0Y?N3M2M4M2N3L3NTbS1"}, "image_id": 478, "id": 8181}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 433.0, 65.0, 55.0], "area": 1914, "segmentation": {"size": [512, 512], "counts": "V^i01n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N200O1N2N2N2N2N2O1000O1N2N2N2O1000000000000O0O2000000000N1O0000000002N2N2N2N1O0O1Cn@I0KT?:n@I0KT?;m@HY?8g@F[?Oc@6h?N2N2NhQV6"}, "image_id": 478, "id": 8182}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 438.0, 27.0, 27.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "XnX52k?4L3N2M4L3M3O20O00010O00010O00010O00010O0N2M4L3M3M4LVbY2"}, "image_id": 478, "id": 8183}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 440.0, 23.0, 19.0], "area": 239, "segmentation": {"size": [512, 512], "counts": "R^P21n?1N3M2N3N1N3O010O010O010O10O010O001M2N3M2O2MRRd5"}, "image_id": 478, "id": 8184}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 446.0, 47.0, 53.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "R_n31m?2M4M2N3DGm@;Q?Hl@;Q?Gm@;Q?m0cARO^>l0eAQO^>l0dARO^>3ZAd0S?YOPAd0W?N3L3N3M2N3M2NdQZ3"}, "image_id": 478, "id": 8185}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 462.0, 44.0, 50.0], "area": 1267, "segmentation": {"size": [512, 512], "counts": "dog42l?3M2N3M2N3M2M3N3M2N3n@VOj>l0TAVOj>Q1N2N2N2N2M3N21O001O001O001O001M^AfNb>X1`AhNa>T17N2N3M2N3M2N3M2N3M2M4M2N3M2NXQb2"}, "image_id": 478, "id": 8186}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 464.0, 26.0, 25.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "Qol61m?2M3N3L3N3L3N201O010O01O010O01O010O01M2N2M4M2M4M2M\\Qf0"}, "image_id": 478, "id": 8187}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 468.0, 44.0, 44.0], "area": 1290, "segmentation": {"size": [512, 512], "counts": "goa51_?1m@2o>2m@1Q?1m@2o>2m@1S?>01O010O0N2M4M2M4M20010O0010O001O00001O001O00001O001N1N2N3L3N3L3N2M4M2M3N3L3NSQh1"}, "image_id": 478, "id": 8188}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 485.0, 68.0, 27.0], "area": 1215, "segmentation": {"size": [512, 512], "counts": "j_l13l?1N3M2O2M3M2O1N2O10000O1N2O1N2O1001O1O001O1O001O00O11O1O001O001O1O001O001O1O001O001O1ON2O1N2O1N2N2O1N21O1O001O001O1O0O2N1N3M2O2M3N1N3Ma`Q5"}, "image_id": 478, "id": 8189}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 494.0, 46.0, 18.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "h_X6240b?2[@1b?8O1001ON2O1N20000001O001O00001O001OO1N20000001O001O00001O001O0E`@6e?01O00001O001O00001O001MU`P1"}, "image_id": 478, "id": 8190}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 499.0, 40.0, 13.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "nob32m?1N2N2N2N200001O1O001O001O001O001O001ON2N2N2N2O11O001O001O001O1O001O001O001M2NWPi3"}, "image_id": 478, "id": 8191}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 503.0, 23.0, 9.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "noZ52k?3M3O100001O001O00001O001O00001O001O00001O0OS`Y2"}, "image_id": 478, "id": 8192}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 0.0, 31.0, 20.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "PP68h?000000:F0000000000001O00000000000000000000000000001O000000A_`Z7"}, "image_id": 481, "id": 8193}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 0.0, 37.0, 21.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "P`S45k?000000000000O1000000002N6J6J3M0000000000O100000000O10000000000O105K6J5K`oY3"}, "image_id": 481, "id": 8194}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 0.0, 36.0, 19.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "PPa52n?5K5KO1000000001O4L3M0000O100000000O100000000O100000000O1000000O4M4L5Kdol1"}, "image_id": 481, "id": 8195}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 0.0, 29.0, 25.0], "area": 554, "segmentation": {"size": [512, 512], "counts": "SPd62n?4L5K5K5J2O000000O010000000O010000000O100000000O3N5K5K4L`_m0"}, "image_id": 481, "id": 8196}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 6.0, 115.0, 52.0], "area": 3732, "segmentation": {"size": [512, 512], "counts": "]`n04l?3M4K5L4L4L4K5L4L3MO0100000O0100000O0100000O0100000O01004L4K4M3M00O0100000O00O1100000O0104L4L4K3N0000O010000O010000O0100000O0100000O104L4L4K2O00O10O100Nj@ZOT?h0200O01000002M5L4L4LO010000O0100000O010000O010000O010001O4K5L4L3M4K5L4L4L4K`nW5"}, "image_id": 481, "id": 8197}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 20.0, 24.0, 25.0], "area": 448, "segmentation": {"size": [512, 512], "counts": "h`k41o?6I6K6J3M000O01000000000O0100000000O01005K5K6Jl^h2"}, "image_id": 481, "id": 8198}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 22.0, 13.0, 25.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "f0g0Y?010O010OO1M4M2M3M4M2M3MY_i7"}, "image_id": 481, "id": 8199}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 24.0, 46.0, 44.0], "area": 1358, "segmentation": {"size": [512, 512], "counts": "ZQY73l?5L4L5K4L4K102N0000O0100000O0100000O1O01000OHYOYAg0g>]OUAc0k>90O0100000O03N00O10O100000O10O1000O102N4JTO"}, "image_id": 481, "id": 8200}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 28.0, 27.0, 18.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "QQb54l?5K4L0O10O100000O10O100000O10O100000O10O100000O04M5Kl^P2"}, "image_id": 481, "id": 8201}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 38.0, 18.0, 28.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "YQ[63m?4K5L4L5K4K2OO1000O10O3N4L4L4K5L4LTn[1"}, "image_id": 481, "id": 8202}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 45.0, 22.0, 15.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "cQf07c?600000000000001O01O000000000000001O01O00Jgnn6"}, "image_id": 481, "id": 8203}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 45.0, 58.0, 31.0], "area": 1552, "segmentation": {"size": [512, 512], "counts": "dQQ4;e?=C000000000000000000000J60000000000000O10000000000000000000000000O1000000000000000000000O105K000000000000000000?AomQ3"}, "image_id": 481, "id": 8204}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 50.0, 41.0, 36.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "laf51?4l>1o@0o>5l@KU?9g@GY?`002M4M00OLXORAh0n>500O010000000O0100000O01000003L1000000O10O100000O104L4L5J5L4Lgmd1"}, "image_id": 481, "id": 8205}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 58.0, 29.0, 84.0], "area": 1490, "segmentation": {"size": [512, 512], "counts": "m1l05?\\=EcB<\\=DdB<\\=DeB;\\=EcB<\\=DbB>_=R1O00010O01^N`Bc0_=ZOdBe0]=XOgBd0]=XOfBf0\\=XOgBd0]=XOfBf0\\=WOgBf0\\=WOhBf0[=VOhBi0Y=UOjBj0V>N2Jl@^OW?>7N3L3N2M4Mn\\a7"}, "image_id": 481, "id": 8206}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 76.0, 22.0, 23.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "^bQ55k?`0@000000000O0100000000000000000O1000000`0@T]c2"}, "image_id": 481, "id": 8207}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 91.0, 24.0, 25.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "Pcl61n?6K5K4L5J0100000O0100000O10O1000O1000O3N5K5K4Kf\\g0"}, "image_id": 481, "id": 8208}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 93.0, 55.0, 30.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "nbR4m0R?10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000SmQ3"}, "image_id": 481, "id": 8209}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 99.0, 57.0, 72.0], "area": 2427, "segmentation": {"size": [512, 512], "counts": "iTh02k?4VOLcA7Z>LcA7Z>LdA7X>MdA6Y>MdA7Y>KdA8Y>LcA7Z>j0L3O1010O00010O00M4N11O01O01O01O010O00010O01O01OoNPB2Q>JRB6n=GVB9i=DZB00010ON2M4N10010L3M3M4L3M3M4Ld[[6"}, "image_id": 481, "id": 8210}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 101.0, 23.0, 20.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "ZSa51n?5L5K5K00O01000000O01000000O01000000O05L5K5K]\\S2"}, "image_id": 481, "id": 8211}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 102.0, 45.0, 82.0], "area": 2984, "segmentation": {"size": [512, 512], "counts": "VeS2a0_?0]NCcC=P<0PD0P<0mC3S[O]Ae0c>XO`Ai0l>O00010O00010O0[Om@=S?@PAa0W?01O01OO2L3M3M4M2M3McZc2"}, "image_id": 481, "id": 8216}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 149.0, 24.0, 81.0], "area": 1119, "segmentation": {"size": [512, 512], "counts": "gVd733Oc?3[@Oc?9L3N2N3M2M4oNXOfBj0X=YOeBj0X=XOeBk0X=XOfBj0X=YOeBj0X=XOfBj0X=YOdBk0[=UOcBm0]=TO`Bo0`=PO]BR1d=c03N10N3M2M4ZK"}, "image_id": 481, "id": 8217}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 167.0, 49.0, 88.0], "area": 2677, "segmentation": {"size": [512, 512], "counts": "b6U1i>3L3N3M2M3N3B[N]Bg1`=\\N^Bg1_=\\N]Bg1`=\\N^Bg1_=>M3N3N110O010O01M2N2N3M2M4M2N3L3N3M2M3N3M2N3O0010OO1N3M2M4M2N3L3BQAIQ?5RAHQ?5QAIQ?4`0M2N\\ZW7"}, "image_id": 481, "id": 8218}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 168.0, 54.0, 48.0], "area": 1277, "segmentation": {"size": [512, 512], "counts": "RVn52n?1N2N3M2O1N3M2O2M2N2N3N1N3M2O1N1O10O0000010O00010O0001O01O00010O00010O0000101N3M2O2M2N2O2M2N3M2O1N3M2O2M2NTjV1"}, "image_id": 481, "id": 8219}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 183.0, 68.0, 47.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "dVm03k?3L3N3L3LBd@`0Y?6M2M4M2M3O20O01O010O0N2O20O010O00010O010O0010O0010O0010O0010O0M4M2M3N3L3N3L3N2O2O01L3M4O0010O0010O0010O0010O00N3M2M4M2M3N3LoiP6"}, "image_id": 481, "id": 8220}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 200.0, 78.0, 61.0], "area": 2333, "segmentation": {"size": [512, 512], "counts": "Wgo32k?4M2N3L3N3M2M3N3M2M4M2N3L310O01O010O010O01O01O010O010jNYAQ1k>10O010O00010O010O010O0010O0010O010O0010O0010O010O010O00010O010O01N1N210ON3M2N3M2M3N3M2M4M2N3M2M3N3MmXi2"}, "image_id": 481, "id": 8221}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 205.0, 23.0, 18.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "gV`52k?4K4M3010O00010O00010O000010O00010O00O2L3M3M`YT2"}, "image_id": 481, "id": 8222}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 209.0, 72.0, 95.0], "area": 3254, "segmentation": {"size": [512, 512], "counts": "Pic11m?3L3N2M4M2M4O000010OM4M2N2M4M2M4M2M3N3L3N3L3N2N3L3N2M4DoMeBS2Y=PNcBT2Z=:M3N3L301O00010O00M4L3N3L3HcBkM_=R2dBkM`=R28M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3M21O010ON3M2M3M4M2MTYX5"}, "image_id": 481, "id": 8223}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 226.0, 53.0, 46.0], "area": 1480, "segmentation": {"size": [512, 512], "counts": "UXc02k?3N2N3L3N2M4M2N3L3N2M4M2N2M4N1001M2N210O010O00010O00010O010O00010O0010O0010O0O1M4L3N3L3M3N3O0Ic@L\\?1g@OZ?Ni@1W?Ll@5`?O0MWXb6"}, "image_id": 481, "id": 8224}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 226.0, 60.0, 47.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "oW\\61l?4L3M3M4L3L4M4M200010O00010O00M4L3O1010O000101N00010O0001N10010O000QOWAh0i>TOZAl0m>010O0010O00010O00010O00010O001M2M3M4M2M3M4L3M3M\\he0"}, "image_id": 481, "id": 8225}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 229.0, 27.0, 30.0], "area": 523, "segmentation": {"size": [512, 512], "counts": "lW[71l?4L3L4M4L3M3L5O01O00010O00010O0001O01O0O1L5L3M3L5L3MhX7"}, "image_id": 481, "id": 8226}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 246.0, 48.0, 37.0], "area": 1118, "segmentation": {"size": [512, 512], "counts": "[hV51l?3N3M2M3`@FX?=e@EX?d0M2N2N02N2010O010O00010O010O0010O0010O010N1N2N3O010O00010O010O0001TOPAf0P?WOTAf0T?N2M4M2M4M2M3NnWQ2"}, "image_id": 481, "id": 8227}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 254.0, 58.0, 50.0], "area": 1754, "segmentation": {"size": [512, 512], "counts": "[hj33j?3l@K[>7bAL\\>7aAL[>7YAD38b>7XAC48c>;ZAIe>i010O01O01O010O01O01O010ON3L3O1010O010O00010O010O00010O010O00010O010O00010O010OO1M4M2M4M2M3N3L3N3L3N2MaWX3"}, "image_id": 481, "id": 8228}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 268.0, 27.0, 27.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "ohh41l?3M3N3L3M4L3N2010O010O00010O010O00010O0O2L3N2M4M2M4M^gi2"}, "image_id": 481, "id": 8229}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 293.0, 51.0, 54.0], "area": 1364, "segmentation": {"size": [512, 512], "counts": "ZZU71n?2M2O2N2M3N1O2M3N2E]OVAe0h>]OUAe0j>\\OUAf0i>\\OUAf0h>;N2N1O20000O10ON1000O10O1000O102N2N2M2O2N2N2M3N1O2N2M3N2N1N3N2N2N1N3N2N2N[V1"}, "image_id": 481, "id": 8230}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 295.0, 23.0, 17.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "[YW65k?4L4K10000000O0100000O01000000O01000001N6K4L_V]1"}, "image_id": 481, "id": 8231}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 297.0, 64.0, 86.0], "area": 2705, "segmentation": {"size": [512, 512], "counts": "_ka41l?4M2N2M4M2M4N100M4M2O20O0010O001O0M3N3L3N3YOjN_BY1^=kN_BW1_=kN_BX1]=lN_BW1_=kN_BX1]=kN`BX1^=f0010O01O0O2M2O110ON3M2M3N3L3N3L3TNRBd1P>ZNRBc1W>N3M2M4M2M4M2M4M2M3N3L3N3L3N3L3N3L3NWV^2"}, "image_id": 481, "id": 8232}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 300.0, 70.0, 53.0], "area": 2094, "segmentation": {"size": [512, 512], "counts": "bj_32l?3L3N2N3L310O010O00010M2M4M2M3M4L3N3L3M3N3L21M30010O010O0O20O00010O010O00010O010O00010O010ON2N3L3N3M2M4M2O1010O010O00010O01N1N2M4M2N3L3N2MUV]3"}, "image_id": 481, "id": 8233}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 307.0, 42.0, 63.0], "area": 1600, "segmentation": {"size": [512, 512], "counts": "kiX61o?4K5L5K4L4K6K4L4L5J5L00O1000O1000O10O10001O4K5L4L000O0100BQBkNo=U1UBgNj==RB5]>KeA3[>MeA3[>LfA4Z>LeAKLJc>8aAN1Ec>8\\A3Q?Io@7U?Dl@<_?L6K]UR1"}, "image_id": 481, "id": 8234}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 322.0, 54.0, 81.0], "area": 2646, "segmentation": {"size": [512, 512], "counts": "^jb51o?5K4L5J4MO100000O0100001eA^OU=h0fB\\OV=h0fB]OU=h0eB]OW=g0eB\\OQ=BbBW18WOV=T1fBlNZ=Y1aBgN^=n10O1000O10O101O5K000O10O1000O10O100000O6K4L5K4K6K4L5K3MO0100ZOXA4j>J[A1i>J\\A1j>J[A1Rcb1"}, "image_id": 481, "id": 8235}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 341.0, 19.0, 24.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "ijR71o?3M3L4M3M3L4M1O0O10O011O3L4M3M3L4M4Lgdc0"}, "image_id": 481, "id": 8236}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 361.0, 8.0, 21.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "j[l72k?3N2M4M2N3L3N2gD"}, "image_id": 481, "id": 8237}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 362.0, 39.0, 39.0], "area": 725, "segmentation": {"size": [512, 512], "counts": "V\\S73l?2N2N1N3L2000O10O100d@DU?=i@FU?m0ZAVOg>R1O0001O01O00010O0O1K6M200O2N100O101O00001O00L4M3M4L3L5M201O2K4L6\\Oh@7b?L4MbR6"}, "image_id": 481, "id": 8240}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 443.0, 16.0, 46.0], "area": 386, "segmentation": {"size": [512, 512], "counts": "k=^1b>M4M2M3N3L3N3L3N2M4M2M4M2M4M2MRbg7"}, "image_id": 481, "id": 8241}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 469.0, 121.0, 43.0], "area": 2677, "segmentation": {"size": [512, 512], "counts": "oom01m?2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N200001O00001O001O00001ON2N200001O001O00001O001O00001mNYAk0g>SO[Am0l>01O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O0000Q`U5"}, "image_id": 481, "id": 8242}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 96.0, 35.0], "area": 2211, "segmentation": {"size": [512, 512], "counts": "0?a?0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O000000001O01O0001O0001O01O0001O01O0001O0001O01O0001O0001O01O0001O01O0001O00000K6K4L4L4Klo_6"}, "image_id": 483, "id": 8243}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 13.0, 47.0, 37.0], "area": 1131, "segmentation": {"size": [512, 512], "counts": "Uac13i?4L4L4L5K4L41O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01OL4L5K4L4L4L]od5"}, "image_id": 483, "id": 8244}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 313.0, 56.0, 45.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "iZ]21k?4L4K6K4L4L401O01O0001O01O0001O0O1L4L5O00000010O00000010O0000010O0000010O0000010O00000010O0SOXAb0h>ZO\\Af0d>VOaAj0j>00O1L5J5L4L4Lhef4"}, "image_id": 483, "id": 8245}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 326.0, 48.0, 67.0], "area": 2024, "segmentation": {"size": [512, 512], "counts": "X[[35g?4L4K5L5K4L4L4L5N1000010O000kAjN`=V1\\BnNe=Q1VBTOj=m0QBWOo=^1O0001O01O0001O01O0001O01O0001O0N2L4L5K4K5L4L4L5K4L4K5L5K4Leel3"}, "image_id": 483, "id": 8246}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 353.0, 13.0, 17.0], "area": 149, "segmentation": {"size": [512, 512], "counts": "]kZ23i?5K4L41O01O0001O0O1L4L4LQe^5"}, "image_id": 483, "id": 8247}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 370.0, 15.0, 22.0], "area": 213, "segmentation": {"size": [512, 512], "counts": "R\\X24h?4L4L5K4000010O0000M4K4L4L4L`T`5"}, "image_id": 483, "id": 8248}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 374.0, 15.0, 22.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "Vl_23j?3M4K4M3M301O01O01OM3L5L3M3L\\dX5"}, "image_id": 483, "id": 8249}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 398.0, 19.0, 40.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "am[23j?3M4K4M3M4L3M3M1OO10002N4L3M3M4L3M3LdcZ5"}, "image_id": 483, "id": 8250}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 404.0, 13.0, 21.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "Tmk33i?4M4K4M3N30O000N3L3L4M3L^cm3"}, "image_id": 483, "id": 8251}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 405.0, 14.0, 23.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "W]i73e?8H8N3O01O000000000001O01OYC"}, "image_id": 483, "id": 8252}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 447.0, 16.0, 38.0], "area": 596, "segmentation": {"size": [512, 512], "counts": "o]h7j0V?00000000000001O0000000000002N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N3M[o_7"}, "image_id": 485, "id": 8260}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 19.0, 44.0, 40.0], "area": 889, "segmentation": {"size": [512, 512], "counts": "Vam22m?2N2N2N2N2N2N2O1N3M2001OO1N2N2N2O1N2N1O00000000000001O002O1N2N2N2N2N2N3M2N2N2O1N2N2N2Nn^\\4"}, "image_id": 485, "id": 8261}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 21.0, 31.0, 31.0], "area": 505, "segmentation": {"size": [512, 512], "counts": "TQe12m?2N2O1N3M2N2N2N2N2N2N2N3N0O00000001O2O1N2N3M2N2N2N2N2N2O1N2No^k5"}, "image_id": 485, "id": 8262}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 27.0, 28.0, 28.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "Yak02m?2N2N2N2N2N2N2N2N2N2O1N2N000010O3M2N2N2N2N2N2N2N2N2N2Nj^f6"}, "image_id": 485, "id": 8263}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 33.0, 17.0, 16.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "XQS41n?2N2N2N2N2N2N2O10O1N2N2N2N2N2N2Nh^d3"}, "image_id": 485, "id": 8264}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 36.0, 29.0, 29.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "ba[41n?2N2N2N2N2N2N2N3M2N2O1N2N2N0002N2N2N2N2N2N2O2M2N2N2N2N2N_nU3"}, "image_id": 485, "id": 8265}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 36.0, 64.0, 42.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "]QP51o?3L3N3L3N3g@Ak>a0SAAj>c0SA@j>b0SAAj>l0O00O0101O0O010O10O3N3L3N3M0ON3O010O10O10O010O10O10O010O10O10O03N000O10O010O10O10O010O10O10003L3N3L3N3M2M4M2MYno1"}, "image_id": 485, "id": 8266}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 44.0, 59.0, 60.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "ebh11n?2N2N2N3M2N2N2N2N2O1G\\OSAf0k>\\OSAf0k>\\OSAf0k>9N2N2O1N00000000001O000001O000000000000000001O01O2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2NPnY5"}, "image_id": 485, "id": 8267}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 70.0, 58.0, 55.0], "area": 1591, "segmentation": {"size": [512, 512], "counts": "Zc81n?2N2N2N2N2N2N2N2N2N2N2D[OZAg0d>[OZAg0d>[OZAg0d>[OZAg0d>l0SAVOk>Q1N2O1N2001M2N20000000000000001O000001O00000XAnNa>R1^AoNb>Q1\\AQOd>V1001O0000O1N3M2N2N2O1N00UOhAOX>1jAMX>1jAMX>1jANW>0kANW>1jAMX>1jAMX>1jAMX>1jAMX>1jAMX>1jAMY>0jAMX>1jAMX>1UmT3"}, "image_id": 485, "id": 8271}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 92.0, 11.0, 11.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "QS^31n?2N2N2N2N20N2N2N2N2NP]\\4"}, "image_id": 485, "id": 8272}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 94.0, 21.0, 21.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "YSU22m?2N2N2N2O2M2N2N1O00000002O1N2N3M2N2N2N2Nj\\`5"}, "image_id": 485, "id": 8273}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 96.0, 31.0, 31.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "_cd52m?2N2N2N3N1N2N2N2N2N2N2N3M00010O0002N2N2N2N2N2N3N1N2N2N2N2N2Nclk1"}, "image_id": 485, "id": 8274}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 103.0, 57.0, 52.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "Xdl01n?2N3M2O1N2N2N2N2N2N2N2N3M2N2O1N2N1O001O000000000003M2N000001O01O0000000000000000010O2N2N2N2N2N2N2N2N2O2M2N2N2N2N[lV6"}, "image_id": 485, "id": 8275}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 106.0, 58.0, 46.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "ncT32m?2N2N2N2N2N2N2N2N3O000O1N2N2N2N1O000001O01O002N2N2N201OO2M2O10N3M2N20000000O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2Nl[n3"}, "image_id": 485, "id": 8276}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 123.0, 60.0, 52.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "ndl13l?2O1ZOK^A7`>K^A7`>K^A7a>J]A8a>J]A9`>I^A9`>I_A8_>K^A7`>K^A7`>K^A7a>e0N0000001O03M2N2N2N2N2O2M2N2N2N2N2N100O00000000000010O0000000000010O0001O2N2N2N2N2O1N3M2N2N2Nk[U5"}, "image_id": 485, "id": 8277}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 134.0, 60.0, 47.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "gTd21n?2N2U@Mf?8N2N20001N1N2N2O1O100N2N3M2N2N00000001O0001O2N2N3O01O0N2N21N1N2N2N2001O000O1N2N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2N2Nnj]4"}, "image_id": 485, "id": 8278}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 139.0, 34.0, 35.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "ddU51n?2O1N2N2N2N2N2N2N2N3N1000000000000000010O000000N2N2N2N2N3M2N2O1N2NnZY2"}, "image_id": 485, "id": 8279}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 142.0, 50.0, 59.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "cTl52n?4K6K5K5K5K1O1N2O2N1O1O1N2O1O1O1O2M2O1O1O1O1N2O1O1103L4L5K1O0O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2NZjZ1"}, "image_id": 485, "id": 8280}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 146.0, 26.0, 27.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "ndg03l?2N2N2N2N2N2N2N2N2N2N2N2OO2N2N2N2N2N2N2N2N2N2N2N2NS[k6"}, "image_id": 485, "id": 8281}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 163.0, 58.0, 47.0], "area": 1436, "segmentation": {"size": [512, 512], "counts": "heU22m?2N2N2N2N2N2N3M2N2O100N2N2N2N2N0001O00000002N2N2N2000O2M2000N2N2N2O100001O000N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NSZm4"}, "image_id": 485, "id": 8282}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 171.0, 60.0, 65.0], "area": 1780, "segmentation": {"size": [512, 512], "counts": "kVc31n?2N2N3M2N2N2ACXA?f>CYA>f>DWA>g>DWA?f>CXA?f>CXA?f>?N2N1O0010O000000000000010O0000000M_AhNa>X130001O0001O000001O2N3M2N2O1N2N2N2N3M2N2N2O1N2N2N2N3M2N2NPj^3"}, "image_id": 485, "id": 8283}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 180.0, 66.0, 58.0], "area": 1832, "segmentation": {"size": [512, 512], "counts": "UfT51n?2N3M2N2N2O1N2N2N2N2N3M2N2N2N2RAQOi>P1UAROi>T1N2N01O2001O0001O000000000001O0001O00N2O0O00000002N2N2O1N2N3M2N2N1O00001O102M2N2N2N2N2N2N2N2N3N1N]Yj1"}, "image_id": 485, "id": 8284}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 187.0, 46.0, 59.0], "area": 1413, "segmentation": {"size": [512, 512], "counts": "dff11n?2O1N2N2N3M2N2a@BY?`0e@CX?d0N2N2O1N2JSOVAo0h>6N2N3N1N2N2N2N2N2N200000O1N2N2N2N2N2N3QO\\A=f>A\\A>e>@]A>e>A]AB]AKH>m>E`A9b>E`A9U?N2N2N2N2NmXb5"}, "image_id": 485, "id": 8285}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 220.0, 25.0, 24.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "YgQ41n?2N3M2N2N2N2N2O1N2N1O00000001O2O1N2N3M2N2N2N2N2Nkha3"}, "image_id": 485, "id": 8286}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 224.0, 24.0, 22.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "YW]31n?2N2N2O1N3M2N2N2N2N2O1O01N2N2O1N2N2N3M2N2N2N2OdhV4"}, "image_id": 485, "id": 8287}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 232.0, 90.0, 86.0], "area": 2784, "segmentation": {"size": [512, 512], "counts": "WY[41n?2N2N2O1N2N3M2N2N2N2N2N2N2N2O1N3M2N2N2N2N00001O0MhN_AX1a>300001O000000000001O00000001O00002A\\A_Oe>`0]A^Oc>b0_A\\Oa>d0aA[O^>e0dAYO]>f0eAXO[>h0>001O0000001O2N2N2N2O1N2N1O00000000000010O2N2N2N3M2N2N2N2N2N2O1N2N2N2N3M2N2N2N2NThW2"}, "image_id": 485, "id": 8288}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 233.0, 51.0, 52.0], "area": 1263, "segmentation": {"size": [512, 512], "counts": "YXd22m?2N2O1N2N2N2N2N3M2N2N2N2N2N2O1HSOZAo0e>ROYAP1e>6O0000000000001O000001O003M2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2O1NRXb4"}, "image_id": 485, "id": 8289}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 252.0, 30.0, 29.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "Yho32m?2N2O1N2N2N2N2N3M2N2N2O1N2N1O01O2N2N2N2N3M2N2O1N2N2N2N2N2NgWa3"}, "image_id": 485, "id": 8290}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 258.0, 30.0, 57.0], "area": 871, "segmentation": {"size": [512, 512], "counts": "kXa71n?2N2N2O1N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2O1N2N2N2N2N3M2N2O1mG"}, "image_id": 485, "id": 8291}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 270.0, 48.0, 58.0], "area": 1487, "segmentation": {"size": [512, 512], "counts": "VYR31n?2O1N2N2N3M2N2N2g@@P?a0n@Bo>`0o@Bo>i0N2N2N2N3N1N2N2N2N2N3NO1O01O01O00000000101N3M2N2N2N2WOWA8k>FWA9k>DWA:k>DWA:k>DXA9j>EXA9j>EXA9Y?N2O1N3MafU4"}, "image_id": 485, "id": 8292}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 286.0, 31.0, 50.0], "area": 877, "segmentation": {"size": [512, 512], "counts": "o8`1_>20O1N2N2O1N2N2N001O00002N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N\\V`7"}, "image_id": 485, "id": 8293}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 298.0, 59.0, 58.0], "area": 1673, "segmentation": {"size": [512, 512], "counts": "jY_61o?1N2N2N2N3M2N2N2N2N2N2N2O1O101O00000001O0000000001M2N2N2N2N2N2N2N2O1N2N3M2N1O001O000002XOgAF[>8hAE[>8gAF[>8gAF[>9fAE\\>9fAE\\>9fAE\\>9fAF[>8gAF[>8gAF[>8h0N2N3M2NVVc0"}, "image_id": 485, "id": 8294}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 301.0, 62.0, 53.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "[j_32m?2N2N2N2N2N2N3N1N2N2N2N2N2N1O02N2N2N2OO000001O2Ng@^OU?b0l@_OR?a0n@AP??PACn>?PACn>i0N2N2N2N3N1LfN_A\\1_>2001O010O0000002N2N3M2N2N2N2O1VOQA`0Q?^OQA`0Q?^ORA?Y?N2N2N2N2N2N2O1NhUa3"}, "image_id": 485, "id": 8295}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 305.0, 40.0, 44.0], "area": 1028, "segmentation": {"size": [512, 512], "counts": "UZ\\71n?2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N1O002O1000N2N2N2N2N2N2O1N3M2N2N2N2N2N2NoE"}, "image_id": 485, "id": 8296}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 321.0, 54.0, 52.0], "area": 1330, "segmentation": {"size": [512, 512], "counts": "Pk71n?2N2N2N2O2M2N2N2N2N2N2N2O2M2N2N2N2N2N1O0001O0001O000000000001O0001O0003M2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N[Um6"}, "image_id": 485, "id": 8297}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 337.0, 10.0, 21.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "iZk73l?2N2N2N2N2O1N3M2N2^E"}, "image_id": 485, "id": 8298}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 339.0, 62.0, 59.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "nkc51n?2N2N2N2N2N2O1N3M2K^Oi@d0V?4N2N2N2N00000001O00000001O0000002O1N1LoNXAQ1h>31O000001O0000000001O00002O1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2OiT]1"}, "image_id": 485, "id": 8299}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 346.0, 24.0, 24.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "Rka12m?2N2N2N2N2N3M2O1N2000000001N1N2N2N2N2N2N2N2N2OgTR6"}, "image_id": 485, "id": 8300}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 349.0, 13.0, 26.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "m:j0W?N2N2N2N2N3M2N2N2N2O1N2NgTi7"}, "image_id": 485, "id": 8301}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 354.0, 9.0, 9.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "Wk_72m?2N2N1O0002N2N2Nld;"}, "image_id": 485, "id": 8302}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 360.0, 52.0, 50.0], "area": 1322, "segmentation": {"size": [512, 512], "counts": "Slh02n?1N2N2N2N3M2N2`@B[?c0N3M2O1N00002N2N2N2N2N2O1N0000001O00000001O00001O2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2NTT]6"}, "image_id": 485, "id": 8303}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 367.0, 26.0, 39.0], "area": 562, "segmentation": {"size": [512, 512], "counts": "U\\c72m?2N2N2N2N2N2O1N3M2N2N2N1O000001O0001O2N2N2N3M2N2O1`D"}, "image_id": 485, "id": 8304}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 382.0, 52.0, 52.0], "area": 1399, "segmentation": {"size": [512, 512], "counts": "hlm42m?2N2O1N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O002O1N2N2N2N00GdAoN^>o0dAoN^>o0dAoN^>o0dAoN^>o0;N20000000001O0O1N2N2N2N2N2N2Bb@7`?Gb@7e?O1N3MUSX2"}, "image_id": 485, "id": 8305}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 391.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "d\\m52m?2N2O1N3M2N2N2N2N2O2M2N2N1O0001O2N2N2O2M2N2N2N2N2N3N1N2N\\Sd1"}, "image_id": 485, "id": 8306}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 401.0, 59.0, 70.0], "area": 2107, "segmentation": {"size": [512, 512], "counts": "S=`1_>3M2N2O0O0001O2N2N3M00000000000000000000101N2N2N2N2N2N00000000001O000000000001O0001O2N2N2N3M2N2N2N2N2N2N2N2N3M2N2O1NnRR7"}, "image_id": 485, "id": 8307}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 403.0, 23.0, 23.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "jlc62n?1N3M2N2N2N2N2N2O10000010N1N2N2N2N2N2N2N2N2OobP1"}, "image_id": 485, "id": 8308}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 407.0, 64.0, 56.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "cmc11n?2N2N2N2N2N2N2N2N3N1N2N2N2N2001O00000000N2N3M2N2N2N000001O00000000000001O000000000001O001O2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2NgR\\5"}, "image_id": 485, "id": 8309}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 407.0, 46.0, 49.0], "area": 1013, "segmentation": {"size": [512, 512], "counts": "hmd61n?2N2N2N2O1N2N2N3M2N2N2N2N2N2N0001O000001O0000000000000001O000001O00000001O2N2N2N2N2Ee@N^?Od@O^?Od@0]?Ne@0\\bd0"}, "image_id": 485, "id": 8310}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 409.0, 29.0, 29.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "V]c42m?2N2N2N2N2N3M2N2N2N2N2O1N2N10O2N2N2N2N3M2N2N2N2N2N2N2N2NjRn2"}, "image_id": 485, "id": 8311}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 413.0, 16.0, 24.0], "area": 239, "segmentation": {"size": [512, 512], "counts": "V]h72m?3M2N2O1N2N2N2N2N2N2O2OO1N2N2NPC"}, "image_id": 485, "id": 8312}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 425.0, 37.0, 39.0], "area": 702, "segmentation": {"size": [512, 512], "counts": "gmP12m?2N2O1N2N2N3M2N2N2N2N2N3N1N2N2N21O0001O00N2N2N3M2N2N2O1N2N3M2N2N2N2N2O1NQb\\6"}, "image_id": 485, "id": 8313}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 425.0, 59.0, 53.0], "area": 1570, "segmentation": {"size": [512, 512], "counts": "W^a51n?2N2N2N3N1N2N2N2N2N2N2N2K5O2M2N2N2N2N2N2N00000001O000001O0001O2N2N2N2N2N3M0001O000001O1O2N2N2N2N2N3N1N2N2N2N2N2N2N2NWRa1"}, "image_id": 485, "id": 8314}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 443.0, 75.0, 69.0], "area": 2398, "segmentation": {"size": [512, 512], "counts": "Y_U21n?2N3M2N2N2N2N2N2O1N3M2N2N2N2N2000000N3N0O1O1O1O1O1O1O1O1O10O00000000IfNiAZ1X>gNfAY1Z>jNcAW1\\>60000000000001O0001O000002N2N2N2N2N2O1N3M2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N\\Qe4"}, "image_id": 485, "id": 8315}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 458.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Zno72d1"}, "image_id": 485, "id": 8316}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 461.0, 19.0, 38.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "nnf72m?2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N2N2bA"}, "image_id": 485, "id": 8317}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 464.0, 69.0, 48.0], "area": 1796, "segmentation": {"size": [512, 512], "counts": "]_<1n?2N2N2N2L4N2N2N2N2N2N2N2N2N000000011N2N2N2N2N2N2N2N2N1O11O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O2N1N2O1N2N2NVPa6"}, "image_id": 485, "id": 8318}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 466.0, 52.0, 46.0], "area": 1367, "segmentation": {"size": [512, 512], "counts": "aoT62m?2N2N2N2N3M2N2N2N2J]Ol@e0R?6N2O0O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1001N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N2NhPQ1"}, "image_id": 485, "id": 8319}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 474.0, 23.0, 22.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "V_g11n?3M2N2N2O1N3M2N000010O0000001O2O1N3M2N2N2N2OnPm5"}, "image_id": 485, "id": 8320}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 497.0, 22.0, 15.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "m?2m?2N1O100O1O1O1O1O100O1O1O1O12N1O1O1N2N3M2NW`d7"}, "image_id": 485, "id": 8321}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 499.0, 23.0, 13.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "ood71n?1O1O1O1O1O1O1O1O100O1O1O1001O1O1O1O1O1O2N1O"}, "image_id": 485, "id": 8322}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 509.0, 6.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "o_g61n?1O100001OR`U1"}, "image_id": 485, "id": 8323}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 0.0, 15.0, 7.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "PP11o?1O1O1O1O1O1O00O1O1O1O1O1O10P`g7"}, "image_id": 486, "id": 8324}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 0.0, 77.0, 73.0], "area": 2431, "segmentation": {"size": [512, 512], "counts": "P`i01o?1O1O2N1O1O1O1O1O1O1O1O1O1k@Ce>>YADf>=XAEg>:WAHh>7XAKg>6WALh>5VANh>f0O1O1O1O1O1O1O1O1O1O1O1O1O_NeA_1Y>`NiA`1\\>000000000001O000000000001O0000000000N2N3M2N2N2N2N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N`no5"}, "image_id": 486, "id": 8325}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 0.0, 9.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "PP_21o?1O1O1O00O1O1O10P`\\5"}, "image_id": 486, "id": 8326}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 16.0, 8.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "PP[61o?1O1O1O1O1O1O1OO1O100O1O1O1O1OQP]1"}, "image_id": 486, "id": 8327}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 0.0, 15.0, 11.0], "area": 98, "segmentation": {"size": [512, 512], "counts": "SPe61n?2N2N3N1O1O1O00O1O1O2N2N2N2Ol_S1"}, "image_id": 486, "id": 8328}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 0.0, 23.0, 18.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "P`d71o?1O2N1O1O1O1O1O1O1O1O2N1O1O1O1OO1O1O100O1O1O1"}, "image_id": 486, "id": 8329}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 1.0, 29.0, 32.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "^`R71n?2N2N2N2N2N2N2O2M2N2N2N2N2N2O100001M2ZOk@>W?@l@=V?Al@=]?N2N2N2N2N2O1N[o>"}, "image_id": 486, "id": 8330}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 9.0, 84.0, 115.0], "area": 3248, "segmentation": {"size": [512, 512], "counts": "2M2N2N2O11O00000001O0000000001O000001O000000000001O01O00000000000000010O000000000000000010O0000000000000001O01O0000aAfNW>Z1hAgNX>Y1fAiNZ>W1dAkN\\>]10N2N2E`ATOb>i0`AUOb>i0`AUOb>j0_ATOc>j0_ATOc>j0;N2N2N2N2N3M2O1N2N2N2N2N2NRme6"}, "image_id": 486, "id": 8331}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 10.0, 66.0, 63.0], "area": 2015, "segmentation": {"size": [512, 512], "counts": "jah22j>0WB2g=0WB2g=0WB2g=0WB2g=1VB1i=0VB1h=1VB1h=1VB2g=0WB2g=0WB2g=0WB2g=0WB2g=1VB0i=2UBOk=2TBMm=4QBLQ>4mALU>n03M2N2N2N2O1N2N2N3M2N2N2N2N2N01O0000000001O000001O000000000001O0000011N3M2N2N2N2N2N2N2O1N3M2N2NX_V4"}, "image_id": 486, "id": 8332}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 12.0, 66.0, 51.0], "area": 1619, "segmentation": {"size": [512, 512], "counts": "PQY62m?2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N002N2O10001O0000000000O1N2N3M2N2O1N2N2N2N2N2N2N2N1O00000001O3M2N2N20O1N2O1N2N2N2N2N3M2N2N2N2Nine0"}, "image_id": 486, "id": 8333}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 13.0, 29.0, 29.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "kPm11n?2N2N2N2O1N2N2N2N2N2N2N3M2N1O02N2N2O1N2N2N2N2N2N2N2N2N2NW_d5"}, "image_id": 486, "id": 8334}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 19.0, 23.0, 53.0], "area": 747, "segmentation": {"size": [512, 512], "counts": "n`d71n?2a@Nm>4QANm>5PANm>4QANm>4QANm>4QANn>3PAOn>c0N2N2N2N3N10000001O0000N3M2N2XO"}, "image_id": 486, "id": 8335}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 57.0, 19.0, 16.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "Tbm12l?3N1O2N2N1O0000001O1010O1N2N1O2N2N2NRnh5"}, "image_id": 486, "id": 8336}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 61.0, 65.0, 55.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "cbo62m?2N3M2N2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2001O00000N2N2N2YO]ANe>0]ANe>0]ANf>O\\AOf>O\\A0e>N]A0e>N]A0e>O]ANe>2[ALg>4YAJh>8>2N2O1N2N2N2N2N2N3M1O01O3M2N2O1N2N2N2N2N2N2N`M"}, "image_id": 486, "id": 8337}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 62.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "n12Pno7"}, "image_id": 486, "id": 8338}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 69.0, 37.0, 36.0], "area": 658, "segmentation": {"size": [512, 512], "counts": "kRW21n?2N2N2N2N2N2N2N2N2N2N2N1O2N2N1O00000000000002N2N2N2N2N2N1O2N2N2N2N2N2N2N_]V5"}, "image_id": 486, "id": 8339}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 72.0, 46.0, 50.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "lbl32m?2N2N2N2d@Hn>:o@In>9PAIn>9QAHm>:QAHm>:QAHm>h0M2O1N2N000000010O0000000002N2O2M2N2N2N2N2O0O001O2N2N2O2M2N2N2N2N3M2O1NT]\\3"}, "image_id": 486, "id": 8340}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 85.0, 36.0, 36.0], "area": 646, "segmentation": {"size": [512, 512], "counts": "Zco21n?2N2N2N2N2N2N3M2N2N2N2N2N2N1O00000000000011N2N2N2N2N2N2N2N2N2N2N2N2N2Nn\\^4"}, "image_id": 486, "id": 8341}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 89.0, 76.0, 60.0], "area": 2240, "segmentation": {"size": [512, 512], "counts": "RdZ41n?2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N3M1O001GnNcAR1]>POaAP1`>QO_An0a>TO]Al0c>801O2N2N2N2N2N21M2N2N00001O0[OcAH]>8eAGZ>:gADY>>kA@W>>kA@X>=jAAX>>iA@X>a0hA]OX>e0`03M2N2N2N2O100000001O00N2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N`\\_2"}, "image_id": 486, "id": 8342}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 99.0, 31.0, 34.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "ZS=2m?2N2N2N2O2M2N2N2O1010O000001O01O0000010O000O1N2N3N1N2N2N3M2NT\\S7"}, "image_id": 486, "id": 8343}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 113.0, 61.0, 55.0], "area": 1551, "segmentation": {"size": [512, 512], "counts": "[TQ12m?2N2O1N2N2N2N3M2N2N2O1N2N2O2l@UOP?n00001O000N2N2N000001O00000001O000001O00000001O000001O002N2O1N2N2N2N3M2N2N2O1N2N3M2N2N2N2N2Ok[P6"}, "image_id": 486, "id": 8344}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 120.0, 14.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "ncm32m?2N3M2O1N2N2N02N2N2N2N2O2MR\\k3"}, "image_id": 486, "id": 8345}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 122.0, 37.0, 36.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "^T41n?2N2N2N2N3M2N2N2O1N2N2N2N3M1O000001O01O000001O2N2N2N2N2N2O2M2N2N2N2N2N2Ni[Y7"}, "image_id": 486, "id": 8346}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 125.0, 14.0, 15.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "UTS42n?1N2N2N3M0000001O03M2N2N2Noke3"}, "image_id": 486, "id": 8347}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 129.0, 75.0, 63.0], "area": 2236, "segmentation": {"size": [512, 512], "counts": "Yem42n?1N2N2N2N2N3M2N2N2N200000000N3M1O0001O0000000002N2N2O10000@QOnAo0P>SOPBm0n=UOSBk0j=WOTBk0j=WOTBj0k=XOSBh0m=ZOQBf0o=]OnAc0R>_OlAb0T>_OjAa0V>d0002N3M2N2N00000001O01O002N2N2N3M2N2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2NXkl1"}, "image_id": 486, "id": 8348}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 140.0, 16.0, 16.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "cTU32m?2N2N2N3N1N2N1O01O2N2N2O1N3M2N^kb4"}, "image_id": 486, "id": 8349}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 145.0, 32.0, 31.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "odf32n?1N2N2N2N3M2N2N2N2N2N2N2O1N2N1O01O2N2N2N2N2N2N2O2M2N2N2N2N2N2NQ[i3"}, "image_id": 486, "id": 8350}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 161.0, 4.0, 8.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "Q58i?N2N2Nmjm7"}, "image_id": 486, "id": 8351}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 165.0, 61.0, 74.0], "area": 2134, "segmentation": {"size": [512, 512], "counts": "lfd51n?2N2DLi@7T?Kj@7T?Kj@7T?Kj@7T?Kk@6S?=N2N2O1N2N3M2N2N2N0011N2NBiNVBU1k=lNUBR1k=POVBm0j=UOUBj0k=XOSBh0m=ZOQBf0o=\\OoAd0Q>_OlAa0T>AjA`0U>BiA>W>e000001O000000001O2O1N3M2N2N2N2N2N2N2N2N2N3N1N2N2Ei@JY?4i@JY?4i@JY?4;N2NUj\\1"}, "image_id": 486, "id": 8352}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 170.0, 47.0, 48.0], "area": 1133, "segmentation": {"size": [512, 512], "counts": "lUR23l?2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N3M2N2O1001O00000N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2NkYV5"}, "image_id": 486, "id": 8353}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 172.0, 22.0, 20.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "beg42m?3M2O1N2N2N3M1001O11O0001N1OO01O2N2N2N2O2MYZm2"}, "image_id": 486, "id": 8354}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 174.0, 44.0, 34.0], "area": 764, "segmentation": {"size": [512, 512], "counts": "bUa02m?2N2N2N2Z@Gc?=O000000000O1N2N2N2O10000000000O1N2N2N2N2N1O02N2N2N2N2N2N2N2N2N2N2O1N2N2N2N2NRjh6"}, "image_id": 486, "id": 8355}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 191.0, 32.0, 35.0], "area": 549, "segmentation": {"size": [512, 512], "counts": "ZVk33l?2N2N2N2O1N2N2N3M2N2N2N20000010O0000000N2N2N2N3M2O1N2N2N2N2N2N]id3"}, "image_id": 486, "id": 8356}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 197.0, 38.0, 66.0], "area": 1248, "segmentation": {"size": [512, 512], "counts": "d6c1\\>00IeAjN[>V1gAhNY>X1iAfNW>Z17001O000001O00000000002N2N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2NVi\\7"}, "image_id": 486, "id": 8357}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 203.0, 7.0, 8.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "]VZ52m?2N3O00O1N2NbYb2"}, "image_id": 486, "id": 8358}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 207.0, 61.0, 54.0], "area": 1541, "segmentation": {"size": [512, 512], "counts": "ffb42m?2N2O1N2N2N3M2N2O11O00000001O00000001OO1N2N3M2N2N2N2N2O1N2N2N3M2N2O11O00000001O00N2N3M2N2N2O1N2N2TOo@f0S?XOo@f0X?M2N2N2N2O1N2N2N2N3M`h^2"}, "image_id": 486, "id": 8359}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 213.0, 37.0, 35.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "YWk11n?2N2N3M2N2O1N2N2N2N2N2N3M1O01O01O000000000001O2N2N2O1N2N3M2N2N2N2N2N2N2OmXb5"}, "image_id": 486, "id": 8360}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 216.0, 6.0, 6.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "iV`52m?200000O1NUi\\2"}, "image_id": 486, "id": 8361}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 230.0, 57.0, 60.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "bgk32m?3M2]@JY?8e@JY?8e@JY?8e@JY?`0N3M2N2N2N2N20000000000000000N2N2N2N2N2O1N200000000000000010M2N2N2N2N2N2N2N2N2GPA_OR??PA_OR??PA_OR??9N2N2O1N2N2N2N2NkgW3"}, "image_id": 486, "id": 8362}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 237.0, 20.0, 20.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "fgX21n?2N2N2N3M2N2N2N2N2OO2O1N2N2N2N3M2N2N2NZX]5"}, "image_id": 486, "id": 8363}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 242.0, 31.0, 31.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "og\\32m?2N3N1N2N2N2N2N2N2N2N2N3M2O1N11N2N2N2N2N3M2N2N2N2O1N2N2N2N2NogS4"}, "image_id": 486, "id": 8364}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 259.0, 61.0, 51.0], "area": 1332, "segmentation": {"size": [512, 512], "counts": "dho12m?2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N2000001O0000000000000001O0N2N2N2N2N2N00001O2N2O1N3M2N2N2N2N2N200000N2N1O000002N2N2N2NQgQ5"}, "image_id": 486, "id": 8365}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 271.0, 66.0, 63.0], "area": 1812, "segmentation": {"size": [512, 512], "counts": "jhW32m?2N2N2N2N2N3M2O1N2N2N2N201O000000000001O01O0000000000010O0N2N2N2N2N2N2O2M2N2N2N2000001O00O1N2N3M2N2N2N2O1N2N2N3XOl@?V?_Ol@?V?_Ol@?\\?N3M2O1N2N2N2NWVg3"}, "image_id": 486, "id": 8366}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 293.0, 23.0, 23.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "\\YW72m?2N2N3N1N2N2N2N2O1000001OO1N3M2O1N2N2N2N2N2N^V="}, "image_id": 486, "id": 8367}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 297.0, 27.0, 46.0], "area": 698, "segmentation": {"size": [512, 512], "counts": "Pjb72m?2N2O1N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N1O00000001OhF"}, "image_id": 486, "id": 8368}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 301.0, 22.0, 34.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "]9h0Y?01O00000000000000000N2N2N2N3M2O1N2N2N2N2Nmed7"}, "image_id": 486, "id": 8369}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 302.0, 30.0, 31.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "kic22m?2N2N2N2N2N3M2N2N2N2O1N2N2N2000N2N2N2O1N2N3M2N2N2N2N2N2N2NSVm4"}, "image_id": 486, "id": 8370}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 318.0, 46.0, 52.0], "area": 1211, "segmentation": {"size": [512, 512], "counts": "gjo21n?2N2N2N2N3M2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2O2M001O0001O2N3M2N2N2UOWACVA;l>CVABWABXA;j>CXA;X?N2N2N2N2N2N2ORUY4"}, "image_id": 486, "id": 8371}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 321.0, 20.0, 20.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "^jW41n?2N2N2N3N1N000000000000000011N2N3M2N2NjU^3"}, "image_id": 486, "id": 8372}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 330.0, 47.0, 47.0], "area": 1123, "segmentation": {"size": [512, 512], "counts": "Q[P71n?2N2N2N2N3N1N2N2N2N2N3M2O1N2N2N2N3M2N2O1N000000000002O2N11OO1N2O1ISAXOP?e0RAYOP?e07N2N2N2O2M2N2N2N2N2N3NnT8"}, "image_id": 486, "id": 8373}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 332.0, 36.0, 34.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "cjg12m?2N2N2N2O1[@Fa?>N2N2000000000000N3M2N2O1000000000N2N2N2N2N2N2N2N2N2N2N2N2NQUf5"}, "image_id": 486, "id": 8374}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 334.0, 35.0, 34.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "jZY12m?2N2N2O0O002N2N2N2N2O1N3M2N2O11O000001O0001M2N2N2O1N2N2N3M2N2N2N2O1NmTU6"}, "image_id": 486, "id": 8375}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 340.0, 31.0, 31.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "Qk82m?2N3M2N2O1N2N2N2N3M2N2O1N2N2N10O2N2N2N2N2O1N3M2N2N2N2N2O2M2NmdW7"}, "image_id": 486, "id": 8376}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 347.0, 6.0, 11.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "P[m71n?2N2N2N2N2TE"}, "image_id": 486, "id": 8377}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 350.0, 47.0, 52.0], "area": 1284, "segmentation": {"size": [512, 512], "counts": "f[]21n?3M2N2N2N2N2N2O2M2N2N2N2N2N3N1N2N2N2N2N2N3M2O1N1O001O01O2N2N2N2N3M2O1N2CTACn>;TACn>;UABn>;TACn>;=N2N2O1N2N3M^Tk4"}, "image_id": 486, "id": 8378}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 369.0, 46.0, 44.0], "area": 1091, "segmentation": {"size": [512, 512], "counts": "Vl\\62m?2N3M2N2O1N2N2N3M2N2O1N2N2N3M2N2N2O1N1O1O00000001O1010000O1N3N1N2IRAYOP?e07N2N3M2N2O1N2N3M2N2N2OiSl0"}, "image_id": 486, "id": 8379}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 369.0, 13.0, 25.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "lki71n?2N2O1N2N3M2N2N2N2N2N2N2^D"}, "image_id": 486, "id": 8380}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 377.0, 29.0, 29.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "Ql61n?3M2N2N2N2N2N2N200000000000000000000000O1N2N2N3M2N2N2N2ObcZ7"}, "image_id": 486, "id": 8381}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 378.0, 53.0, 56.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "^lb11n?2N2N2L4N3M2O100N2N2N2O1N3M2N2N2N2N2N2N2N2O2M20000000000000001ON3M2N2O1N2N2N2N2N3M2WOk@d0Z?N2O2M2N2N2N2N2N2N2NTcb5"}, "image_id": 486, "id": 8382}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 392.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "X<3eco7"}, "image_id": 486, "id": 8383}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 392.0, 56.0, 49.0], "area": 1375, "segmentation": {"size": [512, 512], "counts": "P]c52m?3M2N2N2O1N2N3M2N2N2N2O0O00000000000010O02N3M2N2N2N2N2O1O2O0001O0000000N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N3M2N2N2Nkb`1"}, "image_id": 486, "id": 8384}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 402.0, 23.0, 46.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "b<[1f>000000N2N2N2N2N2N2O1N2XOl@a0V?]Ol@a0[?N2N2N2N2N2N2N2N2NdRd7"}, "image_id": 486, "id": 8385}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 404.0, 61.0, 59.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "o\\j01n?2N2O1N3Y@I`?=N2N2N2N2N2N2O2O01O000000N2N2N2N3N1N2N2N2000000000001O000001O000000000N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2NWRW6"}, "image_id": 486, "id": 8386}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 409.0, 6.0, 11.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "m\\m73m?1N2N2N2N1WC"}, "image_id": 486, "id": 8387}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 419.0, 24.0, 25.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "[mf61n?2N2N3M2O1N2N2N2N2000001O0000N3M2N2N2N2N2O1N2N_Rm0"}, "image_id": 486, "id": 8388}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 431.0, 56.0, 53.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "Uno41n?2N2N2N2N2N2N2N2N2N2N2N2N002N2N2N2N2N2N2N2N20000000000000N2O10000000000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NaQT2"}, "image_id": 486, "id": 8389}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 440.0, 10.0, 10.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "lmn62m?2N2N2N2OO1O3M2O1NURl0"}, "image_id": 486, "id": 8390}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 442.0, 63.0, 54.0], "area": 1492, "segmentation": {"size": [512, 512], "counts": "\\^61n?2N3M200000000O1N2O10N2N3M20O1HKb@7\\?Kb@7\\?Kb@7\\?8O1N2N2N3M2N2N2N2N2N2N2N2N2N2O1000000002N0O2M2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2NXQj6"}, "image_id": 486, "id": 8391}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 450.0, 23.0, 24.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "ZnP61n?3M2N2N2N2N2N2O1N201O000001M2N2N2N2N2N2N2N2O`ac1"}, "image_id": 486, "id": 8392}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 471.0, 9.0, 10.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "lnX62l?3N2N2N02N2N2N2NVab1"}, "image_id": 486, "id": 8393}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 478.0, 36.0, 34.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "o>Q1n>11O1O1O00O11O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1OQ`]7"}, "image_id": 486, "id": 8394}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 482.0, 52.0, 30.0], "area": 841, "segmentation": {"size": [512, 512], "counts": "o_o31n?1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1O1N2N2N2N2N2N2N2NZ`V3"}, "image_id": 486, "id": 8395}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 486.0, 21.0, 24.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "^oe72m?2N2N2N2N2N2N2N2000001O0000N2N3M2N2N2N2N^@"}, "image_id": 486, "id": 8396}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 493.0, 23.0, 19.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "e__51n?2N2N2N2N2O1N3M2N20O1O1O1O2N1O1O1O1O1O1O1N3MUPU2"}, "image_id": 486, "id": 8397}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "oo[61n?1001OQPb1"}, "image_id": 486, "id": 8398}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 232.0, 40.0, 62.0], "area": 1619, "segmentation": {"size": [512, 512], "counts": "_7f1W>3N3M2001O0N3L3M3N3L3N3M200010O0010O001kNYAP1f>mN^AR1h>1O01O01O01O010O01O01O010O0[OVA1m>MUA0o>LTA2n>KVA1m>MUA1RW\\7"}, "image_id": 487, "id": 8399}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 275.0, 91.0, 97.0], "area": 3587, "segmentation": {"size": [512, 512], "counts": "Zk91l?3N2M4M2M4M2M3N3L3N3L3M3N3N110O01O01O01ON3M2M4M0O010O01O010O102M4M2M4M21O01O01O01O01O0M013L3N201O010ON3N01N3L3N2M4M2M4L3N2M4M20010O01N1N2M4M2M4L3N2M4M2M4M2M3N3L3N3L3M3N3L3N3L3N2M4MTgX6"}, "image_id": 487, "id": 8400}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 302.0, 61.0, 96.0], "area": 3398, "segmentation": {"size": [512, 512], "counts": "h[c12k?3N3L3lNH]B;`=H^B;_=G^B<_=H^B;_=G^B<_=H]B;`=H^B;_=G^BM2M3N3L3M4M2M3N3L3M4M2M3N3L3M4M2M3NXV^5"}, "image_id": 487, "id": 8401}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 318.0, 13.0, 35.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "n9S1m>01L3N2M4M2M4L3N2M4M2M4MoUi7"}, "image_id": 487, "id": 8402}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 337.0, 65.0, 94.0], "area": 3149, "segmentation": {"size": [512, 512], "counts": "Tl\\21m?3L3EMf@6X?Lf@7V?;M3N3M201O01ON3L3QBlNP=V1mBmNP=W1mBlNo7YAFj>7YAGj>6YAFj>7YAGj>6a0M3Nicj3"}, "image_id": 487, "id": 8404}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 387.0, 18.0, 65.0], "area": 721, "segmentation": {"size": [512, 512], "counts": "SM2M3N3M2M4O010N1N2M4TO\\A8f>F\\A11Df>7\\A3m>KVA2m>KUA2n>KVA2]?MXcf7"}, "image_id": 487, "id": 8405}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 444.0, 68.0, 66.0], "area": 2605, "segmentation": {"size": [512, 512], "counts": "eon34d?8H801O00000001O0000\\O]OkAd0S>^OlAb0S>AgAc0X>_O`Ah0`>YO_Ag0`>=O1O101N1O1O100O1O1O100O2N1O100O1O1O11O101N1O1K6F7I7100O2N2N2OO1O2N1O1O1O1KTAUOj>m0XAQOd>T140010O00000000000oNZAd0S?H8I7HRRo2"}, "image_id": 487, "id": 8406}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 448.0, 87.0, 64.0], "area": 2804, "segmentation": {"size": [512, 512], "counts": "Z_Q23k?3L3M3N3L3N3L3M3N3N11O010O01N1M3N3L3N3L3M3N01N3O1010O010O00010O0010O0lN`Ah0a>TObAm0]>POgAo0Y>oNiAQ1c>10O010O00010O0010O0010O00010O010O00010O0010O0010O0nNWAm0n>1O010O01O00001O00001O001O0N2N3L3M3N3L3N3L3MkPc4"}, "image_id": 487, "id": 8407}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 453.0, 21.0, 30.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "U>j0V?10O01O01O010O010O00001M2M4M2N2M4M2N3L3NeQe7"}, "image_id": 487, "id": 8408}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 453.0, 91.0, 59.0], "area": 2302, "segmentation": {"size": [512, 512], "counts": "QoY51n?1N3N2M2O2M5L2M3I\\Oo@e0o>^On@e0o>8M2N3M30O010O01O10hN\\AR1j>0O1WAlNd>T1ZAoNf>U10100O0010mN[Aj0d>TO_Ak0b>QOaAP1h>0O01000O01O1O001O001O1O001O1O001O001O1O001O001O1ON2O1N2O1001O001O1O001O001O1O001O1O001O001O1O001O001O1O001O1F]@5c?J^@6g?O1N1NW`X1"}, "image_id": 487, "id": 8409}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 456.0, 27.0, 27.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "kno41m?2N2N3M2M4M2N3M2N3O010O00010O010O01O0N3M2M4M2N2N3M2Mdab2"}, "image_id": 487, "id": 8410}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 502.0, 10.0, 10.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "f?:f?01O001O00001MX@Lh?2Z@Nl?0Q`j7"}, "image_id": 487, "id": 8411}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 503.0, 32.0, 9.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "noc12k?3N2N2001O00001O001O00001O001O00001O001ON200001O00001O001O0000QPl5"}, "image_id": 487, "id": 8412}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 504.0, 13.0, 8.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "l_a24i?3O11O0000001O00001O0000UPX5"}, "image_id": 487, "id": 8413}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 22.0, 54.0], "area": 594, "segmentation": {"size": [512, 512], "counts": "0f1Z>M4M2M4M2M3N3L3N3L3N2O2O001N1N2M4M2M4M2M3Njod7"}, "image_id": 490, "id": 8414}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 0.0, 14.0, 5.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "P`j41o?001O00001O00001O001O00O1MS`n2"}, "image_id": 490, "id": 8415}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 0.0, 37.0, 16.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "P`Y51o?001O00001O001O00001V@Lf?701O001O00001O001O00001O001O00001O001O00N2N2M3N2N2MSPT2"}, "image_id": 490, "id": 8416}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 0.0, 53.0, 45.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "TPW61m?3M201O004L1O1O00000000O10000000TAES>8hAJX>7dAM[>3cAO]>1XAD5>c>2ZA1e>e001O00001O001O00001O001ON2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3NR`n0"}, "image_id": 490, "id": 8417}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 0.0, 54.0, 70.0], "area": 2250, "segmentation": {"size": [512, 512], "counts": "gPU73k?2M4M2M4M20001O010O01O_AB`=?]BD`=>]BE`=?]BCa=?TBWOM>k=>VBVOM>k=?TBWOM>n=;SBOm=R1010O010O00010O01O0ON3N12O000O2O0O2O00M3]OSBQOo=m0TBoNo=n0TBPOn=n0TBoNo=n0UBoNm=o0c0M3N2M3N2M3N2M3N2D`@5c?"}, "image_id": 490, "id": 8418}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 1.0, 60.0, 57.0], "area": 2013, "segmentation": {"size": [512, 512], "counts": "Ya`11l?3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M301O0010O0010O0010O0010O0010O0010O010O00010O010OaNbAZ1d>L3N3L3N2010O01O01ON3L3N3M2M3N3L3N3L3N]_a5"}, "image_id": 490, "id": 8419}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 28.0, 67.0, 92.0], "area": 3541, "segmentation": {"size": [512, 512], "counts": "Sb92l00Q>3^AN11_>3^AN03^>3^AM22^>3^AN02_>a0_AB^>Q1N3L3QBZNb=h1[B[Nb=i1[BZNb=h1[B[Nb=S2N3M2M4M210O00010O010O00010O01L3N2N3L3N3L3N2M4O0010OM3N3FkAdNX>Z1kAcNV>[170O0103L3N3M200010O01O01ON3M2M4M2M3N3L3N3M2M4M2Mlnd6"}, "image_id": 490, "id": 8420}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 28.0, 75.0, 87.0], "area": 3232, "segmentation": {"size": [512, 512], "counts": "RR[22l?2M4M2M3N3M2M4M2M3lAVOP=m0mBUOP=n0mBUOS=l0jBWOV=h0gB[OY=f0dB]O[=c0bB@_=?_BD`==]BEd=:YBIg=8VBKg=7VBLj=V10O00010O010O00010O010O010O00001L3N3L3N2N3L3N3L3N0O0100O0100O010O01210O00010O0O2M2M4M2M3N3L3N3M2M3N3L3N3L3Nn^_4"}, "image_id": 490, "id": 8421}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 47.0, 69.0, 104.0], "area": 3493, "segmentation": {"size": [512, 512], "counts": "hcR31l?3N3L3N2M40O010O00010OO2M2O1g@_OS?h0O0nNXOSBNa0l0Y=XOTBNc0j0W=ZOTBOc0h0DTOX=?TC>AVOX=?UC<@XOY=?UCf0g<^OWCc0ge0SBWOT>d0mAXOW>f0lAVOX>g0jAUOZ>M^Ai0:WO^>h0?M2N2N3N1N2N3M2O1N2N3M_mj3"}, "image_id": 490, "id": 8422}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 51.0, 16.0, 30.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "XRh72l?2M4M2M4M2M4M2M3O20O010O01O010YN"}, "image_id": 490, "id": 8423}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 62.0, 63.0, 59.0], "area": 1958, "segmentation": {"size": [512, 512], "counts": "mbS12l?3L3N3L3N2M4M2M4M2010O0010O0010O0010O0O2M0O012M4M21O010O010O00010O01O0^AkNW>W1fAmNX>U1eAmN\\>\\1O01O01O010O01O01OUOeA1\\>LgA4X>IkA7V>FlA:T>CPB=o=ASB?n=]OVBc0i=[OYBd0b>N3L3N3L3N2N3L3Noll5"}, "image_id": 490, "id": 8424}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 70.0, 60.0, 89.0], "area": 3145, "segmentation": {"size": [512, 512], "counts": "ZST41W10^=3^B0_=3_BO_=4]B0_=3_BO_=3^B1^=3_BO_=3^B0_=4^BO_=3^B0_=3_B0^=3^B0_=3_BO_=[1L3O2O00010O010O00010O010O00010O010ON2N3L3N3L3N2M4M2M4M2M3aNgAU1\\>gNgAV1c>N3M2010O00N3L3N3L3N2M4M2M4M2M3NTmm2"}, "image_id": 490, "id": 8425}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 97.0, 51.0, 63.0], "area": 1754, "segmentation": {"size": [512, 512], "counts": "gTo42l?2M3N3L3M3N3L3N3L3N2M4M2^OnNSBV1i=mNUBU1i=nNSBU1l=lNRBW1m=iNPBZ1P><0O0O1N1N3N2M4M2M4M20010O010O00010O010M2M3N3L3N2M4M2M4L3N2M4M2M4Mf\\W2"}, "image_id": 490, "id": 8426}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 123.0, 46.0, 67.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "l3]1b>20000000000000000000000000O100000000000O1000O1N2N2N2N2N2N2N2N00002N2N2N2N2^Of@;\\?Cf@;a?N2N2N2N2N2NjjX7"}, "image_id": 490, "id": 8427}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 128.0, 61.0, 85.0], "area": 3244, "segmentation": {"size": [512, 512], "counts": "ndQ13i?4M4K4M3f@_OQ?e0n@]On>g0PA\\Oj>h0VAYOf>T1L3O2N1O2O0O0001NeA`NX>d101O01O00[B]Nmj0^AYOa>h0\\AZOd>f0YA^Og>m00010N1M4M2O2O00010O010O01O01O010O01L3[O^AJd>3`AJc>3_AJd>4_AId>3_AJd>4_AId>3f0NjkZ1"}, "image_id": 490, "id": 8429}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 134.0, 14.0, 14.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "\\d>2m?2N2N2N2N2N20O0O2N2N2N2N2Ne[Z7"}, "image_id": 490, "id": 8430}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 168.0, 69.0, 67.0], "area": 2170, "segmentation": {"size": [512, 512], "counts": "iVR21n?2M3N2N2N2N2N2N2N2N2N2EYOZAi0d>YOZAi0c>ZO[Ah0c>ZO[Ah0c>l0\\AWOd>h0YA[Og>P10O00010O010OM3N3M2010O01O01O010O001L3N2N3L3N3ESA^OP?`0SA]Oo>`0;M2N3L3N2MWZc1"}, "image_id": 490, "id": 8432}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 184.0, 3.0, 7.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "men71l?3N3WJ"}, "image_id": 490, "id": 8433}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 190.0, 27.0, 27.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "Zf<2m?2N2N2N2N2N2N2N2N2N2N2N2O10O1N2N2N2N2N2N2N2N2N2N2N2MgiU7"}, "image_id": 490, "id": 8434}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 194.0, 81.0, 85.0], "area": 3495, "segmentation": {"size": [512, 512], "counts": "kgo21l?4M2N3M2M4M2N2M4M2O2QAVOf>j0XAXOi>h0TAZOl>n0O010O010O010O01O01O010O010O01O010O0O1NYOROaBm0]=XO]Bk0a=YO[Bj0b=ZOZBh0f=XOXBj0g=XOUBl0j=TOTBn0k=e0O000O20O010O00N3M2M4M2M40O00010O0M4M2N3L3N2M4_OTBjNn=T1TBjNo=R1UBjNn=T1TBjNn=S1b0M2N3L3N3L3N2M4M2N3L3N2M4Mdig3"}, "image_id": 490, "id": 8435}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 209.0, 20.0, 28.0], "area": 371, "segmentation": {"size": [512, 512], "counts": "TWf71l?3N3M2M4M2M3N3M201O01O010O01O01O010O01ZI"}, "image_id": 490, "id": 8436}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 210.0, 8.0, 16.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "b6`0a?N2N2N2N2N2N2NXik7"}, "image_id": 490, "id": 8437}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 210.0, 33.0, 34.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "QW31n?2N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1000N2N2N2N2N2N2N2N2N2N2N2N2N2N2NoX\\7"}, "image_id": 490, "id": 8438}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 218.0, 101.0, 70.0], "area": 3555, "segmentation": {"size": [512, 512], "counts": "jWc41m?2M4M2M3N3L3N3M2M4O01O010O01O01O010O01O0N2N3L3N3M2010O00010O010O00010O010ON2M4M2O2O010O01O01O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O01ROdA7[>GhA8Y>DjABlA?T>]OPBb0P>\\ORBe0m=YOVBf0b>1O01O010O010O00010O0M4M2M3N3M2M4M2M3NiWj1"}, "image_id": 490, "id": 8439}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 250.0, 38.0, 38.0], "area": 1013, "segmentation": {"size": [512, 512], "counts": "]X]73m?2J6N3L3N3L3N2N0O10O010O10O10O01N1N3N110001N01N3O01O010O010O01O01O010O01O0QH"}, "image_id": 490, "id": 8440}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 266.0, 57.0, 50.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "PY\\41n?1O2M3GJe@8Y?Je@8X?Jg@8W?Jg@8W?9O2N2O1O1000OO2N2N2N2N2000O010000000N0O10002O10O1000O10000000O10O1000000O1N1O2N2M3N2N3M2Bd@5^?Id@5^?Id@4f?N1OeVg2"}, "image_id": 490, "id": 8441}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 268.0, 7.0, 13.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "\\8=d?N2N2N2N2N2N_Wl7"}, "image_id": 490, "id": 8442}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 283.0, 81.0, 67.0], "area": 2771, "segmentation": {"size": [512, 512], "counts": "XYP24i?3N2M4M2O20O00010OWACo==oAER>;jAHV>8hAKW>6eAM\\>2bA0^>0_A4a>L[A7e>c0L3N2N3O010O01O01O010O01O0O101O01O01O010ON3M200010O010O00010O0ROgA5Z>HhA9W>DlAAPB?P>^ORBb0n=[OVBd0k=YOWBh0h=UO[Bk0^>0O01O01O010O01O01O01O01O010O01O000M4M2M4M2M3N3L3MmUg4"}, "image_id": 490, "id": 8443}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 303.0, 5.0, 15.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "iim73k?3L3N3L3aF"}, "image_id": 490, "id": 8444}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 329.0, 51.0, 49.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "kjV72k?3N2M4M2M4M2j@_Oj>d0TA_Ol>a0PABP?h00O00010O010O0001M2O20O0010O0010O0010O0010O0010O0010O0010O00010O010O00010O010O00010XE"}, "image_id": 490, "id": 8445}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 333.0, 102.0, 57.0], "area": 2771, "segmentation": {"size": [512, 512], "counts": "S[_31m?3M2M4M2M3N3M2M4N110O00010O010O010O00010O010O010OO1N30O01O01O010N1N3O000010O010O01O01O010O010O00010O010O010O00010O010O01O01O010O010O01OYOm@a0Z?O01O0N3L3N2M4IQOYAQ1d>8M2O2O01ON3M2M4N1010O0O1N3[OeA@^>>dA@_>=dA@^>=eA@^>>eA_O^>>dA_O_>>d0M2N3L3N2M`em2"}, "image_id": 490, "id": 8446}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 341.0, 83.0, 77.0], "area": 3086, "segmentation": {"size": [512, 512], "counts": "d[17h?2N2N2N2N2O1O1O1O1N200l@[Oh>OUAf01]Oh>OUAf01]Oh>Q1N2N2N2N2N2N2N2N2N2N2O10000000N200000000N2N2N0000002N1O0000000000000NcAbN]>^120000LbAgN^>Y140000000000000002N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1OhTe6"}, "image_id": 490, "id": 8447}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 343.0, 111.0, 71.0], "area": 3294, "segmentation": {"size": [512, 512], "counts": "W[`13l?1N3N2M2O2M3N1N3N2N1N3O100O01000O01000O01000O01000O01000k@VOS?l0O1000O010N2M2O200O0100N1N3N2O001000O010000O01000O010000OO2N2N1N3N2N11000O01000O10O10O10O10O10O1000O001N2M2O0O010O0100O010O3N2M2O2N2M2O2M3N1N3N2M3N1N3N2N1N3N2M2O2M_Th4"}, "image_id": 490, "id": 8448}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 365.0, 16.0, 15.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "ck\\11n?2M3N1N3O1000O10O1000O1M2O2M3N\\T[6"}, "image_id": 490, "id": 8449}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 386.0, 72.0, 62.0], "area": 2440, "segmentation": {"size": [512, 512], "counts": "j\\b31m?3M2M3N3M2M4M2j@]Om>f0PA\\OQ?i001O010O010O001M2N2N3O010O010O01O01O010O010OO2M2N201O010O010O01O01OWATO^>m0_AUOb>j0\\AXOc>j0ZAYOc>S1M4M2N3L3O110O010O010O00010OVOmAGT>6nAKQ>3QBLQ>0SBMo=1SBMo=1TBLo=0TBMo=1TBLo=1SBMo=0TBMP>0Q1NgcY3"}, "image_id": 490, "id": 8450}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 389.0, 75.0, 76.0], "area": 2866, "segmentation": {"size": [512, 512], "counts": "hmj02l?3M2O2M2N3N2M2O2M3M2O2M2N3N20O01M2O2M3N110O0N3N2M2N3N1N3M3N1N3O00100O0100O010M2N3N0O1O01O01O01O01O011N2N3N2M2O2M3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2O2M_co5"}, "image_id": 490, "id": 8451}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 428.0, 67.0, 79.0], "area": 2813, "segmentation": {"size": [512, 512], "counts": "dng13j?3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L31O010OSBbNX=_1eBcN\\=\\1aBhN^=W1aBkN_=R1aBRO_=l0`BVO`=k0\\BXOe=g0YB\\Of=d0WB_Oi=]10O0PNWBi1j=SNZBm1l=0010O0010O010O00O2L10O010O0100O3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3L[bV5"}, "image_id": 490, "id": 8452}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 447.0, 75.0, 65.0], "area": 2613, "segmentation": {"size": [512, 512], "counts": "gob23k?2N3L3N3M2M4M2N2M4M2N3M2010O001O00001O001O001O00001O001OO1M3N2N2M3N2N2M3N2N2M3N2O100001O001O001O00O1N0O010001N3N2N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4MkaW4"}, "image_id": 490, "id": 8453}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 449.0, 68.0, 63.0], "area": 2291, "segmentation": {"size": [512, 512], "counts": "nod32k?3N2M3M3N2M31O001O00001O00N2N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2O1001O00001O001O00001M2N3L3N2M4M2M4L3O110O0FTAAl>9ZAFf>7]AHe>6\\AGg>9ZADi>;>1OCa@9c?1OO2M2MbQY3"}, "image_id": 490, "id": 8454}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 452.0, 66.0, 60.0], "area": 2360, "segmentation": {"size": [512, 512], "counts": "ooP61l?3M3N2M3N2M3N2M3N2M3FXOZAj0c>YOZAj0d>:M3N2M3N2N21O00001O00001O001O00001O001O00001O001O00001O001M]AgN`>Y130O010O012O1000M4M2M3N3L3N3L3N2M4M2M4M2M3N3LiQn0"}, "image_id": 490, "id": 8455}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 470.0, 46.0, 42.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "]oV73k?3L3M4h@Eg>=VAFg>=VAFh>=UAEk>h0001O001O00001O00M3M3N2M3001O001O00001O001O00001O00001M2M4M2M3N3L3M3N3L3M4M2MSQ2"}, "image_id": 490, "id": 8456}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 0.0, 57.0, 33.0], "area": 1246, "segmentation": {"size": [512, 512], "counts": "T`m02n?3M3L4M2M4M3M2M4M3M2M0100O10000O10000O100O10000O10000O10000O100O10000O10000O100O10000O10000O10000O101N3N3M3L3N3MboU6"}, "image_id": 494, "id": 8457}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 57.0, 64.0], "area": 1857, "segmentation": {"size": [512, 512], "counts": "0P2P>O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1N2OQ`S7"}, "image_id": 496, "id": 8458}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 228.0, 114.0, 117.0], "area": 6965, "segmentation": {"size": [512, 512], "counts": "\\h?2m?2N2N2M3N1O2N2N2M3N2N2N1_AWOj=k0TBWOj=k0SBXOm=h0QBZOo=f0oA\\OP>e0nA]OP>d0oA^Oo=d0nA_OP>c0nA_OP>c0nA_OP>Y1N3N2N2N2N2N2M3N2N2N2N1O2M3N2N2N2N2N2000000O10O100000000000O10000000O100000OO2N2N2M3N2N2N2N1N3N20000000000O010O1N2N2N2M3N2N1O2N2M3N2N2N2N2M2O2aNbAY1d>N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3NjVg5"}, "image_id": 496, "id": 8459}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 0.0, 71.0, 61.0], "area": 2404, "segmentation": {"size": [512, 512], "counts": "VPU22l?3M2N3O0]@H^?=01O001O001O001f@^OT?b0j@@V?f0O001O001O00001_AXOh=h0VBZOj=g0SB\\Ol=d0RB^On=c0oA_OQ>a0lACS>>jADV>Q101O001O001O00001O001O001O0000N2N2M3N2N2N2N2M3N2N2N2N2M3N2N2N2M3N2N2N2N2M3N2N2N2N2MS`g4"}, "image_id": 497, "id": 8460}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 2.0, 63.0, 86.0], "area": 2892, "segmentation": {"size": [512, 512], "counts": "WbZ32l?3M2M4M2N2N3L3N3M2N3L3N3WOnN`BT1^=nN`BT1^=oN_BT1]=oN`BT1^=nN`BU1]=nN`BT1^=nN`BU1\\=oN`BT1^=i0O000010O010O010O010O01O01O0O2M2M4M2N3M2ESB`NP>]1TB`No=]1SBaNo=]1^=S1000O01N2N10O2M2O2M3N2M2O2M3N1N3N2M2O2N2M2O2M3NO010O010O010O010O0100O0100O010O03N2M2O2M3N1N3N2M2O2M3N1O2M3N1N3N2M2O2M3NRo[2"}, "image_id": 497, "id": 8462}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 54.0, 95.0, 64.0], "area": 3058, "segmentation": {"size": [512, 512], "counts": "Zbf11m?3N2M2O2N2M2O2M3f@^OS?d0j@_OS?i0N1O2M3UAnNe>S1ZAoNf>U110O10O10O1000O0100O0N02O2000O01000O010000O010000O0100N1O2N2O10O1000O01000O001N2M20N2O2M3N2N1N3N2N11000O01000O10O1N1N100O102N2M2O2M3N2M2O2N2M2O2M3N1O2M3N2M2O2M^mi4"}, "image_id": 497, "id": 8463}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 57.0, 51.0, 94.0], "area": 3059, "segmentation": {"size": [512, 512], "counts": "bbi54h?7I5UABm=c0nABn=b0nABn=b0nABn=b0mAFP>U100fBWN^;aAOU>m01000000000000000K507I=C7IO1000O100000000000000000O100000O100009G=C=Cc0SA@j>b0QA\\O13m>m0M3N1O2eAjNk=V1RBlNo=T1oAnNP>R1nAQOR>o0lAROU>l0kAVOT>h0lA[OT>c0lA^OU>`0jACV>=hAEW>S10O10O10O10O10O10O10O1N2M2O2M3N1O2M3N110O1000O10O10O10O10O10O10O10O100O0N3N1N010O0100O010O02O2M3N2M2O2N2M2O2M3N1N3N2N1N3Ng[a3"}, "image_id": 497, "id": 8468}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 114.0, 73.0, 75.0], "area": 2683, "segmentation": {"size": [512, 512], "counts": "hTS11m?3N1N3N2M2O2N2M2O2M3N2GZOTAh0k>ZOSAh0j>9N1N3N2M2O2N1N10101000O10O10O10O10O10O10O10O1000O10O10O10O10O10O10O10O1000O10O10iNdAh0]>UOeAl0Z>SOhAm0X>POkAn0W>POjAn0X>POkAn0e>M2O2N2M3N1N3Kc@D_?:4N3N2M2O2NoZh5"}, "image_id": 497, "id": 8469}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 133.0, 105.0, 60.0], "area": 3168, "segmentation": {"size": [512, 512], "counts": "Pe`45k?5K4L5K4K6K4L1O0O100000O10O100000O10O1000O10O100000O10^OoNTBR1l=SOoAm0Q>WOkAi0T>]OgAb0Z>b00000O10O100000O10O1000O1000O102N4O2M2K6K5K4L2NO010000000O0100000O10O100000O10O1000O1000O1000O1000O1000O1000O3N1O1O1N2O1O2N1O1N2O1O1O1O2M2O1O1O1O^jj1"}, "image_id": 497, "id": 8470}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 167.0, 76.0, 77.0], "area": 2738, "segmentation": {"size": [512, 512], "counts": "WV`02m?2M3N1O2M3N1N3N2M2O2O1O0100000O01000O01000O0EZO]Af0`>]O`Ac0^>_OaAa0]>AdA?Z>CcA`0[>BcA`0Z>BdA`0[>BcA`0Z>e0N1O2000O01000O10O1000O01000O01000O01000O10O10O10O1000O0fNiAl0W>QOlAo0T>oNmAQ1S>mNPBS1P>kNRB4Hc0Y>VOQB6Hb0g>[O\\Ab0g>\\OZAc0g>[O\\Ab0S?N1N3N2N2M2O2M3NRiY6"}, "image_id": 497, "id": 8471}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 175.0, 28.0, 34.0], "area": 652, "segmentation": {"size": [512, 512], "counts": "cU_61o?4K6K4L5K4L5J3N0000O010000000O0100000O102N5K4L5J5L5K4LdiR1"}, "image_id": 497, "id": 8472}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 176.0, 89.0, 66.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "TVW21n?2N1N3N2M2O2N2M3N1N3N2N1N3N2N20O1000O01000O10O1000O01000O10O10O10O10M3N1O2M3N20O10O10O10O1000O10O10O10O1000O10O10O10O1000O10O10M3N1O2M100O01000O012M2O2N2M2O2M3N2N1N3N2M2O2M3N2NcY\\4"}, "image_id": 497, "id": 8473}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 183.0, 7.0, 20.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "iel74k?4M4L1O0O3N4UJ"}, "image_id": 497, "id": 8474}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 210.0, 41.0, 61.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "Pg[71g?3]@1_?=K3MO02O4L5K4K6K4L3M000O01005K4K2OO100000O10O1000O10O4M4L5K4L5J5L5K4L4K6K4L5KjG"}, "image_id": 497, "id": 8475}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 221.0, 38.0, 61.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "TWX62m?6K4L5H7K6K5K4L5J5L5K5K1O0O10O3N1O00O0100000O10O100000O13L6K4L5K5J5ASACR?8RADR?8`0KkgT1"}, "image_id": 497, "id": 8476}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 227.0, 37.0, 55.0], "area": 1303, "segmentation": {"size": [512, 512], "counts": "]gb55k?4L5FFd@9\\?60003M4L5J5L4L5K4K4MO1000O1000O1000O10O10002N5J5L4L5K4L4K6K4L5K4K`gj1"}, "image_id": 497, "id": 8477}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 234.0, 66.0, 67.0], "area": 2296, "segmentation": {"size": [512, 512], "counts": "Shk12l?2N3M2N3L3N3M2N3M2N2M4N110O0010O010O01dAPOg=o0WBSOi=n0TBUOl=j0RBXOn=i0oAZOQ>e0lA^OT>b0jAAV>?gACY>S1O010O010O01O010O010O010O010O0N2N3M1O000O10001O3M2N3L3N3M2N3M2N3L3N3M2N2N3M2N\\XS5"}, "image_id": 497, "id": 8478}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 252.0, 60.0, 60.0], "area": 2168, "segmentation": {"size": [512, 512], "counts": "l7m0S?1000O01000O01000O01M3N2M2O200O010000O01000O010000O01000O010000O01N2N1N3N2M3N1O2M3N11000OO2UOhAJ[>3gALZ>3hAJ[>3hAKZ>3gAK[>3hAKZ>3hAJ[>3gAL[>2gALZ>3hAJ[>3gAL[>2j0N3NhgQ7"}, "image_id": 497, "id": 8479}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 270.0, 18.0, 25.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "aX]52n?4L5J5L5K2N00O0100000O013M4L5K4K5LnfY2"}, "image_id": 497, "id": 8480}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 293.0, 74.0, 71.0], "area": 2622, "segmentation": {"size": [512, 512], "counts": "PZm02l?3N2N1N3N2M2O2N2M3N1N3N2N1N3N2M3N1O2M30O010000O010000O010000OmN\\Aj0e>TO]Al0c>QO_AP1a>nNaAQ1h>0O1000O10O10O10O1M3N1N3N2N1N3N2M3N1O2M3O01000O10O1POkA2W>LkA2V>MkA2W>KlA2W>LkA2V>LlA2W>LkA2W>LkA2W>KlA2V>MkA2W>KlA2W>LkA2V>MlA1R?N[em5"}, "image_id": 497, "id": 8481}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 300.0, 64.0, 56.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "oYb32m?2M3N2N1N3N2M2O2N2M2O2M3N2N101O10O1000O10O10O10O1000O10O10O10O1000O10O10O10O10O1000OO2N2N1N3N2000N1O2M3N1O2M3N2M2O2N2M2Fd@O^?Ne@O^?Oc@0_?Nc@OZU^3"}, "image_id": 497, "id": 8482}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 301.0, 25.0, 68.0], "area": 793, "segmentation": {"size": [512, 512], "counts": "]9l1U>00O0WNlA?1e0V>XOlAi0T>UOnAj0R>UOPBk0P>ROSBn0m=POTBP1m=mNVB4Bg0W>TOYB3Bi0g>UO[Al0e>RO]Al0m>N2M2O2M3N1N3N2M3M2O2M3NUUc7"}, "image_id": 497, "id": 8483}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 311.0, 53.0, 51.0], "area": 1494, "segmentation": {"size": [512, 512], "counts": "aj]22l?3N1O2M3N1N3N2M3N1N3N2N1N3N2M2O2M3N11000O10O1000O01000O01000M2O2O1O01N2N1O2M3N2M2O2M3N1N3N2M2O2@d@9b?N3N2M2Oceg4"}, "image_id": 497, "id": 8484}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 345.0, 53.0, 51.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "c[[32l?2O2M3N1O2M3N1N3N2M3N1O2M3N1N3N2M2O20000O01000O01000O01000O1M2O2O10O1N2N1N3N2M2O2M3N1O2M3N2M2Ad@9^?Ec@9d?N2M2O2NaTj3"}, "image_id": 497, "id": 8485}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 353.0, 73.0, 60.0], "area": 2474, "segmentation": {"size": [512, 512], "counts": "ak?3j?4[@KY?9c@LX?`0M3N201O01O01O01O0001O01O0001O01O01O01O0001O01O0001O0^ATOQ>k0kAZOT>g0gA]OY>c0dA@\\>`0`ADa>n0O0001O01O00010O0000010O0001O01O00010K4M12L4M3L5K41O0001O01ON3K4M3L4L5L3L4L4Mhd[6"}, "image_id": 497, "id": 8486}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 372.0, 22.0, 20.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "kkm23l?1N3N2M2O2O10O10O10O10O10O10O10N2N1N3N2M2OSTg4"}, "image_id": 497, "id": 8487}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 373.0, 29.0, 27.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "j[^64l?5K4L4K6K00000O01000000O01000000O01000000O0100004K6K4L5KiSS1"}, "image_id": 497, "id": 8488}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 374.0, 39.0, 59.0], "area": 1371, "segmentation": {"size": [512, 512], "counts": "WmR73i?4M3M3L5L3M3M4K4M3M4K4M3M3M4O00010O00010O00010ON2M2N001N5L3M3L5L3M3M4K4M3M3F_@1lS:"}, "image_id": 497, "id": 8489}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 383.0, 36.0, 60.0], "area": 1371, "segmentation": {"size": [512, 512], "counts": "Y\\P66j?4L4L5K4K6K4L4L5J5FcNiAb1R>600O0100000O01000001O4K2O003M5J5L5K4L4L5J5L5K4L4K]b]1"}, "image_id": 497, "id": 8490}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 393.0, 53.0, 45.0], "area": 1236, "segmentation": {"size": [512, 512], "counts": "mlg21n?2M2O2N2M3N1N3N2M2O2M3N1O2M3O0010000O01000O01000O010000O01M3N1010000O01N2M2O2M3N1O2M3N1N3N2M2O2M3N2N1N3NTc]4"}, "image_id": 497, "id": 8491}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 399.0, 4.0, 9.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "_<9h?N1N3N`cm7"}, "image_id": 497, "id": 8492}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 405.0, 51.0, 51.0], "area": 1276, "segmentation": {"size": [512, 512], "counts": "^]c11n?2M3N1O2M3N2M2O2N2M2O2M3N2N1N3N2M2O2O1O10O10O10O1000O10O10OoNZAi0f>TO\\Al0e>RO]An0k>000O01O1N1N3N2N2M2O2M3N1O2M3N2M2O2N^Rc5"}, "image_id": 497, "id": 8493}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 413.0, 16.0, 14.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "S]R13k?2M3O2O0010O010O010O00010M2N3MPce6"}, "image_id": 497, "id": 8494}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 421.0, 9.0, 20.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "bmk72l?3N1N3M2O2M3M2O2jB"}, "image_id": 497, "id": 8495}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 423.0, 66.0, 86.0], "area": 2811, "segmentation": {"size": [512, 512], "counts": "f=m1Q>210000O010000O010000O010000O01M3N2N1N3N2M2O2M2O00O0100O0100O0100O0100O010O0100O0100O0100O3N1O2M3N2M2O2M3N1O2M3N2M2O2N2M2O2M3N2N1N]bn6"}, "image_id": 497, "id": 8496}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 425.0, 54.0, 60.0], "area": 1596, "segmentation": {"size": [512, 512], "counts": "b^l42m?2N2M2O2N2M3N1N3N2N1N3N2M3N1O2M3EoNdAR1[>oNcAT1[>nNcAT1Z>;N1O2M3NO3N1O2M2O0O2O1O2M3N2M2O2N2M2O2N2M3N1N3N2N1N3N2M3N1O2M3N1N3NWbX2"}, "image_id": 497, "id": 8497}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 436.0, 17.0, 24.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "Xn_71l?4K4M3M4K4M30010O0001N1M3M3L5L3L]b7"}, "image_id": 497, "id": 8498}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 440.0, 50.0, 52.0], "area": 1252, "segmentation": {"size": [512, 512], "counts": "a^\\22m?2N2M2O2N2M3N1N3N2N2M2O2N2M2O2M3N2N1O200000O01000O0100000nNYAi0g>VO[Aj0e>SO^Am0b>QO_AP1i>O10OO2N2M3N1O2M3N2N1N3N2M2O2N2M3NZaj4"}, "image_id": 497, "id": 8499}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 454.0, 28.0, 28.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "e^n02l?3N1O2M3N1N3N2M3N1N300O010000O010O1M2O2M3N1N3N2N2M2O2M`ac6"}, "image_id": 497, "id": 8500}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 465.0, 25.0, 17.0], "area": 277, "segmentation": {"size": [512, 512], "counts": "f^P73m?4K5L1OO10O1000O10O1000O10O1000O10O1000O102N4L4LUQc0"}, "image_id": 497, "id": 8501}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 470.0, 76.0, 42.0], "area": 1727, "segmentation": {"size": [512, 512], "counts": "Xob12k?3N2M4M2M4M2M3010O01O01O01j@[OP?d0n@_OQ?h00010O010O0010O0010O0010O0010O0010O0010O0010O0010O01O00001O001O00001O001O00001O001O00001O001O00001O001O00001O0N3M2M3N3L3NaPW5"}, "image_id": 497, "id": 8502}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 479.0, 25.0, 23.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "Xoe42l?3N1N3N2N1N3O1O10O1000O01000O10O100M2O2M3N1O2M3Nf`m2"}, "image_id": 497, "id": 8503}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 486.0, 51.0, 26.0], "area": 933, "segmentation": {"size": [512, 512], "counts": "joP72h?1[@3`?700000000O1000000O100000000O100000000O100KAi@?W?50000O100000000O1000000O100000000O100004L00003M5K5K4LU`5"}, "image_id": 497, "id": 8504}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 491.0, 25.0, 21.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "doX51n?2N2M3N1N3N2N110O1000O0100000O001O1O1O0O2M3N1O2M[`Z2"}, "image_id": 497, "id": 8505}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 498.0, 44.0, 14.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "o_m01n?1O1N2O1N2O1O11O001O00O1N2O1O1O1001O1O1O001O1O001O1O1O001O1O001O1ON2O100001O1O001O1O00Q`\\6"}, "image_id": 497, "id": 8506}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 500.0, 34.0, 12.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "o_b41n?1N2O1001O001O1OO1N2O1O1N2O1N21O1O001O1O001O1O1O001O1O001O1O000000Q`l2"}, "image_id": 497, "id": 8507}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 0.0, 142.0, 15.0], "area": 2039, "segmentation": {"size": [512, 512], "counts": "P`R5?a?0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000P`f0"}, "image_id": 498, "id": 8508}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 0.0, 43.0, 49.0], "area": 1218, "segmentation": {"size": [512, 512], "counts": "UaR21m?3L3N2M4M2M40O0010L3N3L3N2M4DPOdAR1\\>nNaAV1^>jN`AX1`>401O001O00001O001OO1M3N2M3N2M3N2SOSAd0P?ZORAd0V?O2O0Ie@I\\?4f@I]?4f@J\\?4:LjoW5"}, "image_id": 499, "id": 8509}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 0.0, 42.0, 46.0], "area": 1175, "segmentation": {"size": [512, 512], "counts": "RaQ33k?2M4M2M3M4M2M3N3DWO\\Al0a>WO]Al0b>UOZAn0f>601O001O00001O001O00001ON2M3N2M3M3N2M3N2M3O100O1J6M3N2M3N2MS`Y4"}, "image_id": 499, "id": 8510}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 0.0, 38.0, 16.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "P`f42n?5K5K4L00000000O100000000O100000000O100000000O100000000O100000000O10001O5Kj_f2"}, "image_id": 499, "id": 8511}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 0.0, 10.0, 2.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "P`h52n?00000000O10000000P`R2"}, "image_id": 499, "id": 8512}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 0.0, 11.0, 3.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "PPm63m?0000O1000000O100000P`m0"}, "image_id": 499, "id": 8513}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 3.0, 7.0, 31.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "``l74k?6@Im@Im@;o>:10O1M"}, "image_id": 499, "id": 8514}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 6.0, 28.0, 27.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "[`n63m?4L4L4K6K2N00O0100000O10O1000O10O1000O10O1004L4K6K4L4LU_c0"}, "image_id": 499, "id": 8515}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 9.0, 42.0, 57.0], "area": 1371, "segmentation": {"size": [512, 512], "counts": "eQk34j?2M4M2M3N3L3N3L3EWO]Ak0`>XO]Al0`>WO]Ak0`>=M21O010ON3L30010O0010O0N3L3N2M4L3N3L3N2M4M2M4M2M3N3L3M4M2Mao_3"}, "image_id": 499, "id": 8516}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 16.0, 29.0, 30.0], "area": 617, "segmentation": {"size": [512, 512], "counts": "e`S63m?5K4K5L5K4LO0100000O010000000O0100000O0100000O4M4L5K4L4Kmn]1"}, "image_id": 499, "id": 8517}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 26.0, 41.0, 37.0], "area": 1040, "segmentation": {"size": [512, 512], "counts": "Ta]55k?4L4L4K5L4L2NOLWORAj0n>400O0100000O0100000O0100000O010004K1000O10O1000O11N4M4L4L4K5L4L\\nm1"}, "image_id": 499, "id": 8518}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 34.0, 78.0, 77.0], "area": 3574, "segmentation": {"size": [512, 512], "counts": "eRa01o?<000O010000000000POjNVCV1jb=P100O01000000000000000000000O01000000000000000000000O010j0a019G4k@MU?7g@HZ??O105K1O0JXOTAh0k>6100000O10O100000O0100000O10O105K000O100000O10O10003M4K6K4L5K4L]l="}, "image_id": 499, "id": 8525}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 88.0, 40.0, 52.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "ocX23k?3L3N3M2M3N3C]OXAf0f>]OWAf0e>]OYAe0e>=L3N2O2O0010O0010O0010O0010ON2N3L3N3L3O2O0JQAZOP?c0SA[OP?b08N3L3N2M4M2MQ]S5"}, "image_id": 499, "id": 8526}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 109.0, 22.0, 33.0], "area": 523, "segmentation": {"size": [512, 512], "counts": "hSW1:f?9O1000000002N7I00000O10009G;Efk]6"}, "image_id": 499, "id": 8527}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 115.0, 39.0, 50.0], "area": 1153, "segmentation": {"size": [512, 512], "counts": "iTW32k?4M2M4M2M3N3L3F\\OVAg0f>\\OWAg0g>:M4M2010O00010O010O00010O010M2M3N3L3N3N100JPA[OR?b07M4M2M3N3L3NU\\U4"}, "image_id": 499, "id": 8528}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 121.0, 42.0, 68.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "Vd`43m?4EKc@9Y?8O000O5L5K4L4K5L4L5K4L4K5L1OO10O1000O10O1000O10O14L5K4K5L4L4L4K6K4L4L4K5L4L5K\\Zj2"}, "image_id": 499, "id": 8529}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 126.0, 22.0, 26.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "aTj32l?3L3N2N3L3N3M2M4O00010O01M2N3M2M3N3M2M4M2Nokj3"}, "image_id": 499, "id": 8530}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 143.0, 33.0, 60.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "bTf02n?:F;E;D`>BkA3U>LWBIo=1i[i6"}, "image_id": 499, "id": 8531}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 156.0, 49.0, 36.0], "area": 1248, "segmentation": {"size": [512, 512], "counts": "WeW73l?100_@0R?3k@OS?6h@JW??1001O4K2O000000O0100000O010000000O0100000O01000000O01000000O0100000O010000000O0100000OUK"}, "image_id": 499, "id": 8532}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 167.0, 30.0, 54.0], "area": 1433, "segmentation": {"size": [512, 512], "counts": "Yel16j?f0ZOg0YO1OO1000O100000000000000000000O1000000000000000O10c0]Og0YO_Yd5"}, "image_id": 499, "id": 8533}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 177.0, 26.0, 28.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "UV`32l?2M4M2N2M4M2N3L3O2O01O010O01OO2O010N1M4M2N2M4M5K2MYjR4"}, "image_id": 499, "id": 8534}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 184.0, 29.0, 28.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "\\f\\11l?3N3L3M3N3L3M4N100010O00010O01O01O010O000O2L3M3N3L3M4M2MTjT6"}, "image_id": 499, "id": 8535}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 192.0, 58.0, 62.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "[Wh22m?2N2N2N2N2N1O2N2M3N2N2N2N2N2_OTOkAn0S>TOkAn0S>TOkAm0T>TOkAn0S>TOgAH0W1W>>0000O10O10002N2N2N2N2N1O000O3N2N2N1O2N2N2N2N2N2M3N000000002N2N2L`@Ea?:4N2N2N2MbiZ4"}, "image_id": 499, "id": 8536}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 199.0, 37.0, 43.0], "area": 1116, "segmentation": {"size": [512, 512], "counts": "hV[71o?5K4@Kl@;o>Jl@9Q?;0O10O102N4L3L10000000O010000000O0100000O0100004L5J5L5K5K5K4K6KdX2"}, "image_id": 499, "id": 8537}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 205.0, 29.0, 33.0], "area": 549, "segmentation": {"size": [512, 512], "counts": "UW\\21m?3L3N2N3L3N3L3N2N3L3010O010O0010O0010M2N3M2M3N3L3N3M2M4M]YU5"}, "image_id": 499, "id": 8538}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 216.0, 49.0, 35.0], "area": 1189, "segmentation": {"size": [512, 512], "counts": "RWS52m?5L5K4L4K6K1O00000O0100000O010000000O0100000O0100002N0O100000M2010000000O0100000O010004L4K6K4L5KbXT2"}, "image_id": 499, "id": 8539}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 227.0, 28.0, 36.0], "area": 596, "segmentation": {"size": [512, 512], "counts": "og_32k?3M3N3L3M3N3L3O20O00010O00M4M2M4M2000O2M2M3M4M2Eh@K\\?1g@L\\?2g@K[?2UYR4"}, "image_id": 499, "id": 8540}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 240.0, 48.0, 47.0], "area": 1382, "segmentation": {"size": [512, 512], "counts": "_Xo33k?2N3L3N3M2N2M4M2N3M2M4M2N3M20001O010O010O001M20010O010O0010O001M2N3L3N3M2N2M4M2N3M2M4M2N3M2M3NXhX3"}, "image_id": 499, "id": 8541}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 241.0, 15.0, 28.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "Qhh71m?3N2N2N1N3N2N2M3N1O2N2M3O10O^H"}, "image_id": 499, "id": 8542}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 256.0, 20.0, 29.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "fXd43k?2M4M2M3N3M2M4M2M310ON2N3L3N3L3N2M4M2MogQ3"}, "image_id": 499, "id": 8543}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 267.0, 53.0, 51.0], "area": 1309, "segmentation": {"size": [512, 512], "counts": "Xie41n?2N2N2O2M2N2N2N2N2O2M2N2N2N2N2N3N1N2N1O000000010O0000000000010O000002N2N2N3N1N2N2N2N2N3M2O1N2N2N2N3M2N2OPg_2"}, "image_id": 499, "id": 8544}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 303.0, 33.0, 33.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "nYm62m?2N2N2O1N3M2N2N2N2N2N2O2M2N2N1O0002O1N2N2N2N2N3M2N2O1N2N2N2N3M2NQVb0"}, "image_id": 499, "id": 8545}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 315.0, 38.0, 91.0], "area": 2110, "segmentation": {"size": [512, 512], "counts": "Y\\]73k?2M4M2M3N3M201O0M3[O\\OlAg0Q>[OmAg0P>]OlAg0Q>[OmAg0P>]OlAf0R>\\OlAg0P>f0ESNaBP2[=TNaBo1]=SNaBo1\\==M2M4N110O0001N1N3L3N2N3L3N3L3N2XOSB[OP>b0SB^Ok3"}, "image_id": 499, "id": 8546}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 337.0, 32.0, 48.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "ojh1Q1a>>000001O00000000000000000000000000000000000000000000000000000_Ug5"}, "image_id": 499, "id": 8547}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 348.0, 46.0, 56.0], "area": 1443, "segmentation": {"size": [512, 512], "counts": "V\\`23j?3N2N3J5M4M2N3L3N2N3L310M2N3M2M3N3M2O20O00010O010O010M2N2M4M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2Nkdh4"}, "image_id": 499, "id": 8548}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 374.0, 55.0, 50.0], "area": 1424, "segmentation": {"size": [512, 512], "counts": "Zl^32m?2N2M3N2N2N1O2N2N2M3N2N2N2N1O2N2M3O1O100000O100000O100000O1000000OO000O11O2N2N2M3N2N2N1O2N2N2M3N2N2N2N1O2M3Ncce3"}, "image_id": 499, "id": 8549}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 382.0, 13.0, 23.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "_\\^14e?7I8L300001O000001ON2I8HWT[6"}, "image_id": 499, "id": 8550}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 397.0, 18.0, 23.0], "area": 218, "segmentation": {"size": [512, 512], "counts": "o\\T31m?3L3N3M2M4M2N2N30O0N2N3M2M4M2N3L3Nacb4"}, "image_id": 499, "id": 8551}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 408.0, 31.0, 29.0], "area": 562, "segmentation": {"size": [512, 512], "counts": "Ym_14j?2M3N3L3M4M200010O01O0M301O010O00010O01O0YOk@d0X?01O0N3L3N2M4M2M4MobP6"}, "image_id": 499, "id": 8552}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 411.0, 18.0, 22.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "]]h21m?2M4L3M3M4M2N201O01O001L3M3N3L3M3MUcn4"}, "image_id": 499, "id": 8553}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 420.0, 18.0, 14.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "[m_71m?2N3M2O2O010O010O010O010O01M2N3M2OgR7"}, "image_id": 499, "id": 8554}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 421.0, 29.0, 41.0], "area": 797, "segmentation": {"size": [512, 512], "counts": "U=S1n>N1010O001RAoNl>S1O010O01O00001O0010O01N1N2N3L3N3M2M3N3L3N3M2MbRa7"}, "image_id": 499, "id": 8555}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 421.0, 37.0, 32.0], "area": 686, "segmentation": {"size": [512, 512], "counts": "g]a02l?2M4M2N2M4M2M4N101O01O010O010O01O01O010O010O00010O010O010N1M3N3M2M4M2N2McRl6"}, "image_id": 499, "id": 8556}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 421.0, 45.0, 59.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "_]W42n?3L4M3M2M4M3L4M2N3L4M3M3L3N3M3L4M2N0O10O01000O010O10O12M4M2N3L4M3M3L3N3L4M3M2M4M3M3L3N3M]QR3"}, "image_id": 499, "id": 8557}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 443.0, 23.0, 21.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "Ung62n?1N3M2O2M2N3N0O0010O00010O00010O2N3N1N3M2O2Mmal0"}, "image_id": 499, "id": 8558}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 466.0, 47.0, 35.0], "area": 1372, "segmentation": {"size": [512, 512], "counts": "h^f12n?=CK5000000000000001O01O0000000000000K5E;Fl\\e7"}, "image_id": 502, "id": 8570}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 111.0, 38.0, 46.0], "area": 1192, "segmentation": {"size": [512, 512], "counts": "hdW13d?9F:F:F:N30O0000000000000001OI80O00000000mN_Ag0m>000000001O00000001O0000000M3F:F\\\\U6"}, "image_id": 502, "id": 8571}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 112.0, 37.0, 45.0], "area": 1233, "segmentation": {"size": [512, 512], "counts": "`To1;Y?0001O0000000001O000000000001I6D]\\^5"}, "image_id": 502, "id": 8572}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 114.0, 36.0, 36.0], "area": 1043, "segmentation": {"size": [512, 512], "counts": "]d`02e?9`@FQ?i0G9000000000000000000001O01O0000J60000000000010O00000000000000L4F:Eg\\m6"}, "image_id": 502, "id": 8573}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 143.0, 37.0, 53.0], "area": 1751, "segmentation": {"size": [512, 512], "counts": "]UV3c0f>g0J60000001OO4L1000001O0001O1O0M3000000001O0001O000000000000000000000M4YOS\\W4"}, "image_id": 502, "id": 8574}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 163.0, 25.0, 16.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "_UQ21g?8L400001O0001O0000000001O0001O0000000001O01OO1HR[b5"}, "image_id": 502, "id": 8575}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 166.0, 14.0, 23.0], "area": 287, "segmentation": {"size": [512, 512], "counts": "V5f0Z?000001O000000000001O00O1CVkh7"}, "image_id": 502, "id": 8576}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 179.0, 61.0, 56.0], "area": 1745, "segmentation": {"size": [512, 512], "counts": "^VY61n?2O1N3M2N2O2M2DBSA`0k>BSAa0j>AUA`0i>CTA?k>BSAa0j>SOdAn0[>UObAk0_>VO`Ai0`>YO^Ag0b>g0`AXO`>g0bAWO^>i0eATO[>l0gAROY>o0U1\\AmNc>S1\\AoNd>V11000O10000000N2N2N11000000000OXAmNc>Y10000000O1000O10O1N2N2O1000O10000YAjNc>V1[AlNe>X10000O1000O10000N2N1010000O100000O1000M3N2N2N2N1O2M3N2N2N2N2N1N3N2N2N2N2NRgn4"}, "image_id": 502, "id": 8582}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 239.0, 46.0, 48.0], "area": 1116, "segmentation": {"size": [512, 512], "counts": "[hf62m?2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2M2O200O10M3N2N1N3N2N2M2O2N2N2M2O2N2M3N1O2M3N2N1N3N2NPXb0"}, "image_id": 502, "id": 8583}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 240.0, 74.0, 82.0], "area": 2903, "segmentation": {"size": [512, 512], "counts": "_iX51m?2O2N2M3N2M2O2SOAVBa0h=AUBb0h=AVB`0i=AUBb0h=AVBa0h=AQBGHj0U>AQBe0m=]ORBe0l=]OQBf0l=]ORBe0l=h0O2M3N2M2O2N2M0102M3N1O2M3N1N3N2N2M2O2M3N1O2M3N2M2O2N2M2O2M3NO10O10O0100020O1000O0O2N2N2M2O2M3N1O2M3N2M2O2N2M2OlWb1"}, "image_id": 502, "id": 8584}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 259.0, 95.0, 88.0], "area": 3265, "segmentation": {"size": [512, 512], "counts": "mhR12m?2L4N1N3N2N2N2N2N2N2N0O10001O2N2N2N2UAoNd>S1ZAoNd>X1M3O1000000O010000N2M3O100000O1]AgN^>X1aAjN_>V1_AlNa>Z100000O1000O1000N2N2N20000000O1\\AhN_>X1_AjNa>[100000000000O10O10M3N2O1000000000O100000N2N2N2N2M3N1O2N2N2N2N2N2N2M3N2N1O2N2N2Nne]5"}, "image_id": 502, "id": 8585}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 266.0, 12.0, 24.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "iXj71n?1T@0f?7N2N2M2O2N1N10002M3eG"}, "image_id": 502, "id": 8586}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 282.0, 9.0, 16.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "l8>`?12N2N2N2N2N2N2NPWk7"}, "image_id": 502, "id": 8587}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 282.0, 20.0, 21.0], "area": 213, "segmentation": {"size": [512, 512], "counts": "PYe02m?2N3N1N2N2N2O2M2001O000O1N2N3M2N2O1N2NjfP7"}, "image_id": 502, "id": 8588}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 294.0, 50.0, 90.0], "area": 3197, "segmentation": {"size": [512, 512], "counts": "`k]62l?3M2N3nNIXB8Q=?mBDfRCDn<WOaWi0"}, "image_id": 502, "id": 8589}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 312.0, 37.0, 39.0], "area": 728, "segmentation": {"size": [512, 512], "counts": "U:d0[?2M3N2N2N2N2N000001O2N2N1O2M3N2N2N2N000000000000000000001O2N2N2N1O2M3N2NSV]7"}, "image_id": 502, "id": 8590}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 313.0, 22.0, 23.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "QjQ12m?2N2N2N2N2N2M3O101O000000O1N2N2N2N2N2N2N2NkUc6"}, "image_id": 502, "id": 8591}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 332.0, 38.0, 35.0], "area": 663, "segmentation": {"size": [512, 512], "counts": "mZX41o?2M2N3N1N3M2O2M2N3N1N3M1O01O01O01O01O01O01O01O00011N2N3N1N3M2O2M2N2O2M2N3NSeT3"}, "image_id": 502, "id": 8592}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 334.0, 29.0, 29.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "lj81n?2N2N2N2N2N2N2N2N2N2N2N2N2N2O01N2N2N2N2N2N2N2N3M2N2N2N2NUeX7"}, "image_id": 502, "id": 8593}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 338.0, 10.0, 9.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "gjj01n?3N1N1O1O0002N2N2N\\UP7"}, "image_id": 502, "id": 8594}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 344.0, 40.0, 36.0], "area": 773, "segmentation": {"size": [512, 512], "counts": "Tk`11n?2N2N2N4L2N2N2N2M3N1O3O0000000O1N2N2N101000O100O1N2N2N2N2N2N2N2N2N2N2N2N1O2N2MgTk5"}, "image_id": 502, "id": 8595}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 348.0, 31.0, 31.0], "area": 493, "segmentation": {"size": [512, 512], "counts": "\\[d42m?2N2N2N2N2N2N2N2N2N2N2N2N2N1O0002N2N2N2N2N2N2N2N2N2N2N2N2N2NgTl2"}, "image_id": 502, "id": 8596}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 365.0, 17.0, 23.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "lkR55g?4L4L5L30000010O0000010L3L4K5L4Ledd2"}, "image_id": 502, "id": 8597}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 367.0, 22.0, 24.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "Q\\m61m?2N3M2N3M2N3M2N3N1O2O01M2O2M2N3M2N3M2N3N1N^dg0"}, "image_id": 502, "id": 8598}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 371.0, 47.0, 39.0], "area": 838, "segmentation": {"size": [512, 512], "counts": "i[l01n?2N2N2N2N2N2O3N0000000000000000000O1N2N2N2N2N2N2N2N2000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NhS\\6"}, "image_id": 502, "id": 8599}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 372.0, 23.0, 22.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "Q\\Z55f?6K4L4001O01O0000010O00000010O0000000N3J5L4K]TZ2"}, "image_id": 502, "id": 8600}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 375.0, 37.0, 39.0], "area": 762, "segmentation": {"size": [512, 512], "counts": "VlS61o?1N2T@Mh?7N3M2N2N2N3N1N2N2N3M2N2N2N2O2M10O2N2N2N3M2O1N2N3M2N2N2N3M2O1N2N3M2NecY1"}, "image_id": 502, "id": 8601}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 389.0, 80.0, 63.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "`]g31l?4M2M3M4L3M3N3M20010O00010O01O01O010O00010O01O01O01M2001[OZOkAi0Q>_OhAd0W>^OfAe0X>]OdAf0[>`0O2O000O2O010O00010O00010O01O01O01O01O010O00010O00010O01lNgA>Z>_OiA>Z>_OjA>Y>^OjA?Y>_OiA>Z>_OjAa0j>00010O010O0001M2M3M4M2M3MkbP3"}, "image_id": 502, "id": 8602}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 393.0, 46.0, 45.0], "area": 936, "segmentation": {"size": [512, 512], "counts": "dl`01n?2N3M2N20000Z@Gd?;0000N2N2M3N2N2N2N2N2N2N2N2O101O00000000mNVAo0n>0000N2N2N2N2N2N2N2N2N2N2N2N2N2N3N1NkRh6"}, "image_id": 502, "id": 8603}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 417.0, 52.0, 37.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "h]V7:Y?=F:00000000001O0001O000000000000000001O0001O00000000000000000001O0001O000000000000000001O0001O0000000kB"}, "image_id": 502, "id": 8604}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 420.0, 42.0, 58.0], "area": 1410, "segmentation": {"size": [512, 512], "counts": "]=i0V?2PAWOf>k0XAWOf>k0XAWOf>S1N2N2N2O1N2N200001O0000000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2NlaZ7"}, "image_id": 502, "id": 8605}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 439.0, 61.0, 63.0], "area": 2165, "segmentation": {"size": [512, 512], "counts": "mnm23j?3N2M4L3M3M4M2N2RAXOe>h0XAZOh>f0WA\\Oh>d0WA^Oi>l001O10O01O10O01O10O01O10OIjNdAT1[>PObAQ1\\>92O2M00O2M3O210O00010O00010O00010O0001O0M3M4L3M3M4L3M3M4L3M3M4L3M3MSbS4"}, "image_id": 502, "id": 8606}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 445.0, 62.0, 67.0], "area": 2219, "segmentation": {"size": [512, 512], "counts": "m_o43j?3M3M3N2M3M300001O00001O001OM3N2M3M3M3M3M3M3M3M3M3M3M3N2M3M3M3O11O00001O00001O001O0M3M4L3N2M4L3M3N3L3M3M4M2M4L3M3N3L3M3M4MjaQ2"}, "image_id": 502, "id": 8607}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 456.0, 47.0, 56.0], "area": 1471, "segmentation": {"size": [512, 512], "counts": "eoR41m?3M2]OLYA6d>LZA6e>LXA7e>KYA7e>LYA6d>MYA6e>KYA7e>d0M2N3N1N3M3N1100O001O001N1N3M3M2O2M2N1O010O2N3M2O2M2N3N2M2N3M2O2M2N3M3N1N_aU3"}, "image_id": 502, "id": 8608}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 476.0, 6.0, 12.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "l>9RAEo>:o0000000000000000000000001O0001O00000000000000;E00000000000010O0000000000Ib_P1"}, "image_id": 503, "id": 8619}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 10.0, 65.0, 57.0], "area": 2009, "segmentation": {"size": [512, 512], "counts": "PQo12n?1N2N2N2N2N3M2N2O1N2N2N201N1N2N2N2N2N2N3N1N2N2N2N1O1O003N1000000000O2M000001O0001O1O2N2N2I\\AoNf>P1[AoNf>o0\\AoNg>n07N200N2N2O1N3M2N2N2N2N2N2O2M2N2Nl^P5"}, "image_id": 503, "id": 8620}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 13.0, 29.0, 29.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "j`]31n?3M2N2N2N2N2N2N2N2N2O2M2N2N01O2N2N2O1N2N2N3M2N2N2N2N2N2NVoS4"}, "image_id": 503, "id": 8621}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 18.0, 8.0, 8.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "dPT32m?2N2O1000N2N2N[og4"}, "image_id": 503, "id": 8622}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 30.0, 29.0, 29.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "[Qe31n?2N2N2N3M2N2N2N2N2O1N2N2N3M01O2N2N2N2N2N2O1N3M2N2N2N2N2Ne^l3"}, "image_id": 503, "id": 8623}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 36.0, 40.0, 47.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "bQ^5?a?01O0000000000000000m@Db>m0G9J6001O00000000000000000000000001O0000000000000001O0_Oa0YOaom1"}, "image_id": 503, "id": 8624}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 36.0, 22.0, 55.0], "area": 1102, "segmentation": {"size": [512, 512], "counts": "_Re7;T?a0^Ob0H8000000001O000000000000000001O0000000kN"}, "image_id": 503, "id": 8625}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 45.0, 55.0, 47.0], "area": 1233, "segmentation": {"size": [512, 512], "counts": "iao22m?3^@LS?6k@LS?6k@MR?5l@MS?4k@NS?4k@NS?a0N2N2N3M2N2N2O1000000000N2N2O1N3M2N2N2N2N2N2OO00000000000001O2O1N2N2N3M2N2N2N0001O003M02N3M2O1NRnT4"}, "image_id": 503, "id": 8626}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 57.0, 24.0, 26.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "QRR22m?2N2N3M2N2N2N2O1N21O01O000000N2O1N3M2N2N2N2N2Nhma5"}, "image_id": 503, "id": 8627}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 57.0, 48.0, 34.0], "area": 1220, "segmentation": {"size": [512, 512], "counts": "Vb\\65^?=00000;E00000000000000000000000001O000000000001O00000000000000VOTA`0V?3M00000000000000001O00000000Mo]k0"}, "image_id": 503, "id": 8628}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 62.0, 22.0, 22.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "VRR5<\\?800000001O000001O0000000000000000000001OB_nb2"}, "image_id": 503, "id": 8629}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 68.0, 23.0, 22.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "dbU76Z?`000000000000000000000000000001O00000000000HSn>"}, "image_id": 503, "id": 8630}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 75.0, 12.0, 18.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "[b`4b0^?0000000001O000000000Lh]Y3"}, "image_id": 503, "id": 8631}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 77.0, 43.0, 51.0], "area": 1166, "segmentation": {"size": [512, 512], "counts": "`S`21]?0SA2l>ORA3l>ORA4k>Nn@KO9Q?Nn@KO9Q?Nn@9P?=N2N2N2N2N2N2N2N10O000000002N2N2N2KYAnNi>P15N2N2N2N2N2N001O2N2N2O1N3M2N2N2N2NP]j4"}, "image_id": 503, "id": 8632}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 78.0, 12.0, 9.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "aRS66g?300000000000000000000bmf1"}, "image_id": 503, "id": 8633}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 80.0, 59.0, 52.0], "area": 1508, "segmentation": {"size": [512, 512], "counts": "\\cc01n?2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N3M2N11N3M2N2N1O001O000001O0000000000000001O1O2N2O1N2N2N3M2N2N2N2N2N2N2N2O1N2NPm^6"}, "image_id": 503, "id": 8634}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 85.0, 59.0, 50.0], "area": 1407, "segmentation": {"size": [512, 512], "counts": "Zcb31n?2N3M2N2N2N2N2N2O1N2N20000000000000000N2N2O1N2N2N3M2N0000001O000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N2Njl_3"}, "image_id": 503, "id": 8635}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 88.0, 28.0, 31.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "ZSd11n?2N2HM`@5^?N_@4_?8O1N2N2N2N3M2N0001O2N2N2N2N2N2N2N3M2N2O1N2N2Nklm5"}, "image_id": 503, "id": 8636}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 91.0, 23.0, 23.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "WSc6:Z?<0000000000000000001O01O0000000000000000I7D`]Q1"}, "image_id": 503, "id": 8637}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 102.0, 31.0, 30.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "gSo21n?2N2N2N2N2N2N2N2N2N2N2N2N1O00000001O2N2N2N2N2N3M2N2O1N2N2N2N^\\a4"}, "image_id": 503, "id": 8638}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 110.0, 73.0, 102.0], "area": 2983, "segmentation": {"size": [512, 512], "counts": "^3a1`>01O000000000001O00000001O000000000001O00000001O000000000001O00000001O000000000001O00000001O0000000N2N2O1N2N2N2N3M2N2N2N2N2N2N2O1N2N2N3M2N2N2N2NTZk6"}, "image_id": 503, "id": 8639}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 114.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "oSV22m?2N2O1N2N2N2N2N2N2N2N3M2N2N10O2N2N2N2N2N2N2O1N2N2N3M2N2NQ\\[5"}, "image_id": 503, "id": 8640}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 117.0, 58.0, 52.0], "area": 1373, "segmentation": {"size": [512, 512], "counts": "QUj51n?2M2O2N2M3N1O2M3000OO0O10O10O10O10O102N1N3N2N2M2O2N2M3N1O2M3N2N0O10O10O12M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1O2M3N2N2M2OlkX1"}, "image_id": 503, "id": 8641}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 126.0, 17.0, 17.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "UTX11n?2N3M2O1N2N2N2N2O01N2N2N2N3M2N2Nk[_6"}, "image_id": 503, "id": 8642}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 129.0, 41.0, 43.0], "area": 918, "segmentation": {"size": [512, 512], "counts": "iTb12m?3N1N3M2O1N3M2N3G_Oo@b0P?_On@c0P?9M2O0O000010O00010O0000010O003N1N2N3M2O2M2N2O2M2N3N1N2N3M2OZ[i5"}, "image_id": 503, "id": 8643}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 136.0, 58.0, 48.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "PU^43l?4M3M4K4M4L3L2O00000O01000O0100000O01000O01000O01000O1O0JYOTAh0k>7O001O1O4L2O0O0102N1JZAoNh>o06N2N2N2N2O2M2N2N2N2N2N2N2N3N1N2NSkd2"}, "image_id": 503, "id": 8644}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 137.0, 28.0, 29.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "eTn32m?2N2N2N2N2N2N2N2O1N2N3M2N2O10N2N2N2N2N2N2N2N3M2N2N2N2NZkc3"}, "image_id": 503, "id": 8645}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 153.0, 10.0, 11.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "mdU12m?2N2N2N2O10N2N2N2NT[e6"}, "image_id": 503, "id": 8646}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 154.0, 16.0, 16.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "PeP72m?2N3N1N3N1N2N01O012M2N3N1N3M2OmZg0"}, "image_id": 503, "id": 8647}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 156.0, 27.0, 27.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "Yed32m?2N2N2N2N2N2N2N2N2N2N2N2N01O2N2N2N2N2N2N2N2N2N2N2N2Nijm3"}, "image_id": 503, "id": 8648}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 156.0, 17.0, 61.0], "area": 624, "segmentation": {"size": [512, 512], "counts": "Reg72n?3U@Na?U1lAlNS>`11000O3RK"}, "image_id": 503, "id": 8649}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 162.0, 35.0, 38.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "\\eT71o?2M3M2O2M2O2M3M2O2M3M2O2M2O2M3M2O1OO2O2M2O2M3M2O2M3M2O2M2O2M3M2O2M2OUj9"}, "image_id": 503, "id": 8650}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 163.0, 8.0, 8.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "Veb62m?2N2N2OO2N2N2NkZY1"}, "image_id": 503, "id": 8651}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 170.0, 37.0, 33.0], "area": 658, "segmentation": {"size": [512, 512], "counts": "QfW61n?2N3M2HId@9Z?Id@9[?Ic@8[?7O0000001O0000101N2N2N2N200000O1N2N2N2N3M2N2N2N2O1N2N2N3M2NTjU1"}, "image_id": 503, "id": 8652}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 171.0, 61.0, 51.0], "area": 1500, "segmentation": {"size": [512, 512], "counts": "PfR21n?2N2O1N2N2N2N3M2N2N2O1N2000001O01O0000N2N2O2M2N2N2N2N1O000001O00000001O000001O00001O2N2O1N2N3M2N2N2N2N2N2O2M2N2N2N2N2N2NRjn4"}, "image_id": 503, "id": 8653}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 174.0, 23.0, 24.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "fUW11o?1N2N3M2N2N2N2O1N3M21O00000N2N2N3M2N2N2O1N2NUZ]6"}, "image_id": 503, "id": 8654}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 189.0, 32.0, 31.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "[f_13l?2N2N2N2N2N2O1N3M2N2N2N2N2N1O01O1O2O1N2N2N2N2N2N3M2N2O1N2N2N2NeYP6"}, "image_id": 503, "id": 8655}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 189.0, 46.0, 72.0], "area": 1603, "segmentation": {"size": [512, 512], "counts": "UVo61n?3N2M2O2i@I`>9^AJ_>9_AJ^>8_AK^>7aAK]>7`AL]>7`AM\\>5cAM[>n0M3N3O0O100O2O000ON3N1O2M3N1N3N2M2O2M4M2N2M3N3L3N2M3N3L3N2N2M4M2M3NWh9"}, "image_id": 503, "id": 8656}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 191.0, 32.0, 31.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "df^53l?2N2N2N2N2N2O1N0000000000001O01O000000000000010O2N2N2N2N2N2N3MjYQ2"}, "image_id": 503, "id": 8657}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 196.0, 15.0, 14.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "ZVX41n?2N2N2N2N2N2O10N2N2N2O1N2N2NfY`3"}, "image_id": 503, "id": 8658}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 200.0, 36.0, 38.0], "area": 694, "segmentation": {"size": [512, 512], "counts": "e62m?2N2N2O1N2N2N2N3M2N2N2N2N2N2O1O10001O01O000N2N2N2N2N2O1N2N3M2N2N4L3M2NQi]7"}, "image_id": 503, "id": 8659}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 208.0, 33.0, 34.0], "area": 574, "segmentation": {"size": [512, 512], "counts": "SWV41n?2N2N2N2N2N2N2N2N2N2N2N2N2O1N00000001O2N0001O01O2N2_Oh@7Z?Gh@7Z?Gh@7b?N2N2N2NQYY3"}, "image_id": 503, "id": 8660}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 213.0, 49.0, 47.0], "area": 1101, "segmentation": {"size": [512, 512], "counts": "aWP31n?2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N1O01O000000000000000001O01O2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2NjXW4"}, "image_id": 503, "id": 8661}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 219.0, 29.0, 29.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "WgR62m?2N3N1N2N2N2N2N3M2O1N2N2N2N10O2N2N3M2N2N2O1N2N3M2N2N2N2Ngh^1"}, "image_id": 503, "id": 8662}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 224.0, 17.0, 16.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "XWT51n?2N2N3M2N2O1N1O0010O2N2N2N2N2N2NkXc2"}, "image_id": 503, "id": 8663}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 224.0, 10.0, 10.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "Uge52m?2N2N2N1O03M2N2N2OkXU2"}, "image_id": 503, "id": 8664}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 225.0, 63.0, 62.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "Xhl02m?2N2N2N2N2N2O2M2N2N2N2N2N2N2N3N1N2N2N2N1O00000001O0000000001O000001O0000000001O00000001O2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2KX@Oj?O_hS6"}, "image_id": 503, "id": 8665}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 251.0, 56.0, 54.0], "area": 1426, "segmentation": {"size": [512, 512], "counts": "_XT61n?2N2N2N2N1O2N2N2N2N2N2M3N2N2N2N2N2N2N1010000O100000000000000000O10000000OO2M3N2N2N2N2N2N2N2N2N2N2N2N1N3N2N2N2NVgo0"}, "image_id": 503, "id": 8666}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 253.0, 14.0, 14.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "SXn52m?2N2N2O1N2N2N11N2N2N2O2M2Nmgj1"}, "image_id": 503, "id": 8667}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 256.0, 16.0, 15.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "Xhh11n?2N2N2N2N2N2N1O01O2N2N2N2N2N2NkWo5"}, "image_id": 503, "id": 8668}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 256.0, 53.0, 50.0], "area": 1265, "segmentation": {"size": [512, 512], "counts": "nhd32m?2O1N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N1O0101N000000000000001O000001O00000001O2N2N2N3M2N2N2N2O1N2N2N2N2N2N2Nag`3"}, "image_id": 503, "id": 8669}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 257.0, 34.0, 34.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "_ho41n?2N2N3M2N2N2O1N2N2N2N2N2N3M2N200000N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2O\\W_2"}, "image_id": 503, "id": 8670}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 269.0, 36.0, 35.0], "area": 655, "segmentation": {"size": [512, 512], "counts": "ohY11n?2N3M2N2O1N2N2N3M2N2N2N2O1N2N1O00001O0001O2N2N2N2N3N1N2N2N2N2N2N3M2O1NTWT6"}, "image_id": 503, "id": 8671}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 274.0, 59.0, 66.0], "area": 1831, "segmentation": {"size": [512, 512], "counts": "Tic51n?2N2O1N2N2N2N2N2N2N2N2N3M2N2N2O1ZAQOX>Q1fAQOX>Q1fAROW>P1gAROW>P1gAROX>o0fASOZ>Z10000000000000001O0001N1N2N2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N3MWf^1"}, "image_id": 503, "id": 8672}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 276.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "d83n?N\\gn7"}, "image_id": 503, "id": 8673}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 278.0, 52.0, 50.0], "area": 1284, "segmentation": {"size": [512, 512], "counts": "aY72m?2N2O1N2N3M2N2EBQA`0m>BQA`0n>APAa0n>AQA`0m>Q1N2N2N2N2001O000000000N2N2N2N2N00000000002N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2Nncl5"}, "image_id": 503, "id": 8686}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 364.0, 9.0, 18.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "ckk72m?2N2N2O1N3M2N2N2cD"}, "image_id": 503, "id": 8687}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 388.0, 49.0, 52.0], "area": 1205, "segmentation": {"size": [512, 512], "counts": "dlW71n?2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2O2O00000000000000000001O0001O00N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2NmB"}, "image_id": 503, "id": 8688}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 400.0, 68.0, 58.0], "area": 1856, "segmentation": {"size": [512, 512], "counts": "dmh12m?2O1N2N2N2N2N2N2EARAa0l>ARAa0l>ARAa0m>@QAb0m>;M2N2N2N2N2N2N00000000000001O000000101N2N2N2N3M2N2N2N2N2N2N2N2N0001O0000000000000001O0000002N2N2O1N3M2N2N2N2NVSU5"}, "image_id": 503, "id": 8689}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 417.0, 27.0, 28.0], "area": 376, "segmentation": {"size": [512, 512], "counts": "\\]g21n?2N2N2O1N2N2N2N3M2N2N2N20000001M2N2N2N2N2O1N2N2N3M2N`Rk4"}, "image_id": 503, "id": 8690}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 422.0, 122.0, 90.0], "area": 3411, "segmentation": {"size": [512, 512], "counts": "o_d21n?1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O000010O0000000000000000010O0000000000000000010O0000000000000000010O000000000000000001O01O0000000000000000010O000000002N2N2N3M2O0O00000000002N2N2N2O1N2N3I_@Jc?46N2O1N^b^3"}, "image_id": 503, "id": 8691}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 425.0, 53.0, 50.0], "area": 1351, "segmentation": {"size": [512, 512], "counts": "jmf62m?2N3N1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3N10000001O000001OO1N2N01O01O002N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2O2M2Nla>"}, "image_id": 503, "id": 8692}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 439.0, 49.0, 67.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "^oY21n?2O1\\ONWA4g>OVA3h>OVA3h>OVA3h>OVA4g>NWA4g>NWA4g>NWA4g>c0O01O0000CkNPBU1P>mNnAS1R>oNlAR1S>POkAP1U>ROiAn0W>TOgAl0Z><00000000001O2N2O1N2N2N2N2N2N2N3M2ZOo@:S?Do@:S?Do@;R?CPA;R?CPA;\\?N3M2N2N2NZam4"}, "image_id": 503, "id": 8693}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 457.0, 21.0, 23.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "`ne73l?2N2N2N2N2N2N2O1O2O000001N1N2N2N2N2N2O1N\\A"}, "image_id": 503, "id": 8694}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 461.0, 52.0, 51.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "k^S62m?2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2O1000001O00000001O00O1N3M2N2N2N1O100O1O11O1\\O`AFa>8aAG`>8aAGa>6aAI`>5cAI^>5dAJ]>4eAK\\>3fAL[>2gAMZ>1hANY>0iANZ>0hANY>0iANY>0laR1"}, "image_id": 503, "id": 8695}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 483.0, 32.0, 29.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "bol61n?2N2N2N2O2M2N2N2N2N2N2N2O0O1O1O1O11N3M2N2N2N2O1N2N3M2N2N2N2N2O]Pc0"}, "image_id": 503, "id": 8696}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 484.0, 41.0, 28.0], "area": 669, "segmentation": {"size": [512, 512], "counts": "c_f51n?3M2N3N1N2N3N1N3M100O1O1O100O1O100O1O100O11O2N1O1O2N1O2N1O2N1O1O2N1O2N1O2N1O1O2NQPe1"}, "image_id": 503, "id": 8697}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 503.0, 16.0, 9.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "oo31n?1O1O1O1O1O1O1O11O1O1O1O1O2N1OQPd7"}, "image_id": 503, "id": 8698}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 0.0, 74.0, 38.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "RPd01m?201O1O001O1O001O1O001O1O001O1O001O1O001O1O001O1O001O1O1O001O1O001O1O0000N2001O1O001O1O001O1O1O00100O01000O01000O0100O0N3N2M2O2M3N1N3N2M2O2M3N2M2OaoV6"}, "image_id": 504, "id": 8699}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 0.0, 63.0, 73.0], "area": 2780, "segmentation": {"size": [512, 512], "counts": "l`W22l?2b@MW>8hANNMX>7hAMOMX>8fANOMZ>5eA1NL]>4bA2OM^>1bAb0^>a0O1O001mA^Nf=b1XBaNg=`1VBbNj=^1UBdNj=]1SBfNl=g101O010O010O10O010O10O010O01M2N002N2O2M2N3M2O1N1O00011N2N3M2O2M2N3M3N1N3M2N3N1N3M2N3N2M2N3M2Oeoh4"}, "image_id": 504, "id": 8700}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 0.0, 72.0, 61.0], "area": 2342, "segmentation": {"size": [512, 512], "counts": "P`n42n?1O2Y@L^?5`@N^?4_@N;Lg>>XADf>>WADh>=VAEi>;UAFj>i0O2N1O1O2N1O1O2N1O1O0000O1O100O1O1O100002O0010M2N2O2M200010O0010O00010O01O01M2O2M1O10O00010O0001O3N1N3M2O1N3M2N3N1N3M2O1N3Mg^m1"}, "image_id": 504, "id": 8701}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 0.0, 34.0, 15.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "PPo51o?001O001O1O001O001O001O001O1O001O001O001O1O001O001ON2N2N2O1N2N2N2OQP`1"}, "image_id": 504, "id": 8702}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 5.0, 78.0, 69.0], "area": 2891, "segmentation": {"size": [512, 512], "counts": "oPi61n?2M2O2N2N2N2N2N2N1O2N2N2M3N2N1100000O1N2N2N1O2N2M3N200000O1000000000N1O2N2N2N2N2N2M2O2N0001O2N1O2N20000000O01000000000O10O100fNjAi0V>UOQBe0P>YORBd0Q>ZOPBe0Q>ZOQBd0Q>ZOQBd0g>N2N2M3N1O2N2N2N2M]N"}, "image_id": 504, "id": 8703}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 38.0, 10.0, 11.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "^Qa11m?3L3N2O2O00O2L3N2MlnY6"}, "image_id": 504, "id": 8704}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 42.0, 68.0, 79.0], "area": 2765, "segmentation": {"size": [512, 512], "counts": "Xc^32l?3M2N3M2N3M2N3M2N3M2N3M2N3M2N2AkNQBX1l=jNRBX1l=kNQBX1k=kNSBW1k=lNRBW1k=`0N3M2N30O010O010O010O0O2M2N3M2N2N3M2N3M2N3M2N3M2N3jN\\Al0f>RO\\Am0l>N30RAVOd>OWAh03[Oi>f0TA]Ok>c0UA]Ol>c0TA]Ok>c0UA]Ol>c0TA\\Ol>d0TAZOo>i02K6M2N3M2N3M2N3M2N3Mj]_3"}, "image_id": 504, "id": 8705}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 59.0, 157.0, 159.0], "area": 15196, "segmentation": {"size": [512, 512], "counts": "V3h0g1D\\;>bDD\\;?aDD\\;>bDD\\;?bDC\\;>bDD\\;?jClN3h0PkClN4g0PkClN3h0PkClN0k0S<1QBOP>NRB1o=LSB5m=IUB6m=FVB:j=DWBM2N3L3N3L4M2MY\\d0"}, "image_id": 504, "id": 8707}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 77.0, 75.0, 84.0], "area": 2840, "segmentation": {"size": [512, 512], "counts": "bdd42l?2N3N1N3M2N3M2N3M2N3YOYORBh0l=ZORBi0k=ZORBi0k=ZORBh0m=YOQBj0l=YOQBi0m=YOQBj0l=f0N3M2N3M210O010O01N1N3M2O2M2N3M2N3M2N3M2N3N1N3M2N00001O000001O000003M2N3M3N110O010O010O010N1N3M2O2M2N3M3M2N3M2N3NWmU2"}, "image_id": 504, "id": 8708}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 129.0, 82.0, 73.0], "area": 3090, "segmentation": {"size": [512, 512], "counts": "ZU\\51m?2M4M2M3N3L3N3N11O010O01O01O01f@\\OV?g01O010O01O01O01N1N3L3N2M4M2M4L3N2M4M2M4O01O010O01O01O010O01O01O010O01O01O010O01O01O010O0ROmA0R>OoA1R>OnA0R>0nA1R>NnA2R>KRB5m=IUB7l=EXB:h=DZB=f=_O]Ba0c=]O`Bc0_=ZOdBf0]=WOfBh0Y>N2M4M2M4M2M3N3L3N`jZ1"}, "image_id": 504, "id": 8709}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 145.0, 68.0, 77.0], "area": 2752, "segmentation": {"size": [512, 512], "counts": "meh61m?3M2N3M2N3M2a@BZ?d0M2O2O010O010O010O0O2M2N3M3M2O2M2O20O010OO2N1N3M2N3M2N3N1N3M2N300O010O010O010O010O010O01QOUBHj=6XBJi=4YBLf=5YBKh=WOVBe044e=UOYBe046k=GXB6j=DkAE>d0j=D\\B:f=D]B9f=D\\B:f=D]B9f=D\\B:g>M3N1N3MjY5"}, "image_id": 504, "id": 8710}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 217.0, 28.0, 29.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "[gQ71m?3N1N3M2N3N1N3M3N1N3N1100O0100O010OO2M3M2O2M2N3M2O2M3MoX`0"}, "image_id": 504, "id": 8711}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 293.0, 33.0, 82.0], "area": 1663, "segmentation": {"size": [512, 512], "counts": "`9n1Q>2N2M3N1O2M3N2M201000mM`Bd1`=[NbBe1^=XNeBf1]=XNdBg1^=WNdBg1]=WNfBf1]=XNdBg1l=M3QOeA6]>HeA6\\>IeA5^>HeA6]>HeA6]>HdA6^>HeA6]>HeA6]>HeA5^>HdA7]>HeA6T?M3N1NWU_7"}, "image_id": 504, "id": 8712}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 0.0, 47.0, 30.0], "area": 787, "segmentation": {"size": [512, 512], "counts": "Y`f22m?2N2M3N1O2N2N2N2O1O001O1O1O1O1O1O001O1O1O1O1OO1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2OQPb4"}, "image_id": 506, "id": 8713}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 71.0, 38.0], "area": 1503, "segmentation": {"size": [512, 512], "counts": "PPl51o?1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1OO1O1O1O1O1O1O1N2001O1O001OO1N2O1O2N2N2N2N2N2N2N2N1O2M3N2N2Ne_P1"}, "image_id": 506, "id": 8714}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 25.0, 46.0], "area": 635, "segmentation": {"size": [512, 512], "counts": "<[1f>000000O0O2M3N2N2N2N2N2N2N2N2N1N3N2N2N2N2N2N2N2No^c7"}, "image_id": 506, "id": 8715}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 16.0, 75.0, 63.0], "area": 2293, "segmentation": {"size": [512, 512], "counts": "`a[51n?2HO^@3`?O^@3`?7O2HAl@a0R?@m@b0Q?@m@b0Q?8N2M100000001O2N2N2N200^AhN[>W1dAkN\\>U1aAnN_>Y1100000O1000000000000000O1O1N2N1N3N2N2N2N2N00O10000000O1000000001100N1O2N2N2N2N2N2N2O100O1N2N1O2N2N2N2Nen^1"}, "image_id": 506, "id": 8716}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 33.0, 72.0, 79.0], "area": 2956, "segmentation": {"size": [512, 512], "counts": "kb\\61n?2N2N2N2N2N2N2N1O2N2M3N2N2N2N2N2[OQOVBQ1h=QOVBQ1h=QOVBQ1h=QOVBQ1h=QOVBQ1h=QOVBP1i=QOUBP1k=ROSBn0l=UORBj0o=c0000000000000000000000O2O2N2N2O10000000000000N2N0O10O10000XOlAET>;nACR>=PBAQ>>QB@Q>>QB@Q>=RBAP>=RBAP>=RBAP>9aAAa04P>9aAAa03Q>:UBDl=:VBEl=9o0N2N2N2Nd^?"}, "image_id": 506, "id": 8717}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 50.0, 23.0, 24.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "Qb01n?2N1O2M3N2N2N2N2N2N0O100001N3N2N2N2N2N1O2M3NXnc7"}, "image_id": 506, "id": 8718}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 56.0, 18.0, 18.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "Qb\\12m?2N2N2N2N2N2N1N3O01N2N2N2N2N2N2N2NQ^Z6"}, "image_id": 506, "id": 8719}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 58.0, 28.0, 28.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "Xbn02m?2N1O2N2N2N2N2N2M3N2N2N2N1010O1N2N2N1N3N2N2N2N2N2N2N2Nk]c6"}, "image_id": 506, "id": 8720}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 62.0, 53.0, 52.0], "area": 1378, "segmentation": {"size": [512, 512], "counts": "cb62m?2N2N2N2N2N3L3N2N2N2N2N2N2N1N3N2N2N002O100000000O010000000000000OO2N2M3N2N2N2N2N2N1O2M3N2N2N2N2N2N2N1N3NWmn6"}, "image_id": 506, "id": 8721}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 66.0, 71.0, 78.0], "area": 2831, "segmentation": {"size": [512, 512], "counts": "YSd42m?2N2N2N2CHo@:o>Ho@:o>Ho@:o>Ho@:o>Ho@:o>=N2N2\\AlNY>V1eAlNX>W1fAjNY>W1fAkNY>V1eAlN[>]100000000O10000N2N2N2N2N2N0000O10000000000000O100001O2HoA^NS>`1oA^NS>`1oA^NS>`18N2N2M3N2N2N2N2N2N2N2N1O2N2N2N2N2N2N2N2N2N2N2MV]X2"}, "image_id": 506, "id": 8722}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 69.0, 51.0, 85.0], "area": 2317, "segmentation": {"size": [512, 512], "counts": "RdV72m?2N2N2N2M3N2N2N2N1O2N2N2N2N2DSObAo0\\>RObAQ1[>ROcAP1[>ROcAP1[>ROcAP1[>m0^AUOb>k0\\AWOd>i0YAYOh>o01EXA[Oj>c0WA\\Ok>b0WA\\Ok>b0WA[Ol>c0:N2M2O2N2N2N2N2N2NS\\R6"}, "image_id": 506, "id": 8725}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 112.0, 40.0, 49.0], "area": 1226, "segmentation": {"size": [512, 512], "counts": "e3S1k>3N1O2N2O1000000O010000N2M3N1O00O1000O1000001O2N2M3N2N1O2N2N2M3N2N2N1O2N2N2M3NSl[7"}, "image_id": 506, "id": 8726}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 117.0, 72.0, 77.0], "area": 2485, "segmentation": {"size": [512, 512], "counts": "\\eX51o?2M3N1N3M2O2M2oNA^Bb0_=@`Bb0^=@_Bb0_=A^Bb0_=@`Ba0_=AlAF?l0b=D\\Bo00010O00010O010OFQBdNo=\\1SBbNn=^1TB`Nk=`1WB^Ni=b1;0O010O02N2O2M2N3N2M2N3N1N3N1N3M3N1N1O10O01O01O010O000101N2N3N1N3M2O2M3N1N_[c1"}, "image_id": 506, "id": 8727}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 143.0, 15.0, 16.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "gT?3l?2N2N2N2N2N000001O2N2N2N2N2N][Y7"}, "image_id": 506, "id": 8728}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 148.0, 58.0, 53.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "\\eb11n?2N2N2N2N1N3N2N2N2N2N2N2N2N2M3N1O2N2N2N2N2N20000000O01000000000O1N2N2N0O2O2N2N20O1000N2N2N2N2N2N4K3N2N2N1O2N2N2N2N^Z`5"}, "image_id": 506, "id": 8729}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 157.0, 23.0, 23.0], "area": 254, "segmentation": {"size": [512, 512], "counts": "[U61n?2N2N2N3M2N2N2N1O01O00000000002N2N2N2N3N1N2NlZ^7"}, "image_id": 506, "id": 8730}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 166.0, 14.0, 15.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "[e^72m?2M3N2N2O0101O00000N2N2N2NbZ:"}, "image_id": 506, "id": 8731}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 178.0, 58.0, 54.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "YV^21n?2N2N2N2N2N2M3N2N2N2N1O2N2N2N2N2N2M3N2N2N2O10O10000000O1000000000N2N2N2N2M3N2N11000O1O1N2N2N2N2N2N2N1O2M3N2N2N2N2N_id4"}, "image_id": 506, "id": 8732}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 178.0, 71.0, 67.0], "area": 2568, "segmentation": {"size": [512, 512], "counts": "nVX62n?2M3N3L3N2M3N2M3N2POZOSBN1k0j=YOSBN1i0k=]OPBN1f0o=^OnAN1c0Q>BkAN1a0T>l0010O010O010O010O010O10O10O010O010O2O2M3N2M4M2M10O010O010O0102N2M3N2M4M0O010O010O010O010O102M3N3L3N2N2M3N2M3N2MZYd0"}, "image_id": 506, "id": 8733}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 217.0, 7.0, 8.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "lVc01n?2M3O1000M3NUYY7"}, "image_id": 506, "id": 8734}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 217.0, 33.0, 34.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "XgR12m?2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N20N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2Ngh\\6"}, "image_id": 506, "id": 8735}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 233.0, 44.0, 44.0], "area": 975, "segmentation": {"size": [512, 512], "counts": "ogT22m?2N2N2N2N1O2M3N2N2N2N2N2N2N2N2N1N3N2N2N200000O1N2M3N2N2N1O2N2N2N2N2M3N2N2N2N1O2N2N2N2NSXU5"}, "image_id": 506, "id": 8736}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 233.0, 16.0, 24.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "_WY71n?2N2N2N2^@IX?9f@JW?8g@JZ?>000N2O1Bc@6_?Hc@6e?N2N2NXh>"}, "image_id": 506, "id": 8737}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 237.0, 30.0, 65.0], "area": 925, "segmentation": {"size": [512, 512], "counts": "mWa71n?2N2N3M2O1N2N3M2N2N2O1N3M2O100N3M2N2O1N2bAjNo=Y1nAiNP>Y1nAiNR>W1lAlNS>T1kAnNU>S1iAnNW>^10010O0ZH"}, "image_id": 506, "id": 8738}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 240.0, 21.0, 21.0], "area": 214, "segmentation": {"size": [512, 512], "counts": "lW\\12m?2N2N2N2N3M2OO0000000001O01O2N2N2N2N2N2N[XY6"}, "image_id": 506, "id": 8739}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 243.0, 58.0, 54.0], "area": 1570, "segmentation": {"size": [512, 512], "counts": "[Xj21n?2N2M3N2N2N2N2N2N2N1O2N2M3N2N2N2N2N2N2N2N1O200000000000000O1000O1N2N2N2N2N2N2N2O01000N2N2N2N2N2N2N2M3N1O2N2N2N2N2N^gX4"}, "image_id": 506, "id": 8740}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 245.0, 80.0, 72.0], "area": 2991, "segmentation": {"size": [512, 512], "counts": "SiY52n?4L5K4K5L5K2N000O0100000O0100000O010000000\\OXOoAh0R>\\OjAd0V>AeA?i=YO^B312M3N2M3N2M2OO010O010O010O0010O011N0hA_NR>b1nA`NP>_1PBdNm=]1SBdNl=[1TBeNl=\\1SBeNm=Y1UBgNj=[1TBeNm=]1RBbNP>^1oA`NT>`1lA^NV>b1jA[NY>e1200000OO2M3N2M3N2M3N2M3M3N2M3N2M2O2M3N2M3N2M3N2M3NdU2"}, "image_id": 506, "id": 8743}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 277.0, 9.0, 10.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "iX`23l?2N2M2100O0N3N2NXW[5"}, "image_id": 506, "id": 8744}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 280.0, 11.0, 10.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "kXX22m?2N1O2O100O10O1N2M3NTWb5"}, "image_id": 506, "id": 8745}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 293.0, 11.0, 10.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "YYZ22m?2N1O2N2000O1N2N2N1OgV`5"}, "image_id": 506, "id": 8746}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 313.0, 68.0, 56.0], "area": 2383, "segmentation": {"size": [512, 512], "counts": "Wjc51o?4L4K6b@Em>?o@Ec>JaAm0_>XO\\Ag0e>=M4K5L000000O0100000O010004LO2O4L3M0O104L2NO0100000O01000001O4K5LO1000O10O1000O10O1000O10O1000LQAWOo>i04O0100000O011O4L4K6K4L4LRUZ1"}, "image_id": 506, "id": 8747}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 317.0, 58.0, 54.0], "area": 1601, "segmentation": {"size": [512, 512], "counts": "djX32m?2N2N2N2N2N2N1O2M3N2N2N2N2N2N2N2N2N2N1N3N2O100O10000000000000O100O1M3N1O2N2N2N2O10000O1N2N2N2M2O2N2N2N2N2N2N2N2N2NTUj3"}, "image_id": 506, "id": 8748}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 327.0, 66.0, 66.0], "area": 1986, "segmentation": {"size": [512, 512], "counts": "`:c0\\?2N2N2N2N2N2N1O0001O2O100000001O00000001O0N2N2N2N2N2O1000000000000000000000000001O000000000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N\\dn6"}, "image_id": 506, "id": 8749}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 329.0, 30.0, 30.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "hje21n?2M3N2N2N2N1O2M3N2N2N2N2N10100000O0O00O03N2N2^Oh@9Z?Eg@:a?N2M3N2N2NYUk4"}, "image_id": 506, "id": 8750}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 330.0, 13.0, 13.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "^Zl61n?2O1N3M2O2M20O1N3M2N3N1N_Um0"}, "image_id": 506, "id": 8751}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 360.0, 33.0, 34.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "k[i32m?2M3N2N2N2N2N2N1O2N2N2N2N2N2N0O1001O2N2N2M3N2N2N2N2N2N2N1O2N2N2N\\Tf3"}, "image_id": 506, "id": 8752}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 380.0, 12.0, 41.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "l;Y1h>IZAROh>k0ZASOh>k08Am@NU?0m@NU?0m@NU?0m@NU?0m@NU?0m@NU?1>Nici7"}, "image_id": 506, "id": 8753}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 395.0, 68.0, 71.0], "area": 2443, "segmentation": {"size": [512, 512], "counts": "jlm02m?2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2O2O000000000000000N2N2N2O110O0000000O1N2O10000001O0001M2N2N2N2N2N2N2OO1O1O1O1O1VOiAJY>4iAJY>4jAJV>5lAIV>5lAIV>HaA4;2_>LcA2_>LcA2_>LcA2_>LcA2_>LcA2W?NQRP6"}, "image_id": 506, "id": 8754}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 396.0, 16.0, 11.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "alT42m?4M1O0O0100O01000O01000O012M4M_Sc3"}, "image_id": 506, "id": 8755}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 420.0, 47.0, 72.0], "area": 2121, "segmentation": {"size": [512, 512], "counts": "]nX72n?3_OMo@5o>Mn@7n>Mo@6n>Mo@53Bd>o0YASOd>X1M2M2O00O010O10O10GaAPOS>LUBT1LmNl=5UBn01jNh=>TBh0T>a001000O010O01000O2O0O2O000O3N0O01000O010O012N2M4M2N3L`B"}, "image_id": 506, "id": 8756}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 430.0, 67.0, 82.0], "area": 3156, "segmentation": {"size": [512, 512], "counts": "m]Q61n?4M2N3L3N3L3N3M2M4M2M4M2NO0102M3N3fAgNg=[1VBhNg=\\1VBgNg=[1WBgNf=]1VBgNg=k1N2M100O10000O100O10000O100O1001O2N3M2N3M2N3M2N3M2N3L3N3M2CbASOb>j0`ATOb>j0aAROc>j01n?2N2N1O2N2N2N2M3N2N2N2N2N2N2N2N10100O10000000000000000000O01N2N2N2M3N2N2`AbN[>c1N2N2O1000000000000O0100000O1N2N2N2N2M3N2N0001O2N2N2N2N2N2N2]OjAYOX>e0jAYOX>e0iAYOY>b0eATO48Y>b0mA\\OU>b0mA\\OU>b0mA\\OU>b0mA\\OU>b0f0M3N2N2N2N2N2N1O2NfQe6"}, "image_id": 506, "id": 8758}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 470.0, 18.0, 28.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "inS71o?3L3N3M2M3N3L3N3O01O0N3L3N3M2M4M2MePc0"}, "image_id": 506, "id": 8759}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 480.0, 39.0, 32.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "\\?d0[?1N2O1N2O1N2O1N21O1O001O1O001O1O001O1O0]Oi@=X?Aj@>]?O001O1O001O1O001O1O001O1O0O2N2M2OWP\\7"}, "image_id": 506, "id": 8760}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 484.0, 63.0, 28.0], "area": 1027, "segmentation": {"size": [512, 512], "counts": "mod31n?2N2N100O1O1O1O1O1O1O1O1O1O12N1O1O00O1O1O1O1O1O1O1O1O1O1O1O1001O1O1O1O1OO1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1N2N2N2N3M2N2N2N2N[`[3"}, "image_id": 506, "id": 8761}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 0.0, 42.0, 29.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "[`X12m?2N2N2N2N2N2N2O1N00000010O1O1O1O11O1O1O1O1O1O2N1O1000000001O0N2N2N2N2N2N2N2O1N3M2N__R6"}, "image_id": 507, "id": 8762}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 51.0, 26.0], "area": 712, "segmentation": {"size": [512, 512], "counts": "P`l21o?1O1O1O1O1O1O1O1O1O1O1OO1O1O11O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1O1O2N2N2N2N2NloY4"}, "image_id": 507, "id": 8763}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 0.0, 34.0, 22.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "TPU72m?2N2N2N3N1O1O1O1O1O1O1O2N1O1OO1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1KX@1i_:"}, "image_id": 507, "id": 8764}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 9.0, 56.0, 57.0], "area": 1515, "segmentation": {"size": [512, 512], "counts": "k`U22n?1N3M2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2O10001O00000001O0000000000000O1N2N2N3M2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2Nc^n4"}, "image_id": 507, "id": 8765}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 11.0, 10.0, 20.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "cPk72m?2N2N2N2N2N2N2O2M2D"}, "image_id": 507, "id": 8766}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 29.0, 57.0, 53.0], "area": 1613, "segmentation": {"size": [512, 512], "counts": "nai424Od?3Z@Od?8N2N2O1N2N2N3M2JXOQAj0m>5O01O01O0000002N2N2N2N3N1N001O0000000000001100O1N2N2N3M2N2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2NZnY2"}, "image_id": 507, "id": 8767}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 42.0, 48.0, 57.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "RRd11o?1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3M2O1N2N2N2N200000jNgAc0Z>[OhAc0[>ZOgAd0[>ZOgAd0[>[OfAc0\\>[OfAc0\\>[OfAc0\\>[OgAc0Z>[OhAc0l>N2N2N2N2N2N2N2N3M_mc5"}, "image_id": 507, "id": 8768}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 46.0, 24.0, 23.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "fQP11n?2N2N2N2N2N2O1N2N2O11O0000000N2N2O1N2N2N2N2N2NUnc6"}, "image_id": 507, "id": 8769}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 62.0, 93.0, 87.0], "area": 2527, "segmentation": {"size": [512, 512], "counts": "RTo42m?2O1N2N2N2N3M2N2N2N2O1N2N3M2N2N1O000010O000000000000001O0001O000000000001O00000001O00000001O0000000001O000001O000000000000010O00000000000000001O01O00000001O2N2N3M2N2O1N2N2N2N3M2N2N2N2Nc]b1"}, "image_id": 507, "id": 8770}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 64.0, 24.0, 26.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "[bW42m?3M2N2O1N2N3M2N2N2O2M000003M2O1N2N2N3M2O1N2N3Md]\\3"}, "image_id": 507, "id": 8771}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 68.0, 39.0, 37.0], "area": 776, "segmentation": {"size": [512, 512], "counts": "lR11n?2N3N1FLe@5Z?Md@5Z?Md@4[?9O1O2N1O100O2N1O1O2N1O11O100O10001O0N2N2N2N2N2N2N2N2N2N2N2N2N2NX][7"}, "image_id": 507, "id": 8772}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 81.0, 57.0, 45.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "Pcm01n?2O1N2N2N3M2N2N2O1N2N3M2N2N2O1N2N2N100100O1O2O0O1O100O1O100O001O01O01O01O01O01O01O01O01O01O0001O01ON3K4M3M4L3L4MnlU6"}, "image_id": 507, "id": 8773}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 96.0, 45.0, 45.0], "area": 1034, "segmentation": {"size": [512, 512], "counts": "gcl32m?3M2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N1O000000000002N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N\\l\\3"}, "image_id": 507, "id": 8774}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 102.0, 50.0, 49.0], "area": 1255, "segmentation": {"size": [512, 512], "counts": "jco51n?2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2O100000O1N2O2M2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3Nn[W1"}, "image_id": 507, "id": 8775}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 112.0, 61.0, 51.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "Ud93l?2M2N3M2O2M2N3M3N1N3M2O2N2O010O010O01000O010O010O01000O010O010O01000O010O01000O010O010O01000O01M2N3N1N3M3M2O2M2N3M2O2M3Mgkg6"}, "image_id": 507, "id": 8776}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 129.0, 21.0, 22.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "XTi32m?2N2N2N2N3M2N2O10000001O0O1O1N2N2N2N2N2Nc[l3"}, "image_id": 507, "id": 8777}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 132.0, 29.0, 28.0], "area": 355, "segmentation": {"size": [512, 512], "counts": "fT[52m?2O1N2N2N3M2N2N1O01O0000000001O0001O0000012M2N2N2N2N2N3Nc[V2"}, "image_id": 507, "id": 8778}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 133.0, 87.0, 80.0], "area": 2586, "segmentation": {"size": [512, 512], "counts": "kUV62m?2N2N2N3M2N2N2N2N2N2N2N2O2M2N2O1000000O1N2N2N00000101N2N2N000000001O000001HPO_AP1a>RO]An0c>UOZAk0f>7001O0000000001O00000001O0000000001O0000000001O0000003M2O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2NW[>"}, "image_id": 507, "id": 8779}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 136.0, 29.0, 29.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "edW42m?2N2N2O1N2N2N2N2N3M2N2N2N2N11O1N2N3M2N2N2N2N2N2N2N2O1N3MYkY3"}, "image_id": 507, "id": 8780}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 151.0, 34.0, 57.0], "area": 1189, "segmentation": {"size": [512, 512], "counts": "S5n0Q?2N2N2N2N2N2N2N2N3N1N2N2N2O10O1N2N2N2oNbA=`>AbA=`>AbA=`>AbA=`>AbA=a>@bA>_>@cA>_>@cA>Q?N2N2N2N2N2N2NTj^7"}, "image_id": 507, "id": 8781}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 151.0, 60.0, 53.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "WUV32m?2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N3N100000000001O01O0000000000O2M2N2N2N2N2N1O0001O000001O00002N2N3N1N2N2N2N2N2N2N3M2O]jk3"}, "image_id": 507, "id": 8782}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 159.0, 23.0, 24.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "[Uc43l?2N2N2N2N2O2M2N2N000001O01O02N2N3M2N2N2O1N3MhZQ3"}, "image_id": 507, "id": 8783}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 173.0, 73.0, 73.0], "area": 1999, "segmentation": {"size": [512, 512], "counts": "Sgk61n?2N2N3N1N2N2N2N2N2N2N2N2N3M2O1N2N00000000001O0001O0000000000000001O0001O0000000000000001O0001O0000000000000001O0001O00002N2N2N2N2O1N2N3M2N2N2N2NWJ"}, "image_id": 507, "id": 8784}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 179.0, 31.0, 31.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "jeU41n?2N2N2O1N2N2N2N2O100000000000001O01O000000000N2N2N2N2N2N2N2NhiZ3"}, "image_id": 507, "id": 8785}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 181.0, 32.0, 32.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "TVT51n?2N3M2N2N2N2N2N2O1N3M2N2N2N2N1O02N2N2N2N2N2N2O1N3M2N2N2N2N2N2Nli[2"}, "image_id": 507, "id": 8786}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 187.0, 58.0, 54.0], "area": 1435, "segmentation": {"size": [512, 512], "counts": "^Va22l?3N2N2N1N3N2N2N2M3N1O2N2N2M3N1010000000O0100000000O010000000O0100000000O010000000O0O2N2N2N2M2O2N2N2M3N1O2N2N2M3N2NTia4"}, "image_id": 507, "id": 8787}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 208.0, 28.0, 29.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "jVn51n?2N2N2N2N3N1N2N2N2N2N2O101O000000N3M2N2N2N2N2N2O1N3M2NPic1"}, "image_id": 507, "id": 8788}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 214.0, 26.0, 26.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "Qg`31n?2N2N2N2N2N2N2N3M2O1N2N2O10O1N3M2N2N2N2O1N2N2N2N2NnXR4"}, "image_id": 507, "id": 8789}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 216.0, 52.0, 49.0], "area": 1305, "segmentation": {"size": [512, 512], "counts": "[gV52m?2N2N2N3N1N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N200000000000O2M2N2O1N2N2N2N0000002N2N2N2N2N2O1N2N2N2N2N2N3M2N_Xo1"}, "image_id": 507, "id": 8790}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 222.0, 35.0, 54.0], "area": 1119, "segmentation": {"size": [512, 512], "counts": "gg^71n?3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N3N1N2N2N2N0002N2N2O1N2N2N2NlH"}, "image_id": 507, "id": 8791}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 226.0, 33.0, 52.0], "area": 1541, "segmentation": {"size": [512, 512], "counts": "XgS2a0Y?7O000UAFP>U1000000000000000000000001O0001O0000000000000000000000000dNYj[5"}, "image_id": 507, "id": 8792}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 234.0, 22.0, 23.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "age63m?1N2N2N2N2N2N3M2000000001N1N2N2N2N2N2N2O2MXXo0"}, "image_id": 507, "id": 8793}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 237.0, 14.0, 13.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "bgn31o?1N2N2N2N2N2000N2N2N2N1O2N^Xj3"}, "image_id": 507, "id": 8794}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 245.0, 46.0, 45.0], "area": 1007, "segmentation": {"size": [512, 512], "counts": "^hS62m?2N3N1N2N2N2N2N3M2N2N2O1N2N2N3M1O00000001O01O00000000000102M2N2N2N2N2N3M2O1N2N2N2N2N3M2O1NiWU1"}, "image_id": 507, "id": 8795}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 247.0, 29.0, 29.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "WXn62m?2N3M2O1N2N2N2N2N3M0001O00000001O0001O3M2N2N2O1N2N2N3M2NoWc0"}, "image_id": 507, "id": 8796}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 250.0, 35.0, 35.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "XXP42m?2O1N2N2N2N2N2N2N2N2N2N2N2N2N3N1001O0O1N2N2N2N2N2N2O1N2N2N2N2N2N2N2NdW^3"}, "image_id": 507, "id": 8797}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 254.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "n73ogo7"}, "image_id": 507, "id": 8798}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 258.0, 18.0, 44.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "R8\\1e>N2O1N2N3M2N2N2N2O1N3_Oh@5Z?Ih@5Z?Jh@3Z?Kh@3Z?Kh@3d?N2OQgf7"}, "image_id": 507, "id": 8799}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 259.0, 31.0, 31.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "aX`31n?3M2N2N2N2N2N2O2M2N2N2N2N2N2N11N2N2N2N2N2O1N3M2N2N2N2N2N2O2M]WP4"}, "image_id": 507, "id": 8800}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 259.0, 27.0, 27.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "^hd52m?2O1N3M2N2N2O1N3M2N2N2N2OO001O2N2N2O2M2N2N2N2N3N1N2Nagm1"}, "image_id": 507, "id": 8801}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 269.0, 58.0, 46.0], "area": 1729, "segmentation": {"size": [512, 512], "counts": "aYg42f?8H8H8O101O0O1H8N2001O01O000000000001OXAmNc>X101O00000001mN^Af0b>ROfAn0f>00L40000010O000000000000010O000000000000010O00000000N2H8I8GXg[2"}, "image_id": 507, "id": 8802}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 272.0, 42.0, 36.0], "area": 761, "segmentation": {"size": [512, 512], "counts": "Qia21n?2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N0001O2N2N3M2N2N2O1N2N00000000001O002O1N2N2N3M2N2NVWi4"}, "image_id": 507, "id": 8803}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 298.0, 56.0, 55.0], "area": 1397, "segmentation": {"size": [512, 512], "counts": "]ZQ31n?2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2N00000001O000001O0000000000000000000002O1N2N2N2N2N2N3M2N2N2N2O1N2N3M2N2N2NSfR4"}, "image_id": 507, "id": 8804}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 313.0, 38.0, 37.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "_Zj52m?3M2N2N2N2N2O1N2N2N3M2N1O0000001O01O0000000000001O2N2O1N2N2N2N3M2N2N2N2N2Ojeb1"}, "image_id": 507, "id": 8805}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 315.0, 29.0, 29.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "ZZZ41n?2N2N2N2N2N2N2N2N2N2N2N2N2N1O02N2N2N2N2N2N2N2N2N2N2N2N2NiUW3"}, "image_id": 507, "id": 8806}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 321.0, 72.0, 86.0], "area": 2505, "segmentation": {"size": [512, 512], "counts": "\\;[1d>01O00000000001O2N01O000000000GjNiAV1X>kNfAU1Z>mNeAR1[>POcAP1]>90BcA[O\\>e0fAYOZ>g0hAWOX>i0jAUOV>k0lASOT>m0nAQOS>n0oAPOQ>P1?00001O002N2N2N201O0O0O0000000001O2N2O1N2N00001O0000002N2N2O2M2N2N2N2N2N2N2N2N3M2O1N2N^ek6"}, "image_id": 507, "id": 8807}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 327.0, 23.0, 28.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "kZX51n?2N2DMh@5V?Mh@5V?Ng@4W?Ng@4W?\\OiAg0U>ZOiAh0U>ZOiAh0U>ZOiAh0U>[OhAg0V>[OhAf0W>\\OgAd0Y>^OfAa0Z>b000001O002O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2NgRd2"}, "image_id": 507, "id": 8817}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 402.0, 27.0, 37.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "Pmb72m?2N2N2N2N2O1N2N2N1O00001O2N2N200000000000001M2N2N2O1YC"}, "image_id": 507, "id": 8818}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 412.0, 24.0, 22.0], "area": 277, "segmentation": {"size": [512, 512], "counts": "Xmb52m?2N2N2O1N2N2N2N2N1O1O000001O1O2N2N2N3M2O1N2N2NlRQ2"}, "image_id": 507, "id": 8819}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 417.0, 111.0, 95.0], "area": 4013, "segmentation": {"size": [512, 512], "counts": "f_h02m?3M2N2O1N2N2N2N1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1000000O1O1O1O1O1O00000FfNoAZ1Q>hNmAX1T>iNkAV1U>mNhAS1X>oNfAQ1Z>;O0000000000N201O0001O0000000000000001O01O0000000000000001O0001O000000000001O2N2N2N3N1N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3MUR`5"}, "image_id": 507, "id": 8820}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 418.0, 39.0, 73.0], "area": 1833, "segmentation": {"size": [512, 512], "counts": "V=[1e>100aAfNX>Y1fAiNZ>W1dAkN\\>U1cAlN]>]1O0O1N2N2N2N2N2O2M2N2N1O1O0002N2N3M2N2N2N2eNSBa0n=]OTBHFb0X>DUBGEc0Y>C[B:g=EZB9h=EZB9h=EZB9h=EZB9h>N2O1N3MZQ\\7"}, "image_id": 507, "id": 8821}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 445.0, 61.0, 48.0], "area": 1609, "segmentation": {"size": [512, 512], "counts": "bnS52n?3L3N2M3N3M2M3N3L3N2M3N000O10O010O010O010O10O10O010O010O01000O010O010O010O10O10O010O010O010O10O10O4M2M3N2M4M2M3N2N3L3N2M[am1"}, "image_id": 507, "id": 8822}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 449.0, 55.0, 54.0], "area": 1643, "segmentation": {"size": [512, 512], "counts": "nno61n?2FOb@3\\?Ob@3\\?Oc@2[?0c@2[?SO\\Ao0c>RO[AP1c>600000000000000010O001O2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O2M2N\\ni6"}, "image_id": 512, "id": 8843}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 36.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "aQb21n?2N2N2O1N3M2N2N2N2N2N2N2N2N2OO1O2N2N3M2N2O1N2N2N2N2N2N2N`^o4"}, "image_id": 512, "id": 8844}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 45.0, 56.0, 56.0], "area": 1582, "segmentation": {"size": [512, 512], "counts": "mQW62m?2N2N3M2N2N2N2l@Ad>a0ZAAd>a0ZAAd>a0ZAAd>a0ZAAe>`0YABe>n0N2N2N3M200000000000O1N2N3M2N2N2N2N2N2N2N1O01O000001O0000003M2N2N2N2N2N2N2N2O1N2N2N2Nlml0"}, "image_id": 512, "id": 8845}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 50.0, 42.0, 39.0], "area": 891, "segmentation": {"size": [512, 512], "counts": "URV22m?2N2N3M2N2O1N2N2N2N2N3M2N1O01O01O2N3M2N2N2O02M2N2O1N2N200N2N3M2N2N2N2O1N2N2N3M2N2NimT5"}, "image_id": 512, "id": 8846}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 50.0, 20.0, 20.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "Pbh42m?2N2N2N2N1O0000001O01O00000M]@Jc?74N2N2N2N[^m2"}, "image_id": 512, "id": 8847}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 61.0, 56.0, 61.0], "area": 1597, "segmentation": {"size": [512, 512], "counts": "oRm01n?2N2N2X@Kb?;O1N3M2N2N2O100001OO1N2N2_OUOjAn0S>TOkAn0S>TOkAn0S>TOkAn0T>SOjAm0V>UOhAl0W>VOhAi0X>?0000000000003M2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N3M2NXmV6"}, "image_id": 512, "id": 8848}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 70.0, 32.0, 32.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "fbS51n?3M2N2N2N2N2N2O2M2N2N2N2N2N1O01O00002N2N3M2N2O1N2N2N2N2N3M2N2O[]\\2"}, "image_id": 512, "id": 8849}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 70.0, 29.0, 24.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "gRe52n?1N2N2N2N2N1O00000000002N2N2N2OO0000002N2N2N2N2N3M2O1N2Na]l1"}, "image_id": 512, "id": 8850}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 82.0, 62.0, 54.0], "area": 1748, "segmentation": {"size": [512, 512], "counts": "^cd63m?5K5J6K5K3M0O01000000O0100000O0100000OFYO\\Ah0c>ZO\\Af0c>\\O\\Ad0c>^O\\Aa0d>A[A?e>BYA?f>>O001O1O001O100O001001O1N2O2N1O1N2N2O1N2N3M2N2O1N2N2N2O1N3M2N2N2O1N2N2Nc\\<"}, "image_id": 512, "id": 8851}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 88.0, 12.0, 11.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "nRb21n?2N2N2N2N1O01O2N2N2N2NUmW5"}, "image_id": 512, "id": 8852}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 98.0, 45.0, 61.0], "area": 1640, "segmentation": {"size": [512, 512], "counts": "aTT44j?2N3YOI`A:]>IaA9]>IaA:\\>I`A:]>IaA:\\>IaA9]>I`A;\\>f0N2N30O01O010O01O010O01O010O01O0N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3Mb\\U3"}, "image_id": 512, "id": 8853}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 98.0, 28.0, 27.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "_So41n?3M2N2N2N2N2N2N2N2O1N2N2N1O10O3M2N2N2N2N2N2N2N2N2N2N2Nblb2"}, "image_id": 512, "id": 8854}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 99.0, 55.0, 62.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "]d^12m?3M2N2N2O1N2N2N2N2N2N2N2N2GUOZAm0d>UOZAn0c>TO[An0c>8O000000000001O00000001O000000000000000001O2N2N2N1O010O001O2N3M2_Oo@0S?No@0S?LQA2Q?LQA2Q?LQA2`?NUle5"}, "image_id": 512, "id": 8855}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 104.0, 32.0, 31.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "kcn22m?2N2N2N2N2N2N2N2N2N2N1O0000000001O0000002N2N2N2N2O1N2N2N2N2N2N^\\a4"}, "image_id": 512, "id": 8856}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 117.0, 12.0, 10.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "kcZ21n?2N2N2N02N0000011N2O1NY\\_5"}, "image_id": 512, "id": 8857}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 119.0, 30.0, 31.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "TTR62n?1N3M2N2N2N2N2O1N3M2N2N2N2N2N03M2N2O1N2N2N3M2N2N2N2O1N2N3Mik^1"}, "image_id": 512, "id": 8858}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 129.0, 50.0, 52.0], "area": 1368, "segmentation": {"size": [512, 512], "counts": "nTW72m?2N3N1N3M2O1N3M2O2M2N3N1N3M2O1N00010O00010O00010O00010O00010O0001O01O01O01O01O01O01O010O2N3N1N3M2OhK"}, "image_id": 512, "id": 8859}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 146.0, 57.0, 63.0], "area": 1669, "segmentation": {"size": [512, 512], "counts": "mUY22m?2O1N2N3M2N2N2N2N2N2N2O1N3M2N2N2_OoNPBR1o=POoAP1Q>ROmAo0S>ROkAn0U>TOiAl0W>WOgAh0Y>ZOeAf0[>?00000000010O00001O2N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3MeZj4"}, "image_id": 512, "id": 8860}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 151.0, 31.0, 30.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "TU]32n?1N2N2N2N2N2N2N2N3M2N2N2N2N2OO1O2O1N2N2N2N2N2N3M2N2N2N2N2N2OjZS4"}, "image_id": 512, "id": 8861}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 159.0, 57.0, 64.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "iUe42m?2N3N1N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2NTO_A=_>BcA>[>BgA>W>BiA`0U>@kAb0S>^OmAd0Q>^OmAd0Q>^OmAd0S>e000010N1N2N2O1N2N2N2N001A`A]Ob>a0`A]Oc>`0_A^Oc>`0_A^Oc>`0_A^Oc>a0^A^Oc>`0_A^Oc>`0`0N2N2N2N3M2N2NWZ^2"}, "image_id": 512, "id": 8862}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 163.0, 36.0, 36.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "ge`62m?2N2N3M2N2N2N2N2O1N2N2N2N2N0000001O00000002N2N3M2N2N2N2N2N2N2N2O1N2N2N_Zm0"}, "image_id": 512, "id": 8863}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 180.0, 12.0, 24.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "nUj72m?2N2N3M2N2N2O1N2N2N2N2[J"}, "image_id": 512, "id": 8864}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 182.0, 10.0, 9.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "jeW42m?2N2N2N10O2O1N2N2NWZc3"}, "image_id": 512, "id": 8865}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 187.0, 4.0, 7.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "k57j?N2N2ORjm7"}, "image_id": 512, "id": 8866}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 189.0, 53.0, 59.0], "area": 1580, "segmentation": {"size": [512, 512], "counts": "TWj22m?2N2N2N3M2N2N2O1N2N2N2]OZOiAi0T>YOjAi0T>YOjAi0T>YOjAi0U>XOiAj0U>XOjAh0U>ZOiAf0W>\\OgAd0Y>a001O000002N2N2N3M2N2O1N2N2N2N2N0000001O01O000002N2Cm@JU?4m@JU?4m@JU?4m@JU?4m@KT?3n@KT?3?MeY[4"}, "image_id": 512, "id": 8867}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 189.0, 31.0, 31.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "\\fm31n?2N3M2N2N2N2O1N3M2N2N2N2N2N10O00011N2N2N2N2N2N3M2O1N2N2N2N3Meib3"}, "image_id": 512, "id": 8868}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 191.0, 24.0, 23.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "ZV\\72n?1N2N3M2N2O1N3M2N1O01O0000010O2N2N3M2N2O1N3M2Nhi7"}, "image_id": 512, "id": 8869}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 202.0, 23.0, 24.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "if`56c?7J6N20000010O0000000001O01O00000001O01I6I7JiiS2"}, "image_id": 512, "id": 8870}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 216.0, 42.0, 41.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "`W`42m?2N2N2N2N2N2O1N2N2N3M2N2N2N2N1O1O01O00000000000000003N1N2N2N2N2N2N2N2N2N2N3N1N2N2Nhhj2"}, "image_id": 512, "id": 8871}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 223.0, 58.0, 38.0], "area": 1632, "segmentation": {"size": [512, 512], "counts": "hgn56c?7H8J7O000001O000001O000001O00000001O000001O000001O000001O000001O000001O000001J5L41O0000000001O0001O00000O1I7I7I8GVYT1"}, "image_id": 512, "id": 8872}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 229.0, 25.0, 38.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "V7U1l>N3M2N2N1O01O0000000002N2N2N3M2N2N2O1N2N2N2N2N2N^Xc7"}, "image_id": 512, "id": 8873}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 229.0, 63.0, 61.0], "area": 1740, "segmentation": {"size": [512, 512], "counts": "\\XX32m?2N2O1N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N0000001O00000001O00000000000001O00000001O0000001O2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2O1N2NYXh3"}, "image_id": 512, "id": 8874}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 239.0, 16.0, 20.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "mWh71n?2N3M2N2N010O0000001O2N2N2N2N0bH"}, "image_id": 512, "id": 8875}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 243.0, 19.0, 18.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "kWS51n?2N2N2N2N2N2N2N2O10O1N2N2N2N2N2N2N2NUXc2"}, "image_id": 512, "id": 8876}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 266.0, 37.0, 37.0], "area": 620, "segmentation": {"size": [512, 512], "counts": "oho52m?3M2N2N2O1N2N2N3M2N2N2N2OO2N2N2NO1L5KBRA3n>NSA0n>1RAMP?2RAKo>5RAIo>7RAGo>9RAEo>;RADn><;010O000000000000Lhg]1"}, "image_id": 512, "id": 8877}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 269.0, 60.0, 51.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "Vii33m?2M4M2N3L3N3L3N3M2M2OO010O10O10O010O010O10O10O010O10O001O1N101O001N101O0O2O10O101N2O1N101M3N2N1N3N2N2M3N1O2N2M3N1O2M3NUWX3"}, "image_id": 512, "id": 8878}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 275.0, 66.0, 63.0], "area": 2096, "segmentation": {"size": [512, 512], "counts": "iYl62m?2N2N2N2N2N2N3N1N2N2N2N2N20O1Om@\\Ol>k01O00O1O1O1O1O100O1O1O1O1FiNjAW1V>kNiAU1V>mNhAT1W>nNgAS1X>;00OJ6O11O0001O0001O012O0000O2M2N2N2N3M2O1N2ETA_On>`0SA^OP??RA_OP??RA@o>>;N2N3N1N2N2Nef2"}, "image_id": 512, "id": 8879}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 277.0, 45.0, 47.0], "area": 1197, "segmentation": {"size": [512, 512], "counts": "SY=7h?4L2N2N2N2O1N2N2N2N3M2N2N2N2O1N2O1O1O1O1O1O1000N2N2N2N3M2N2N2N2N3M2N2N2N2N2N3M2O1N2N2N3MbVl6"}, "image_id": 512, "id": 8880}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 282.0, 43.0, 42.0], "area": 947, "segmentation": {"size": [512, 512], "counts": "biV61n?2N2N2N3N1N2N2N2N2N2N2N3M2N2N2N1O00001O000001O0000002N2N2O1N201O00000N2N2N4L4L3N3L4L]fS1"}, "image_id": 512, "id": 8881}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 292.0, 30.0, 29.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "bYQ52m?2O1N2N3M2N2N2N2N2O1N3M1O1O000001O2N2N2N2O2M2N2N2N2N2N2N2N`f_2"}, "image_id": 512, "id": 8882}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 314.0, 45.0, 41.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "XZ[11n?2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2O20O00000000000O2M2N2N2N2N01O000002N2N2N3M2O1N2N2N2N2N2NaUn5"}, "image_id": 512, "id": 8883}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 326.0, 29.0, 28.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "djd51n?2N2N2N2N2N2N2N2N2N2N2N2N2N10O2N2N2N2N2N2N2N2N2N2O2M2N2N]el1"}, "image_id": 512, "id": 8884}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 328.0, 52.0, 53.0], "area": 1335, "segmentation": {"size": [512, 512], "counts": "R[^41n?2N2N2O1N3M2N2N2N2N2N2N2O2M2N20000N2O1N3M2N2N2N00000010O0000000000002N2GZAVOh>g0ZAWOh>g0ZAWOh>g0:N2O1N2N2N3M2N2N2N2O1N2NVeg2"}, "image_id": 512, "id": 8885}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 333.0, 40.0, 33.0], "area": 694, "segmentation": {"size": [512, 512], "counts": "kZ21n?2N2N2N2O1N3M2N2N2N2N2N2N2N2O2O00O1N2N2O2M0000000000001O00011N2K6M2N2N2O1N2N2NZeY7"}, "image_id": 512, "id": 8886}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 349.0, 37.0, 33.0], "area": 976, "segmentation": {"size": [512, 512], "counts": "Tk]7c0]?7I00000000000000000000000O010000000000000000000000000000000O1O1O1O1O1O1SE"}, "image_id": 512, "id": 8887}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 355.0, 52.0, 50.0], "area": 1365, "segmentation": {"size": [512, 512], "counts": "ik>1n?2N2N2N2N2N2N3M2N2N2N2N2N2N2O100000000N2N2N2N2N2N2N2N1O000001O001O2N2N3N1N2N2N2N2XOn@?T?_On@?T?_On@?T?@m@>\\?N2N2N2N2N2N2NTTg6"}, "image_id": 512, "id": 8888}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 355.0, 22.0, 23.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "]kn51n?2N2N2N3M2N2N2N2N2N2000N2N2N2N2N2N2N2N2N2NcTf1"}, "image_id": 512, "id": 8889}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 356.0, 9.0, 9.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "X[c12m?2N2N2OO001O2N2NkTX6"}, "image_id": 512, "id": 8890}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 362.0, 53.0, 47.0], "area": 1268, "segmentation": {"size": [512, 512], "counts": "P\\U54l?5K6I6K5KO1000O01000000O0100O010O1O1O001O100O001O1O00100O1O000000010O000002N2N2N2N3M2N2O1N2N3M2N2N2N2N3MUTP2"}, "image_id": 512, "id": 8891}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 378.0, 52.0, 52.0], "area": 1332, "segmentation": {"size": [512, 512], "counts": "flj12m?2N3M2N2N2N2O1N2N3M2N2N2N2N2N2O1N3M2N2N0000000001O01O0000000002N2N2N2O1N2N2N3M2N2N2N2N2O1N3M2N2N2N2N2NaS[5"}, "image_id": 512, "id": 8892}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 378.0, 32.0, 32.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "Zl[61n?2N2N2N2N2O2M2N2N2N2N2N2N3M10O0000003N1N2N2N2N2N2N2N3N1N2N2N2NhST1"}, "image_id": 512, "id": 8893}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 392.0, 19.0, 35.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "elf71n?2N2N2N3M2O1N2N2N2N2N3M2N2O1N2O10001dC"}, "image_id": 512, "id": 8894}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 394.0, 25.0, 25.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "b\\`02m?2N2N2N2N2N2N2N2O10000000000000N2N2N2N2N2N2N2N2NXSS7"}, "image_id": 512, "id": 8895}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 400.0, 14.0, 14.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "fl\\72m?2N2N2N2N2N2O01N2N2N2N2N2N[S<"}, "image_id": 512, "id": 8896}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 404.0, 26.0, 42.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "e2O1000000000O1O1N2N2N3M2N2N2N2N2N2N2N2N2N2O1N2N3Mdbb7"}, "image_id": 512, "id": 8897}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 408.0, 33.0, 33.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "X]U13m?1N2N2N2N2N2N3M2O1N2N2N2N2N0000010O0002N2N2N3M2N2O1N2N2N2N3M2N2NjRZ6"}, "image_id": 512, "id": 8898}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 408.0, 91.0, 84.0], "area": 2650, "segmentation": {"size": [512, 512], "counts": "hno12m?2N2N2N2N2O1N2N2N2N2N3M2N2N2O1N2N2N1O000000000001O0001O000000000001O0000000001O00000001O000000000001O000001O000000000000000101N1O00000000000001O2O1N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2Nfbb4"}, "image_id": 512, "id": 8899}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 414.0, 54.0, 53.0], "area": 1341, "segmentation": {"size": [512, 512], "counts": "nml51n?3M2O1N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N000000001O01O000000000000000001O02N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N`RX1"}, "image_id": 512, "id": 8900}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 419.0, 33.0, 33.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "c]l62m?2N2N2N2N2N2N2N2N2N2N2N3N1N2N000001O2N2N2N2N2N2N2N2N2O1N2N2N2N3M_Rc0"}, "image_id": 512, "id": 8901}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 431.0, 12.0, 13.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "cmg12m?2N2N3M200000O1N2N2N2N[RR6"}, "image_id": 512, "id": 8902}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 442.0, 17.0, 63.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "\\og71\\?0j@042P?0j@042P?1i@O52Q?6n@K[>JSB=@K[>JSB=@K[>JSBk0j=WOTBk0j=WOTBk0k=VOSBl0k=VOSBl0k=VOSBj0m=XOQBi0P>WOPBg0R>YOmAf0U>a0TB"}, "image_id": 512, "id": 8903}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 456.0, 54.0, 50.0], "area": 1319, "segmentation": {"size": [512, 512], "counts": "WoY61o?1N2N2N2N2N2N3M2N2N2N2N2N2IWOTAk0j>WOTAk0j>7N2N0000000000000001O00002N2N2N2O10000000000O2M2N2N2N2N2O1N2N2N2N2N2N2N2N2NoPk0"}, "image_id": 512, "id": 8904}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 460.0, 31.0, 31.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "h^f31n?2N2N2N2O1N2N2N2N2N3M2N2N2O10000000N3M2N2N2N2N2N2N2N2O1N2N3MSQj3"}, "image_id": 512, "id": 8905}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 473.0, 54.0, 39.0], "area": 1232, "segmentation": {"size": [512, 512], "counts": "d_Q31n?2N2O1N2N2N2N2N2N3M2N1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1001O1O1O1O1O2N1O1N2N2N2N2N2O1N2N3M2N2N2N2N2N2N``S4"}, "image_id": 512, "id": 8906}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 489.0, 42.0, 23.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "ooV71n?1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O11O1O2N1O1O1O1O1O1O1O1O1O2N1O1N2O1NUP4"}, "image_id": 512, "id": 8907}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 492.0, 40.0, 20.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "o_Z11n?1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O100O1O1001O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1NS`Q6"}, "image_id": 512, "id": 8908}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 499.0, 26.0, 13.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "o_W41o?0O1O1O1O1O1O1O100O1O1O1O1O11O1O1O1O1O1O1O2N1O1O1OQ`[3"}, "image_id": 512, "id": 8909}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oo01PPo7"}, "image_id": 512, "id": 8910}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 71.0, 26.0, 65.0], "area": 981, "segmentation": {"size": [512, 512], "counts": "W2k1V>00O10000000000N2N1mNgA>Y>AjA=W>BkABkABkABjABiABiABiABiABiAN2N2N2N2M`lb7"}, "image_id": 513, "id": 8911}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 134.0, 74.0, 83.0], "area": 3136, "segmentation": {"size": [512, 512], "counts": "Sf11n?2M3N1N3N2]@E^?`0N2M3N2N2N1mNVOoBl0oSOnAm0R>UOlAm0R>b0N2N2N2NO10001O2N00000000000000O10O3N2N2N1O0001N3N2N2N2N1O000001ROUBFm=8UBFm=8UBFm=8UBFm=8UBEn=9TBEn=9TBEn=9TBEn=9SBFn=5cABa07n=4YBJi=4YBJi=4YBJi=4YBJi=4S1N2NUZR6"}, "image_id": 513, "id": 8913}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 188.0, 10.0, 10.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "QVh01n?2N2N2N2N10O2N2M3NRjR7"}, "image_id": 513, "id": 8914}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 218.0, 9.0, 10.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "oV;2m?2N2N2M21N2N2M3NSY`7"}, "image_id": 513, "id": 8915}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 243.0, 46.0, 66.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "e7j1U>2N3N100000N3M2N2N2N2O2M2N2N2N3NO000000003N11M2N2N2N2O2M2N2N1O10O0003M2N2N2Ld@B]?<6M2N2N2O2M2N]gX7"}, "image_id": 513, "id": 8916}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 252.0, 72.0, 62.0], "area": 2194, "segmentation": {"size": [512, 512], "counts": "nhn11n?2N2N2N2N1N3N1OO10a@FY?:g@HW?8i@JU?6k@LS?4m@NQ?4m@MR?`000PAVOk>j0UAXOh>i0XAYOf>g0ZA[Od>e0\\A]Ob>d0\\A_Ob>P1O2N2N2N2N2N2N2M11O2N2N2N2N2M3N2N2N1O2N2N2N2N0O10000000O10000000O10002N2N1O2N2N2N2M3N2N2N2N2N2N2N2NgWm4"}, "image_id": 513, "id": 8917}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 262.0, 12.0, 12.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "[hR12m?2N2M3N20O100O1N2N2N2NdWg6"}, "image_id": 513, "id": 8918}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 288.0, 69.0, 61.0], "area": 2227, "segmentation": {"size": [512, 512], "counts": "Ujf22m?2N2N2BIPA9n>IPA9n>IPA9n>IPA9n>IPA9n>IPA9n>>N1O2N2N2N2M3N2N2N2N2N2N2N2N11N2N2N2N2N2N2N2N2N2N2N1O2M10000000000000001O2N2N2N000002N10O2N2N2N2N2N2N2M3N2N2N2N2N1O2NafV4"}, "image_id": 513, "id": 8919}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 314.0, 54.0, 88.0], "area": 2480, "segmentation": {"size": [512, 512], "counts": "d[U71n?2N2N2N1O2N2N2N2N2N2N1O2M3N2O1000O10O100N2N0SOSOhBm0X=UOfBm0X=UOfBm0X=UOeBn0X=TOgBn0W=TOgBn0W=TOgBm0X=UOfBm0X=UOfBm0X=UOfBm0X=UOfBm0W=VOgBl0W=VOgBl0W=UOhBm0X=n0OO2N2N2N2N2N2N2N2N1O2N2@SBjNo=T1SBjNo=T1SBjNo=S1TBkNn=S1TBkNd3"}, "image_id": 513, "id": 8920}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 319.0, 68.0, 77.0], "area": 2392, "segmentation": {"size": [512, 512], "counts": "U[Q11n?2N2N2N2N2j@Gb>;\\AFc>;SAA66e>=ZAEd>=ZAEd>=ZAEd>m0N2N2N2N2N1O2M3N2N2N2N00000000000O010000BSBjNm=V1TBiNl=W1VBgNj=Y1XBeNj=Y1XBeNj=Y1XBeNj=Y1`0N2N2N2N2M3N1O2O1000000000000N2N00002N2N2N2M3N2N2N1O2N2N2NXel5"}, "image_id": 513, "id": 8921}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 323.0, 66.0, 59.0], "area": 2123, "segmentation": {"size": [512, 512], "counts": "[k^31n?2_ONQA4m>NPA5m>NQA4m>NQA4m>NQA4m>NQA4h>AYA=M3i>BXAo0f>7M3N2N2N2N2N2N2N2O10O1O1N2N2N2N2N2N2N1O2N2N2N2N2N2N2N000000O102N2N1O000000000001O2N2N2M3N2N1O2N2N2N2N2N2N2N2N2M_U`3"}, "image_id": 513, "id": 8922}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 366.0, 67.0, 75.0], "area": 2612, "segmentation": {"size": [512, 512], "counts": "Xmh11n?2nNNSB4k=NSB4k=NSB4k=NSB3l=ORB3l=ORB3l=OQB4m=NQB4i=^OjA`0;4i=2UB0i=2UB0i=2UB0i=2UB0i=2UBOj=3TBOj=U1N2N1N1002N2N2N1N3N2N2N2N1O00O1000O1JkAbNU>^1mA`NU>^17O2N2N2N2N1O2M101O000000001O2N2N2N2M3N2N2N2N2N2N1O2N2N2M3NTdU5"}, "image_id": 513, "id": 8923}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 367.0, 77.0, 81.0], "area": 3248, "segmentation": {"size": [512, 512], "counts": "^\\T41>1o>2o@0o>2o@0o>2o@06Kf=2YB5H17Je=4YB3I17Jd=5ZB2I17Jd=5ZBa01\\Oc=5ZBa01\\Od=4YBb01[Oe=Z1YBhNf=j1O1O1O1O1O1O10N1O2N2N2N2N2N2N2N2N1N3N2N2N2N2N1O2N2N2M3N1O2N2NO1000000000O1000O10000000000000O01001O1O2N2N2N2N2N2M3N2N2N2N2N2N2N1O2NTTe2"}, "image_id": 513, "id": 8924}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 401.0, 69.0, 79.0], "area": 2726, "segmentation": {"size": [512, 512], "counts": "]n^23l?2N2mNKXB7f=JYB8e=JYB8e=JYB7f=KXB7e=LYB6e=LXB7f=KXB7c=ZOnAa0=7c=YOoAb0<7c=N[B4c=N[B4c=M\\B5b=M\\B5b=M\\B4c=N[B4b=U11O1O1O1O1O1N3N2N1O2N2N000ImAbNS>^1nAaNR>^1QB_NP>a170001O2N2N2N1O2N2M3N0000000000002N2M3N2N2N2N2N2N2N2N2N2M2O2N2N2N2NPc^4"}, "image_id": 513, "id": 8925}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 412.0, 70.0, 61.0], "area": 2249, "segmentation": {"size": [512, 512], "counts": "Qnh41n?2N1N3N2N2N2FEl@=R?El@=R?El@=R?:N2N2M3N2N1O2N2N2N2N2N2N2N2M3N2N01O2N2N2N2N2N2N2N2N2N1N3N1OO1000000000000002M3N2N1O0002N20N2N2M3N2N2N2N2N2N2N2N1O2N2M3NfRT2"}, "image_id": 513, "id": 8926}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 435.0, 31.0, 42.0], "area": 854, "segmentation": {"size": [512, 512], "counts": "c=R1Q?L3N1OO3N3L4M11N2N0O0100O0100O01000O011N4M2N3L4M2M4M2N3L3N3L_Q`7"}, "image_id": 513, "id": 8927}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 439.0, 63.0, 66.0], "area": 2306, "segmentation": {"size": [512, 512], "counts": "jnS32m?2N2FLe@6Y?Le@6Y?Le@63EP?e0n@]OP?e0n@]OP?j0O2N2N2N2N2O1O10N2N1OO3N2N1O2N2N2N2O10000O1N1O2N2N0000O1000O11O2N2N2N2N2N2M3N2N2GYAVOi>h0YAVOi>h0XAWOj>f0YAXOi>f0:N1O2N2M3N2N2N2N2M3Ndal3"}, "image_id": 513, "id": 8928}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 443.0, 14.0, 14.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "Un\\41n?2N2N2N000O10000001O2N2N2NSR\\3"}, "image_id": 513, "id": 8929}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 445.0, 10.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "Q^X42m?2N2N2N20O1N2N2N1NQbb3"}, "image_id": 513, "id": 8930}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 448.0, 76.0, 64.0], "area": 2667, "segmentation": {"size": [512, 512], "counts": "Zo\\51n?2N2N2N2N2N2N1N3N2N2N2N2_OXOgAj0W>XOgAj0W>XOfAk0X>WOfAj0Y>XOeAj0Y>WOfAk0X>WOfAk0W>a0N2N2N2N1O11O001O1O1O1O1O1O1O1O001O1N2N2N2N2N2M3N1O1O000000O10O100000000000O12M3N2N2N2N2N2N2M3N2N2N2N2N2N2N1N3N^Q]1"}, "image_id": 513, "id": 8931}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 452.0, 9.0, 8.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "WnU41n?1O2N2O1000O1N2Niae3"}, "image_id": 513, "id": 8932}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 479.0, 65.0, 33.0], "area": 1189, "segmentation": {"size": [512, 512], "counts": "i_>2m?2N3N1N2N1O1EFo@;k>LSA5l>NQA3n>OQA1n>a000O1000000000000001O2N1O1O1O1O1O2N1O1O1O1O1O2N00O1O1001O1O1O1O1OO1O1O1O100O11O1O1O1O1O2N1O1O1O1O1O2N1O1O1OQPa6"}, "image_id": 513, "id": 8933}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 490.0, 52.0, 22.0], "area": 1144, "segmentation": {"size": [512, 512], "counts": "ZoQ4f0Z?00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000fPT3"}, "image_id": 513, "id": 8934}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 492.0, 53.0, 20.0], "area": 609, "segmentation": {"size": [512, 512], "counts": "bo_649MW?7f@JX?9f@HY?:e@G[??O10000000000001O001O1O1O1O1O1O001O1O1O1OO1O1O11O1O1O001O1O1O00O1O1O1N2O11O1O1O1O1O1O001O1O1O1O1OQ`e0"}, "image_id": 513, "id": 8935}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 104.0, 17.0, 24.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "XS[2b0^?000000000000000000006J000000000h\\\\5"}, "image_id": 514, "id": 8936}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 131.0, 22.0, 13.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "XdP21m?3L30001O010O00010O01O01O010O00010O01O0N2Mj[d5"}, "image_id": 514, "id": 8937}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 133.0, 11.0, 15.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "UTe3?a?0000000000000000000k[U4"}, "image_id": 514, "id": 8938}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 179.0, 9.0, 13.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "cUe0IZAg0e>[OXAh0e>ZOYAh0e>ZOZAg0d>[OZAg0g>9N2N10OUOgNPCY1P=iNnBW1R=lNkBT1U=nNkBP1U=ROkBl0V=UOjBi0V=YOfBDBR1g=\\OeBh0[=n002OO2N2N2N1O0NRBTNm=l1300000SBTNg=m1XBUNg=j1ZBWNe=h1[BZNe=f1YB]Nf=o100O1O1NYO^BSO`=m0bBTO[=l0gBTOW=l0jBUOT=NoBLM6T=NQCKJ8V=KRCKJ8V=KRCKJ8V=KRCKJ8V=KRCKK7V=LQCJK8V=LQCJK8V=LQCJK8V=LQCKJ8V=KjC3XM2Nkei5"}, "image_id": 514, "id": 8942}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 325.0, 51.0, 48.0], "area": 2090, "segmentation": {"size": [512, 512], "counts": "ejc2:b?4000000i@Kc>i0D<001O000000000000000000000001O000000000000000000000001O000000000000000000000001O000000\\Od0WObfb4"}, "image_id": 514, "id": 8943}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 363.0, 18.0, 12.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "`[g04g?50001O000001O0001O000001O01O00N2Jido6"}, "image_id": 514, "id": 8944}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 0.0, 54.0, 62.0], "area": 1800, "segmentation": {"size": [512, 512], "counts": "``_13l?2N2N2N2N2N2_@C\\?b0N2N2N2N2N2N2N2N2N3N1O1O1O1O1O1O1O1O1O1O1O100000000000000000000000O1N2UO`A3a>F_AG2a0a>F_AG2a0a>F_AG2a0a>FgA8[>FgA8[>FgA8[>FgA8R?N3M2N2Na^e5"}, "image_id": 515, "id": 8945}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 0.0, 23.0, 15.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "PPT21o?1O1O1O2N1O1O2N1O1O1O2N0000O1O1O1O2O1N2N3M2Nk_`5"}, "image_id": 515, "id": 8946}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 78.0, 41.0, 57.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "o2X1g>000000000000000001O2N2N2N00000000000000000000002N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2NS][7"}, "image_id": 515, "id": 8947}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 103.0, 10.0, 22.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "gSk71m?3N1N3M2N3N1N3M3M2iL"}, "image_id": 515, "id": 8948}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 110.0, 20.0, 20.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "fcQ42m?2N2N2N2N2M3N2O100O10N2N2N2N2N2N2N2N2NY\\d3"}, "image_id": 515, "id": 8949}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 119.0, 73.0, 76.0], "area": 2418, "segmentation": {"size": [512, 512], "counts": "de11m?3N2N2M2O2M3N2N1N3N2N1N3N2N2M2O2M3N2N1N2OO10O10O010O10O10O10O10O10O10O02O1O2M3N1O2M3N2M20N3N1O2M3N2N1N3N2M3N1O2M3N1O2M3N2N1N3N2M3N1O2M3N1O2M3N2Mgki6"}, "image_id": 515, "id": 8950}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 150.0, 15.0, 12.0], "area": 91, "segmentation": {"size": [512, 512], "counts": "hdb33l?2N101O1000O01000O010O1N1N3NTkU4"}, "image_id": 515, "id": 8951}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 179.0, 30.0, 30.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "SfX31n?2M2O2N2M3N1O2M3N2N1N3N2O10O10O1000O0N3N2N2M2O2M3N2N1N3N2NRZX4"}, "image_id": 515, "id": 8952}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 183.0, 4.0, 9.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "lUn72l?3M3N1YJ"}, "image_id": 515, "id": 8953}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 199.0, 51.0, 54.0], "area": 1396, "segmentation": {"size": [512, 512], "counts": "ZWV21n?2N2M2O2N2M3N1O2M3N2M2O2N2M3N1O2M3N1O2M3N2N1N3N2M2O02N2M2O2N2M3N1O2M3N2N1N3N2M3N1O2M3N1O2M3N2N1N3N2NXYP5"}, "image_id": 515, "id": 8954}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 205.0, 6.0, 11.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "]6;f?N1N3N2N1Nbil7"}, "image_id": 515, "id": 8955}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 219.0, 30.0, 29.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "ZWV41n?2N2M3N1O2M3N2M2O2N2M201O1000O10O10N2M3N1O2M3N1N3N2N2M2O2NjhZ3"}, "image_id": 515, "id": 8956}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 220.0, 11.0, 21.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "l6e0[?O2N2M3N1N3N2N2M2O2NnXj7"}, "image_id": 515, "id": 8957}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 221.0, 54.0, 51.0], "area": 1395, "segmentation": {"size": [512, 512], "counts": "egV32m?1N3N2N2M2O2M5L2N1N3N2M2O2N2M3N1N3O10O1N20O10O10O10O1000O10O10O10O1000ON3N2N1N3N2M3N1O2M3N1N3N2N2M2O2M3N1O[Xn3"}, "image_id": 515, "id": 8958}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 260.0, 30.0, 30.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "cXP52m?2M2O2N2M3N1O2M3N2N1N3O1000O10O1000O1N1O2M3N2N1N3N2N2M2O2N`g`2"}, "image_id": 515, "id": 8959}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 262.0, 53.0, 50.0], "area": 1411, "segmentation": {"size": [512, 512], "counts": "mXR42m?2N1N3N2M3N3M2M2O2N2M3N1N3N2N1N3N2O10O0O2000O10O1000O10O1000O010N2O10O1N1N3N2N2M2O2M3N1O2\\Of@>_?O2N2M2O2M3N2NRWS3"}, "image_id": 515, "id": 8960}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 267.0, 13.0, 11.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "ah]11n?1N3N2N2N1010O0O2N2M2O2Nbg[6"}, "image_id": 515, "id": 8961}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 276.0, 11.0, 12.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "gXh12m?5L3MO01000O011O3L4MUWR6"}, "image_id": 515, "id": 8962}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 283.0, 23.0, 23.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "Xik11n?2N1N3N2N2M2O2N2M2O200000O001M3N1O2M3N2N1N3Nmfh5"}, "image_id": 515, "id": 8963}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 288.0, 75.0, 89.0], "area": 3294, "segmentation": {"size": [512, 512], "counts": "RjY21n?1N3N2N1N3N2M3N1O2aA^O\\=e0aB^O]=d0iAZOd04a=c0jA[Oc04b=f0\\B\\Oe=d0YB^Og=b0WB@h=a0UBBk==TBDm=0000O1000000000000000000O10000000000000000O100000000000009G4L0oNcA>^>BkA5U>KUBKk=5X1H[_]7"}, "image_id": 516, "id": 8981}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 0.0, 36.0, 51.0], "area": 1383, "segmentation": {"size": [512, 512], "counts": "ZPk11o?:E70O1000N2000IgAgNY>Y1700000000O100000001O003M;E:F;Dmnb5"}, "image_id": 516, "id": 8982}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 0.0, 44.0, 31.0], "area": 1126, "segmentation": {"size": [512, 512], "counts": "``j22m?9ZOJRA;i>>000000O1000O100000000000000O10000000000000000O103M1O0000O100000O10000000O14L9G8GZ__4"}, "image_id": 516, "id": 8983}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 0.0, 46.0, 88.0], "area": 2814, "segmentation": {"size": [512, 512], "counts": "Pbl31n?;aNIaB:\\=0ZB0f=:PBFP>c0gA\\OZ>U1O5L1VO^NTCb1lS140000000000000000000O10O9Hb0I2O6J00000000000000O010000000J600000000000O1000O1]OeAA[>?oAWOQ>i0c000O1000O10000000000000O100000O1002N;E;Ee^Z1"}, "image_id": 516, "id": 8991}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 36.0, 7.0, 8.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "UQh21n?7J000000000l^T5"}, "image_id": 516, "id": 8992}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 38.0, 44.0, 33.0], "area": 1162, "segmentation": {"size": [512, 512], "counts": "ham26j?9XOJTA6l>?00O1000O100000000000O1000O100000000000O1000O10005K000000000000O010000000004L9G9GP^\\4"}, "image_id": 516, "id": 8993}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 51.0, 12.0, 17.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "dQf1;e?5J10000000000000O3N:FQnS6"}, "image_id": 516, "id": 8994}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 59.0, 62.0, 36.0], "area": 1669, "segmentation": {"size": [512, 512], "counts": "]bn49g?9G000000000CGSA9m>=0O100000000000O1000O100000000000O100O0100000000000O1000O100000000000O1002M5L0000O10000000O10000000O18H8H9GX]R2"}, "image_id": 516, "id": 8995}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 72.0, 21.0, 16.0], "area": 271, "segmentation": {"size": [512, 512], "counts": "ZR:4l?:FO1000O100000000000000000O1000O100007Ia][7"}, "image_id": 516, "id": 8996}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 77.0, 65.0, 34.0], "area": 1366, "segmentation": {"size": [512, 512], "counts": "USj24l?3M000000000000O0100000000M5N0O1000000000OO3AFVALPA4o>>10000000000000O0100000000000000O0100000000001O5K0O10000000O100000O100000009G8Hm\\U4"}, "image_id": 516, "id": 8997}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 80.0, 21.0, 22.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "bRb47i?:F3M0000000O10O1000000000000000O10O4M:FS]S3"}, "image_id": 516, "id": 8998}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 86.0, 46.0, 31.0], "area": 1220, "segmentation": {"size": [512, 512], "counts": "gRk13m?;E;E5K00000000000O0100000000000002N1O00O01000000000000000000O010000000000000000000O018H;Egl]5"}, "image_id": 516, "id": 8999}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 90.0, 15.0, 23.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "kbb29g?;E2N00000O100000000000O8I;EdlU5"}, "image_id": 516, "id": 9000}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 99.0, 39.0, 33.0], "area": 1036, "segmentation": {"size": [512, 512], "counts": "W3k0U?O10000000O10000000O100000O10004L00000000O10O10L400000000000O1000O10005K8H9GW\\\\7"}, "image_id": 516, "id": 9001}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 100.0, 33.0, 24.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "WSW12n?9G:F00000O10000000O10000000O1000000000O10000000O10000000O102N9Ga\\X6"}, "image_id": 516, "id": 9002}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 106.0, 27.0, 52.0], "area": 747, "segmentation": {"size": [512, 512], "counts": "ocb72m?2O1N3M2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N201O000bL"}, "image_id": 516, "id": 9003}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 123.0, 146.0, 45.0], "area": 4029, "segmentation": {"size": [512, 512], "counts": "[Tg24l?:F9G6JO100000O100000000000O100000O10000O100000O10001O00O100000001O00000000O0010001O5J100000O100000000000O100000O100K50000000O10O1000001O000000O1000O01000N200000O01002N0000000000000O10O100006JO10000JZORAf0n>5100000KWOSAi0m>500000001N100004L0O100O10000O1000O10000000000000O10O1000000000000000O10O100:F:F`ko2"}, "image_id": 516, "id": 9004}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 128.0, 50.0, 48.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "XTb12n?6J5K5J7J5K5K6J0O1000O1000O1000O100000O1000O1000O1000K\\AmNe>S14000O10O100000O10O100000O1000O4M6J5K6J5J6KRkd5"}, "image_id": 516, "id": 9005}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 132.0, 53.0, 59.0], "area": 1861, "segmentation": {"size": [512, 512], "counts": "_d=2n?4K6K4L2NO14K5L5K4L4K6K4L2N00O0100000O0100000O010000000O0100000O010SOdA6\\>JiA1W>OmAMS>3RBGn=:VBBj==[B_Oe=a0m00O10O1000O1000O4M5K4L5J\\kg6"}, "image_id": 516, "id": 9006}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 133.0, 9.0, 8.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "VT31o?6JO1000000000O1k[h7"}, "image_id": 516, "id": 9007}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 143.0, 20.0, 54.0], "area": 805, "segmentation": {"size": [512, 512], "counts": "`4e1[>MfA`NZ>`1300O010000000000005K9G:E2OO100:F9GVje7"}, "image_id": 516, "id": 9008}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 156.0, 12.0, 10.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "mTc49g?000000000000000O1008HljV3"}, "image_id": 516, "id": 9009}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 174.0, 55.0, 54.0], "area": 1437, "segmentation": {"size": [512, 512], "counts": "jUo61n?2N2N2N2N2N3M2O1N2N2N2O1001O0001O0N2N2N2N2O1N2N2N2N2O2O00000001O0000000001ON2N2N2O2M2N2N2N2N2]Oi@:Y?Di@:Y?Ei@8Y?Fi@8a?N2N2O1N^Y5"}, "image_id": 516, "id": 9010}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 201.0, 58.0, 47.0], "area": 1456, "segmentation": {"size": [512, 512], "counts": "PW]62n?1N3M2N2N2N2N2N2O2M2N2N2N2N2N2O2O000O1N000001O000001O0000000001O02O1001O0001O00O1N2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N2Nnhe0"}, "image_id": 516, "id": 9011}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 206.0, 66.0, 51.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "YWQ32m?2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N3M2N2N2O0O0000000000000002O1N2N2N2N3M2N2N2N2N000001O0000001O3M2N10O1O2N2N2N2O1N2N2N2N2N2N3M2N2N2NSim3"}, "image_id": 516, "id": 9012}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 220.0, 41.0, 62.0], "area": 1797, "segmentation": {"size": [512, 512], "counts": "nf[25k?:F8H000O10O11O:F:F;E6J0000O01002N6J00000000000O1000O1000000000000000O1SOgA3X>NRBHP>6ZB@P>6X1EWho4"}, "image_id": 516, "id": 9013}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 227.0, 16.0, 17.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "Xgd72m?2N2N2N2N2O2O000000N2N2N3M2N2NdX3"}, "image_id": 516, "id": 9014}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 231.0, 41.0, 51.0], "area": 1722, "segmentation": {"size": [512, 512], "counts": "lWb1=c?>B3MO10\\O_OiAa0W>d0000000000000O100000O100000000000005K2NO100000O10000000000000004L=C=CeWi5"}, "image_id": 516, "id": 9015}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 231.0, 31.0, 30.0], "area": 360, "segmentation": {"size": [512, 512], "counts": "lWn61n?2O1N3M2N2N2N2N000000001O01O0000000000000000002O1N2N2N2N2N3MbXb0"}, "image_id": 516, "id": 9016}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 235.0, 23.0, 25.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "jW[72m?2N2N2O1N2N3GDi@>V?Ch@>W?6O0001O2N2N2N2N2N2N2O1N3M2N[X9"}, "image_id": 516, "id": 9017}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 240.0, 54.0, 51.0], "area": 1913, "segmentation": {"size": [512, 512], "counts": "aXd06j?:TOG]A;a>OUA1_>AiAQ1W>;10000000O10000000O1000000000O1000O10000000000000O06K:F8HO1000000000O10000000O100000O100000006J:F[g`6"}, "image_id": 516, "id": 9018}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 242.0, 57.0, 48.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "Whj3:f?:F7I00O1000000000000000O010O100O1O1O1O100O001O1O010O00000010O0000010O000001O01O1O2O1N3M2N2O2M2N2N3N1N2N3M2O1N3MkgX3"}, "image_id": 516, "id": 9019}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 257.0, 48.0, 49.0], "area": 1173, "segmentation": {"size": [512, 512], "counts": "lXX72m?2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2N0001O000000000001O0001O002N2N2N3M2N2O1N2N2N2N2N2N3M2N2O1N^G"}, "image_id": 516, "id": 9020}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 259.0, 41.0, 41.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "jXQ32m?2N3M2N2N2N2N2O1N2N2N2N2N2N3M1O000001O0000000000002N2O1N2N3M2N2N2N2N2N2N2N2N2N2O]WZ4"}, "image_id": 516, "id": 9021}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 269.0, 30.0, 45.0], "area": 817, "segmentation": {"size": [512, 512], "counts": "]8]1c>O1000000000002N9G:F2M100000000000O1000O100000000000O15K9G`f`7"}, "image_id": 516, "id": 9022}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 281.0, 55.0, 60.0], "area": 1523, "segmentation": {"size": [512, 512], "counts": "hYb41n?2N2N2N2N2N3M2O1N2N2N2KZOm@h0Q?5N2N2N2YAmN^>U1`AmN^>U1`AmN^>\\1N2N000002N1HbAmN^>S1dAkN\\>U1fAiNZ>W171O000001O2N2N2N2N2N3M1O00101N2N2N2N2N3M2N2N2N2N2N2NeVb2"}, "image_id": 516, "id": 9023}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 285.0, 6.0, 8.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "mX[27i?1O0000000Sga5"}, "image_id": 516, "id": 9024}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 291.0, 12.0, 12.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "YYP41n?2N2N2N2N2N01O2N2N3M2Nifi3"}, "image_id": 516, "id": 9025}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 294.0, 50.0, 50.0], "area": 1243, "segmentation": {"size": [512, 512], "counts": "Pja11n?2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2OO0000000001O0002N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2N2O1N3M2NTVe5"}, "image_id": 516, "id": 9026}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 300.0, 23.0, 22.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "hiU42m?2N2N2O2M2N2N2N1O01O000000010O2N3M2N2N2O1N3M\\f^3"}, "image_id": 516, "id": 9027}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 301.0, 10.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "bYn22m?2N2N2N2N02N2N2N2N`fl4"}, "image_id": 516, "id": 9028}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 304.0, 32.0, 49.0], "area": 1263, "segmentation": {"size": [512, 512], "counts": "bYg03m?:F:F:F:F4L0000000O10O1000000000000000O10O10000000005K:F9G:F:EUeh6"}, "image_id": 516, "id": 9029}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 324.0, 27.0, 26.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "_Zh51n?2N2O1N2N2N3M2N2N2N2O1N2N2OO2N2O1N2N2N2N3M2N2N2O1N2N`Uj1"}, "image_id": 516, "id": 9030}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 325.0, 61.0, 62.0], "area": 1925, "segmentation": {"size": [512, 512], "counts": "jjW2130g?2W@0g?6N2N2N2N2N2O2M2N2N2N2O100O1N2N2N2N2N2N2N3N1N2N2N2N2000000000000000N3M2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N3Mhdi4"}, "image_id": 516, "id": 9031}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 333.0, 28.0, 28.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "iZ^11n?2N2N2N3N1N2N2N2N2N2N2N3M2O01N2N2N2N2N2N2N3N1N2N2N2N2NVeS6"}, "image_id": 516, "id": 9032}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 333.0, 54.0, 51.0], "area": 1342, "segmentation": {"size": [512, 512], "counts": "Z[S52m?3N1N2N2N2N2N2N3M2O1N2N2N2N2N3M2O1N1O1O0000000001O0001O0000000001O00101N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N2NPeQ2"}, "image_id": 516, "id": 9033}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 336.0, 54.0, 55.0], "area": 1417, "segmentation": {"size": [512, 512], "counts": "T[\\31n?2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3O01O000000000001OO2N1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N^dh3"}, "image_id": 516, "id": 9034}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 337.0, 16.0, 34.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "Sk9:f?3\\OIYA7g>a001O2N00000O10000007H=D=C_T^7"}, "image_id": 516, "id": 9035}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 338.0, 27.0, 27.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "nZU41n?2N3M2N2O1N2N2N3M2N2O1N2N1O02N2O2M2N2N2N2N3N1N2N2N2NQU]3"}, "image_id": 516, "id": 9036}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 352.0, 4.0, 37.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "P;U1U?FGSA:k>HSA:k>HSA:k>IRA9m>HQA:m>?M2N2N2N2N2N2N001O01O002N2N3M2N2N2N2O100000010O0N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2NScQ4"}, "image_id": 516, "id": 9042}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 384.0, 23.0, 23.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "]ln32m?2N2N2N2O1N2N3M1O000000000002O1N2N2N2N2N2N2Nice3"}, "image_id": 516, "id": 9043}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 385.0, 52.0, 50.0], "area": 1214, "segmentation": {"size": [512, 512], "counts": "n\\W41n?2N2N2O1N2N3M2N2N2N2N2O1N2N3M2N2N2N2N01O000000000001O01O00000000002N2O1N2N2N3M2N2N2N2O1N2N3M2N2N2N2N2O[cn2"}, "image_id": 516, "id": 9044}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 386.0, 10.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "VlU53l?2N2N2N20N2N2N2N2NjSe2"}, "image_id": 516, "id": 9045}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 410.0, 24.0, 24.0], "area": 236, "segmentation": {"size": [512, 512], "counts": "Zmc02m?2O1N2N2N2N2N0000000000000000000001O2N2N2N2N2NRSP7"}, "image_id": 516, "id": 9046}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 411.0, 65.0, 65.0], "area": 1766, "segmentation": {"size": [512, 512], "counts": "Z^m02m?2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N1O00000@QOPBo0P>SOoAl0Q>VOmAj0S>XOkAh0U>ZOiAf0W>\\OgAd0Y>^OeAc0Z>_OdAa0\\>a01O000000001O2N3M2N2N2N2O1N2N2N2N2N2N2N2N001O00000001O01O2N2N2N2N2N2N2N3MdRR6"}, "image_id": 516, "id": 9047}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 412.0, 24.0, 24.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "Z]T51n?2N2N2N2N2N3N1N2N00000000000003M2O1N2N2N2N2N2Nlb_2"}, "image_id": 516, "id": 9048}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 421.0, 54.0, 55.0], "area": 1461, "segmentation": {"size": [512, 512], "counts": "XnW62m?2O1N2N2N2N2N3M2N2N2N2O1N2N3M2N2N1O0000001O01O00N2N2M3N2010O1O2N1O1O2N1O1O101N1O1O2N1O1O2N100O2L3M3M4K4M3MgRm0"}, "image_id": 516, "id": 9049}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 429.0, 30.0, 29.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "j]m11n?2N2N2O1N2N3M2N2N2N2N2N2O1N2N11N2N2N2N2N2N3M2N2O1N2N2N2N2NUbc5"}, "image_id": 516, "id": 9050}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 437.0, 27.0, 26.0], "area": 363, "segmentation": {"size": [512, 512], "counts": "S^k31n?2N2N3M2N2N2O1N2N2N2N1O00000001O1O3M2N2O1N2N2N2N2N3MQRg3"}, "image_id": 516, "id": 9051}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 442.0, 12.0, 13.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "Pn^52m?2N2N2N2N2N11N2N3M2N2MRR[2"}, "image_id": 516, "id": 9052}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 460.0, 61.0, 51.0], "area": 1610, "segmentation": {"size": [512, 512], "counts": "o^k62n?1N3M2N2N2N2N2N2N3N1O1000000O1l@YOl>i0RAYOl>o0N2N1O001O101N2O2O00N2O1N1O0000001O000001O00000001O2N2N2O1N2N3M2N2N2N2N2N2O1N3M2N2N2N2NQQ6"}, "image_id": 516, "id": 9053}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 462.0, 72.0, 50.0], "area": 1862, "segmentation": {"size": [512, 512], "counts": "_o\\12m?2N2N2N2N2O1_@D[?>c@D[?b0N2N2N20O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O100O1O1O1M3L4L4M3000000001N1O101N1O1O1O2N1OO1M3M300100O2M2L4L5O01O00010O000001M2L4M3L5K4M`Q_5"}, "image_id": 516, "id": 9054}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 463.0, 32.0, 29.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "lnX51o?1N2N2N2N2N2N2N2O1N2N3M2N2N2N01O2O1N2N2N2N3M2N010O2N3M2N2N2N2OSQW2"}, "image_id": 516, "id": 9055}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 483.0, 32.0, 29.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "a__21n?3N1N2N2N2N2N2N3M2O1N2N2N1O1O1O1001O2M2N2N2N2N2O1N3M2N2N2N2N2N^`P5"}, "image_id": 516, "id": 9056}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 485.0, 42.0, 27.0], "area": 603, "segmentation": {"size": [512, 512], "counts": "m_`41n?2N2N1O1O1FJi@7V?Kh@6X?Le@5Z?9O1O1O1O1O1O1O1001O1O2N1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1OQ`j2"}, "image_id": 516, "id": 9057}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 503.0, 14.0, 9.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "o_Z21n?1O1O1O1O1O1O1O11O1O1N2N2NU`^5"}, "image_id": 516, "id": 9058}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 508.0, 8.0, 4.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "ooj61n?1O1O1001O1O1OQPQ1"}, "image_id": 516, "id": 9059}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 90.0, 34.0, 35.0], "area": 832, "segmentation": {"size": [512, 512], "counts": "^SS43_?>J60001O000001O000000000000000001O00009HO00000001O000000L4K500I7Cam[3"}, "image_id": 517, "id": 9060}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 92.0, 4.0, 18.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "l2b0^?00M3Camm7"}, "image_id": 517, "id": 9061}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 96.0, 33.0, 34.0], "area": 839, "segmentation": {"size": [512, 512], "counts": "nch43a?G90000000001O00000001O00000J600001O000001O000000000001O0000H8FQmV1"}, "image_id": 517, "id": 9064}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 105.0, 34.0, 31.0], "area": 879, "segmentation": {"size": [512, 512], "counts": "PTQ77Z??H9O00000000000001O00000000000K50000000000000001O0000000001O00L4ATm="}, "image_id": 517, "id": 9065}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 107.0, 15.0, 37.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "Udh74a?;a@Eo>k0K5001O01O000000000000000dL"}, "image_id": 517, "id": 9066}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 119.0, 57.0, 48.0], "area": 1435, "segmentation": {"size": [512, 512], "counts": "VTP32m?2N2N2N2N2N2N2N2N2N2N2N2N2000000000000000000O2M2N2N2N2O1N2N2N1O00000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2Nd[S4"}, "image_id": 517, "id": 9067}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 120.0, 13.0, 17.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "WTo51i?6I7N3O000001O00000O1I7I^\\j1"}, "image_id": 517, "id": 9068}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 131.0, 6.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "STd79g?00000010Olk8"}, "image_id": 517, "id": 9069}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 137.0, 6.0, 23.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "Y4g0Z?O0000000\\OZll7"}, "image_id": 517, "id": 9070}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 146.0, 53.0, 42.0], "area": 1205, "segmentation": {"size": [512, 512], "counts": "ndU7:Z?<00000010O0000000000000000000001O0001O00000000000000000001Ol@Dd>l000000TO\\AN2N1O00003M2N2N2N2N2000000000O1N2N2N2N2N3M2N2N2N000000000000002N2N2N2N2N2NXi[1"}, "image_id": 517, "id": 9075}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 229.0, 46.0, 38.0], "area": 822, "segmentation": {"size": [512, 512], "counts": "jW`61n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N00000002N2N2N2N2N2N2N2N1O0000000000000000001M4N2N2N2N2N2Nfhh0"}, "image_id": 517, "id": 9076}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 251.0, 16.0, 19.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "Qhn52m?2T@Mh?7N2N2N2N20000000O1N2N2N2IZ@1l?NiWi1"}, "image_id": 517, "id": 9077}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 253.0, 25.0, 26.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "VXl42m?2N2N3N1N2N2N2N3M2N2O100000N3N1N2N2N2N2N3N1N2N2NeWg2"}, "image_id": 517, "id": 9078}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 254.0, 46.0, 39.0], "area": 822, "segmentation": {"size": [512, 512], "counts": "dho61n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N000001O2N2N2N2N2N2N2N1O0000000000000000000Md@C]?<5N2N2N2N2N2NmW9"}, "image_id": 517, "id": 9079}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 267.0, 77.0, 80.0], "area": 2500, "segmentation": {"size": [512, 512], "counts": "eXY52m?2N2N2N2N2N2N3M2N2N2O11O0000O1N2N2O1000000010OO1N2N2N2N2N2N2N2000000N2fAdNn=]1PBeNP>\\1mAfNS>Z1kAhNU>b101O0000_OkAVOU>h0mAXOS>f0oAZOQ>d0QB\\Oo=c0RB]Oo=`0SB@m=>SBDm=:SBHm=6SBLm=2SB0m=NSB4m=JSB8m=FSB;n=CRB=P>APB?R>_OoA`0S>^OmAb0U>\\OkAd0W>ZOiAf0Y>YOfAg0]>VOcAj0k>N2N2O1N2N2N2N2N2N2N2N2N2NmU`1"}, "image_id": 517, "id": 9080}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 268.0, 8.0, 16.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "\\8`0a?N2N2N2N2N2N2N^gk7"}, "image_id": 517, "id": 9081}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 285.0, 38.0, 37.0], "area": 729, "segmentation": {"size": [512, 512], "counts": "bY]71n?2N2N2N2N2N2N2N2N2N2M3N2N1O2N2N2NO10001O2N2N2N2N2N2N2N2N000000000000000000TG"}, "image_id": 517, "id": 9082}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 326.0, 10.0, 18.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "_Zk71n?2N2N2N2N2N2N1O2N2iE"}, "image_id": 517, "id": 9083}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 383.0, 57.0, 56.0], "area": 1350, "segmentation": {"size": [512, 512], "counts": "cll61n?2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N2N2N2N20O1N2N2N2N200000000001O0000000000000000ROQAj0S?00001OO2M2N2N2N2N2O1N2N2N2N2N2N3Mgb6"}, "image_id": 517, "id": 9084}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 430.0, 11.0, 23.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "hmj72m?2N2N2N2N2N2N2N3M2N2aB"}, "image_id": 517, "id": 9085}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 474.0, 9.0, 15.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "Pok72m?2N2N2N2N2N2O10OUA"}, "image_id": 517, "id": 9086}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 192.0, 16.0, 14.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "VVc74j?2N2N3O010O010O00010O01M2N3M2Nmi4"}, "image_id": 519, "id": 9087}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 224.0, 52.0, 35.0], "area": 814, "segmentation": {"size": [512, 512], "counts": "\\WV72k?3M4L3M4O000010O010O010O00010O010O00010O010O0010O0010O010O00010O010O00010O010O010O00010O010O00010O010O]H"}, "image_id": 519, "id": 9088}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 266.0, 70.0, 43.0], "area": 1111, "segmentation": {"size": [512, 512], "counts": "gXm61m?3L3N2M4M2O2O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01kF"}, "image_id": 519, "id": 9089}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 31.0, 42.0], "area": 1042, "segmentation": {"size": [512, 512], "counts": "3n0R?1O5J4M000000O010000000O0100000O1002N000000O4K6K6K4L5K5K4K6Km^`7"}, "image_id": 520, "id": 9090}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 52.0, 2.0, 8.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "d18m?KWnn7"}, "image_id": 520, "id": 9091}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 69.0, 41.0, 36.0], "area": 1064, "segmentation": {"size": [512, 512], "counts": "_2a0_?4L4L1N1JXOTAh0l>5010000000O0100000O10O100000O10005K000O0100000O10O1003M5J5L00OJa@M_?3:M4LV][7"}, "image_id": 520, "id": 9092}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 123.0, 35.0, 53.0], "area": 1278, "segmentation": {"size": [512, 512], "counts": "fT:2n?4BLg@:g>E^A5G:g>F]Ae0a>_OZAb0e>=0003M4L2M0100000O0100000O10O1000004K6K2N1O4K6K4L5K4L4K6KjZT7"}, "image_id": 520, "id": 9093}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 155.0, 7.0, 28.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "k4l0V?N5K4L4K6K4L]Zl7"}, "image_id": 520, "id": 9094}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 206.0, 44.0, 37.0], "area": 1113, "segmentation": {"size": [512, 512], "counts": "gV>5k?4L4L5J5L4L2NOL50000O0100000O0100000O10O1000000O5L0000O0100000O10O102N4L5J2O0I`@N_?3e@I[?7;L4Lmhk6"}, "image_id": 520, "id": 9095}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 243.0, 15.0, 28.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "c7k0V?0O01N2N2M3N1O2N2M3N2N1N3N2NSXh7"}, "image_id": 520, "id": 9096}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 262.0, 44.0, 37.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "aXd02m?5L4L4L4K5L4GUOUAk0k>5O0100000O0100000O0100000O01000000O5L000000O0100000O0103M4L5J2OOJb@L^?4704LXge6"}, "image_id": 520, "id": 9097}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 288.0, 24.0, 18.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "Yi64h?4L401O01O0000O2N11O0001O01O0001O0001O01OL4K6KPW]7"}, "image_id": 520, "id": 9098}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 296.0, 26.0, 18.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "]YP13l?5L5K1OO10O100000O0100000O10O100000O0100000O2O5K4L^fb6"}, "image_id": 520, "id": 9099}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 316.0, 41.0, 51.0], "area": 1203, "segmentation": {"size": [512, 512], "counts": "S[l02l?3M2M4M2N3M2E_OTAd0j>^OTAe0h>_OUAc0i>M200O2IPAZOR?c08M2M3N3L3N3L3NlUi7"}, "image_id": 520, "id": 9101}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 359.0, 21.0, 27.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "lk51m?3L3N2M4M2M4M2M3O20O010O0N2M4M2M4M2M3N3Lgd_7"}, "image_id": 520, "id": 9102}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 364.0, 4.0, 19.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "\\;c0^?Ge@I]?Na@5i?Mddm7"}, "image_id": 520, "id": 9103}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 393.0, 48.0, 43.0], "area": 1154, "segmentation": {"size": [512, 512], "counts": "Qm74j?2M3M4M2M3M4M2M4L30010O01O01O010O00010PAROl>Q11O013L0010ON2010O0010O0010O00010O0010L3M4M2M3M4M2M3M4MWSP7"}, "image_id": 520, "id": 9104}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 451.0, 16.0, 22.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "c^W34j?2M3N3L3N3L3O11N1N2M4M2M4M2M3Nla`4"}, "image_id": 521, "id": 9105}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 0.0, 50.0, 72.0], "area": 2254, "segmentation": {"size": [512, 512], "counts": "QQW71n?2M3N2N2N1O2N2N2N2N2M3N2N2N2N1O2N2N2M3N2N2N2N2N2N1O2N2M3N2O1O1O1O1O001O1O1O1O1O1O1O1O00N2O1O1O1O1O1"}, "image_id": 522, "id": 9106}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 16.0, 139.0, 133.0], "area": 7895, "segmentation": {"size": [512, 512], "counts": "hQQ52m?2N2N2M3N2N2N2N2N1O2M3N2N2N2N2N2N2M3N2N1O2N2N2N2M3N2N2N2N2N1O2M3N2N2N2O1O10000000O100000O1000000000000O010000000000000O100000O1O1N2N1N3N2O1O10000000O010YBPN]=P2aBRN_=n1_BTNa=l1\\BWNd=h1[BZNe=Q2O100000000000000O0100000000000O10000000O2N3M4M2M2N00O100000O100O1O2M3N2N2N2M3N2N2M3N1O2N2M3N2N2N2M3N2N2M3N2N2N1N3Nf\\i0"}, "image_id": 522, "id": 9107}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 350.0, 75.0, 78.0], "area": 3144, "segmentation": {"size": [512, 512], "counts": "];?`?2N2N2N2N2N2N2M2O2N2N2N2N2N2O1000O100000000000N2N2N2N1O2N2N2N2000000000O10000000000000000O1000000000O100000O1N2N2N2N2N2N2N2N2N2N2XOUA8m>FUA8m>FUA8m>FUA8m>FUA8m>FUA8[?N2N2N2M\\Sj6"}, "image_id": 522, "id": 9108}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 392.0, 76.0, 120.0], "area": 5329, "segmentation": {"size": [512, 512], "counts": "XX14000000O10000K\\AnNd>Q1600000001N10O10GYAYOg>g09000000O100000000O10005K5K6J5J\\_^5"}, "image_id": 523, "id": 9112}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 0.0, 62.0, 62.0], "area": 2582, "segmentation": {"size": [512, 512], "counts": "PPQ35;OQ?6j@OQ?6j@OQ?f0J00000000003MO100000000O100L4000000004L5K5K5K00000000O100000000O10003020J5L0000O10O100000O10O100000SOkANV>2oAIP>8UBCk=<[B_Oh=>]B]Oh=>R1K5K5JVoo3"}, "image_id": 523, "id": 9113}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 0.0, 62.0, 71.0], "area": 2694, "segmentation": {"size": [512, 512], "counts": "PPT42n?5K5K5K5K5K3M0000O100000000O1000000005K5K5K5K5K5K5K5K3MO100000000O100000000O100000000O10002N5K5J6K5K5K5K5J5L00O1004K6K5K5KUnl2"}, "image_id": 523, "id": 9114}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 0.0, 35.0, 26.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "P`X51o?1O001`@NP?3m@OS?1l@1S?0k@2T?Oi@4V?Mh@4X?Lg@6X?:O001O1O1O0000O1N2O1N2O1O1N2O1N2O1O1N2O1O1N2O1N2OQPV2"}, "image_id": 523, "id": 9115}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 0.0, 79.0, 47.0], "area": 1883, "segmentation": {"size": [512, 512], "counts": "S`R62l?3N101O1O001O1O1O001O1O001O1O001O1O1O001O1O00100O1N101O1O001O1O1O0010O01N101O0100O010O100O010O10O0N3M2N3N110O010O010O010O010O010O01\\OVA1i>MYA3h>J[A6d>G_A9b>DaA<^>BdA>]>_OfA?n>N3M2N3M2N3MRoe0"}, "image_id": 523, "id": 9116}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 0.0, 22.0, 9.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "P`P71o?001O001O001O001O001O001O001O001O00O1N2N2NR`d0"}, "image_id": 523, "id": 9117}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 0.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "PPb71o?000P`<"}, "image_id": 523, "id": 9118}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 7.0, 53.0, 68.0], "area": 2364, "segmentation": {"size": [512, 512], "counts": "ZPj01c01g>4TA1g>3TA2h>2TA3f>e0OO100000O01004L4K5L5K3M02N1N0100000O05L4L4LO3N4L4L5K1N1000O10O1000O10O1000O1000L400O5L4WOVA4n>HVA4o>GUA5o>GVA3`?LQ^[6"}, "image_id": 523, "id": 9119}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 17.0, 77.0, 57.0], "area": 1979, "segmentation": {"size": [512, 512], "counts": "TQl52l?2O2M3N2N1N3N2N2M2O20000O01000N2N1O2M3N2N1N3000O0100000O0100000O0100000O0100000O0100000O010O1N2N1N3N2N2O010O10O1000O10O1000O10O10N2N2M2O2N2M3N1O2M3N2N1NY^m0"}, "image_id": 523, "id": 9120}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 43.0, 15.0, 15.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "caR52l?2O2N2M2O2N200O01N2N1N3N2M3N`ne2"}, "image_id": 523, "id": 9121}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 44.0, 86.0, 62.0], "area": 2385, "segmentation": {"size": [512, 512], "counts": "jb]12m?2N2N2N2N2N2N2N2N2M3N2N2N2N2N1O00000002N2N1O000000000001O2N2N2N2N2N2N2O10000000N2N2N00000000000000000000000000000000002N2N2N2N1O00000000000000002N2N2N2N2N2N2N2N2N2N2N2N2NX^W5"}, "image_id": 523, "id": 9122}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 46.0, 76.0, 56.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "QRb52m?2M3N1O2M3N2M2O2N2M21000O10O10N2N1N3N2M3N1O200000O01000O0100000O0100000O01000O0100000O01000O0O2N2M3N1O2O1O10O1000O01000O10O1000N1N3N2N2M2O2M3N2N1N3N2N\\mW1"}, "image_id": 523, "id": 9123}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 52.0, 15.0, 15.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "jQX31n?2N2N2N2N2N2O1000N2N2N2N2N2NV^`4"}, "image_id": 523, "id": 9124}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 57.0, 80.0, 67.0], "area": 1994, "segmentation": {"size": [512, 512], "counts": "_SR21n?2N2N2N2N2N2N2N2N2N2N2N2N1O0000000000000000000000001O2N2N2N2N2N2N000002N000000000000000000001O2N2N2N2N2N0000000000000000000000000000002N2N2N2N2N2N2N2N2N2N2N2Nlme4"}, "image_id": 523, "id": 9125}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 75.0, 60.0, 54.0], "area": 1612, "segmentation": {"size": [512, 512], "counts": "_cj22m?2N2N2N2N2N2N2N1O2N2N2M3N2N2N2N000000002N2N2N2N2N2N1O000O100000000001O2M2O2N2N2N2JTAUOn>i06N1O000000002N2N2N2N2N2N2M3N2N1O2NY]W4"}, "image_id": 523, "id": 9126}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 77.0, 75.0, 51.0], "area": 1922, "segmentation": {"size": [512, 512], "counts": "mbW53l?2M2O2M3N2N1N3N2M2O2N2N20O1000O0RAWOf>i0XAXOi>h0TA[Ok>e0TA]Ol>k0100OO000O01002O10O10O10O1000O01000O10O1000O01000ON3N2M2O2N2O1O010000O010000O01000O010O1N1N3N2M3N1O2M3N1N3N2Nalb1"}, "image_id": 523, "id": 9127}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 96.0, 70.0, 57.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "XdZ31n?2N2N2N2N2HFg@i0TAUOl>k0400000001O2N2N2N2N2N2N2N2N2N2N2N2N2Nd\\b3"}, "image_id": 523, "id": 9128}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 100.0, 80.0, 54.0], "area": 2033, "segmentation": {"size": [512, 512], "counts": "eSj41m?3N1O2M3N1N3N2N2M2O2g@[OS?k0N2000O01000O0100000O01n@SOP?o0M3N11000O10O10O1000O10O10O10O1000O10O01N2M3N1N3N20O1N1N300O10O10O10O100N1N3N2N110O1000O01000OO2M3N2M2O2N2M3N1N3Nfkm1"}, "image_id": 523, "id": 9129}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 125.0, 77.0, 56.0], "area": 2244, "segmentation": {"size": [512, 512], "counts": "Rei31n?2N2N2N2N2M3N2N2N2N2N2N2N2N2N1O2N2N00000000000O1000002N20000000000000000N2N1O0000000000000000000O1002N2N2N2M3N2N1O2N2N1O0000O1001O2N2N2N2N2N2M3N2N2N2N2Nfko2"}, "image_id": 523, "id": 9130}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 165.0, 72.0, 68.0], "area": 2358, "segmentation": {"size": [512, 512], "counts": "bec42m?2N2N2N2N2N2N2N2N2N2N2N2N2O10000000000000000N2N2N2O10O10000000000000O1N2N2N2N2000000N2N2N2N2N2N2N2N2N20000000O1lNnA7T>GnA7T>GnA7T>ChAC6h0T>CRB;P>CRB;P>CRB;P>CRB;P>CRB;P>CRB;P>CRB;k>N2N2N2N2NZYX2"}, "image_id": 523, "id": 9131}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 195.0, 81.0, 64.0], "area": 2413, "segmentation": {"size": [512, 512], "counts": "VgQ41n?2N2O10N10ZON_A2_>0`A1^>1`A1^>1`A1^>1`A1^>1`A1^>1`A1^>1`A1^>1`A1^>1`A1^>1`A1^>j0N2O1O10000000N2N2N2N2N2N20000000000000000000000000000O1N2N2N2N2N2N2O1000000000N2N000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2Nghe2"}, "image_id": 523, "id": 9132}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 243.0, 25.0, 38.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "Uhc73l?2M2O2M3N1O2M3N2M2O2M3N1O2000O01000O10O10O10O100TH"}, "image_id": 523, "id": 9133}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 245.0, 34.0, 45.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "e7\\1e>M2N2N3O010O010O010O01O0N3N100010O010O0N3M2N3M2N3M2N3M2Jf@C^?:5N3L3N3MRh^7"}, "image_id": 523, "id": 9134}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 264.0, 60.0, 46.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "\\i:1m?2N3M2M4M2N3L31O010O01O0N3M2N3L3N1O001N4O010O0010O001O0N3O01N1N201O010O010O010O000N1N1001N3N3M2N3L3N3M2N2M4M2N3M2M4M2NdWg6"}, "image_id": 523, "id": 9135}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 274.0, 56.0, 62.0], "area": 1895, "segmentation": {"size": [512, 512], "counts": "fiP13k?2N3M2N3M2N3M2N2M4N110O0RAWOf>i0WAYOi>g0TA\\Om>l0M2N3M2N3M2N3M2N3M2O2O010O010O0010M2N3M2M4M2N3M2N3M1O00003M2M4M2N3M2N3M2N3M2N3L3NXWS6"}, "image_id": 523, "id": 9136}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 282.0, 57.0, 64.0], "area": 1981, "segmentation": {"size": [512, 512], "counts": "^jd13k?2N3M2N3M2N3L3N3M2N3M2N3L3N2N3M2N3M2N3L1000001O2O20O010O010O010O0N3M2N2N3M2M10002N2N3M2N3L3N3M2N3M2N2M4M2N3M2N3Mof^5"}, "image_id": 523, "id": 9137}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 289.0, 53.0, 71.0], "area": 2013, "segmentation": {"size": [512, 512], "counts": "niU72m?2N2M2O2N2M2O2M3N20O1000O010N2M2O2M3N2N1N3O10O01M3N2M2O2N2M2O2M3O10O10O10O1000O10O10O10OO2iAYNS>k1000O01000O10bF"}, "image_id": 523, "id": 9138}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 308.0, 54.0, 61.0], "area": 1687, "segmentation": {"size": [512, 512], "counts": "jZU21l?3N3M2N2N3L3N3M2O2O010O0PAZOf>g0WA[Oj>d0TA_Ok>a0SAAn>h001M2N3M2M4M2N3M2M3N3M210O00O2M2N3L3N3M2N3L2O003M2N2M4M2N3M2M4M2N3M2N2M4M2NVfo4"}, "image_id": 523, "id": 9139}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 334.0, 54.0, 60.0], "area": 1697, "segmentation": {"size": [512, 512], "counts": "Y[g23k?3M2N3M2N3M2M4M201O001QAXOe>g0YA[Od>h0ZA[Oc>h0ZAZOd>S1M2N2010O010O010O010O010O010O001N101lN^Ai0a>UObAj0_>SOcAn0\\>POgAo0Y>oNiAR1c>O012M0VOQAc0R?XOQAh0S?0010O010[Ok@_OVAc0h>_OVAc0g>_OWAc0h>eNmA[1T>cNnA]1Q>bNQB^1X>0100M3N1N3N2O10O1000O01000O10O1000O0100000O01000O0100000O001N2N2M2O2000O10O1000O01000O10O1000O0100O1M2O2N2M2O2M3N2N1NdC"}, "image_id": 523, "id": 9141}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 384.0, 34.0, 72.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "Y2N2N2O1N2N2N20O1N3M2N0TOoAJQ>7PBGP>9RBEn=;UBCj==XBAh=?ZB_Of=a0\\B]Od=c0^B[Od=c0^B[Od=c0^B[Od=c0^B[Od=c0^B[Od=c0^B[Od=c0^B[Od=c0n0N2Ic@H`?5b@J_?48O1N2Nbc^7"}, "image_id": 523, "id": 9142}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 403.0, 74.0, 70.0], "area": 2073, "segmentation": {"size": [512, 512], "counts": "WmV62l?2O2M3N2M2O2M3N1O2000O10d@]OY?e0O2M3N1N3N2M2O2O1000O01000O01000O10O01O1O01000O01001O0O10O10O10UAPOd>Q1YAROg>S11000O01000O10O1kNZAo0e>PO]AP1j>O010TOSAd0m>ZOUAe0k>YOWAh0Q?00O0O2N2M2O2M3N1N3N2M3N1OjQd0"}, "image_id": 523, "id": 9143}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 429.0, 59.0, 61.0], "area": 1932, "segmentation": {"size": [512, 512], "counts": "h^Z11n?2M3N2FKf@7R?Hm@2O8S?Hl@2O8R?`0N2N1L5N2N2M2O2N2N2M3N000O2O2O0010000000M2O2N2M3N1O2N2M3N2N1N3N2N2M2O2N00O010000O3N2N2N1N3N2N2M3N1O2N2MSRh5"}, "image_id": 523, "id": 9144}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 435.0, 71.0, 68.0], "area": 2500, "segmentation": {"size": [512, 512], "counts": "ina51m?3N1O2M3N2M2O2M3N1g@]OT?e0j@]OS?i0O2M30O10PAQOn>Q1O01000O10O1000O0O2N2M1GjNiAU1X>mNeAT1Z>nNeAT1Y>nNdAU1Z>9N3N2M2O2M3O1O01000O01N2N2M2O2N2M2O2M3N1O2M3N2M2JYAROi>l07N1N3N2M3N1N3N2N1N3N2M3N1OhaZ1"}, "image_id": 523, "id": 9145}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 439.0, 16.0, 59.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "V_h71n?2SONhA5V>LiA5V>MgA6V>MhA5V>L`AJN<_>M`AIO<`>M_AIO<_>N_A>_>b0O2M3N2M2O2XB"}, "image_id": 523, "id": 9146}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 448.0, 62.0, 64.0], "area": 2071, "segmentation": {"size": [512, 512], "counts": "bol11n?2M3N1O2N2M3N2N2N1N3N2N2N2AUOeAm0Z>UOdAm0Y>VOeAl0Y>VOeAk0Z>VOdAl0[>VOcAk0[>>O1O1O1N2O1O1O1N2O1O01O2M3N1O2N2M3N2N1O2M3N2N2N1N30000O1N1TOQAe0R?XOQAf0V?N1O2M3N2N2N2N1N3N2N2NTQT5"}, "image_id": 523, "id": 9147}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 454.0, 6.0, 11.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "V>;f?N2N2N2N2Nfal7"}, "image_id": 523, "id": 9148}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 477.0, 64.0, 35.0], "area": 1279, "segmentation": {"size": [512, 512], "counts": "ooY61n?1O1N2O1N2O1O1N2O1N2O1O1N2O1N21O1O1O001O1O001O00N2O1N2O1O1N2O1N2O1N2O1O1N2001O1O001O1O001O00N2OO102N1N3N2M2O2N2M3N1N3N2M2O2N2MjPf0"}, "image_id": 523, "id": 9149}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 479.0, 67.0, 33.0], "area": 1040, "segmentation": {"size": [512, 512], "counts": "]om01n?2N2N2N2N2N2N2N2N2N2N2N1O2N200000000000O1O1O1OO1O1O11O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1OQ`P6"}, "image_id": 523, "id": 9150}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 502.0, 20.0, 10.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "o_e01n?1O1O1O100O1O1O1O1O100001O2N1O1O1O1O2NQ`P7"}, "image_id": 523, "id": 9151}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 511.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "o_Q11o?000QPm6"}, "image_id": 523, "id": 9152}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 29.0, 32.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "0P1P?O1O1O1O1N2O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1OQ`a7"}, "image_id": 525, "id": 9153}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 224.0, 296.0], "area": 28541, "segmentation": {"size": [512, 512], "counts": "c6Q2n=2N2N2M3N2N2N2N2N2N2N2N1O2N2M3N2N2N2N2N2N2N1O00000O10000000O10000000000000O3N2N1O2N2N2N2N2N1O000O010000000000000000000O01000000000ZLSLZKm3f4ULXKk3h4WLVKi3j4YLTKg3l4[LRKe3n4]LoJc3R5_LlJa3T5aLjJ_3V5cLhJ]3X5eLfJ[3Y5hLeJX3[5iLdJW3\\5kLbJU3^5mL`JS3`5oL^JQ3b5QM\\Jo2d5SMZJl2g5VMVJk2j5WMTJi2l5YMRJg2n5[MPJe2Q6\\MmId2T6]MjIc2W6^MgIb2Z6^MeIb2\\6_MbIa2_6`M_I`2b6aM\\I_2e6bMXI^2j6cMTI]2m6dMQI\\2P7eMnH[2S7fMkHZ2V7gMhHY2Y7hMeHX2\\7iMbHW2_7iM`HW2a7jM]HV2d7kMZHU2g7lMVHT2l7mMRHS2o7nMoGR2R8oMlGQ2U8PNiGP2X8QNfGo1[8RNcGn1^8SN`Gm1a8TN]Gl1d8TN[Gl1f8UNXGk1i8VNTGj1n8WNPGi1Q9XNmFh1T9YNjFg1W9ZNgFf1Z9[NdFe1]9\\NaFd1`9]N^Fc1c9\\21O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1RLjDQ3W;nLkD^2NXMY;9kD]2NXMX;:lD\\2NXMW;;mD[2NXMV;QEX2OXMQ;?REW2a;hMaDV2`;iMbDU2_;jMcDT2^;kMdDS2];lMeDR2\\;mMfDQ2[;nMgDP2Z;oMhDo1Y;oMjDo1W;PNkDm1W;RNjDm1W;RNkDl1V;SNlDk1U;TNmDj1fi0eAYOY>i0eAZOW>j0fAXOX>j0eAZOX>i0eAYOY>Y1L3N2N3L3N3M210O01O010O01O010OO0O03M2O2O010O01O010O01O010O01O010O010O01O01OO2L3N3M2N1N10O11N3N3M2M3N3M2N3L3010O010M2N2M4M2N3M2M4M2N2M4M2N1N1000O01000O10O1000O01000O03N2N3M2M4M2N3L3N2N3]OgAZO\\>d0fAZO]>c0fAZO\\>c0gAZO]>c0fAZO\\>d0a0M4M2N3L3N3M2NV\\i0"}, "image_id": 525, "id": 9155}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 192.0, 17.0, 23.0], "area": 196, "segmentation": {"size": [512, 512], "counts": "bVV51m?3L3N3M2N3L3N2N3ON3L3N3M2M4M2N2NoYa2"}, "image_id": 525, "id": 9156}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 200.0, 33.0, 38.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "WgX41m?3N2N2N1N3N2N2N00O01000000O01000000O010000000O01002N2M3N2N2N1O2MdiV3"}, "image_id": 525, "id": 9157}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 353.0, 36.0, 35.0], "area": 668, "segmentation": {"size": [512, 512], "counts": "a[S12m?2N2M3N1O2N2N2N2N2N2N2N2N2N1O2O10000000O1N2N1O2N2N2N2N2N2N2N2N2N2N2N2N^dZ6"}, "image_id": 525, "id": 9158}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 381.0, 18.0, 18.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "Wl]12m?2N1O2M3N2N2N2N0001O2N2M3N2N2N2N1OnSY6"}, "image_id": 525, "id": 9159}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 491.0, 19.0, 21.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "[?e0\\?O00001O00001O00001O001O000O2L3M3M4LaPf7"}, "image_id": 525, "id": 9160}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 495.0, 23.0, 17.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "m_S41l?4M2N3M2N2M300001O001O001O00001O001O0O2M2M4M\\Pa3"}, "image_id": 525, "id": 9161}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 503.0, 25.0, 9.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "n_e22l?2M3N200001O00001O001O001O00001O001O00001O001O00QPn4"}, "image_id": 525, "id": 9162}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 308.0, 22.0, 17.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "lY_51m?3L3N2O2O010O010O010O01O01O010O010OO2M2N3MVfU2"}, "image_id": 527, "id": 9163}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 381.0, 24.0, 22.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "Y\\T64i?3N3L3N2N3O00010O010O00010O01O01O01O0M3N3L3M4Mnc_1"}, "image_id": 527, "id": 9164}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 405.0, 110.0, 60.0], "area": 2448, "segmentation": {"size": [512, 512], "counts": "_mm41m?3L3N2M4M2N3L3N3M2M3N30O01O010O01O01O010OO2M2N2M4O0010O010O00010O010O0010O0010O0010O010O00010O010O01O01O010O01O01O010O01O010O01O01O010O010O00010O010O0010O0010O010O0010O0010O0010O010O00010O010O01O01O010O0O1N3L3N3M2M3N3MUR[1"}, "image_id": 527, "id": 9165}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 0.0, 72.0, 23.0], "area": 835, "segmentation": {"size": [512, 512], "counts": "P`l02n?001O001O001O00001O001O00001O001O001O00M3N2M31O001V@Me?3Y@Og?501O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O0000M3N2N2M3N2M3N2NR`o5"}, "image_id": 530, "id": 9166}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 0.0, 5.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPa31o?00001O0P`\\4"}, "image_id": 530, "id": 9167}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 0.0, 62.0, 13.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "PPl31o?0000001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0000O1M3O100000000O100000000O100000000O1000004LloT3"}, "image_id": 530, "id": 9168}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 0.0, 44.0, 22.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "P`^51o?00001O001O00001O00001O001O00001\\@J\\?6a@N^?2`@0`?9O00001O001O00001O00001O001O00001O00M3M3N3L3N3L3MQ`k1"}, "image_id": 530, "id": 9169}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 60.0, 70.0], "area": 2451, "segmentation": {"size": [512, 512], "counts": "iQ[62k?4L3N2dNGnB=o8[AKc>7[ALa>7\\ALb>7[ALa>7\\ALb>7[AKb>l0M2kAbNi=^1SBeNm=[1QBhNn=X1oAkNR>a1010O01O0O1N3L3N3O01O01O01O0M4M2M3N0O10O010O010O010O0101N3"}, "image_id": 530, "id": 9171}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 6.0, 115.0, 89.0], "area": 3578, "segmentation": {"size": [512, 512], "counts": "dQ81l?3N2M4M2N3L3N2M4M2M4M2M3N3M2M4M2M3N3M210O00010O010O01oNgA:X>ClABnA>S>^OQBb0n=\\OTBd0m=XOVBi0i=UOZBj0f=SO]Bm0\\>0O0010O0010O0010RORAh0S?010O010O0010O0010O0010O0010O0010O0010O0010O0010O010O00010O010O0M4M21O010O01O01O010O01O01O01M2N3L3N2M40O01O010O01O01O010ON3M2SO[ABZA@[ABZAN2M4M2JhAaN\\>[16N2M4M2M4M2M3N3L3M4M2M3N3L3N3Ld_e7"}, "image_id": 530, "id": 9173}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 13.0, 75.0, 77.0], "area": 3387, "segmentation": {"size": [512, 512], "counts": "VaX33f0N]>6_AN^>4`AN]>5`AN^>5_AN]>5`AN]>5`AO]>4`AN]>l0N30O00010O010O00010OO2L3N210O01O01O010O00010O010O00010O01O01M2O20O00010O010hNRBO00010O0010O0010O00010O010O0[OTA6l>GWA7l>FWA6l>GWA7k>FXA7[?M2Mkma3"}, "image_id": 530, "id": 9174}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 49.0, 69.0, 62.0], "area": 2457, "segmentation": {"size": [512, 512], "counts": "Ubg41o?5K4@In@=m>Hn@=m>`0KO10O100000O15J6K5K4L0O1N200O010000000O010000000O0103M5_NgAQ1^>jNeAR1e>00000O10O4M5K3M000O1000O1000O1000O1000O1000O1000O1000L4O01[On@IXA2h>M^ANg>MXnU2"}, "image_id": 530, "id": 9175}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 68.0, 113.0, 73.0], "area": 3715, "segmentation": {"size": [512, 512], "counts": "k2;b?3N2N3L3010O010O0010O0010O00N3L3N3L3N2N3L3010O010O0010O0001M2N30O0010O010O0010O0010O0010O0010O0010O0010O010O001TOUAb0j>\\OXAd0h>YO\\Ag0d>VO^Aj0l>010O010O0O2L3RAQOi>U1N11O010O01O01O010O010O01N100010O010O0010O0010O010O00010iNYAS1k>0O00010OO2M2M3N3M2M4M2M3N3L3N3M2Mf\\W6"}, "image_id": 530, "id": 9176}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 89.0, 70.0, 66.0], "area": 2521, "segmentation": {"size": [512, 512], "counts": "ST[62l?2N3M2CIo@:n>HPA:n>Io@:n>HQA9m>?N1N3M2N3N10100O010ON1O000001O3O0010O010N1N3M2N3O0010bAmNm=S1QBPOo=P1oAQOQ>o0mATOQ>n0lATOR>n0lAUOR>m0kAUOV>[1O010O010O10O0_NmAV1S>gNoAY1Q>eNRB[1n=bNTB]1Y>M2N3M2N3M2O2M2N3M2N3M2N3M2N3M2N3M2O\\la0"}, "image_id": 530, "id": 9177}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 95.0, 59.0, 47.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "QTS32l?2N2M4M2GEj@?S?Ck@?R?Dl@?Q?9M3N3M2N30O00010O010O0O2M20010O001L3N210O0010O0QASOl>P101O010M2N2N3O010O01O010O0M3N3M2M4M2M3N3M2M4M2M4M2N2Mh\\o3"}, "image_id": 530, "id": 9178}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 95.0, 12.0, 17.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "Zcn53k?3M2M3N3M210OM4M2M3N3MP]k1"}, "image_id": 530, "id": 9179}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 125.0, 21.0, 43.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "bde72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2RL"}, "image_id": 530, "id": 9180}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 127.0, 68.0, 74.0], "area": 2250, "segmentation": {"size": [512, 512], "counts": "oUf33k?2M4M2M3M4M2M4M2O110O0010O0010O00010O01M2M3N3N1000O2M2M4L3N2M4\\OoNUBS1i=oNTBU1h=nNVBT1h=oNWBQ1f=RO[Bn0a=UO_Bk0_=XO`Bh0_=YObBg0]=ZO_Bh0c=WOZBj0h=VOVBi0m=c03L3N3O00010O000YNlA_1\\>N3L3N2M4M2M301O001O0N2M4Ck@KW?3k@JY?2j@LX?2nkW3"}, "image_id": 530, "id": 9181}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 139.0, 58.0, 52.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "[eg23k?2M3N3L3N3O01O01M2M4M2M4M2N2M4O010O000N2M12010O01O01O010O01mNYAk0g>RO]An0j>0010O010O00010O01L30010O010O00010O01mNUAP1n>M2N3M2M3Bj@0Y?Mi@0Z?Mi@1Xj[4"}, "image_id": 530, "id": 9182}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 152.0, 36.0, 39.0], "area": 753, "segmentation": {"size": [512, 512], "counts": "aec63k?2O2M3M2O2M2N3N2M2N3N1N3M3N1N3O001000O0N3M2O2M3M2O2M2N3N2M2N3N1N3N2M2NoZj0"}, "image_id": 530, "id": 9183}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 166.0, 65.0, 70.0], "area": 2553, "segmentation": {"size": [512, 512], "counts": "ffd42m?1N3M3N1N3JEd@iNcAW1]>60O010O00011NkAbNl=]1SBfNm=W1TBjNm=T1RBoNm=o0TBSOl=j0TBXOm=f0RB]Om=a0TB@m=>RBEm=:SBHm=8PBJQ>6lAMS>3lAOT>n0010O01M3N1N3M03M3N1N3N1N3ChAnN[>o0hAoNY>P1hAnN[>o0hAnN[>o0iNhAU1Z>iNhAU1Z>iNhAU1c>N2N2N2N2N2N2N2N2N2N`I"}, "image_id": 530, "id": 9185}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 202.0, 39.0, 93.0], "area": 2204, "segmentation": {"size": [512, 512], "counts": "\\6[15KS=7kBKU=a1O010O00010O0010O0010OO1N3eMgBj1\\=TNfBj1]=RNgBj1\\=TNfBj1j=L3N2M4L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2MnX\\7"}, "image_id": 530, "id": 9186}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 205.0, 92.0, 85.0], "area": 3528, "segmentation": {"size": [512, 512], "counts": "fWc51n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O10000000000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N00000000000000000000JRB[Nn=e1TBYNl=g1VBWNj=i160000000000LlA]NT>c1nA[NS>d16N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2Njhn0"}, "image_id": 530, "id": 9187}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 232.0, 66.0, 87.0], "area": 2926, "segmentation": {"size": [512, 512], "counts": "`Ya02l?2N3L3N3L3N2O2O001YO\\OoAe0o=]OoAe0n=^OoAf0n=]OoAe0n=^OoAf0n=\\OPBf0m=ZOSBi0k=YOSBj0i=YOTBj0j=h0O00010M201O01O010O01O01O010O0O1N1N010O10O010O03N3L3N2M4M2N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2N3L3Nah]6"}, "image_id": 530, "id": 9188}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 260.0, 68.0, 87.0], "area": 2841, "segmentation": {"size": [512, 512], "counts": "VYZ13k?2M3N3L3l@Db>?ZADg>;WAHh>8UAKl>5QAMo>c0O010O01O01M2M4M2M3010O01lAgN`=X1]BkNc=U1[BmNe=T1WBPOi=o0UBSOk=n0RBUOn=^100010O010O00010OO2M1N010O010O010O0102M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4Megc5"}, "image_id": 530, "id": 9189}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 261.0, 61.0, 72.0], "area": 2372, "segmentation": {"size": [512, 512], "counts": "Sig62m?2AMn@5P?Mo@4o>No@5n>MPA5n>No@4>Cn=;bA4>Cn=m0PBUOn=m0PBVOm=l0QBVOm=l0RBUOl=m0RBUOl=m0RBUOm=l0QBVOo=j0oAXOQ>\\1000000N0000000001O2N1O000000001O3N1N1O000ChASOX>m0jAQOV>o0lAoNU>P1mAnNU>P1mAnNU>P1mAnNU>Q1>N2N2N2N00001O3XOi@b0\\?N2N2N2N2N2N2N2N2NYg9"}, "image_id": 530, "id": 9190}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 267.0, 10.0, 17.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "cXk71n?2N3M2N2N2N2N2N01OfG"}, "image_id": 530, "id": 9191}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 272.0, 59.0, 86.0], "area": 2797, "segmentation": {"size": [512, 512], "counts": "bi\\23k?2YAM^=7QBHJ4R>6QBIK3Q>7RBII3S>7PBIK3R>6QBJI3S>7QB:l=HQB;l=n0N30O010O01O01O010O01O01O010O01O01O010M2M3N3L3N2N0O010O010O010O103M2M4M2M3N3L3N3L3N3L3N2N3L3N3L3N2M\\ge4"}, "image_id": 530, "id": 9192}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 297.0, 62.0, 88.0], "area": 2779, "segmentation": {"size": [512, 512], "counts": "bkU31m?2N3M2O2M2N3QOBXBa0f=@YBa0e=BXBa0e=AnALNe0R>BnAJOf0Q>BmAKOf0Q>AnALNe0R>o0N1N3M2N3N2M2N3O00100N0O0001O010O3M2O2M2N3M3N1N3M2O2M2N3M3N1N3M2O2M3kNXAn0o>M2O2M2N3N1N3M3M2O2M2N3M3NQVk3"}, "image_id": 530, "id": 9193}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 299.0, 18.0, 19.0], "area": 195, "segmentation": {"size": [512, 512], "counts": "gY72l?3M2M3N3M2O20O010O0010ON2N3M2M4M2Nbf_7"}, "image_id": 530, "id": 9194}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 300.0, 6.0, 10.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "\\99g?10O0M3N3Mdfl7"}, "image_id": 530, "id": 9195}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 300.0, 15.0, 20.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "ki02l?3M2N2N3M2N3M2OO3M2N3M2N3M2Mdfg7"}, "image_id": 530, "id": 9196}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 314.0, 29.0, 19.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "PZ?1l?4M2^@JY?8c@K]?5a@N^?:10O01O01OO2L3N3O0010O0010O0010O0010O0010OO1N3LoUR7"}, "image_id": 530, "id": 9197}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 320.0, 100.0, 119.0], "area": 3592, "segmentation": {"size": [512, 512], "counts": "ml_31n?2M2N3M3M2O2M2N3O01000O010O010O01000O010O010O0100O01O1M2N3M2O2M2N3M3N1N3O010O010O1M2N3NO0001O0001O01O0001O01O0HaNQB_1P>bNoA]1Y=bNXC4\\O[1[=dNVCd1j<^NTCb1m<`NQC_1oPOYAP1g>32M3O0010O1O0O2M3N2N2M310O010N1N2M4M2M4M2N2M4M2M4N100O2O000ON3N1N3N2M2O2GRB^NP>_1TB]Nn=b19N3L3N2M4M2M4M2M3N3M2M4M2M4M2M3N3L3N\\UR7"}, "image_id": 530, "id": 9199}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 353.0, 89.0, 85.0], "area": 3261, "segmentation": {"size": [512, 512], "counts": "o\\f42m?2N2N2N2N2N2N2N2N2N2N2N2N2N2DSObAo0\\>SObAo0\\>ROcAP1[>ROcAP1[>ROcAP1[>k0cAXOZ>k0cAWO[>l0cAVO[>k0cAWO[>Y1M3N1N3M2N3N1N3M3N1N3M2N3N2M2N3N1N3M2O2M001O01O011N2N3M3N1N3M2O2M2E_BUNd=h1_BVNc=h1^BVNd=h1_BUNd=h1;O2M3M2O2M2N3M2O2M3M2O2M2N3M3N1N3M2O2M2N3M3N1N3M2OncU6"}, "image_id": 530, "id": 9201}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 391.0, 16.0, 14.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "]le22l?3N1N3N1010O01000O010O001M3M2OdSR5"}, "image_id": 530, "id": 9202}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 398.0, 14.0, 15.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "fli52m?2N2M3N2N2N0001O1O2N2N2M3N_So1"}, "image_id": 530, "id": 9203}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 409.0, 85.0, 92.0], "area": 3594, "segmentation": {"size": [512, 512], "counts": "RoY11n?2M2N3M2O2M3M2O2M2N3M3N1N1O011N2BSOgAP1V>ROhAP1V>SOhAo0V>ROhAQ1U>>01O010O10O10O010O10O01M3N1N3M2O2N2OO2M1O10O0010O0001O01O01O01O010M5M3N1N3M2N3N2M2N3N1N3M2N10O0003N1N3M2N3N1N3M3N1N3M2N3N2M2N3Nmb[5"}, "image_id": 530, "id": 9204}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 413.0, 82.0, 78.0], "area": 2793, "segmentation": {"size": [512, 512], "counts": "gnV51n?2N2N2N2N2N2N2N2N2N2SO\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bf0c=\\O[Bd0e=^OYBb0g=@WB`0i=BUB>k=k0000000000000002N2N2N2N2N2N2N2N2N2N2N2mN]Af0e>XO]Af0e>XO]Af0e>XO]Af0e>XO]Af0n>000H[OVAe0j>]OTAc0l>_ORAa0n>8000000000000000000000000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2NeR`1"}, "image_id": 530, "id": 9205}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 431.0, 75.0, 81.0], "area": 2768, "segmentation": {"size": [512, 512], "counts": "XoW23k?2O2M2N3N2M2N3N1N3M3N1N3M2O2N2OO2M[ARO]>l0cAUO]>i0cAZO]>f0`A\\O`>d0_A^Oa>b0\\AAd>m0010O0000N2O1N2N2O1N2N2O1N2N2O1N0010O00010O00010O00010O1O3N2M2N3N1IcAiN`>T1cAiN_>U18N2M2N3N1N3M3N1N3M2O2M3M2O2M2N3NTbb4"}, "image_id": 530, "id": 9206}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 431.0, 13.0, 14.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "hmd32m?1N3M2N3M201OO2N1N3M2N3M`bT4"}, "image_id": 530, "id": 9207}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 446.0, 65.0, 66.0], "area": 2153, "segmentation": {"size": [512, 512], "counts": "o_k51n?1O1O1N2O1O1O1O1O1]OF_A;_>H_A9`>I^A8a>J]A7b>K\\A6c>K\\A6c>L[A5d>MZA4e>NYA3f>d0O1O1001O1O00O1O1O1O1N2O1O1O1O1O1O1O00001O2N2N1O000O10O1ZOlAAT>?nA_OR>a0PB]OR>a0PB]OR>a0PB]OR>a0PB]OR>a0PB]OQ>b0QB\\OQ>a0RB]OP>a0RB]OP>a0RB]OP>a0i0Ic@H_?6c@H_?67N2N2NeQT1"}, "image_id": 530, "id": 9208}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 458.0, 80.0, 54.0], "area": 2031, "segmentation": {"size": [512, 512], "counts": "g_V32m?2N2N2N2N2N2N2N1O1O1O1O1O1O1O11O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O1O11O1O00O1O1O1O1O1O1O1O1O1O000000000000000000000000000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NTaa3"}, "image_id": 530, "id": 9209}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 460.0, 17.0, 17.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "f^^42m?2N2M3N1O2N2N1O0002N2N2N2N1O2N2M`QY3"}, "image_id": 530, "id": 9210}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 477.0, 48.0, 35.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "ooj61n?1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1I]OPAd0o>^Oo@c0P?_On@b0Q?7O1O1N2O11O1O1O1O1O1O1O1O1O1N2N2M3N2N2N2N2N2N1O2N2N`P="}, "image_id": 530, "id": 9211}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 507.0, 9.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "ooX41n?1O1O1O11O1O1O1OQ`b3"}, "image_id": 530, "id": 9212}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 122.0, 67.0, 51.0], "area": 1934, "segmentation": {"size": [512, 512], "counts": "fTo22l?3M2M3N3L3N3L3N3L3N2M4N110O01O01O0VAPOe>U10010O010O00010O010O00010O010O000N3O0010M2N3L3N2M4M2N20O2M2N0O04O001O010O01O01N1N3M2M3N3L3N3L3N2MP\\o3"}, "image_id": 531, "id": 9213}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 164.0, 69.0, 99.0], "area": 3465, "segmentation": {"size": [512, 512], "counts": "hVi32k?3M4M2M4L3N2M4M2M4L3N2O2O0RBmNoZBDg=;VBIi=W10001L3N3L3N2M4L3O1010O01O01O0N3L3N3L0010O010O03E_BTNd=j1_BSNc=j1?lAES>:gAIZ>6cAN\\>3aAO`>i0O00010O0010O0010O0010O0N3L3O110O0010O00010O0010OM3N3L3N3N11O010O0O1M4M2M4L3N2M4L3N3L3M3N3L3N2M4L3N`jl1"}, "image_id": 531, "id": 9215}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 173.0, 60.0, 62.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "QWe23j?3N3L3N2M4M2M4N11O010ON3M2M3N3L3N3L3N2M2N01O102M3N3M210O00010O01M2N2M4M21O01O010O01O000M4L3N3L3N2M4M2M4M2M3N3L3N2M4L3N\\j\\4"}, "image_id": 531, "id": 9216}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 216.0, 63.0, 64.0], "area": 2245, "segmentation": {"size": [512, 512], "counts": "bgn62l?2N3L3N2N3L3O2O010O000N3M2M4M2N2O2cAPOh=P1VBROj=o0RBUOn=j0PBXOP>i0lAZOU>e0iA^OV>b0hA@X>U10O010O0010O0010O0010O010O0010O001N1N2N0O10003L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2Noh1"}, "image_id": 531, "id": 9217}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 219.0, 57.0, 59.0], "area": 2331, "segmentation": {"size": [512, 512], "counts": "Rgj41W10^=2_B2^=1nALO5Q>1mAM05R>OkAOO5V>LiA1O6W>JfA404Z>HdA6O5\\>o0O001O010O00010O010O010O00O2M2M4N110O00010O010O010O00010O010O010O00010O010O001M20001YO[A2d>L^A4c>H`AGI;i>LeA0_>McA1_>LeA1]>MeA0PWY2"}, "image_id": 531, "id": 9218}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 241.0, 18.0, 51.0], "area": 491, "segmentation": {"size": [512, 512], "counts": "jXg74j?2N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M3_H"}, "image_id": 531, "id": 9219}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 277.0, 61.0, 81.0], "area": 2687, "segmentation": {"size": [512, 512], "counts": "^ZT24j?2M4M2nNE\\B>a=F\\B=a=E\\B>a=F[B>b=D\\B>a=E\\B>b=E[B>c=CZB`0MPO_=a0aBa0NPOb=>]BP1b=PO\\BR1e=d001O010O01O01O010O01O01O01M2N3L3N1N2O3L3N3L3N2M4M2M4M2M3N5K30O01O01O010O01M2N2M4M2M4M2M3N3LjVm4"}, "image_id": 531, "id": 9220}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 282.0, 92.0, 71.0], "area": 3586, "segmentation": {"size": [512, 512], "counts": "ki]31l?4M2N3L3N2M4M2N3L3N2M4M201O010O0N2N3N110O01O01O010O010O00010O0010O0010O0010O010O00010O01O0]AgN`>\\10001O001O0O101O001O001O0O101O001O00001O010O010O01O010O01O02nNhA8Y>EiAAlA>U>^OnAc0R>ZOQBe0P>WOSBi0o=SOTBi0c>N2M4M2N3L3N2M4M2NkUT3"}, "image_id": 531, "id": 9221}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 284.0, 59.0, 66.0], "area": 2284, "segmentation": {"size": [512, 512], "counts": "kYc61m?2N3L3N3L3N3M2M3N3L3`ATOj=o0SBTOj=n0TBTOi=o0TBUOk=l0RBVOo=i0nA[OQ>f0lA\\OT>X110O010O00010O010O010O00010O010O0001N1N3L2OO010000O3N3L3N2N3L3N3L3N3M2M3N3L3N3L3N2NPW?"}, "image_id": 531, "id": 9222}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 300.0, 46.0, 63.0], "area": 1807, "segmentation": {"size": [512, 512], "counts": "oZY73k?3L3N3L3M3N3L3N3L3N2M4L3N3L3N2M4M2M3N3L3010O001L3N2M4M2M4L300010O00010O01jN\\Ao0c>oN`AP1h>010O0010O0010QF"}, "image_id": 531, "id": 9223}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 337.0, 91.0, 70.0], "area": 3166, "segmentation": {"size": [512, 512], "counts": "_kQ31l?3N3M2M3N3M2M4N101O01O010OO2M2N3L3N2N30O01O010O01O010O01O010O01O010O01O010L3N2N3L310O010O01O01O010O010O01O01O010O01bNbAY1b>0_AfN\\>Z1aAiN`>\\1O010O01O01O010O010O01O0UO`A7`>FbA:^>CfA=Y>AiA?X>^OkAb0T>[OoAe0R>XOQBg0o=WOSBj0a>01N1N3L3N3M2M3N3M2MSd`3"}, "image_id": 531, "id": 9224}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 358.0, 88.0, 81.0], "area": 3685, "segmentation": {"size": [512, 512], "counts": "]lk51o?:F9G1O000O10000000O1000000000H\\OSAe0m>7O1000O1000000000000000O0100000000000000000WOWOZBi0g=AoA?Q>HhA8X>HhA8X>HhA8X>HhA8X>HhA8X>HhA8X>HhA8X>i0N;F:F4L00000000000O0100000000000000000O010007I:F7nNiAMW>3RBDn=<\\BZOd=f0k0O10003M6J0000000000:E`Sh0"}, "image_id": 531, "id": 9225}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 376.0, 69.0, 77.0], "area": 3183, "segmentation": {"size": [512, 512], "counts": "YmW11a01i>4QAOm>5o@Nn>6m@MQ?b0O2O001N100O2O001O00000N0O3N2N30O0010O010O0M3N2N2N1N3N1O2N1N3N2N1N3N2N3O01O010O01O01O010O010O00010O001ROYB_Oi=?ZB^Oh=?[B^Oi=>ZB@h=>[B^Oi=>ZB_Oi=?ZB^Oh=?[B^Oi=>ZB@h=>o0L3N3L3Moce5"}, "image_id": 531, "id": 9226}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 385.0, 52.0, 94.0], "area": 2516, "segmentation": {"size": [512, 512], "counts": "W^V72k?4M2N2M4M2N3L3N2N3O0010O010O00010O010O01M2N2M4M2N3L3N3M2M3N3M2M4M2M4M2N2M40O010OO2M2M3N3M2M4M2010O0010lC"}, "image_id": 531, "id": 9227}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 388.0, 90.0, 76.0], "area": 3396, "segmentation": {"size": [512, 512], "counts": "Xmf21m?2M3N3L3N3L3O110O010O00012M010O000A^OaAb0\\>C`A>^>F^A=_>c0O101O0O2O000O2O0010O00010O010O0010O0010O0010O01N11O01O010O01gA`NQ>_1lAdNT>\\1jAfNV>c10O010O0010O0010O0010O010OTOiAOW>OkA1V>KmA6R>HQB7P>GQB:n=ETB:m=ESB;m=DTB=m=_OVB`0m=ZOVBg0k=UOXBj0_>00O2L301O0M3N3L3N3L3N2MZRl3"}, "image_id": 531, "id": 9228}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 414.0, 17.0, 12.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "Qm[24j?2O110O0010O0010O0010O00010O0N3Lob[5"}, "image_id": 531, "id": 9229}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 432.0, 74.0, 61.0], "area": 2021, "segmentation": {"size": [512, 512], "counts": "Qn\\52l?2M4M2N3M2M3N3N110O010O01O010O01OO2O0010O010O001e@]OX?e0\\AZOn=g0nA\\OS>c0kA_OU>b0hAAX>>eAE[>;cAH\\>9aAI`>6]ANb>h01O010O010O01N11O010O01O01M2N3M2M4M2N2N3L3N03M2N3N1010O0010OO1N3M2M4M2N3L3N3M2M3NPR^1"}, "image_id": 531, "id": 9230}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 444.0, 62.0, 54.0], "area": 2189, "segmentation": {"size": [512, 512], "counts": "QoR21m?3M2N3L3N3M2N2N3M2M4M2N3M2N3M2N3O010O010O01OO2M2N3N110O01O0M3O20O01O010O010O010O0010M2N3M2O1010O0O2L3N3L3_O[AEh>8[ADh>9[AEh>8[ADh>:ZADh>:a0M2N3LoQn4"}, "image_id": 531, "id": 9231}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 465.0, 58.0, 47.0], "area": 1576, "segmentation": {"size": [512, 512], "counts": "m_o23j?3N2M3N2M3N2M3001O00001O0000O1N2M3M3O1001ON2M3N2M1O04N1010O00010L3M4M2O1010O01O01O01OO2L2O2M4L3N2M4M2M4L3N2M4M2M4L[aS4"}, "image_id": 531, "id": 9232}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 481.0, 42.0, 31.0], "area": 795, "segmentation": {"size": [512, 512], "counts": "n_f62l?2M3N2M3N2M3N2M3N2N2M3N21O001O00001O001O00001O001O00001O0^Ol@8U?Em@;]?O00001O001O000O2M2M4M[`d0"}, "image_id": 531, "id": 9233}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 489.0, 66.0, 23.0], "area": 1110, "segmentation": {"size": [512, 512], "counts": "loU53j?3M3N3L3M3N2N21O00001O001O00001O00001O00001O001O00001O00O1N2M3M30000001O00001O00001O001O00001O00001O001O00001O00001O00001O0O2L3N2M_Pi1"}, "image_id": 531, "id": 9234}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 493.0, 29.0, 19.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "n_b02k?3N2N2M3N2N2M300001O001O00001O001O001O00001O001N1N3L3N2N]Po6"}, "image_id": 531, "id": 9235}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 500.0, 17.0, 12.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "nog72n?0O1N2M3N2N2001O001O00001OO1O11O"}, "image_id": 531, "id": 9236}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 7.0, 11.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "0;e?O1O1O2N2N2Om_l7"}, "image_id": 533, "id": 9237}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "PP41ook7"}, "image_id": 533, "id": 9238}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 0.0, 48.0, 32.0], "area": 907, "segmentation": {"size": [512, 512], "counts": "W`>1o?1N2N2N2N2N2N3M2O1O1O1O1O1O1O1O1O2N1O1O1O1O1OO100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O2N2N3M2N2O1N2Ni_i6"}, "image_id": 533, "id": 9239}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 0.0, 54.0, 43.0], "area": 1367, "segmentation": {"size": [512, 512], "counts": "X`^42m?2U@Mf?8N2N2N2O1N3M2N2O1O00001O1O1O1O2N1O1O1O1O1O1O2N1O1OO1O11O1O1O1000O1N3M2N2N2N2O1N2N3M2N2N2N2N2N2O2M2N2N2NY_f2"}, "image_id": 533, "id": 9240}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 24.0, 10.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "PP\\51o?2N1O1O1O1O1O1O1O00O1O100O1O1O1O1001OO1O1O100OQPX2"}, "image_id": 533, "id": 9241}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 15.0, 23.0, 23.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "gPa51n?2N2N2N2N2N2N2N2000001O0000O1N2N2N2N2N2N2O2MS_S2"}, "image_id": 533, "id": 9242}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 16.0, 22.0, 23.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "i`W12m?2N3M2N2O1N2N2N2N2N3N01N2N3M2N2N2N2N2O1N2NV_]6"}, "image_id": 533, "id": 9243}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 22.0, 29.0, 55.0], "area": 871, "segmentation": {"size": [512, 512], "counts": "f0c1^>00000000O1N2N2N3M2O1N2N2N2N2N2N2N3M2Lg@^O[?`04N2N2N2N2N3M2O1N\\^a7"}, "image_id": 533, "id": 9244}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 22.0, 37.0, 34.0], "area": 809, "segmentation": {"size": [512, 512], "counts": "oPQ71n?4M3M3L4M4L3L4M2N0O01000O01000O010000O01000O01000O01000O104L3M3L4M3M3L4Md^<"}, "image_id": 533, "id": 9245}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 28.0, 55.0, 54.0], "area": 1484, "segmentation": {"size": [512, 512], "counts": "aak32m?2N2N2N2N2N2N2N2N2O2M2N2N001O2N3M2N2N2N2N2O1N200000001OO1N2O2O000000000N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N3M2NSnX3"}, "image_id": 533, "id": 9246}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 38.0, 13.0, 22.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "\\ai73l?2N2O1N2N3M2N2O11O0001OOeN"}, "image_id": 533, "id": 9247}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 49.0, 97.0, 101.0], "area": 4667, "segmentation": {"size": [512, 512], "counts": "Sbc21n?3M2N2N2O1N3M2N2N2N2O2M2N2N2N2N3N1O1000O1N3M2NnATOS=k0lBWOT=g0lB[OT=d0lB]OU=b0iB@W=`0gBCX=>eBD[=5nAMP>T1IPNYBR2e=PNYBR2e=6N2N3N1N2N2N2N1O011N2N001O0001O000fNoBHo<8SCHk<9VCGh<9ZCFf<9\\CEd<;_CCb<;`CCb<=^CAda0eA]O[>b0hA[OY>e0jAXOU>i0mAUOS>j0a010O010O10O0102N3L3N2M4M2M3N3M2M3NfXV2"}, "image_id": 533, "id": 9272}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 214.0, 10.0, 10.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "ifa21n?3M2O1N2000O1N2N2NVYY5"}, "image_id": 533, "id": 9273}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 227.0, 6.0, 6.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "SWQ45k?1O0000000mhk3"}, "image_id": 533, "id": 9274}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 230.0, 16.0, 15.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "\\gS41o?1N2N2N2N2N2N2O10O1N2N2N2N2N2NdXd3"}, "image_id": 533, "id": 9275}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 233.0, 24.0, 24.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "agc11n?2N2N2N2N3M2N2N2O1O1000000001N1N2N2N2N2N2N2N2NZXP6"}, "image_id": 533, "id": 9276}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 249.0, 42.0, 56.0], "area": 1318, "segmentation": {"size": [512, 512], "counts": "WX74l?3M3L5L2N0O10O10O3N3M3L4GSOYAP1d>TOXAo0e>9L5L3M0O01000O01000O0102N3L5L3M3L4M3M3L4M3M3L4M3M3L4MjfS7"}, "image_id": 533, "id": 9277}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 252.0, 59.0, 45.0], "area": 1423, "segmentation": {"size": [512, 512], "counts": "]hl31o?3M2M3N3L3N2M4M2N2M4M2M010O0100O0100O0100O010O0100O0N3O0100O0100O0100O30010O00010M2M3N3M2M4MO0100O0100O02O3L3N2M4M2NPgU3"}, "image_id": 533, "id": 9278}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 254.0, 11.0, 11.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "Rhk12m?2N2N2N2O10N2N2N2N2Nngn5"}, "image_id": 533, "id": 9279}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 270.0, 23.0, 24.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "fXn02m?2N2N2N2N2N2N2N2O2O00000000O2M2N2N2N2N2N2N2NUWf6"}, "image_id": 533, "id": 9280}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 270.0, 38.0, 44.0], "area": 1107, "segmentation": {"size": [512, 512], "counts": "eX]73m?3M4L3L4M4L3L5L3M4L3L10000O0100000O01000O010000O0100003L4M4L3L4M4L3M4K4M4L`F"}, "image_id": 533, "id": 9281}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 283.0, 52.0, 66.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "kYo12m?2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2^AcN^>a1N3M2N2N2N1O0000UOoAHR>7PB\\ONMR>g0RBZONMP>i0TBXONMn=l0VBTONNm=m0WBSONOl=l0XBSOV>k0a0N2N3M2N2N2N2N2O1N2N2N2N3MefV5"}, "image_id": 533, "id": 9282}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 284.0, 31.0, 31.0], "area": 493, "segmentation": {"size": [512, 512], "counts": "YYk21n?2N2N3M2N2N2N2O1N2N2N2N3M2N2000O1N2O1N2N3M2N2N2N2N2N2N2O1N3McVe4"}, "image_id": 533, "id": 9283}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 286.0, 14.0, 15.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "TYW12m?2N2N2N3N1N2N11N2N2N2N3M2Nlfa6"}, "image_id": 533, "id": 9284}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 295.0, 47.0, 55.0], "area": 1375, "segmentation": {"size": [512, 512], "counts": "^i]31n?4M2M3[@H^??N3L3N2M4M2M3NO10O103L310O01O01N1M4MO010O10O10O010O012M3N2N3L3N2M4M2M4M2N2M4M2M3N3L3N^ej3"}, "image_id": 533, "id": 9285}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 297.0, 21.0, 42.0], "area": 563, "segmentation": {"size": [512, 512], "counts": "Z9W1j>0000O1N000LVATOi>l0YAROg>o0401O2N2N2N2Dj@KX?3j@KX?3j@KX?4i@JY?4i@JY?4"}, "image_id": 533, "id": 9287}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 338.0, 51.0, 51.0], "area": 1427, "segmentation": {"size": [512, 512], "counts": "Pkf21o?2M3N3L3N2N2M4M2M3N0O10O010O010O02O3M2M3N2M3N3L3N2M100O0100O010O2O2N2M3N3L3N2M3N3L3N2N2M3N3L3N2M3N3LXd_4"}, "image_id": 533, "id": 9288}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 344.0, 32.0, 32.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "T[[11n?2O2M2N2N2N3N1N2N2N2N3M2O1N2N3OO1N2O1N3M2N2N2N2O2M2N2N2N3N1N2NfdT6"}, "image_id": 533, "id": 9289}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 346.0, 10.0, 15.0], "area": 91, "segmentation": {"size": [512, 512], "counts": "k:5N2O1N2N2N2N3MdT3"}, "image_id": 533, "id": 9291}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 359.0, 26.0, 26.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "b[Q11n?2N2N2N2N2N3M2N2O1N2N2N2000O1N3M2N2N2N2N2N2N2N2N2N\\da6"}, "image_id": 533, "id": 9292}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 367.0, 23.0, 36.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "a;j0U?3M2O100001O000O1N2N2N2N2N2O2M2N2N2N2N2N2N2NmSd7"}, "image_id": 533, "id": 9293}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 382.0, 55.0, 48.0], "area": 1304, "segmentation": {"size": [512, 512], "counts": "flY51o?1N2N2N2N3M2N2N2N2O1N2N2N2N0000000001O0001O2N2N2N2N3M2O10000000000010O0N2N2O1N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2NVcj1"}, "image_id": 533, "id": 9294}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 387.0, 47.0, 41.0], "area": 1142, "segmentation": {"size": [512, 512], "counts": "alT21o?2M4M2M4M2N3L3N2M4M2M3N000O010O01000O010O01000O010O0100O0100O010O010001N3N3L3N3M2M4M2M3N3M2MUcS5"}, "image_id": 533, "id": 9295}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 387.0, 2.0, 4.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "T\\o71n?3kC"}, "image_id": 533, "id": 9296}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 393.0, 6.0, 12.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "]\\m72m?2O1N2N3M2fC"}, "image_id": 533, "id": 9297}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 405.0, 10.0, 14.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "h<;d?01O01O02N2N2N2N2NWcj7"}, "image_id": 533, "id": 9298}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 413.0, 25.0, 26.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "Wm`01n?2N2N2N2N3M2N2N2N2N2000000000O1N2N3M2N2N2N2N2N2NebR7"}, "image_id": 533, "id": 9299}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 415.0, 47.0, 45.0], "area": 1066, "segmentation": {"size": [512, 512], "counts": "f]]11o?1N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2O1N2N1O000000001O01O2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N3N1N2N2N]Rk5"}, "image_id": 533, "id": 9300}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 423.0, 29.0, 30.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "n]n52m?2N2N2N2IGd@;Z?Hc@8^?4000000000001O2N00000001O002N2O1N2N3M2N2N2NaRc1"}, "image_id": 533, "id": 9301}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 438.0, 51.0, 51.0], "area": 1439, "segmentation": {"size": [512, 512], "counts": "cne42m?2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N3M2N00001O01O00000000000000001101O000000N2N2N2N3N1N2N2N2N2Cg@0[?Ng@0[?Ng@0[?Ng@0[?Ng@0QQa2"}, "image_id": 533, "id": 9302}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 449.0, 52.0, 52.0], "area": 1331, "segmentation": {"size": [512, 512], "counts": "m^g02n?1N3M2N2N2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N000000000000000010O002N2N2N3M2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N2NZa^6"}, "image_id": 533, "id": 9303}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 459.0, 22.0, 23.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "cna51n?2N2N2N2N2N2O2M2N21O0000000N2N2N2N2N3M2O1NXQS2"}, "image_id": 533, "id": 9304}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 463.0, 56.0, 49.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "coR62m?2N2N2N3M2N2N2O1N2N2N1O1O1O1O1KWORAj0m>5O1O100O1O1O1O1O1O1O1O1O000001O0000002O1N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N3MlPQ1"}, "image_id": 533, "id": 9305}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 469.0, 18.0, 17.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "j^n51n?2N2N2N2N3N1O100001O0O1O1N2N2N2N2NQah1"}, "image_id": 533, "id": 9306}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 476.0, 54.0, 36.0], "area": 1081, "segmentation": {"size": [512, 512], "counts": "j_o32m?3N1N2N1O1O1O1O100O1O1O1O1O1O100O1J]On@d0Q?^Om@c0R?6O1O1O1O100O1O1O1O11O1O1O1O1O1O2M2N2N2OO000001O1O2N3N1N2N2N2N2N3M2Nb`U3"}, "image_id": 533, "id": 9307}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 487.0, 44.0, 25.0], "area": 800, "segmentation": {"size": [512, 512], "counts": "^_42n?3L3N3M3L4M2M10000O100O13M2N00O10000O10000O100O10000O10000O10000O1001O2N3M3M3M2N3M3M3MR`U7"}, "image_id": 533, "id": 9308}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 500.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "d?2Z`o7"}, "image_id": 533, "id": 9309}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 501.0, 20.0, 11.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "oo]51o?0O1O1O1O100O1O1O1O1O100O12N1O1O1O2M2NTPX2"}, "image_id": 533, "id": 9310}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 505.0, 13.0, 7.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "o_R71n?1O1O1O1O1O11O1O1O1O1O1OQPg0"}, "image_id": 533, "id": 9311}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 508.0, 8.0, 4.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "oo\\71n?1O1O1001O1O1OQP?"}, "image_id": 533, "id": 9312}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o__31P``4"}, "image_id": 533, "id": 9313}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 39.0, 41.0], "area": 899, "segmentation": {"size": [512, 512], "counts": "1X1i>N1O00001O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O2N3M2N2Nl_\\7"}, "image_id": 534, "id": 9314}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 0.0, 65.0, 32.0], "area": 1175, "segmentation": {"size": [512, 512], "counts": "QPi12m?2O001O001O001O1O001O001O1O001O001O1O001O001O001O1O001O001O1O001O001O1O001O001O001O1O001O001O1O001O0000N2O1N2N2O1N2N3M2O2M2N3H[@2k?Mj_V5"}, "image_id": 534, "id": 9315}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 0.0, 28.0, 13.0], "area": 183, "segmentation": {"size": [512, 512], "counts": "PPZ41o?1O1O2N1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1001O00O1O1O1OQPX3"}, "image_id": 534, "id": 9316}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 0.0, 56.0, 49.0], "area": 1433, "segmentation": {"size": [512, 512], "counts": "jPT51n?2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1N0000000000001O01O1O1O1O1O1O1O1O100O1O1O2N2N2N2N2N2N2O1N3M2N2N2N2N2N2Naoo1"}, "image_id": 534, "id": 9317}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 22.0, 29.0, 29.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "Tam01n?2N2N2N2N2N2N2N2N2N2N2N2N2N2OO2N2N3M2N2N2N2O1N2N2N2N2N2Nmnc6"}, "image_id": 534, "id": 9318}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 25.0, 53.0, 48.0], "area": 1268, "segmentation": {"size": [512, 512], "counts": "ba12n?2M2N3N1N3M2O2M2N3N1N3M2O2M2N10O00010O00010O00010O00010O00010O00010O00010O002O1N3M2O2M2N3N1N3M2O2M2N3N1NdnS7"}, "image_id": 534, "id": 9319}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 27.0, 37.0, 37.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "`ai32m?2N2N2N3M2N2O1N2N2N2N2N2N2N00001O0001O000000002N2N2N2N2O2M2N2N2N2N2N2N2Nhnc3"}, "image_id": 534, "id": 9320}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 33.0, 36.0, 36.0], "area": 655, "segmentation": {"size": [512, 512], "counts": "aac11o?1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N01O2N2O2M2N2N2N2N2N2N2N2N3N1N2N2N2N^^j5"}, "image_id": 534, "id": 9321}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 37.0, 29.0, 30.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "baX22m?2N2O1N2N3M2N2N2O1N3M2N2N2N0002N2N2N3N1N2N2N2N2N3N1N2N2N]nX5"}, "image_id": 534, "id": 9322}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 38.0, 22.0, 22.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "]aQ52m?2N2N2N3M2N2O1N2O10000000O1N2O1N2N3M2N2N2N^^c2"}, "image_id": 534, "id": 9323}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 47.0, 74.0, 51.0], "area": 1920, "segmentation": {"size": [512, 512], "counts": "URQ42m?8H8H8I2M2N2N3M1O1O001O1OM[AmNe>S1400O0100000O100000O10O1000001N6K4LO10O1000O1000O1000O10O10O1O1O00100OO100001O01O00000O10001O001O3N1N2N2N2N3M2O1N2N2N2NUni2"}, "image_id": 534, "id": 9324}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 50.0, 58.0, 55.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "gb]51`?1l@2Q?0m@2Q?0m@2Q?0n@1P?1n@1P?1n@1P?a0N2O1N3M2N2N2N2N2N000001O000001O000001O2N2N3M2N2O1N2N2N2N000000000001O01O001O2N2N2N2N3M2O1N2N2N2N2N2NP^e1"}, "image_id": 534, "id": 9325}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 52.0, 37.0, 36.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "URa21n?2N2N2O1N2N2N3M2N2N2N2N2N2N2N2O1N2N0002N2N2O1N2N2N2N2N2N2N2N2N3M2N2O1N2Nk]l4"}, "image_id": 534, "id": 9326}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 68.0, 45.0, 45.0], "area": 1054, "segmentation": {"size": [512, 512], "counts": "fbk21n?2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2000000000O1N2N2O1N2N3M2N2N2N20000O1N2N3M2I^@Md?1^@Md?17NSm]4"}, "image_id": 534, "id": 9327}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 79.0, 59.0, 46.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "nbS62m?2N2O1N2N3M2N2N2N2N2N2N2O1N3M20000001O0000000001O0000N1O00000000001O01O000000000000000001O2O2M2N2N2N2N2N2N2N2N3N1N2NSmn0"}, "image_id": 534, "id": 9328}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 82.0, 57.0, 50.0], "area": 1354, "segmentation": {"size": [512, 512], "counts": "[cm01n?2N3M2O1N2N2N2N2N3M2N2N2O1O10000010OO1N2N2N0010O000000000000010O00000002N2N1O10IXAUOh>k0ZASOi>j08N2N2O1N2N2N3M2N2N2N2O1N2N2NnlU6"}, "image_id": 534, "id": 9329}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 96.0, 54.0, 52.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "ncc41n?2O1N2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N000001O00000000000001O0001O001O3M2N2N2N2O1N2N2N2N2N3M2N2N2N2N2O1N2N]\\a2"}, "image_id": 534, "id": 9330}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 99.0, 28.0, 27.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "bc\\42m?2N2N3N1N2N2N2N2N2N1O0001O00000000102M2N2N2N2N2N2N2N3Nb\\U3"}, "image_id": 534, "id": 9331}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 107.0, 59.0, 56.0], "area": 1814, "segmentation": {"size": [512, 512], "counts": "nSg62400O^?3`@01N]?:a@H]??N2N2N2N2N3N1N3M2N3M2N2N2N2N2O1N2N3M01O2N2N2N2O2M2N2N2N2N2N2N2N01O000001O00000000003N1N2N2N2N2N2N2N2N3N1N2N2N2NR\\;"}, "image_id": 534, "id": 9332}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 111.0, 38.0, 34.0], "area": 619, "segmentation": {"size": [512, 512], "counts": "PdZ52m?2N3M2000000N2N2N3M2O1N2N2N1O001O000001O00000000010O2N2N2N2N2N2N3N1N2N2N2NV\\R2"}, "image_id": 534, "id": 9333}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 117.0, 13.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "kSQ22m?2N2M3N2N2O10N1O2N2N2M3NW\\h5"}, "image_id": 534, "id": 9334}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 126.0, 33.0, 26.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "^dZ12m?2O2KKZ@7d?4N2N2N1O00000000101N2N2N2O10O1N2N2N2N2O2O000N2O1N2N2N2N2NckT6"}, "image_id": 534, "id": 9335}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 126.0, 67.0, 68.0], "area": 2054, "segmentation": {"size": [512, 512], "counts": "cdd32m?2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3O00000000000000000000000000000000000000000001N1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NfjY3"}, "image_id": 534, "id": 9336}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 127.0, 20.0, 19.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "XdP61n?2O1N2N2N3M2N2N2OO01O01O2N2N2N2N3N1N2Nj[e1"}, "image_id": 534, "id": 9337}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 140.0, 39.0, 38.0], "area": 755, "segmentation": {"size": [512, 512], "counts": "odV72m?2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N0000000010O001O2N2N2N2N2O1N3M2N2N2N2N2N2O2M2NTk5"}, "image_id": 534, "id": 9338}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 146.0, 30.0, 34.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "hTm41o?1N2N2N2N3M2N2O1O101O000001O000Ca@9^?Ed@;a?01O000001O00N3M2N2O1N2Ncjc2"}, "image_id": 534, "id": 9339}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 150.0, 63.0, 56.0], "area": 1565, "segmentation": {"size": [512, 512], "counts": "XUb22l?3N2N1O2M3N2N2N1N3N2N2N2M2O2000000O010000000O010000000O010000000O0100000N2M210O100000O10O10O1N2N2M2O2N2N2M3N1O2N2M3N2N1O2M3NYZ^4"}, "image_id": 534, "id": 9340}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 153.0, 33.0, 33.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "ZUl52m?2N2N2N2N2N2O2M2N2N2N2N2N2N01O000001O02N2N2N2N2N2N2O1N3M2N2N2N2NjZc1"}, "image_id": 534, "id": 9341}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 158.0, 111.0, 100.0], "area": 4073, "segmentation": {"size": [512, 512], "counts": "cUU12m?2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N20000000000000000`AdN[>\\1cAfN]>_11N1N2O1000000000000N2N2N2N2N2N2N2000RAQOj>o0TASOl>Q10O2M20000000000000N2O1N2N2N2N2N3O0N2N2N2O1N2N2N2O1mAUNP>n1001N1N2000000000000O1bNjAo0X>oNjAo0X>oNjAo0X>oNjAo0X>oNjAo0X>oNjAo0e>N3M2O1N2N2N2N2N2N2N2N2N2N2N2N2NeXS5"}, "image_id": 534, "id": 9342}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 173.0, 11.0, 24.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "cej71o?2M2N3N2M3N1N3N2M3M2bJ"}, "image_id": 534, "id": 9343}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 178.0, 33.0, 37.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "jUf33m?1N3N2Y@Ha?:^@H_?>O2M3M2O2M2O2M3M2010N1N3N1N3M010O2O2M2N3N1N3M3N1N3N2M2NfYi3"}, "image_id": 534, "id": 9344}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 190.0, 17.0, 17.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "Tfa51n?3N1N2N2N2N2N3N11N1N2N2N2N2N3N1NiiU2"}, "image_id": 534, "id": 9345}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 190.0, 54.0, 62.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "Zfk63m?2M3N1N3N2M2N3N2M3N1o@ZOe>i0YAYOd>j0YAXOf>S1M3N2M2O2M3M2O2M3N10N3N2M3N1N3N2M2N010O010O2O2M3M3N1N3N2M2N3N2M3N1N3N2M3M2O2M3N1NhX9"}, "image_id": 534, "id": 9346}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 195.0, 67.0, 56.0], "area": 1834, "segmentation": {"size": [512, 512], "counts": "nV51n?2N2N2N2N3M2O1N2N2N2N2N2N2N3N11ON2N2N2N2N20001O0N2N0001O000001O000002N2N2N2N1O1O01O0000000000000001O2O2M2N2N2N2N2N2N2N2O2M2N2N2N2N2N\\Yi6"}, "image_id": 534, "id": 9347}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 197.0, 22.0, 25.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "ZfZ42n?2M2O2M3M3N1N3N2M2O2N11M2N3N2M3N1N3N2M2N3NZYZ3"}, "image_id": 534, "id": 9348}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 197.0, 26.0, 25.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "^V^63m?2M2N3N2M2O2M3M2O0O10O00010O010O1O2O2M3N1N3M3N1N3N^iT1"}, "image_id": 534, "id": 9349}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 200.0, 31.0, 27.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "_Vf23l?2N2N1Y@Ie?:00000O01N2N2N2M3N1O2O100O10O100N2N2M2O2N2N2N2M2O2N2N]Yj4"}, "image_id": 534, "id": 9350}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 201.0, 25.0, 27.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "`f[51o?1N3N2M3N2M2O2M3M3N1N3N0O10O3M3N1N3N2M3N1N3M3N2MViW2"}, "image_id": 534, "id": 9351}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 202.0, 14.0, 11.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "_VS51n?3N2M2O0O010O01O010O2O2M3Naie2"}, "image_id": 534, "id": 9352}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 208.0, 16.0, 14.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "efh43m?1N3N2M2OO010O01O0100O3N2M2O2MZYo2"}, "image_id": 534, "id": 9353}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 220.0, 11.0, 21.0], "area": 143, "segmentation": {"size": [512, 512], "counts": "Qgj72m?3N2M2O2M3N2M3N1N11NTI"}, "image_id": 534, "id": 9354}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 224.0, 47.0, 55.0], "area": 1470, "segmentation": {"size": [512, 512], "counts": "`gT61o?2M3N2M3M2O2M3N2M2O2M3M3N2M2O2M3N2M2O2O100N00O010O010O00010O010O010O0L_AjNb>U1aAiN^>X14[O\\AOf>0\\AMf>1]AMe>0]ANf>0\\ANf>O]ANf>0\\ANe>0]ANf>0\\ANWWT1"}, "image_id": 534, "id": 9355}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 230.0, 89.0, 114.0], "area": 3864, "segmentation": {"size": [512, 512], "counts": "agm32n?2M3M2O2M3N2M2O2M3M3N101000OO2N2M3N1N3M3N2M2O2M3N2M2O2M3M3N1N3N2M3NO3N2M3N1N30000O01UBTN`=l1^BVNb=j1[BYNe=f1YB\\Ng=e1WB]Ni=n1000O010000O0100000O0100TNWB\\1l=bNVB\\1k=bNWB\\1l=aNWB\\1l=bNVB\\1Y>M2O2M3NO01O010O011N2O2M3M3N1N3N2M3N1N3M3N1N3N2M3Nkee2"}, "image_id": 534, "id": 9356}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 232.0, 47.0, 65.0], "area": 1650, "segmentation": {"size": [512, 512], "counts": "kWU52n?2M3M2O2M3N2M3N1N3M3FYOWAi0g>YOVAi0h>:M3N2M2O2M3N2M210O100N1O2M1O10O012M3M3N1N3ZOcAE_>8dAE_>9bAF_>8dAE_>9cAD`>9cAE_>9cAD_>:cAE_>8g0N2M2O2MiWS2"}, "image_id": 534, "id": 9357}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 252.0, 32.0, 70.0], "area": 1310, "segmentation": {"size": [512, 512], "counts": "Q8k1T>2N2N2N2N2O10N3M2N2N2O1N2aNkAP1W>nNkAP1X>nNiAP1Y>nNiAP1Y>nNiAP1Y>nNiAP1f>M2O3L2N2N2N2N2N2N2O2G[@2g?L[@2k?Nff_7"}, "image_id": 534, "id": 9358}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 258.0, 13.0, 14.0], "area": 81, "segmentation": {"size": [512, 512], "counts": "UXi21n?3M2O1N2010O00000O2M2N2NeWP5"}, "image_id": 534, "id": 9359}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 277.0, 29.0, 30.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "Qid21o?1N3M2N2O2M2N2N3N1N2N3M2O1N1O0003M2O1N3M2N2N3N1N2N3M2O1Nlfl4"}, "image_id": 534, "id": 9360}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 281.0, 60.0, 41.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "cYn03j?4M2M3M4L3M3M4N100010O00010O00010O00010O00010O00010O00010O00010OF\\OXAd0h>;O0000O10O10000000O10O10000000O1000O100005J7J7I6J7IXfS6"}, "image_id": 534, "id": 9361}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 295.0, 14.0, 15.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "_ib03l?2N2N2N2N00000011N2N2N2N2NeVV7"}, "image_id": 534, "id": 9362}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 298.0, 11.0, 11.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "_iY21n?2N2N2N2N20O1M3N2N2Nbf`5"}, "image_id": 534, "id": 9363}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 310.0, 67.0, 71.0], "area": 2002, "segmentation": {"size": [512, 512], "counts": "Wkj12m?2N2N2N3M2N2O1N2N2N2N3M2N2N2O1N2N2N2N1O0001O0000000001O0001OHkNfAU1Z>mNdAS1\\>oNbAQ1^>QO`Ao0`>9O000001O00000001O000001O2N2N2G[ATOg>j0\\ATOe>j0]ATOe>j0:N3M2N2O1N2N2N2N3M2N2N2O1NfeS5"}, "image_id": 534, "id": 9364}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 310.0, 19.0, 20.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "miQ31n?2N2N2O2M2N2N2N2O2N10N2N2N3M2O1N2N5Koed4"}, "image_id": 534, "id": 9365}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 315.0, 52.0, 54.0], "area": 1500, "segmentation": {"size": [512, 512], "counts": "`jX31n?2N2N3M2N2O1N2N2N3M2N2001ON2N3M2O1N2N2N3M2N2N2O1N2N3M2N0002N2N2N2N2O101O0001OO1UOZA9h>FYA8j>EYA8i>FYA8i>FYA9h>EZA9h>EZA9h>EZA9Y?M2N2N2OlTm3"}, "image_id": 534, "id": 9366}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 317.0, 21.0, 21.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "Wjj02m?2N3M2N2O1N2N2N2N000002N2O1N3M2N2N2N2N2Oiej6"}, "image_id": 534, "id": 9367}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 323.0, 3.0, 11.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "S:;f?J\\@Nf?OPVn7"}, "image_id": 534, "id": 9368}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 323.0, 49.0, 38.0], "area": 865, "segmentation": {"size": [512, 512], "counts": "`:g0X?2N10O01O002N2N21M2N2N2O1N2N3M2N2N1O01O01O0002N2N1O00010O0000000000010O000002N2O20O00N2N2O1N2N3MbUW7"}, "image_id": 534, "id": 9369}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 326.0, 31.0, 32.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "cZP12m?2N2N3M2O1N2N2N2N2N2N2N3M2N2O10N2N2N3N1N2N2N2N2N2N2N2N3N1N2NZU`6"}, "image_id": 534, "id": 9370}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 351.0, 64.0, 83.0], "area": 2606, "segmentation": {"size": [512, 512], "counts": "blY22m?2N2N2O1N2N2N3M2b@_OZ?e0N2N3M2N2O1N2001O00N2WOPO_BS1^=oN`BS1^=oN`BS1^=oN`BS1^=oN`BS1_=nN_BT1_=nN`BR1_=PO_BP1a=RO]Bo0b=QO^Bo0b=QO^Bo0b=QO^Bo0b=QO^Bo0b=QO^Bo0b=QO^Bo0b=SO\\Bm0d=UOZBk0f=g0011N2N2N2N3M2N002N2N2O1N2XOeAH_>4cAJ^>5dAI^>5dAI^>5dAI^>5eAH]>6eAH^>5dAI^>5dAJ]>5dAI^>5dAI_>4h0N2NWTf4"}, "image_id": 534, "id": 9371}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 361.0, 30.0, 56.0], "area": 976, "segmentation": {"size": [512, 512], "counts": "^;Y1f>2N2N2N3M2O11O00000N2N2N2N2N3nNYAf0i>XOYAf0i>XOZAf0g>XO[Af0Q?N2N2N2N2N2N2N3M2O1N2Ndc`7"}, "image_id": 534, "id": 9372}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 367.0, 53.0, 58.0], "area": 1560, "segmentation": {"size": [512, 512], "counts": "alR11n?2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N3HnN]AR1c>PO[AP1e>6O01O02N2N2N2N2N3M01O01O0002N3M2N2N2@]A@e>>^A_Od>?^A@c>>_A@d>>]A@e>>]A@e>>]A@e>>`0N2N2N2N3M2O1NlcR6"}, "image_id": 534, "id": 9373}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 371.0, 14.0, 32.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "k[i72n?2M2N3N2M3N1N3M3N1N3N2M2N3[D"}, "image_id": 534, "id": 9374}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 408.0, 40.0, 57.0], "area": 1508, "segmentation": {"size": [512, 512], "counts": "Tmo62n?4L4K5L4L5K4K5FTOXAP1d>:L5K1N010000000O0100000O0100000O010001O4K5L4L5K4K5L4L4L5K4K5LiQ<"}, "image_id": 534, "id": 9375}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 413.0, 18.0, 18.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "W]]31n?2N2N2N2N2N2N2N1O0002N2N2N2N2N2N2NmbY4"}, "image_id": 534, "id": 9376}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 414.0, 28.0, 32.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "R]n02m?2N2\\@LY?6e@LY?6e@LZ?5d@M\\?=O0001O000000N20000000O1N2N2N2N2N2N3M2N2O1N]bc6"}, "image_id": 534, "id": 9377}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 418.0, 7.0, 15.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "R=?b?N3M2N2N2N2NhRl7"}, "image_id": 534, "id": 9378}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 418.0, 52.0, 54.0], "area": 1406, "segmentation": {"size": [512, 512], "counts": "f]e12m?2N2N2N3M2O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2O10001O0001O000O1N2N2N2N2N2N2N2N3SOm@h0W?N2N2O1N2N2N2N3M2N2N2N2Nna`5"}, "image_id": 534, "id": 9379}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 424.0, 21.0, 24.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "^]e72n?2M3N1N3M3N2M3N2M2O0O12M3M3N2M2O2M3N2M3NW2"}, "image_id": 534, "id": 9380}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 439.0, 41.0, 58.0], "area": 1570, "segmentation": {"size": [512, 512], "counts": "SnT62n?5J5L4L4L5J5GTOVAP1f>9L5K4K2O00O10O1000O10O1000O1000O10O1000O12N5J5L4L4L5J5L4L5K4K5L4Lh`V1"}, "image_id": 534, "id": 9381}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 448.0, 33.0, 50.0], "area": 922, "segmentation": {"size": [512, 512], "counts": "Z>X1g>0000001O00000002N2N000000002N2N2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2N^Q_7"}, "image_id": 534, "id": 9382}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 450.0, 11.0, 11.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "Unh71o?2M3N1N3M0101N2O2M2Nja1"}, "image_id": 534, "id": 9383}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 458.0, 49.0, 53.0], "area": 1314, "segmentation": {"size": [512, 512], "counts": "P_P11n?2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N200000000000O1N3M2N2N2N2N2N2N2VOo@b0S?]On@a0T?]On@b0Z?M2N2N2N2N2N2N2NfPW6"}, "image_id": 534, "id": 9384}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 458.0, 26.0, 35.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "`^i62n?2M3N2M3M3N2M3N1h@\\OQ?g0l@\\OR?k0M3O01N2M3N2M2O2M3N2M3N2M3N2M3Nl`i0"}, "image_id": 534, "id": 9385}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 468.0, 30.0, 29.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "Qo<1n?2N2N2N2N2O1N3M2N2N2N2N2N2N2OO2O1N2N2N3M2N2N2N2N2N2N2O1N2NnPT7"}, "image_id": 534, "id": 9386}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 471.0, 23.0, 21.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "SoW21n?2N3M2N2N2N2O1N1O1O00000001O1O2O1N2N3M2N2N2NRa\\5"}, "image_id": 534, "id": 9387}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 475.0, 50.0, 37.0], "area": 1178, "segmentation": {"size": [512, 512], "counts": "c_V51n?2O1N2N3M2N2N2N2N2N2O1N1O1O1O1O1O1O1O100O1O1O1O1O1O1O11O1O1O1O2NO1OORAROm>n02000010O2N2N2N2N2Eg@L\\?2e@L]?2e@L]?2e@M\\?1f@M\\?1TaP2"}, "image_id": 534, "id": 9388}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 495.0, 8.0, 9.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "d_;1m?3N2N2N01O2N2N```7"}, "image_id": 534, "id": 9389}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 497.0, 31.0, 15.0], "area": 247, "segmentation": {"size": [512, 512], "counts": "o_c01n?100O1O1O1O1O1O1O1O1O1O100O1O1O1001O1O1O1O1O1O1O2N1O1O1O1O1OQPm6"}, "image_id": 534, "id": 9390}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 499.0, 14.0, 13.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "g?2m?2N2N2N2O10000O01O1N2N2N2NW`h7"}, "image_id": 534, "id": 9391}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 501.0, 22.0, 11.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "ooc41n?1O1O100O1O1O1O1O1O1O1001O1O1O1O1O1O2N1O1OQPQ3"}, "image_id": 534, "id": 9392}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 509.0, 3.0, 3.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "m?3n?O1OQPn7"}, "image_id": 534, "id": 9393}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 346.0, 20.0, 20.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "S[b42m?2O1N2N3M2N2O1N2N000011N2N2N2N3N1N2N2NndS3"}, "image_id": 535, "id": 9394}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 358.0, 19.0, 14.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "][^51o?2M3N2M10O010O010O010O010O010O2O2M3NeTX2"}, "image_id": 535, "id": 9395}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 384.0, 31.0, 21.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "PlZ6:f?;E00000000000000000000000000000000000000000000O10000000005KkcU1"}, "image_id": 535, "id": 9396}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 403.0, 25.0, 41.0], "area": 993, "segmentation": {"size": [512, 512], "counts": "c\\e4X1h>00000000000001O00000000000001O00000000000000000\\Sn2"}, "image_id": 535, "id": 9397}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 437.0, 142.0, 75.0], "area": 5750, "segmentation": {"size": [512, 512], "counts": "VoR5j0V?00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000_Na10000000000000000000000000000000000000000000000000000000000000000000000000000000000[Rf0"}, "image_id": 535, "id": 9398}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 407.0, 9.0, 22.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "g00O100000000000000000:F`0@000000000000000000000000000000006JQQ`7"}, "image_id": 536, "id": 9400}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 262.0, 16.0, 20.0], "area": 150, "segmentation": {"size": [512, 512], "counts": "ZhT11n?3N1N3M3N2M20100O01M3N2M2O2M3N[Wc6"}, "image_id": 538, "id": 9401}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 298.0, 16.0, 38.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "\\Z]13Q?l0J60000000001O000000000000I7TObgZ6"}, "image_id": 538, "id": 9402}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 336.0, 34.0, 58.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "`:i1W>00000000000000000010O00000000000L40000000001O000000000001OE;E;DX110O0010O0010O010ON210O001nN[Ah0d>UO_Ak0a>SOaAn0i>O0010O0010O0010O0010O0N3L3N2M4M2N3L3N2M4MP_c5"}, "image_id": 539, "id": 9409}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 13.0, 39.0, 46.0], "area": 1074, "segmentation": {"size": [512, 512], "counts": "ba\\73j?3M4M2M3M4L3M3M4L3N3L3M3M4O00010O00O2L3M3N3L3M4M2N2010O00010O00010O010O00010YO"}, "image_id": 539, "id": 9410}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 23.0, 56.0, 46.0], "area": 1496, "segmentation": {"size": [512, 512], "counts": "lP^33l?2f@Lg>3WA0f>3WAOf>4WAOi>1UA2j>ORA4n>LPA7P?<0010O0O2O01O010O01O2OO0010O010O0010O0010OO1010O01O010O01O01O010O01O010O01O0M3N3M2M4M2M4M2M3N3Mhne3"}, "image_id": 539, "id": 9411}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 64.0, 8.0, 25.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "dRl73k?2M3M4L3M3M4L3PN"}, "image_id": 539, "id": 9412}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 65.0, 69.0, 44.0], "area": 1705, "segmentation": {"size": [512, 512], "counts": "nbf21m?2N3M2N3N10010O010O00010@FXA:e>JZA7c>K[A7c>KZA8f>IWA:i>ETA>l>;10O001MJYAUOh>l0610O00010O01O10O0010O0010O0010O0001O010O00010O010O00010O010O00010O001M2M3N3L3N3L3M3N3L3N`mV4"}, "image_id": 539, "id": 9413}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 71.0, 41.0, 51.0], "area": 1188, "segmentation": {"size": [512, 512], "counts": "^SU73k?3L3N2N3L3N3L3N2M4M2N3L3N2M4M2N2O2O0010O0010O00N3L3N3M2M3N3L3N3M2M3N3M2M3N3L3N3Mb]6"}, "image_id": 539, "id": 9414}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 93.0, 14.0, 44.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "STi71l?3N2M4M2M4M2FBo@a0o>Ao@b0m>:N3L3N2O2RM"}, "image_id": 539, "id": 9415}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 131.0, 49.0, 41.0], "area": 1237, "segmentation": {"size": [512, 512], "counts": "iTh64i?3N2M4L3N3L3M3N3N104L010O010O01O01O010O010O01O01O010O010ON2O20O01O010O01O01O0N3M2M4M2N2M4M2M4M2N_[?"}, "image_id": 539, "id": 9416}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 134.0, 26.0, 27.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "gTc72l?3M2M3N3L3N3M2O110O0010O0010O00010O0010N1M4M2M3M4MeK"}, "image_id": 539, "id": 9417}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 158.0, 5.0, 14.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "n4>c?M2M3N3LS[m7"}, "image_id": 539, "id": 9418}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 173.0, 52.0, 49.0], "area": 1671, "segmentation": {"size": [512, 512], "counts": "aVd61l?4ANl@4R?Oj@4S?Ok@4R?>O20O0010M2N3L3N2M4N101O01O0O2M200010O01O01O010O01O01O010O010O00010O010O0001M2M4M2M4Bk@MW?1k@LX?1l@LW?1k@LX?1bja0"}, "image_id": 539, "id": 9419}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 182.0, 47.0, 58.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "nf13k?3L3N3L3N2M4M2M4M2M4QAPOj>T1N2M4M1N3N3N11O010O01O01O010O010O01O01O010O01O01O001oNcA;_>BdA;`>BcA;_>BdA;_>CdA:_>CcA;R?M4M2M4M\\iV7"}, "image_id": 539, "id": 9420}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 212.0, 41.0, 50.0], "area": 1178, "segmentation": {"size": [512, 512], "counts": "jWW12l?2N3L3N2M4M2M4M2M3N3L3N3M2M3N3M201O01O010O00010O010L3N2M4M2M4M2M3N3L3N3L3N2M4L3NUYT6"}, "image_id": 539, "id": 9421}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 212.0, 18.0, 35.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "^Wg72l?2M3N3M2M4M2N3O00010O010M2M4M2N2M4[I"}, "image_id": 539, "id": 9422}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 215.0, 79.0, 63.0], "area": 2564, "segmentation": {"size": [512, 512], "counts": "igl13j?3M4M2M4L3N2M4L3N3L3N2M4M201O01O01O010O010O00010O0010O001jN]An0b>POaAo0h>10O01O01O01O010O01O01O010O00010O010O0010O0010O0010O00010O01L3M310O00O2L3N3L3N2M4M2M3N3L3N3L3N2Mbhk4"}, "image_id": 539, "id": 9423}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 237.0, 30.0, 42.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "\\h93V?N^A4`>N]A6`>M]A5`>N^A5`>M]A5d>JYA:f>GWA;j>DTA>l><0O01OROUAg0l>UOWAl0o>O010O010O0O1N3M2N3M2M4M2N3M2M3NTXW7"}, "image_id": 539, "id": 9424}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 240.0, 72.0, 105.0], "area": 3039, "segmentation": {"size": [512, 512], "counts": "Vjm23k?2N3L3N2N3O0010O01O000N3M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2ZOUNUCn1h0O01O010O0N2N3M2M4M2N3L3N201O0010ON3M2N2M4M2NoWn3"}, "image_id": 539, "id": 9425}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 245.0, 24.0, 31.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "\\Xf02l?3L3N3L3N2M4M2M4M2O110O0010M2N3L3N2M4M2M3N3L3NXhm6"}, "image_id": 539, "id": 9426}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 290.0, 57.0, 74.0], "area": 2044, "segmentation": {"size": [512, 512], "counts": "ijW12l?2N3L3N3N1010O_@D^?`0O010O0O2M2M4M2N2M4M2N3L3N3M2EbNQBa1l=cNQB`1l=bNRB`1l=m=EQB>l=DQB?l=EQB>l=DRB>k=FQB=m=EQB>o=BmAa0S>g0L3N2M4M2M4N11O01O010O010ON2N3L3N3L3N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N3L3N2M`S[6"}, "image_id": 539, "id": 9436}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 400.0, 18.0, 22.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "a010O0N3M2M4M2M2OO10O0102N2M4M2M4M2N3L3N2N3M201N1N2M4M2N3L3N3M2M3N`b^7"}, "image_id": 539, "id": 9438}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 450.0, 50.0, 41.0], "area": 1266, "segmentation": {"size": [512, 512], "counts": "_ni11l?3N3]@MU?5i@MT?7i@LU?`01O01O010O0N2N3O0010O01O01O010O010O00010O010O00010OO2M201O01O010O010N1N2RORAh0T?M4M2M4M2M3N3M2McQ]5"}, "image_id": 539, "id": 9439}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 462.0, 19.0, 23.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "nnW12l?3M2M3N3M2N3M2O1010O010M2N2M4M2M4M2Maa^6"}, "image_id": 539, "id": 9440}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 473.0, 29.0, 21.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "T_^11m?2N3M2N2N3N1010O0010O010O00010O010O00010O010O00O2M2M4M2MQQS6"}, "image_id": 539, "id": 9441}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 505.0, 18.0, 7.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "m_g13k?2N21O00001O001O00001O001O00001O00Q`o5"}, "image_id": 539, "id": 9442}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 0.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "P`Q11o?000PPm6"}, "image_id": 540, "id": 9443}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 0.0, 12.0, 7.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "R`Z12l?3O001O00001O00M3N2O11o__6"}, "image_id": 540, "id": 9444}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 0.0, 41.0, 21.0], "area": 551, "segmentation": {"size": [512, 512], "counts": "V`g11l?3N3N101O001O00001O001O00001O001O001O00001O0a@B\\?a001O00O1O11O001O0010O1O0N3L3N2N3L3Nnoc5"}, "image_id": 540, "id": 9445}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 42.0, 44.0], "area": 1318, "segmentation": {"size": [512, 512], "counts": "lPb23j?3N2M4M2M4L3N2M4M2M4M20001O001O00001O00001O001O00001O001O00001O0010O0O1M4XOYA3i>KYA2k>JYAKH3Q?0YAKI1R?0^AN`^i4"}, "image_id": 540, "id": 9446}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 0.0, 12.0, 5.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "PPj71o?00001O001O00001O001O00"}, "image_id": 540, "id": 9447}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 9.0, 28.0, 31.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "PQ>1l?3N3L3N2M4M2M4L3N30O00010O010O0010O00O2L3N3M2M3N3L3N3MaoS7"}, "image_id": 540, "id": 9448}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 17.0, 47.0, 56.0], "area": 1630, "segmentation": {"size": [512, 512], "counts": "`Q]31l?3^@OS?3k@OR?5j@OS?3k@OR?a0N3L3N3O01O01UAnNf>V11ON3L3N3L3N01011O01O010O01O01O010N1M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2MX_k3"}, "image_id": 540, "id": 9449}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 23.0, 17.0, 31.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "caY11l?4L3N2M4L3M3M3M0001O3M3M4L3M3N3LZo]6"}, "image_id": 540, "id": 9450}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 27.0, 56.0, 63.0], "area": 1791, "segmentation": {"size": [512, 512], "counts": "_RQ44j?2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3NO013L3N2M4M2M4M2M4M2O101O010O010O00010O010O00010O010O001M2N2M4Di@KY?2j@KZ?2i@KY?2h0\\AZOb>i0[AZOb>h0[A[Od>T1NO010O010O00010O010O0010O0010O010O00010O0N0O103L3N2N3L3N3M2M3N3L3N3M2M3N3MkmS2"}, "image_id": 540, "id": 9455}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 72.0, 26.0, 39.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "aR^43k?2i@M^>6_AM^>7_AK`>6]ANc>1[A1e>0WA4h>LVA6k>?010O000ROTAi0Q?0010N101O0O1N3L3N3L3N2F]@4i?NZmT3"}, "image_id": 540, "id": 9456}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 73.0, 20.0, 27.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "obo21l?3N2M4M2M4L3N2M4N110O00M4M2M4M2M3N3L3Ne]f4"}, "image_id": 540, "id": 9457}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 78.0, 51.0, 45.0], "area": 1272, "segmentation": {"size": [512, 512], "counts": "Wco52l?2M4M2N3L3N3M2M4M2M3O20O010O01O010O01O010O010O01RAQOi>o0UATOk>P1010O010O00O0O03N110O0N3L3N2N3L3N3M201O0Ea@5e?1ON3M2MRmV1"}, "image_id": 540, "id": 9458}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 87.0, 26.0, 21.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "TSm31l?3N2M4M2M4O00010O01O01O010O01O01O010O01O000M4M2M4MSme3"}, "image_id": 540, "id": 9459}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 91.0, 43.0, 48.0], "area": 1233, "segmentation": {"size": [512, 512], "counts": "lc^14j?2M4M2M3M4M2M4M2M3M4M2M4M20010O01O01O010O01O01O010O01O010M2N2M4M2M4M2M3M4M2M4M2M4L3Nklk5"}, "image_id": 540, "id": 9460}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 96.0, 42.0, 61.0], "area": 1793, "segmentation": {"size": [512, 512], "counts": "\\3a02;_>c0J6N20001O0000000000000000000L401O00000000000000O1C=C=F:0019F00000000001O00000000elZ7"}, "image_id": 540, "id": 9461}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 107.0, 58.0, 49.0], "area": 1699, "segmentation": {"size": [512, 512], "counts": "Ydf61m?3L3N3L3N2M4O001M2M3O20OO2M2M4M2N2010O010O00010O010O0001XAkNd>X1010O00010NO0O3N2010O0O2L3N3L3N2M4M2M4M2O110O001L3N5K3L3N\\\\<"}, "image_id": 540, "id": 9462}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 109.0, 66.0, 51.0], "area": 1758, "segmentation": {"size": [512, 512], "counts": "fTm12l?2M3N3L3N3L3N2M4O0010O0010O0O2M2M3N3L3N3L100O02O3L3N2M1200010O010O0POTAl0Q?0O0010O0010O0010O0010O0010O0010O0010O0010ON2M4M2M4M2M3M4M2M4MRlQ5"}, "image_id": 540, "id": 9463}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 109.0, 24.0, 22.0], "area": 318, "segmentation": {"size": [512, 512], "counts": "jcg32l?3M2M3N3M2N3O001O01O010O01O01O010N1N3L3N2M4M2N_\\l3"}, "image_id": 540, "id": 9464}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 112.0, 26.0, 30.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "UTd43j?3N3L3N2M4M2M4M2O101O01O010O01O01M2M3M4M2M4M2M3M4M\\ln2"}, "image_id": 540, "id": 9465}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 113.0, 28.0, 26.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "STP31l?3N3L3N2M4M2M4O01O010O01O01O010O01O01O0O2M0O3N3L3N3L3N[la4"}, "image_id": 540, "id": 9466}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 123.0, 23.0, 51.0], "area": 694, "segmentation": {"size": [512, 512], "counts": "edd72l?3M2M3N3M2M4M2M3N3l@WOm>o0O20O00\\AoNY>R1cAQO^>l0bAVO^>h0aA\\O\\>g0aA[O]>V1O010O000TL"}, "image_id": 540, "id": 9467}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 129.0, 26.0, 27.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "bdU13k?2M4M2M3N3L3N30O00010O010O00010O010O0N2N3L3N3L3N2Mk[]6"}, "image_id": 540, "id": 9468}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 131.0, 64.0, 72.0], "area": 2370, "segmentation": {"size": [512, 512], "counts": "QUW34i?3M4K4M3M4K41O01O01O0001O01O01O01O01O0001O01O01O01O01O0001O0_AXOk=g0QB]Oo=d0mA_OR>b0jACR>`0kACQ>b0kAAR>X1M4K40001O0001O01OWNVB\\1k=`NXB`1h=aNXB_1g=aNYB_1g=aNYB_1h=`NXBa1g=`NYB_1g=`NZB`1g=[N]Bd1Q>L3L4^OaA@b>=bA_Ob>=aA_Oc>=aA@b>=c0K4M3M]kh3"}, "image_id": 540, "id": 9469}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 135.0, 23.0, 24.0], "area": 347, "segmentation": {"size": [512, 512], "counts": "gTS53k?2M3N3M2N3L3N3O00010O0010O001O0N2N3L3N3L3N2Mg[a2"}, "image_id": 540, "id": 9470}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 141.0, 37.0, 31.0], "area": 702, "segmentation": {"size": [512, 512], "counts": "PU^52k?3N2M4M2M4M2M3O2O01O01O01O010O01O01O01O01O010O01O01O01O01O0O1M4L3M4M2M3M\\[o1"}, "image_id": 540, "id": 9471}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 152.0, 21.0, 17.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "oTc08a?701O0000000001O0000000000000001O000000J][R7"}, "image_id": 540, "id": 9472}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 153.0, 30.0, 40.0], "area": 666, "segmentation": {"size": [512, 512], "counts": "lUW61i?0Z@2c?8L3N2M4M2M3M4M2M4L3N21M2N30O0001N1N3L3N2M4L3N3L3M4M2M3NRkY1"}, "image_id": 540, "id": 9473}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 166.0, 73.0, 64.0], "area": 2614, "segmentation": {"size": [512, 512], "counts": "WfZ41l?3W@Na?;M2M4L3N2M4L3N0O1O3N3N11O01O010O01O01O01O01O010O01O01O01O01O010O01O]AmNY>T1cAoN]>Q1aAQO`>X1O010O00010O0O110O0010O0010O00010O01N1N2M4L3N2M4M201O01ON3Em@EU?9m@DV?9;L3N3LWj`2"}, "image_id": 540, "id": 9474}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 170.0, 27.0, 38.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "YfR72k?3N3L3M3M4M2M3M4L3M3N3N110O00O2L3N3L3M4L3N2M4L3M4M2Mcj?"}, "image_id": 540, "id": 9475}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 172.0, 22.0, 16.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "eU=5b?901O000001O0000000000000000000001O000001CnjW7"}, "image_id": 540, "id": 9476}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 176.0, 13.0, 40.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "`5X1i>M2N2M4M2UOPAc0Y?M3M2M4M2M4M2NWZi7"}, "image_id": 540, "id": 9477}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 177.0, 36.0, 34.0], "area": 710, "segmentation": {"size": [512, 512], "counts": "Tf_51c?1e@1Y?2c@1Z?2d@1\\?9010OM3O2O010O01O01O010g@[OU?i00O01O01O010O01O01O010N1M3N3L3N3L3N2M4MVZn1"}, "image_id": 540, "id": 9478}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 178.0, 73.0, 59.0], "area": 2386, "segmentation": {"size": [512, 512], "counts": "[fQ11l?3M4M2M4L3M3N3L3M4O010O01O01O010O010O01O01O010O010O00010O010O010O0N2YAROY>R1dAQOY>Q1dARO]>X101O01O010O00010O01M21O010O0010O00010O01N1M3M4L3N3L3M4O000M4Gj@DX?:j@CZ?98M4L3NPji5"}, "image_id": 540, "id": 9479}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 190.0, 28.0, 31.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "cVn53j?3N3L3N3L3M3N3L31O010O01O01O01O010O01N1M3N3L3N3L3M3N3Lmic1"}, "image_id": 540, "id": 9480}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 210.0, 75.0, 48.0], "area": 2644, "segmentation": {"size": [512, 512], "counts": "Wgm25e?6k@G]>?]AH\\>>^AH_>n000001O01O00000000010O000000000010O0000000L4K501O0001O00000001OM4O00000001O01O00000001O01O00nN\\Ah0n>0000010O00000000010O00000000010O00000000L5I6J6JRil3"}, "image_id": 540, "id": 9481}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 224.0, 66.0, 56.0], "area": 2347, "segmentation": {"size": [512, 512], "counts": "`gV63e?8H800000000m@^Oh>b0PAFQ?d00000000001O0001O0000000001O000N200000000013L00000000L400aASOk=n0lAZOU>X100000000010O00000000001O01O0000000001O01O00H8H8H9G8H8IRYh0"}, "image_id": 540, "id": 9482}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 224.0, 3.0, 10.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "Wgn72j?OW@4e?5PI"}, "image_id": 540, "id": 9483}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 229.0, 36.0, 78.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "PX^73k?2N2M4M2N3L3QA_O]>d0`A^O^>d0`A^O]>e0`A_O]>d0`A^O^>d0`A_O]>S1010O00010O010O010O00010O010O010ORB`N\\=`1bBcN]=^1_BeNb=Z1\\BhNd=Y1YBjNg=U1VBnNj=e10OaH"}, "image_id": 540, "id": 9484}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 232.0, 81.0, 58.0], "area": 2588, "segmentation": {"size": [512, 512], "counts": "YXc02j?5J5L4K5K6K4K5O11O01O000001O01O0000010O000000010O00POXAh0Q?O01O000001O01O0001O0001O0001O01O000SAUOf>j0UA\\Oe>R1O11O01O0001O0001O0001OO2O0001O0001O01O000K5L4K6K4K5O110O0000000L5K4K[XT6"}, "image_id": 540, "id": 9485}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 246.0, 49.0, 51.0], "area": 1903, "segmentation": {"size": [512, 512], "counts": "RX`44l?:Ed0c004L:F;DbWg2"}, "image_id": 540, "id": 9486}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 248.0, 9.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "h78h?10O010O001O0N2MXXk7"}, "image_id": 540, "id": 9487}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 254.0, 10.0, 12.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "nWY4k0aARO`>m0bARO]>n0fAoNZ>R1:O010O3M3N2M2O200O0O2M3N1N3M3H\\@0f?N[@0g?N\\@0QVh3"}, "image_id": 540, "id": 9490}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 284.0, 72.0, 72.0], "area": 2519, "segmentation": {"size": [512, 512], "counts": "gjR61l?3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2O2O00010OO2M10O3M2M4M2N3L31O010O01O010O01OO2M1OO11100HSB]Nn=V1QBoN2Ho=W1RBQONFS>V1QBTOR>i0RBTOQ>i0SBROP>k0d0N3M2M4M2N3L3N2N3L3NO2O2N2N3O01O010M2N3L3NSWi0"}, "image_id": 540, "id": 9491}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 291.0, 59.0, 50.0], "area": 2457, "segmentation": {"size": [512, 512], "counts": "nY9?h>i0000000000000000000000001O00000000000000000O1000000O100\\AkN]>\\100000000001O00001O000000000000000000000000000_Oa0M300000000VOeWi6"}, "image_id": 540, "id": 9492}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 302.0, 51.0, 54.0], "area": 2093, "segmentation": {"size": [512, 512], "counts": "cjh4O1O1[OSOUBm0j=^OlAb0T>e0000000000O1000O100000000000O100000O10000000O1000000JgAeNY>[15100000O1000009G8H4L00O10O8I8H_e]2"}, "image_id": 540, "id": 9493}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 308.0, 61.0, 80.0], "area": 2464, "segmentation": {"size": [512, 512], "counts": "ajQ71m?3N1N3M2N3M2N3M2O2M2N1O0PBXOmX1O10O01M2N3M2O2M2N3M2OO3M2O2M3M2N3M2EPBdNS>Z1oAcNS>[1PBcNR>Z1PBdNR>Z1XO[Aj0c>XO[Aj0c>XO[Ai0d>901O01O00000000003N1N2N2N20O1N2N2N3N1DhNmAX1S>jNkAW1T>kNjAU1V>mNhAS1X>:1O00000001O00LiA`NX>_1501N3M2N2N2N2N2N2N3N1N2N2O100O1O2M2N2N2N2N2N2N3N1N2Nad\\3"}, "image_id": 540, "id": 9496}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 352.0, 81.0, 67.0], "area": 2775, "segmentation": {"size": [512, 512], "counts": "Ul71n?2N2N2N2O2M2N2N2N2O2N1J]Om@d0Q?^On@c0Q?7N3M2N2N2N2O1N3M2N2N2N2O0O001O000001O0001O0001O1O1O10O01O000000100O1O1O1O2N1O102M2N2N2N2N3N1N1O001O02N0001O3M2N2O1N2N3M2N2N2O1N3M2N2N[d_6"}, "image_id": 540, "id": 9497}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 385.0, 14.0, 22.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "\\\\i71n?1N3M3M2O2\\@E_?`0N10100O010O0O2mC"}, "image_id": 540, "id": 9498}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 398.0, 64.0, 66.0], "area": 2007, "segmentation": {"size": [512, 512], "counts": "hmY13m?1N2N2N3M2N2N2O1N3M2N2N2N3M2O1N2N3M1O00001O0HiNiAV1W>lNgAT1Y>oNdAQ1\\>QObAP1]>91O000001O01O00000001O01O002N2O1O2O000O1O1N3M2POUAh0m>VOUAh0S?O1N3M2N2N2O1N3M2N2N2N3N`Rf5"}, "image_id": 540, "id": 9499}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 409.0, 21.0, 62.0], "area": 673, "segmentation": {"size": [512, 512], "counts": "j01O2N2N2XOiADY>:iADZ>9hAFY>8iAFY>8iAFY>8iAFY>9iADY>:iADZ>9hAFY>8iAFY>8iAFY>8i0N3N1N2NcRe7"}, "image_id": 540, "id": 9500}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 439.0, 26.0, 24.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "Qnh01n?3M2N2N2O1N2N3M2N2N1O11N2N2O11O0000O2M2N2O1N2N2N3MjQj6"}, "image_id": 540, "id": 9501}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 440.0, 65.0, 72.0], "area": 2518, "segmentation": {"size": [512, 512], "counts": "ao`51m?2N3M2N3N1N3DBRA`0l>BRAa0k>BRA`01[O_>0cA7Ka01ZO0OZ>1hAZ1V>iNhAX1V>:N2O1N2N2001O00N2N2N2N200O1000N2N101N3M3M2N3M2O2M2N3M2N3M2N3N1N3M201O013L100O010O001M2N3M2N3M2N3M2N3M2Nea^1"}, "image_id": 540, "id": 9502}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 444.0, 67.0, 55.0], "area": 1743, "segmentation": {"size": [512, 512], "counts": "Rof11n?2N2N2N2O2M2N2N2N2N2O2M2N2N2N1O01O000000010O0000001O3N0O02N2N3N1N2N1O000010O00000000010O02N0000000010O2N3M2N2N2O1N3M2N2N2N2Cb@6`?Gb@7e?N3M2N]aW5"}, "image_id": 540, "id": 9503}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 464.0, 69.0, 48.0], "area": 1906, "segmentation": {"size": [512, 512], "counts": "i_a61n?1O2M3N2N1O2M3N2N1O1N2A]O`Ad0_>^O_Ac0`>_O^Ab0`>@^Ab0a>@]Aa0b>A\\A`0b>`0O1O1O1O11O001O1O1O1O001O1O1O001O1O1O1O001O1O1O001O1O1O000O0O0100000O01001N3N1O2N2M3N2N1N3N2N2M2O2N2NnP<"}, "image_id": 540, "id": 9504}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 466.0, 56.0, 46.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "Woe02m?2N2N2N2N2N2N3HAj@a0T?Aj@a0T?7N2N2O1N2N2N2N2000001O0N11O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O00O1O11O1O1O1O1O1O2N1O1La@E`?1a@31K_?0b@3i?NYP^6"}, "image_id": 540, "id": 9505}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 475.0, 75.0, 37.0], "area": 1509, "segmentation": {"size": [512, 512], "counts": "ooV41n?100O1O1O1ILa@5^?N`@2_?0_@1`?8O100O1O1O1O1O1O1001O2N00O1O100O1O1O1O1O1O100O1O1O1O1001O1O1O1O1O2N1O00O00000001O01O00001O3N1N1O001O0002N2N2N2O2M2N2N2N2N2O2M2N2N2Nf`c2"}, "image_id": 540, "id": 9506}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 501.0, 31.0, 11.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "oo71n?1001OO1N2O1N2O1N2O11O001O1O001OO1O11O001O1O001O1O001O1O001OQ`X7"}, "image_id": 540, "id": 9507}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 0.0, 229.0, 163.0], "area": 19560, "segmentation": {"size": [512, 512], "counts": "P`]41o?1O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O001O1XB_No5QB3KCZ>4QB8E_O_>4QB=k>O1K5K6JikX4"}, "image_id": 543, "id": 9509}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 174.0, 472.0, 269.0], "area": 7541, "segmentation": {"size": [512, 512], "counts": "^5S1P?O000000000001O0001O000000000001O01O00000000000O1N2O2N1N2O1O1N2O1O1N3N1OO010O0010O010O010O00010O010O00010O010O00010O010O0010O0010O010O00010O010O00010O010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O01B_@]?0010O00010O010O00010O01O01O010O01O01O01O010O01O01O010O01O01O010O00010O010O00010O0010O0010O0010O00010O010O00010O010O00010O01O01O010O01M2N2M4M2M4N100010O010O00010O010O00010O010O00010O010O00010O010O00010O010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O01O0M3N3L3N3L3N2M4M2N3Loeg2"}, "image_id": 543, "id": 9511}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 319.0, 20.0, 16.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "oYR78h?8H0000000000000000000000000000O100000Qfc0"}, "image_id": 543, "id": 9512}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 338.0, 29.0, 36.0], "area": 614, "segmentation": {"size": [512, 512], "counts": "\\[Y64j?2M3N3L3N3M2M3N3L3N3M21O010O01O01O0O2L3N3L3N2M4M2M4M2M4MXUX1"}, "image_id": 543, "id": 9513}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 344.0, 55.0, 67.0], "area": 1843, "segmentation": {"size": [512, 512], "counts": "jko61m?2M4M2CHo@GQA;m>GQAGPA=N3M201O0010O0010O010O010O00010O010O0iAiNf=W1VBmNi=S1UBoNk=Q1SBQOn=o0nAUOQ>k0mAWOT>[1O01O0M3N3M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L3Nhd4"}, "image_id": 543, "id": 9514}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 355.0, 19.0, 17.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "\\kd52k?3N3M2N3O0010O00010O010O010N1N3L3N3MidQ2"}, "image_id": 543, "id": 9515}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 385.0, 44.0, 26.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "Y\\b61m?2N3L3O2O01O010O010O010O01O01O010O010O010O01O01O010O010O010O01O01O010O010O010O0O1N3M2Macg0"}, "image_id": 543, "id": 9516}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 394.0, 13.0, 18.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "fli71m?2M4M2M4M20010O00010O010bC"}, "image_id": 543, "id": 9517}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 476.0, 24.0, 24.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "[oa04i?3L4M3L5N10001O01O01O01O0001O01O01O01L3L4M3L5LRQR7"}, "image_id": 543, "id": 9518}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 492.0, 26.0, 20.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "]?c0\\?1001O001O00001O001O00001O001O00001O001O000O2L3N3L]`b7"}, "image_id": 543, "id": 9519}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 494.0, 34.0, 18.0], "area": 317, "segmentation": {"size": [512, 512], "counts": "o_S11m?2N2M3N2N200001O00M3N2N21O001O001O00001O001G`@0`?Nb@2f?01O001O001O00001O00Q`[6"}, "image_id": 543, "id": 9520}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 0.0, 65.0, 102.0], "area": 3102, "segmentation": {"size": [512, 512], "counts": "lbS11d?0c@3[?0b@3Z?0c@3[?:M40O01GYOWAg0f>]OWAe0g>]OVAg0f>;N3L3O101O010M2M3N3L3EWN]Bl1`=VN]Bm1`=;N2OO2M4MIjMgBT2Y=PNcBQ2]=8O40O0N1O0O100O3I^BnMd=o18N2M3N2M3N2M3N2M3N2M3N2O10000N2M3N2ERAAQ?=RA@Q?<;N3L3M3NPPl5"}, "image_id": 545, "id": 9521}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 0.0, 273.0, 231.0], "area": 37322, "segmentation": {"size": [512, 512], "counts": "Wdg31m?2M3N3L3N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2N3L301O01O01O010O010O00010O010O00010O010O00010O010O00010M2N3L3N2M4M2M4M2M3N3L3N3TElKg9W4WFkKg9W4VFmKf9W4WFkKg9W4VFmKg9V4VFlKg9W4VFmKg9U4XFlKe9X4ZFhKd9Z4YFjKc9Z4ZFhKd9Z4[FhKa9[4_FeK_9]4_FeKa9Y4^FkKa9R4`FPL`9n3_FVL`9g3aF[L_9c3`F`L`9^3`FeL_9X3aFkL_9S3aFPM^9m2bFVM^9h2bFZM^9c2bFaM]9]2cFeM]9X2dFkM[9S2dFPN\\9m1eFUN[9i1dF[N[9b1fF`NZ9^1eFfNZ9W1gFkNY9S1fFPOZ9m0gFVOX9h0gF[OY9b0hFAW9=hFFX98hFJX93hF1W9MiF5W9HiFXOXAh0h>VO[Ai0n>O1O2M2M40O0010O00D"}, "image_id": 545, "id": 9522}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 30.0, 12.0, 19.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "]aT13j?3N3M2M3N1N13M2M4M2M4MQ_e6"}, "image_id": 545, "id": 9523}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 82.0, 14.0, 35.0], "area": 220, "segmentation": {"size": [512, 512], "counts": "k2j0V?N2N1N010O012M4M2N3L3N3L3N]mh7"}, "image_id": 545, "id": 9524}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 93.0, 34.0, 42.0], "area": 772, "segmentation": {"size": [512, 512], "counts": "Sd11m?2H0]@4`?7N3L3N2N3L3N000O011O3L300010O010O010O0M3N3M2M4M2N3L3N2M4M2N3Ln\\]7"}, "image_id": 545, "id": 9525}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 105.0, 34.0, 26.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "cSd02k?4M2N3M2O20O010O010O01O010O010O010O01O010O010O010O010O010N1M4M2N2N\\lj6"}, "image_id": 545, "id": 9526}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 116.0, 29.0, 29.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "QdX12l?3L3N2N3M2N3O000e@@W?e0O01O010O01O02N010O01O010ON3M2M3N3M2M4MTlX6"}, "image_id": 545, "id": 9527}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 137.0, 20.0, 30.0], "area": 389, "segmentation": {"size": [512, 512], "counts": "Y4j0W?0O01O01O010O010O01O0N2M4M2N3L3N3M2M3Nake7"}, "image_id": 545, "id": 9528}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 137.0, 51.0, 43.0], "area": 1336, "segmentation": {"size": [512, 512], "counts": "[e<2k?3HO_@4]?9N3L3N2M4M2M4L3N3O00O2M2N2M4O0010O01O01O010O010O00010O010O010OROQAk0R?010O0010O010N1N3L3N2N3L3N3M2M4MVki6"}, "image_id": 545, "id": 9529}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 166.0, 5.0, 12.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "V5>[AEf>:XAIg>7VALj>5SAMn>c0O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010OO2L3N2M4M11N_Og@`0U?5O1ODm@G02S?POhAR1X>oNdAT1\\>80000N2M3N2M3M3N2M3O11O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001M2M4M2M3N3M2M4M2M3N3L3N3M2MWQ2"}, "image_id": 545, "id": 9536}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 470.0, 105.0, 42.0], "area": 2470, "segmentation": {"size": [512, 512], "counts": "o_i11l?3N2M3N2M3N2N2M3N2M3N2N21O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O0000M3N2M3N2M3N2N2M3N2M3N2M3O11O001O00001O001O00001O001O00001O001O001O000N3M2M4M2M3N3M2M4M2M3NoPb4"}, "image_id": 545, "id": 9537}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 507.0, 15.0, 5.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "moS43k?20000001O001O00001O001O0000Q`d3"}, "image_id": 545, "id": 9538}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 377.0, 13.0, 14.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "P\\P33k?3M2M4O00010O010ON3L3N2MWTi4"}, "image_id": 546, "id": 9539}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 394.0, 129.0, 118.0], "area": 10503, "segmentation": {"size": [512, 512], "counts": "ena31l?3M4L3M3M4cA_OV=d0hB^OU=e0hB^OU=f0gB^OV=d0gB_OY=a0dBB\\=?`BE`=:]BIc=7ZBLf=5VBOj=0SB3m=P1010O00FPNdBQ2X=SNhBl1U=WNkBi1R=ZNnBf1o<^NQCa1l8PBES>U?Eh@=V?8N2N200000N2N3M2N2N2N2N2N2N2N2N2N2N2Nbnc1"}, "image_id": 552, "id": 9558}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 47.0, 28.0, 30.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "ma^12m?2N2N2N2N2N2N3M2N2N2N2N2N2N02N2N2N2M3N3M2N2N2N2N2N2N2NT^S6"}, "image_id": 552, "id": 9559}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 58.0, 23.0, 24.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "SR52m?2N2N2N2N2N2N2N2N2000000000O1N3M2N2N2N2N2N2Ni]_7"}, "image_id": 552, "id": 9560}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 63.0, 55.0, 57.0], "area": 1515, "segmentation": {"size": [512, 512], "counts": "cbP61n?2N2N2N2Y@Hc?VO_Al0`>UO^Am0b>SO\\Ao0d>70000O1O100000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2F\\@5i?N2NolS1"}, "image_id": 552, "id": 9561}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 66.0, 4.0, 6.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "R26k?O1N2Nlmm7"}, "image_id": 552, "id": 9562}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 79.0, 55.0, 52.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "[Sn01n?2N2N2O1N2N2N2N2N2N2N2k@[Ol>g0RA[Ol>g0RA[Ol>n0N2N2N1O00001O21O0N000001O01O000000000000002N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3Mn\\V6"}, "image_id": 552, "id": 9563}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 84.0, 25.0, 44.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "j2V1i>000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2Nk\\c7"}, "image_id": 552, "id": 9564}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 84.0, 17.0, 18.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "nb<2m?2N2O1N2N3M000000001O02N2N2N3M2NWmZ7"}, "image_id": 552, "id": 9565}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 89.0, 83.0, 84.0], "area": 3081, "segmentation": {"size": [512, 512], "counts": "cSj41n?2N2N2O1N2N2N2N3M2N2N2N2N2N2N2O1ZAROW>P1gAROW>P1gAROW>o0hASOV>n0iATOU>l0kAVOS>j0mAXOQ>h0oAZOo=h0oAZOo=h0PBYOn=_1M2O1N20O1N2N3iNQB:o=DSB[OPBg0P>WOPBk0P>SOPBo0P>oNPBS1Q>jNoAY1P>eNPB]1P>aNPBa1W>11O00000000000O1N2N2N2N2N2N1O0000002O1N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N_[l1"}, "image_id": 552, "id": 9566}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 114.0, 64.0, 46.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "PTb11n?2N2N2N2O1N2N2N2N2N2N2N2N2N3M20000000000000000000000N2N2N2N2N00000001O00002N2O1N2N2N2N2N2N2N001O00000000001O2N2O1N3M2N2N2N2N2NQl]5"}, "image_id": 552, "id": 9567}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 124.0, 72.0, 59.0], "area": 1973, "segmentation": {"size": [512, 512], "counts": "Qe22n?1N2N2EKh@7V?Kh@8U?Ji@8U?Ji@8V?:N2N2N2N2N2N2N2N201O00O1N10O000000000001O000000000000101N2N2N2N2N2N3M000000001O01O000000002N2N2N0002N2N2N2O1N2N2N2N2N2N2N2N2N3Mf[i6"}, "image_id": 552, "id": 9568}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 159.0, 32.0, 32.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "^Ui21n?2N2N3M2N2N2N2O1N2N2N2N2N3M2N01O001O2N3N1N2N2N2N2N2N2N3M2O1N2Ncjf4"}, "image_id": 552, "id": 9569}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 165.0, 37.0, 36.0], "area": 674, "segmentation": {"size": [512, 512], "counts": "heZ12m?2N2O1N2N3M2N2N2N2O2M2N2N2N1O0001O000001O0001O2N2N3N1N2N2N2N3M2N2O1N2N2N]jR6"}, "image_id": 552, "id": 9570}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 186.0, 11.0, 19.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "l5a0^?0010O2N3N1N3M2O2M2NmYj7"}, "image_id": 552, "id": 9571}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 190.0, 63.0, 55.0], "area": 1781, "segmentation": {"size": [512, 512], "counts": "kff12m?2O1N3M2L4N2N2N2O1N2N3M2N2N2N2N2N2O1O20ON2N1O01O0001O001O2N000001O0000002O1N2N3M2N2N2N2N2N2O0O0000002N2N2N2N2O1N3M2N2N2N2N2N^iY5"}, "image_id": 552, "id": 9572}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 193.0, 32.0, 33.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "]V31n?3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N02N2O1N3M2N2O2M2N2N3N1N2N3M2O1N]i\\7"}, "image_id": 552, "id": 9573}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 199.0, 10.0, 10.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "\\fY11n?2N2N2N2OO002N2N2NgYa6"}, "image_id": 552, "id": 9574}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 203.0, 68.0, 65.0], "area": 2147, "segmentation": {"size": [512, 512], "counts": "cWi21n?2N2N2O1N2N2N3M2N2N2N2N2N2N2O1N2N3M2FlNeAV1Y>lNeAV1Z>kNdAW1Z>9N0000000000000000010O00000002N2N2N2N0000010O2N2N2N2N2N2N2N001O2O1N2N2N2N3M2N2N2N2N2N2O1N2NPiT4"}, "image_id": 552, "id": 9575}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 205.0, 40.0, 28.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "gfg02m?2N2N2N2N2N2N2000000001O01O0000N2N01O01O00000000000000000000010O001O2N2N2N2N2N_Yd6"}, "image_id": 552, "id": 9576}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 222.0, 12.0, 24.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "XWj72m?2N2N2N2N2N2N2O1N3M2N2QI"}, "image_id": 552, "id": 9577}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 244.0, 38.0, 36.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "`hk12m?2N2N2N2N001O2N2LBd@?Z?4O00000001O00000002N0000000000000001O2N2O1N2N2N2N2N2N2NTXa5"}, "image_id": 552, "id": 9578}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 253.0, 7.0, 24.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "m7h0[?L4M3M3L4M3MaWl7"}, "image_id": 552, "id": 9579}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 253.0, 56.0, 58.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "_XT72n?1N3M2N2N2N2N2N3M2N2N2N2N2N2VASO^>P1_ARO`>o0^ASO`>o0^ASO`>X1N1O2000000000000000O2M2N2N2N2N2N2O101N10000O010O010M3M2N3M2N3M2N3M2N3M2M4MWG"}, "image_id": 552, "id": 9580}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 256.0, 59.0, 54.0], "area": 1589, "segmentation": {"size": [512, 512], "counts": "jX]31n?2N2N2N2N2O1N3M2N2N2N20000N3M2O1N2N2N2N2N2N3M2O1N1O00000001O2N2N1O0000000000002O2M2N2N2N2N2N2N2N3M2N2N2N2N2N2O2M2N2N[We3"}, "image_id": 552, "id": 9581}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 258.0, 24.0, 23.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "Xh_12m?2N2X@La?6]@La?;N2N2000000000O0O0000002N2N2N2N2O1N3M2NdWT6"}, "image_id": 552, "id": 9582}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 259.0, 47.0, 35.0], "area": 1008, "segmentation": {"size": [512, 512], "counts": "]Xh02m?5L3M3L4M3M3L5L000O01000O010000O01000O010000O01000O010001N10O10O1000OKl@_OW??l@^OU?`07O2N1O1N3N1O2M2O1O^W`6"}, "image_id": 552, "id": 9583}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 269.0, 60.0, 56.0], "area": 1588, "segmentation": {"size": [512, 512], "counts": "^YV41n?2N2N3M2N4L2N2N2N2O1N3M2N2O11O000000O0O0001O01O00000000000000010O0000000000000000010O0000001O3M2O1000O1N2N2N2N2N3M9G9Hjfk2"}, "image_id": 552, "id": 9584}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 271.0, 52.0, 49.0], "area": 1071, "segmentation": {"size": [512, 512], "counts": "`iQ21n?2N2N2O1N2N2N2N2N3M2N2N2N2O1N000000001O00000001O000000000001O000001O0000001O2N2O1N3M2N2N2N2N2N2N2N2O2MSWT5"}, "image_id": 552, "id": 9585}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 279.0, 16.0, 16.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "QYS21n?2N2N2N2N2N1O00000002N2N2N2N2NUgd5"}, "image_id": 552, "id": 9586}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 282.0, 62.0, 54.0], "area": 1684, "segmentation": {"size": [512, 512], "counts": "hi^62m?2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N00000000000000010O2N20001ON3M2N2E[AXOg>f0\\AXOe>f0]AXOe>f0]AXOe>f00001O00000001O000L4L4001O000000000001O000000J>C50001O00000001O00000000FcV]7"}, "image_id": 552, "id": 9589}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 309.0, 25.0, 24.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "oiZ221Oj?5N3M2N200O1N2N2N2N000001O001O2N2N2N2N2N2N2O1N2NSfX5"}, "image_id": 552, "id": 9590}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 325.0, 53.0, 58.0], "area": 1681, "segmentation": {"size": [512, 512], "counts": "lZ\\31o?3M3L4M3M3L3N004K4M2NO01000O0103M3L4M1OOHjNhAV1X>nNcAS1]>701000O01000O10O10O10OHbAPO^>o0fAmN[>S1hAjNW>W19O10O10O1_OXAJg>6]AGc>9`ACa>=cA_O\\>a0hA\\O[>a0e0M4L3L4M3MXUi3"}, "image_id": 552, "id": 9591}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 338.0, 16.0, 22.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "S[j52l?3M2N3M2N3M2N1O01O3M2N3M2N3M2N]em1"}, "image_id": 552, "id": 9592}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 340.0, 46.0, 56.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "\\[X11m?2O2M2N3M2N3M2N3N1N3M2O200O010O010O0O2M2N3N101_AmNT>R1jAPOV>Q1gAROY>n0dATOZ>n0dAUO\\>W110O010O010O10O001N1hNdA6N4a>CcA7N4a>CdA6N4a>DcA5O4l>KUA3n>JUA4l>JVA4]?MXdP6"}, "image_id": 552, "id": 9593}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 351.0, 23.0, 27.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "U[W21o?2M3N2M3N2M3N2M3N2M3NO00102M3N2M3N2M3N2M3N2M`T]5"}, "image_id": 552, "id": 9594}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 352.0, 26.0, 26.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "`k=4i?3M4K4M3M4O01O01O0001O01O01O01O01O0001O01K4M3L5L3LnTU7"}, "image_id": 552, "id": 9595}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 353.0, 10.0, 10.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "X[P11l?3L5O0001O00010M2LQej6"}, "image_id": 552, "id": 9596}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 353.0, 68.0, 63.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "R\\S63k?2N3M2N3M3M2010O010O010O010O01O0N3M3M2N3M2N3N1N3M2N3M2N3M2010O010O0N3N1N3M2NaAiNZ>T1gAnNX>Q1gAQOZ>n0dAUO[>l0bAVO_>V10XO`AN`>0cA0]>MeA4Z>JiA5X>HkA8T>FnA:S>DoA

    BRB>o=_OTBa0k=]OWBc0d>0O010O010OO2M3M2N3N1N3M2N3Mccj0"}, "image_id": 552, "id": 9597}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 360.0, 13.0, 15.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "akn12m?2M3N1N3N2M11O2M3N1N3N2Mfdj5"}, "image_id": 552, "id": 9598}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 364.0, 27.0, 22.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "ckR71m?6K1N3M3OO2N110O01000O01000O01000O010O10O10M2O2M3M2OXd?"}, "image_id": 552, "id": 9599}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 384.0, 9.0, 8.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "Sl^12l?3N20O010O001M2Onc\\6"}, "image_id": 552, "id": 9600}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 386.0, 16.0, 14.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "X\\V12l?3N1N3N110O01000O010O01N1N3N1Njca6"}, "image_id": 552, "id": 9601}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 387.0, 58.0, 49.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "i\\U21n?2M2N3M3O010O0100O0100O010O0O2M3M2N3N1N3M2O2M3M2N3N101O010O10O10O010O010O10O10O010O10O10OO2N1N5K2YOo@>S?_OPA>S?@o@=[?N3N1N3M2N3NScm4"}, "image_id": 552, "id": 9602}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 391.0, 32.0, 33.0], "area": 501, "segmentation": {"size": [512, 512], "counts": "`\\Z32m?2N3M2N2N2N2N2N2O1N2O100000001O00000001O0N2N2N2N2N2O1N2N2N2N2NUcU4"}, "image_id": 552, "id": 9603}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 393.0, 31.0, 24.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "Y<`0a?O00010O000010O00010O000010O00010O003M01O01O01O00010O00L5L3M^S`7"}, "image_id": 552, "id": 9604}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 393.0, 11.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "]ll12l?2O2N1010O10O010OO2Mecm5"}, "image_id": 552, "id": 9605}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 398.0, 29.0, 26.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "j\\`13l?1N3M2N3N1N3M201O10O10O010O010O10O10O010O01N1N3M2N3M3N1NYSQ6"}, "image_id": 552, "id": 9606}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 402.0, 53.0, 74.0], "area": 2251, "segmentation": {"size": [512, 512], "counts": "RmU72l?3M2N3M2O200PAC[>=SAE<07Oe=?RBE6Nh=>oAF7Oj=:mAJ7Ml=:jAK8Nn=6iAN6NQ>e0mA^OP>e0mA]OS>c0kA@T>V110O10O010O01M2O2M2N3M2000N3M2N00003M2N3M2IiAcNZ>Z1hAdNZ>Z18M2N3M2O2M2N3M2N3M2N3M2N3MVC"}, "image_id": 552, "id": 9607}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 417.0, 33.0, 48.0], "area": 854, "segmentation": {"size": [512, 512], "counts": "h=d0Z?3M201O0N3M2N3M2N3M1O01O00002N2N3N1N3M2O20OO2M2N3M2N3N1N3M3M2N3MhR_7"}, "image_id": 552, "id": 9608}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 421.0, 48.0, 58.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "`^_52k?3N2M4M2e@Do>`0n@BP?`0m@Do>h0N3O00010O0N1OO0101O3L3N3L3N2M4O010O0010O0010OM4M2M4M2M3O2O010O0010O010OWOVABYAKM:m>GYAMM9l>HbA3b>JbA2PQi1"}, "image_id": 552, "id": 9609}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 426.0, 26.0, 26.0], "area": 400, "segmentation": {"size": [512, 512], "counts": "j]W52l?3M2N2M4M2N3L301O010O01O010O01O010OO2M2N3L3N2N3M2Mbb[2"}, "image_id": 552, "id": 9610}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 427.0, 77.0, 85.0], "area": 2828, "segmentation": {"size": [512, 512], "counts": "e^Z61m?2VOOcA4Z>NeA4X>OeA3Z>NdA5Y>NeA34Da=0O010O0100nNXAk0g>SO[Am0f>PO]AP1i>01000O010O001M3N101O010O10O10O010O0N2O1N2O11N1O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2O2M2N3NRQ?"}, "image_id": 552, "id": 9611}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 437.0, 54.0, 47.0], "area": 1342, "segmentation": {"size": [512, 512], "counts": "[^j13k?2N3N1N3M2N3O1O010O010O010O10O10O0N3M2N3N1N3M3M2N3N1010O010O10O10O010O010O10O10O0N3N1N3M2N3Fn@BU?TOWAm0h>UOWAj0i>6001O01O00000001O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1OQP]5"}, "image_id": 554, "id": 9620}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 0.0, 36.0, 18.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "PPj41o?1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1OO1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1OQPd2"}, "image_id": 554, "id": 9621}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 0.0, 62.0, 42.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "fP`53m?1N2N2N2N3M2O1N2N2N3M2N2O1N2000N000001O0001O0000000000010O00001O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O11O2N01N2N3M2Oko`1"}, "image_id": 554, "id": 9622}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 0.0, 22.0, 8.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "PPP71o?001O001O00001O001O00001O001O001O0000O1N2MSPe0"}, "image_id": 554, "id": 9623}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 9.0, 9.0, 8.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "[`a02m?2N2O1O10O1N2N2NdoY7"}, "image_id": 554, "id": 9624}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 12.0, 56.0, 54.0], "area": 1401, "segmentation": {"size": [512, 512], "counts": "]ab21o?1N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N00001O01O000000000000000000000010O01O2N3M2N2N2N2N2N2N3M2N2N2N2O1N2N3M2NQ_a4"}, "image_id": 554, "id": 9625}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 13.0, 17.0, 33.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "l`g71n?2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2B"}, "image_id": 554, "id": 9626}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 20.0, 57.0, 55.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "UQ43l?2N2N2N2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N21O01O0000000000O2O0O1O100O1O100O2N100O1O100O00010O0001O0001N1M3L4M4K4M3M3Ld^o6"}, "image_id": 554, "id": 9627}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 33.0, 21.0, 21.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "]aT52m?2N2N2N2N2N2N2N0000000001O1O2N2N2N2N2N2Njn`2"}, "image_id": 554, "id": 9628}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 33.0, 53.0, 60.0], "area": 1685, "segmentation": {"size": [512, 512], "counts": "nam52m?2N2N2_@JU?8i@JU?8i@JU?8i@JU?b0N3M2N2DPOeAR1Y>POfAQ1Y>POeAR1Y>POeAR1Y>POeAR1Y>dNiA\\1W>fNhAY1X>71O00000001O00000000011N2N2N2N2N2N1O001O01O002N2N2N2N2N3\\OgA\\O[>b0gA\\O[>b0gA]OZ>a0hA]OZ>a0hA]OZ>a0hA]OZ>a0iA\\OY>c0hA[OZ>c0d0M2N2N2N2N2N2N2N2OZmi3"}, "image_id": 554, "id": 9632}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 67.0, 32.0, 33.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "]R`11n?2N2N2N2N2N2N3M2N2O1N20000001O0000000001ON2N2N2N2N2N3N1N2N2N2NXmo5"}, "image_id": 554, "id": 9633}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 71.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "W26k?N2Nh]n7"}, "image_id": 554, "id": 9634}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 73.0, 30.0, 30.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "gbR21o?1N2N2N2N2N2N3M2N2N2N2N2O1N2N0003N1N2N2N2N2N2N2N2N2N2N3M2OX]^5"}, "image_id": 554, "id": 9635}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 83.0, 36.0, 50.0], "area": 996, "segmentation": {"size": [512, 512], "counts": "g2n0Q?2N2N2N2O10000000001O0000000000O1N2N2N2N3M2N2N2N2N2N2N2N2N2O1N2N2N2N2N_l]7"}, "image_id": 554, "id": 9636}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 89.0, 54.0, 64.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "_cf62m?2N2\\@LY?6e@LY?6e@LY?6e@LY??N2N2RAWOc>j0[AYOb>j0[AXOc>j0[AXOc>T1N2N2N2N2O2M2N2N00001O0002N2N2N2N2N2N2O2M2N2N2N2N2N2N2O2M2N2N2N2N2N2N3M2O1N2N2N2N2NX\\>"}, "image_id": 554, "id": 9637}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 102.0, 83.0, 84.0], "area": 3031, "segmentation": {"size": [512, 512], "counts": "Udl32m?2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N3M2N2N2N2O1000001OZNZBQ1f=mN\\BS1d=kN^BU1c=hN_BX1a=gN`BY1`=eNbB[1^=cNdB]1\\=aNgB^1Y=`NiB_1X=_NjB_1m=N2N2N2N2N3M2O1N2N2N00001O00000001O00000003M2N2N2N2O1N2N2N3M2N2N2N2NXki2"}, "image_id": 554, "id": 9638}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 108.0, 55.0, 53.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "\\To01n?2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2N000001O0000000001O000001O00000001O01O3M2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2MQ\\U6"}, "image_id": 554, "id": 9639}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 135.0, 52.0, 55.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "UUV71n?2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N0000002N2N2N2O1N2N2N2N01O2N1O00010O002N2N2N2N2N2ROWAb0k>\\OWAb0k>\\OWAb0k>\\OXAa0U?N2N2N2O1NQK"}, "image_id": 554, "id": 9640}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 144.0, 57.0, 45.0], "area": 1216, "segmentation": {"size": [512, 512], "counts": "[eZ31n?2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N10O2N2N2N1O000001O0000000002N0003M2N2N1O0002N2N2N2N2N2N2N2N2O10000N3M2N2N2N2N2Nnjh3"}, "image_id": 554, "id": 9641}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 147.0, 32.0, 32.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "Se22m?2N2N3M2O1N2N2N2N2N2N2N2N2N1O000001O2N3M2N2N2N2N2N2N2O1N2N2N2NP[]7"}, "image_id": 554, "id": 9642}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 170.0, 10.0, 11.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "]em02m?2N2N2O110ON2N2O1NaZm6"}, "image_id": 554, "id": 9643}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 173.0, 9.0, 9.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "bU`51m?3M2O2O000010M2NdZ[2"}, "image_id": 554, "id": 9644}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 176.0, 60.0, 95.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "leT42m?2N2N2N2N3M2hABk<`0SCCjSCDk<>SCDk<>SCDk<>SCDm<N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2N2N2NYXm2"}, "image_id": 554, "id": 9645}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 180.0, 55.0, 53.0], "area": 1407, "segmentation": {"size": [512, 512], "counts": "VVj41n?2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2O11O0000000001O0001O0O1O1N2N2N2N2N3M2N0001O02N2N2N2N2N2N3M2O1N2N2N2N^YZ2"}, "image_id": 554, "id": 9646}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 197.0, 16.0, 16.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "^V=2m?2N2N2N2N2N1O000002N2N2N2N2N2NfiZ7"}, "image_id": 554, "id": 9647}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 200.0, 31.0, 30.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "ffY31n?2N2N2N2N2N3M2N2N2N2N2N2N2N2O00O2N2N2N2N2N2N2O1N2N2N2N2N2N2N[iV4"}, "image_id": 554, "id": 9648}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 206.0, 26.0, 33.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "_6P1Q?N2N2N2O1N2N1O00001O00000001O01O2N2N2N3M2N2N2N2O1NYib7"}, "image_id": 554, "id": 9649}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 213.0, 29.0, 62.0], "area": 1023, "segmentation": {"size": [512, 512], "counts": "Pha71n?3M2N2N2N2N2]OC`A?^>C`A?^>C`A?^>CaA>]>DaA>]>DaA>]>E^A?`>b0N2O2M000000001O000000000001O\\I"}, "image_id": 554, "id": 9650}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 216.0, 53.0, 57.0], "area": 1293, "segmentation": {"size": [512, 512], "counts": "RW_22m?3M2N2N2N2]@D_?`0N2N2N2O1N2j@XOS?k000000000XAUO[>j0cAXO]>i0`AYO`>g0^A[Ob>e0\\A]Od>c0ZA_Of>n00000000O1N2N2N2N3M2N2O1000000000000O1N2N2N2N3N1N2N2N2N2N2N2NmWf4"}, "image_id": 554, "id": 9651}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 218.0, 79.0, 85.0], "area": 2363, "segmentation": {"size": [512, 512], "counts": "a8f0Y?2N2O2M2N2N2N2N000000000000000001O00101N2N00001O00000000000000KSOXAn0g>TOWAl0j>4000000000000000000000001O01O0000000000000000000001O01O2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2NeXh6"}, "image_id": 554, "id": 9652}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 218.0, 26.0, 26.0], "area": 324, "segmentation": {"size": [512, 512], "counts": "XW[52n?1N2N2N2N3M2N2N2N2OO0000000000002N2O1N2N2N3M2N2N2NmhW2"}, "image_id": 554, "id": 9653}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 230.0, 14.0, 15.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "[WS71n?2N2N2N3M2O100001M2N2N2N2Nche0"}, "image_id": 554, "id": 9654}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 245.0, 60.0, 59.0], "area": 1680, "segmentation": {"size": [512, 512], "counts": "[h_31n?2N2N2N3M2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2001O00000001O0000N2N3M2N2O1N2N2N2N2N3M2N2N100O001O2N2N2N2N2N2N3N1N2N2NZWb3"}, "image_id": 554, "id": 9655}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 247.0, 16.0, 16.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "mWk11n?2N2N2N2N2N3O01O0O1O1N2N2N2N2NPhl5"}, "image_id": 554, "id": 9656}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 260.0, 40.0, 61.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "fXm61n?2O1N2N2N2N2N2N2N2N2_A_O`=c0kA1T>n0O1O1O1O100O1O1O1010O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O2oNWh>"}, "image_id": 554, "id": 9657}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 263.0, 53.0, 63.0], "area": 1244, "segmentation": {"size": [512, 512], "counts": "oYa12m?2O2M2WOJeA8Y>JeA6[>LcA5]>M`A3`>O^A1b>1\\AOd>3ZAMf>5YAJg>8WAHi>:UAFk>=TAAl>a0TA]Ol>e0TAYOl>m01M2001O000000000000000001O0001O00000002N2N2N2N2N2N3M2O1N2N2N2N2N2N2NZWd5"}, "image_id": 554, "id": 9658}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 266.0, 30.0, 32.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "bh^21n?2N3M2N2O1N2N2N2N3O0001O00000001O0001O000O1N2O1N2N3M2N2N2NPWR5"}, "image_id": 554, "id": 9659}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 279.0, 43.0, 57.0], "area": 1279, "segmentation": {"size": [512, 512], "counts": "bYm22m?2N2N3M2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2O1N2N2N3M00000011N2N2UObA1a>LbA1`>MbA1`>MbALIHg>:bALIHg>:bALe>2]ALe>2]ALe>2]ALe>2f0NlV]4"}, "image_id": 554, "id": 9660}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 288.0, 57.0, 66.0], "area": 2069, "segmentation": {"size": [512, 512], "counts": "iic01n?2N2O1N2N2^@GZ?;d@GZ?a0N2N2o@ZOe>h0YAZOe>i0XAYOOKe>m0ZA]Od>Q1N3M2N2N2O1001OOO00000010O000002N2N2N1O0001O000000000001O01O01O2N3M2N2[OWA0k>NWA0k>NWA0k>NWA1j>MXA1j>MXA1k>LWA2k>MWA0k>NWA0\\U`6"}, "image_id": 554, "id": 9661}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 296.0, 16.0, 28.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "bYh71n?2N3M2N2N2N2N2N2N2O1N200000001bF"}, "image_id": 554, "id": 9662}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 304.0, 60.0, 58.0], "area": 1655, "segmentation": {"size": [512, 512], "counts": "^jm52m?2N2N2N2N2N2N2N2N3M2N2N2O10000000000N2N2N2N2N3M2IiNbAW1^>kN`AU1`>50001O01O0000000000003M2N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2N2OhUT1"}, "image_id": 554, "id": 9663}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 309.0, 22.0, 23.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "Rje52m?2N2N2N2N3M2N2N1O0000000002N2N2N2N2N2N3M2NSVo1"}, "image_id": 554, "id": 9664}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 313.0, 28.0, 28.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "Uje12m?2N3M2O1N2N2N2N2N2N2N2N2N2OO2O1N2N2N2N2N2N2N2N2N2N3M2NjUl5"}, "image_id": 554, "id": 9665}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 328.0, 35.0, 32.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "hZh22m?2N2N2Z@J_?8_@J_?=O1N2N3M00001O000000002N201O00N2N2N2O1N2N2N2N200O1N3M2N3M2NVUf4"}, "image_id": 554, "id": 9666}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 349.0, 50.0, 50.0], "area": 1310, "segmentation": {"size": [512, 512], "counts": "h[[12m?2N2N2O1N2N3M2N2I@k@b0S?@k@b0S?7N3M2N2O1N2N2N3M1O01O2N3M01O000000000002O1N2N3M2N2N2N2O1N2N3M2N2N2N2N2O2M2N2N^dk5"}, "image_id": 554, "id": 9667}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 349.0, 52.0, 59.0], "area": 1462, "segmentation": {"size": [512, 512], "counts": "Ul\\52m?2N2N2N2N2N2N2N2N3N1N2N2^OXOiAj0U>XOiAj0U>XOiAj0V>WOhAk0V>WOhAj0W>XOgAh0Y>ZOeAf0[>?0001O00000001O2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N2N2N3M2N2NZTi1"}, "image_id": 554, "id": 9668}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 354.0, 62.0, 57.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "Ule61o?1N2N2N2N2N2N2N2N2N2J^Ok@d0S?6N2N2N3M2N2N2N2N2N000001O2N2N00000000001O002N2O1N1O00000000000000001O2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N_T;"}, "image_id": 554, "id": 9669}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 367.0, 64.0, 72.0], "area": 2050, "segmentation": {"size": [512, 512], "counts": "X]n11n?2N2N2N2UOHkA:S>HjA;T>GjA:T>IjA9T>HkA:S>HkA:S>HkA:S>HjA;T>GjA;S>HkA9T>IjA9T>HkA:U>h01N2N2N2M2O2N2N2NO10O10000000O10O10000000O10O10000000002M3N1O2N2N2N2M3N2N1O2N2N2M3N2N2N1O2N2MSdQ5"}, "image_id": 554, "id": 9670}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 378.0, 36.0, 66.0], "area": 1215, "segmentation": {"size": [512, 512], "counts": "^\\^72m?2N3M2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3N100000000000000cAbNW>_1fAcNZ>b1000cNfAS1Z>lNgAT1Y>jNiAV1o1"}, "image_id": 554, "id": 9671}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 402.0, 82.0, 77.0], "area": 2554, "segmentation": {"size": [512, 512], "counts": "W]Z41n?2N2N2O1N3M2N2N2N2N2N3M2N2N2N2N3M2N2N2N2N1O02N2N2O1N2N2N201O0000[ASOV>n0fATO[>l0bAWO^>i0_AZOa>R1100O100O2O0hNZAU1i>O100O10O0O200O0100O010O0100O010O010O010O10O010O0O2M2N3M3M2N3M2N3M2N3M2N3M2N3MRb\\2"}, "image_id": 554, "id": 9672}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 407.0, 23.0, 22.0], "area": 264, "segmentation": {"size": [512, 512], "counts": "Rm]11n?2O1N3M2N2N2N2N2N2N10O00002N2O1N2N2N2N2N2N3MPcV6"}, "image_id": 554, "id": 9673}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 414.0, 53.0, 68.0], "area": 1809, "segmentation": {"size": [512, 512], "counts": "e]m62m?2N2N2N2N2N2N2O1N3M2N2O100O1N2N2N2N2N2N2N2YAjNa>\\1N2N20001N1N2N2N2N2N2N200000QOPBMP>1RBOn=OTBOn=OTBOn=OTB0m=NUB0m=NUB0m=NUB0m=NVBOl=OVBOl=OVBOm=NUB0m=NUB0m=NUB0m=NUB0m=OTBOn=O`S8"}, "image_id": 554, "id": 9674}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 426.0, 35.0, 35.0], "area": 625, "segmentation": {"size": [512, 512], "counts": "i]l51n?2N2O1N2N2N2N2N3M2N2O1N2N2N2N2N2N2000O1N3M2N2N2N2O1N2N3M2N2N2N2N2O1NTRb1"}, "image_id": 554, "id": 9675}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 436.0, 23.0, 22.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "P^a31n?2O1N2N2N2N2N3M2N1O01O0000002N2N2N2O1N2N2N3MTRS4"}, "image_id": 554, "id": 9676}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 444.0, 58.0, 57.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "fnb31o?1N2N2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N2N3M2N2N1O01O2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2NXQ`3"}, "image_id": 554, "id": 9677}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 445.0, 66.0, 67.0], "area": 2066, "segmentation": {"size": [512, 512], "counts": "h^Z61n?2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N3M2N2N2\\AgN^>[1aAfN]>`1N2N2O1N2N2N2N3N100000O1O1O1O1ROmAMT>1nANS>1nANT>OnA0S>NPBOR>OPB1P>MRB2o=LSB3n=KTB4m=JUB5l=IVB6k=HWB7j=GXB8i=FYB9i=EXB:i=DZB:e>1O11O1O1O1O1O1O1O1O1O2N1OQ`d0"}, "image_id": 554, "id": 9678}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 468.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "d>6k?N2N[Qn7"}, "image_id": 554, "id": 9679}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 479.0, 23.0, 33.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "Xod72m?2N2N2N2N2N3M2N2N3N1000000000000N2N2O1N2N1O1"}, "image_id": 554, "id": 9680}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 484.0, 37.0, 28.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "^??`?2N2O1N1O1O1O1O1O1O1O11O2N1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1OQP]7"}, "image_id": 554, "id": 9681}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 487.0, 45.0, 25.0], "area": 625, "segmentation": {"size": [512, 512], "counts": "j_P33m?1N2N2O0O1O100O1O1O100O1O100O1O1O100O1O100O1O100O1O1O11O2N1O2N1O1O2N1O2N1O2N1O1O2N1O2N1OQPY4"}, "image_id": 554, "id": 9682}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 494.0, 34.0, 18.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "h_o41n?2N2N2N3M2N2N1O1O100O1001O1O1O1O1O1O1O1O1OO1O000000001O02N2N2N2N2O[`_2"}, "image_id": 554, "id": 9683}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 498.0, 29.0, 14.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "ooX41n?1O1O1O1O1O1O1O1O1O1O1O1O1001O00O1001O1O2N1O1O1O1O1O1O1NT`X3"}, "image_id": 554, "id": 9684}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 507.0, 9.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "ooR41n?1O1O1O11O1O1O1OQ`h3"}, "image_id": 554, "id": 9685}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "ool51PPS2"}, "image_id": 554, "id": 9686}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oo`71PP?"}, "image_id": 554, "id": 9687}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 20.0, 38.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "0V1j>O2N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N^oe7"}, "image_id": 555, "id": 9688}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 0.0, 81.0, 59.0], "area": 1940, "segmentation": {"size": [512, 512], "counts": "Vad02m?2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2O1N2N000000000000000001O00000001O000000000001O0000001O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1OQPS6"}, "image_id": 555, "id": 9689}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 0.0, 27.0, 14.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "P`V31o?1O1O1O1O1O1O1O2N1O1O1O1OO1O1O1O1O1O1O1O1O1O100O1O1OQP\\4"}, "image_id": 555, "id": 9690}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 0.0, 123.0, 85.0], "area": 3895, "segmentation": {"size": [512, 512], "counts": "kQl21n?2N2N2N2N2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N2N2N3M10O0000000001O000001O00000001O000002N2N2N2N2N2O2M00000000000010O000000000002IQAZOQ?d0QA[On>f0SAXOm>h050000000001O01O002N2N2N2N3M2O1N2N2N2N1O0001O1O1O1O1O100O1O1O1O1O1O1O100O1O1O1O1O1O1O100O1O1O1O1O2N2N3M2O1N2N2N2N2N2Nf_V3"}, "image_id": 555, "id": 9691}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 0.0, 54.0, 34.0], "area": 950, "segmentation": {"size": [512, 512], "counts": "^`n61n?2N2N2N3M2N2N2N2N2N2O1N2N2N3M2O1O1OO1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1OQ`6"}, "image_id": 555, "id": 9692}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 6.0, 29.0, 29.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "c`i21n?2N3M2N2N2N2O1N2N2N2N3M2N2N01O2N2N2O1N2N2N2N3M2N2N2N2N2O\\og4"}, "image_id": 555, "id": 9693}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 18.0, 27.0, 54.0], "area": 847, "segmentation": {"size": [512, 512], "counts": "jab71n?2N2^OLUA6i>MTA5j>MTA5j>MTA5j>NSA4k>NSA4k>NSA4l>a0N2N2N2N0000000000000001O000L[AnNe>R1]AlNc>T1C"}, "image_id": 555, "id": 9694}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 50.0, 63.0, 65.0], "area": 1937, "segmentation": {"size": [512, 512], "counts": "ibf42m?3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N2JlN]AW1`>6N2N0001O0000000001O00000001O00000000011N2N2N2L[AjNg>T14O2M2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2NfmY2"}, "image_id": 555, "id": 9695}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 51.0, 17.0, 18.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "mac42m?1O2N2N2N2N2N2N1O0002N2N3M2N2N2NWnS3"}, "image_id": 555, "id": 9696}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 57.0, 42.0, 63.0], "area": 1438, "segmentation": {"size": [512, 512], "counts": "R2f1[>N2N1O00000000001O0000000000001O2N2N2N2O1N2N2N00002N2N2N2N3M2N2N2N2N2N2N2N2N2N2N2NbmZ7"}, "image_id": 555, "id": 9697}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 81.0, 64.0, 63.0], "area": 1941, "segmentation": {"size": [512, 512], "counts": "gSk31n?2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N1O00000001O000000000000000001O000001O1O2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2NilT3"}, "image_id": 555, "id": 9698}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 90.0, 22.0, 22.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "QSd22n?1N2N2N3M2N2N2O1N20001OO1N2N2N2N3M2N2O1N2NjlP5"}, "image_id": 555, "id": 9699}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 90.0, 27.0, 27.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "Uco22n?1N2N2N2N3M2N2N2N2N2O1N2N2OO2N3M2N2N2N2N2N2O1N2N3M2Nilb4"}, "image_id": 555, "id": 9700}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 92.0, 86.0, 81.0], "area": 2780, "segmentation": {"size": [512, 512], "counts": "bT]12m?2N3M2N2N2N2N2N2N2O1N2N3M2001ON2N2N2N2N2N2N3M01O000000000000000001O01O00002N3M2N1O000010O000CPOkAP1U>ROiAn0W>TOgAl0Y>VOeAj0[>YObAg0^>=0011N2N2N2N0002N2N2N2N2N3M2N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3MXlW5"}, "image_id": 555, "id": 9701}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 96.0, 113.0, 104.0], "area": 3491, "segmentation": {"size": [512, 512], "counts": "`Ua41n?2N2O2M2N2N2N2N2N2N2L4N2N2O2M2N2N2N2N2N2N2N1O02N2O1N2N2N2N1O00000000001O01O000000000000000001O01HPO_AP1a>RO]An0c>TO[Al0e>7001O001O2N2N1O01O01O2N1O00MROUAn0k>30000000001O00000001O0MUAROk>n03001O2N1JoN\\AQ1d>QOZAP1e>50000000000001O2N2N2N2O2M2N2N2N2N2N2N2N2N2N3N1N2N2N2N\\\\f1"}, "image_id": 555, "id": 9702}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 119.0, 63.0, 48.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "YT71n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N00002000000000000000000000000N2N2N2N01O0000001O2N2O1N2N2N2N2N2N1O000000002N2N2N2N2N2N2N2N2N2Ng[i6"}, "image_id": 555, "id": 9703}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 136.0, 11.0, 21.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "X4e0\\?N2N2N2N2N2N2N2N2N2N_[j7"}, "image_id": 555, "id": 9704}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 145.0, 45.0, 50.0], "area": 957, "segmentation": {"size": [512, 512], "counts": "ndi31n?2N2N2N3M2N2O1N2N2N2N2N3M2N2O10000000010O000000000000010O00000000TOPAf0Q?XOQAf0V?N2N3M2N2N2N2O1N2N2N[j_3"}, "image_id": 555, "id": 9705}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 145.0, 31.0, 31.0], "area": 492, "segmentation": {"size": [512, 512], "counts": "oTc61o?1N2N2N2N2N2N2N2N3M2O1N2N2N2N2N11N2N3M2N2N2O1N2N2N2N2N3M2N2NP[m0"}, "image_id": 555, "id": 9706}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 149.0, 12.0, 12.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "idT11n?2O1N3M2N2000O2M2O1N2NU[e6"}, "image_id": 555, "id": 9707}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 158.0, 37.0, 35.0], "area": 656, "segmentation": {"size": [512, 512], "counts": "_ef02m?2N2N2O1N2N3M2N2N2N2N2O1N3M2O01N2N1O0001O00000001O002N2Jg@CZ?;h@C[?:7N2N2N2N2O1Nhjf6"}, "image_id": 555, "id": 9708}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 159.0, 22.0, 20.0], "area": 247, "segmentation": {"size": [512, 512], "counts": "XeP22m?3N1N3M2O2M2N100O00010O000100O3M2O2M2N3N1NiZd5"}, "image_id": 555, "id": 9709}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 160.0, 88.0, 79.0], "area": 2830, "segmentation": {"size": [512, 512], "counts": "`VY21n?2N2N3M2N2N2N2O1N2N2N2N3M2000000000000O2M2N2N2N2O1N2N2N000000000001O01O0000000000000001O01O0MfNaAZ1_>3000000001O01O00000000000001O00010O2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N3M2O1N2N2N2NYjZ4"}, "image_id": 555, "id": 9710}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 160.0, 23.0, 22.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "Xec31n?2N3M2N2O1N2N2N2N3N1000O1O1N2N2N3M2N2N2N2O1NdjP4"}, "image_id": 555, "id": 9711}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 163.0, 54.0, 52.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "oej53l?2N2N2N2N2N2N2N2N2j@\\Om>f0QA\\Om>f0QA\\Om>m0N2N2N1O0000002O1000N1O00000000000001O00000002N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2NZZZ1"}, "image_id": 555, "id": 9712}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 180.0, 23.0, 21.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "nen11o?1N2N3M2O2M2N2O1N1O0001O01O01O2N2O2M2N3N1N2NTje5"}, "image_id": 555, "id": 9713}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 192.0, 57.0, 55.0], "area": 1473, "segmentation": {"size": [512, 512], "counts": "QWh02m?2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2O2M2N1O000000000000000010O000000000000001O2N2N2O1N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N]Y[6"}, "image_id": 555, "id": 9714}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 194.0, 26.0, 26.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "\\V_41n?2N2N3M2N2N2N2N2N2N2O1O2O00N2N2N2N2N2N3M2O1N2N2N2NaiS3"}, "image_id": 555, "id": 9715}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 201.0, 75.0, 73.0], "area": 1942, "segmentation": {"size": [512, 512], "counts": "nWU32m?3N1N2N2N2N1O110O1N2N2O1N2N2N2N2N2N2N2N0000000000000000000001O000000000001O0000000000000000000001O0000NPOUAP1k>20001O01O00002N2N2N2WOm@b0V?[Ol@c0Z?N2N2N2N2N2N2N2N2N2OSYe3"}, "image_id": 555, "id": 9716}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 204.0, 23.0, 21.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "jfY61l?1U@0j?4N2N2N3M1O011N1O1O000001O002O2M2N2N2N2N3N[iZ1"}, "image_id": 555, "id": 9717}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 207.0, 62.0, 53.0], "area": 1575, "segmentation": {"size": [512, 512], "counts": "UWb62n?1N2N2N2N2N2N2N3M2N2O1N2N20000010O00000000N2N2N2N3M2O0O00000000000000010O00000000000002N2N2O1N3M2N2N2N2N2N2N2N2O2M2N2N2N2Nnh>"}, "image_id": 555, "id": 9718}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 208.0, 80.0, 69.0], "area": 2798, "segmentation": {"size": [512, 512], "counts": "`gi43l?2N2f@Jh>7VALg>6WALg>6WAL[>IfA=ML[>JeAJdA>NJ\\>JdA>NJ\\>JdAo0Z>=N3M2N2N2N10O00000000002N2N2N2O2M1O1O1O1O1O1O2N1O100O1O1O1O001O3M2N2N2O1N2N00000000000000000100O2N2N2N1O02O1N2N3M2N2N2N2N2N2N2O1N2N3M2N2NoXn1"}, "image_id": 555, "id": 9719}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 224.0, 30.0, 30.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "]Wo52m?2N2N2O1N2N2N3M2N2N2N2N2N2N2OO2O1N2N2N2N2N2N2N3M2N2N2N2O1Nbha1"}, "image_id": 555, "id": 9720}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 225.0, 13.0, 13.0], "area": 95, "segmentation": {"size": [512, 512], "counts": "WWR22m?2M3N2N2N2O01N2N2N2N2N2NjXg5"}, "image_id": 555, "id": 9721}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 228.0, 30.0, 29.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "dWV11o?1N2N2N2N2N3M2N2N2N2N2N0001O000000001O2N2N2O1N3M2N2N2N2N2NbhZ6"}, "image_id": 555, "id": 9722}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 230.0, 30.0, 31.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "dWR42m?3M2N2N2N2N2N2N2O1N3M2N2N2N1O03M2N2N2N2O1N2N2N2N3M2N2N2N2OZh^3"}, "image_id": 555, "id": 9723}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 235.0, 54.0, 61.0], "area": 1540, "segmentation": {"size": [512, 512], "counts": "ch_12m?2N2N2N3M2N2O1N2N2N2N2N2N2N2N3_OQOlAQ1R>ROkAP1S>ROkAo0T>SOkAl0U>VOiAj0X>WOfAi0Z>>0000000000002N2O1N2N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N3M2N2N2N2NkWe5"}, "image_id": 555, "id": 9724}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 253.0, 55.0, 45.0], "area": 1291, "segmentation": {"size": [512, 512], "counts": "[XS71n?2N2N2N2N3M2N2N2N2N200l@[Ol>e0RA]On>c0PA_OP?a0n@AQ?h0N2N0000001O2N2N200000000000N2N2N2N2N3M2N0000000000001O003M2N2N2O1N2N2N2N2N2N2NbW1"}, "image_id": 555, "id": 9725}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 263.0, 27.0, 28.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "dXS42m?2N2N2N2N2N3N1N2N2N2N2N2N0002N2N2N2N2N2N2O2M2N2N2N2N]W_3"}, "image_id": 555, "id": 9726}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 270.0, 28.0, 28.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "nhb61n?2N2N2N2N2N2N2N2N2N2N2N0000000002N2N2N2N2N2N2N2N2N2N2NXWo0"}, "image_id": 555, "id": 9727}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 271.0, 14.0, 14.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "ghQ52m?2N2N2N2N01O0000002N2N2N2O]Wg2"}, "image_id": 555, "id": 9728}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 275.0, 40.0, 39.0], "area": 736, "segmentation": {"size": [512, 512], "counts": "[Yl12m?2N3M2N2N2N2O1N2N2N3M2N1O000001O01O000000000001O01O0001O3M2N2N2N2N2O1N3M2N2N2NQg_5"}, "image_id": 555, "id": 9729}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 282.0, 14.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "PiZ31n?2N2N2N2N2N2000O1N2N2N2N2NPW^4"}, "image_id": 555, "id": 9730}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 285.0, 57.0, 65.0], "area": 1755, "segmentation": {"size": [512, 512], "counts": "YZV22n?1N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N000001O00O1M3N2M3N2M4M2001O1O2N1O1O2N1O1O2O0O1O2N1O1O1O2N1N2M4L3L4M4L3M3M4LnVm4"}, "image_id": 555, "id": 9731}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 288.0, 25.0, 46.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "cic71n?2N3M2N2N2N2O1N2N2N2N2N2O100N3M2N2O1N2N2N2N2N2N2oF"}, "image_id": 555, "id": 9732}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 289.0, 49.0, 49.0], "area": 1242, "segmentation": {"size": [512, 512], "counts": "hi[42m?2O1N2N2N3M2N2N2N2O1N2N3M2N2N2N2N2N2O2M2N2N1O001O0001O1O3M2N3M2N3N1N3M3M2N3N10000O1N2Ha@L`?3b@K`?4a@K`?38N2NWfk2"}, "image_id": 555, "id": 9733}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 299.0, 16.0, 16.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "ciQ32m?2N3M2N2N2OO0000002N2N2O1N2N2N`Vf4"}, "image_id": 555, "id": 9734}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 301.0, 36.0, 36.0], "area": 554, "segmentation": {"size": [512, 512], "counts": "Uj^32m?2N3M2N2N2N2N2N2O1N00000000001O000001O000000000001O03M2N2N2N2N2N2N2N2OYVo3"}, "image_id": 555, "id": 9735}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 330.0, 27.0, 28.0], "area": 386, "segmentation": {"size": [512, 512], "counts": "hZb73m?1N2N2N2N2N2M4M2N2N2N001O01O003M2N2N2N2O1N3M2N2N2N2NZ5"}, "image_id": 555, "id": 9736}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 338.0, 16.0, 28.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "c:k0U?O001O2N2O1N2N2N2N2N3M2N2N2N2OQeg7"}, "image_id": 555, "id": 9737}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 338.0, 29.0, 29.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "njg62m?3M2N2N2O1N2N2N2N3M2O1N2N2N1001N2N3M2N2N2N2N2O1N3M2N2N2NPei0"}, "image_id": 555, "id": 9738}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 347.0, 17.0, 30.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "Ykg71n?2N2N2N3M2O1N2N2N2N2N2N2N2N2N11NUE"}, "image_id": 555, "id": 9739}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 348.0, 57.0, 56.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "oko23l?2N2N2N2N2N2O2M2N2N2N2N2N2N2N3M2N1O000010O00000000000001O00000000000001O01O02N2N3M2N2N2N2N2O2M2N2N2N2N3M2N2O1N2N`dS4"}, "image_id": 555, "id": 9740}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 351.0, 27.0, 28.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "d[Y71n?2N2O1N2JJ_@8_?6N00000000001O0000000001O001O2N2N2N2N2N3MkT9"}, "image_id": 555, "id": 9741}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 352.0, 28.0, 27.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "\\[T41n?2N2N2N2N2N3M2N2N2N2N2O1N2O01N2N3M2N2N2N2N2N2N2N2O1N2Ncd]3"}, "image_id": 555, "id": 9742}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 356.0, 53.0, 57.0], "area": 1727, "segmentation": {"size": [512, 512], "counts": "T<9f?2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N1O00000000010O00000000002N2N2N2N2N1O02N2N2N2N2N2N3M2N2N2N2N2O1@k@2W?Ll@1V?Nk@0W?Nk@0W?Nk@0W?Nk@0_cU7"}, "image_id": 555, "id": 9743}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 356.0, 79.0, 82.0], "area": 2630, "segmentation": {"size": [512, 512], "counts": "h[`51n?2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2O2O00000000000001O0001O0000000000000001O01O0000000000000001O01O0000O1N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2NQSX1"}, "image_id": 555, "id": 9744}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 381.0, 23.0, 24.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "[lk42m?2N2N2N2N2N2N2N2N000000000002N2N2N2N2N2N2N2Nlch2"}, "image_id": 555, "id": 9745}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 383.0, 54.0, 62.0], "area": 1566, "segmentation": {"size": [512, 512], "counts": "Ymd32m?2N2N2N2N2N2N2N2N2N2N2N2N2^OUOmAm0Q>TOmAn0Q>TOmAn0Q>TOmAn0Q>TOmAm0R>UOlAk0T>XOiAh0W>ZOgAf0Y>`0000001O0002N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N2N3M2N2NVS`3"}, "image_id": 555, "id": 9746}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 388.0, 51.0, 57.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "S]l02m?2N2N2O1N2N3M2CBUA`0i>BUA`0i>BUA`0j>ATAa0j>ATAa0j>BSA>m>EPA;P?Gn@9R?:002N2N2N2N2O1N2N2N2N2N2N3M2Nfbl0"}, "image_id": 555, "id": 9749}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 414.0, 83.0, 78.0], "area": 2619, "segmentation": {"size": [512, 512], "counts": "e^S42m?2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O2M2N2N1O0000000010O00002N2N00KlN_AT1a>nN]AR1c>PO\\Ao0d>7O0000000001O000001O00000000N201O02N2N00000000000100O2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N3M2O1N2N2N\\Rc2"}, "image_id": 555, "id": 9750}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 420.0, 3.0, 5.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "Vmn71n?2N2kB"}, "image_id": 555, "id": 9751}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 427.0, 12.0, 39.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "T^j72^?OPA3n>OPA3n>OQA3l>ORA3l>0QA2n>OPA3n>OPA3n>a0N2N2N2dB"}, "image_id": 555, "id": 9752}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 428.0, 25.0, 25.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "i]13l?2O1N2N2N3M2N2O1N00001O0001O00002O2M2N2N2N2N3M2OZRb7"}, "image_id": 555, "id": 9753}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 428.0, 28.0, 28.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "hm^52m?2N2N2N2N3M2O1N2N2N2N2N3M10O2N2N2N2O1N3M2N2N2N2N2N2O2MVRS2"}, "image_id": 555, "id": 9754}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 431.0, 29.0, 28.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "lmk61n?2N2N2N2O1N3M2N2N2N2N2O2M1O0002N2N2N2O1N2N2N3M2N2N2N2O1NTbe0"}, "image_id": 555, "id": 9755}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 434.0, 44.0, 54.0], "area": 1301, "segmentation": {"size": [512, 512], "counts": "cn72m?3ELe@6Z?Kd@7Z?Kd@7Z?Kd@7Z?9N2N2N2N3M2N2O1N2N2N2N2N1O000000010O0000000002N2N2N2N2O1N3DRAAP?>RA_OP??RA_OP?2l@7^?Gd@7d?N2N2N2NiQR7"}, "image_id": 555, "id": 9756}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 439.0, 60.0, 73.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "^_S11n?2N2N2N2N3M2N2N2O1N2N2N2N2N2N3M1O1O100O1O1O0000000@mNTBS1l=oNRBQ1n=QOPBo0P>SOnAn0R>TOlAk0T>WOjAi0V>YOhAg0X>`00000000000010O0000002N2ZOcAF_>8cAF_>8cAF_>8cAF_>8cAF`>7bAH_>6cAH_>7bAG`>7cAF_>8cAF_>8f0N2N2N2Nian5"}, "image_id": 555, "id": 9757}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 460.0, 10.0, 9.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "anQ61n?2N2N2N1O01O2N2N2NbQi1"}, "image_id": 555, "id": 9758}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 471.0, 55.0, 41.0], "area": 1172, "segmentation": {"size": [512, 512], "counts": "n_i41n?2N1O1O100O1O1O1O1O1O1O1O1O1O100BBYA?f>DWA=h>EVAFUA;j>GTA:k>>O1O1O1O100O1O1001O1O1O1O1O1O1O1O1N3M2N2N2O1N2N2N2N2N2N3M2N2N2N2O_P[2"}, "image_id": 555, "id": 9759}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 472.0, 51.0, 40.0], "area": 1302, "segmentation": {"size": [512, 512], "counts": "ioh61m?3M2M4M2M4M2N2M40O01N1N3L3N2M3N2N2001O001O001O00001O001O00001O001O001O00001M2M4M2N3M21OO2M2M4M2N3L3Nl`="}, "image_id": 555, "id": 9760}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 485.0, 48.0, 27.0], "area": 687, "segmentation": {"size": [512, 512], "counts": "o_o11n?1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O11O1O1O1O1O2N1O1O1O1O1O1O1N3M2N2N2N2NX`X5"}, "image_id": 555, "id": 9761}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 488.0, 10.0, 10.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "]oX61n?2N2N2O1N0002N2O2MePb1"}, "image_id": 555, "id": 9762}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 499.0, 25.0, 13.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "o_P61n?1O100O1O1O1O1O100O1O1O1O1O11O1O1O2N1O1O1O1O2N1OQPc1"}, "image_id": 555, "id": 9763}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 504.0, 14.0, 8.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "mo`03l?1O1O1O100O11O1O1O1O1O1O1OQPX7"}, "image_id": 555, "id": 9764}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 506.0, 7.0, 6.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "m_31n?2N2N10O2O1NUPi7"}, "image_id": 555, "id": 9765}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 21.0, 36.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "0T1l>00000000O1000000000000O10000BYACg>>00000000000O1000O10000000O10000000IWOWAi0i>70O1000000000000O10O100000000N101O1O1HoN`AR1`>7000O1000000000000O10000000000D]A[Oc>c0fAUO[>j0>O1O1O1O1O1O1O1O1000000000000O1000000000000000000O10000000007I9G`_k2"}, "image_id": 556, "id": 9768}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 0.0, 42.0, 74.0], "area": 2133, "segmentation": {"size": [512, 512], "counts": "bPf56f00X>7aA0X>7`A0Z>6`AJ_>l0000O17I3M0O100D[N]Be1b=bNXB^1h=71000000000O1000O100000000000O10O100000000000O1000O1000000000O1007H8I000000O100000O10EROdAn0\\>;O10000000L400O10O1001O8@RA^OV?:`0Ga^l4"}, "image_id": 556, "id": 9771}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 69.0, 52.0, 100.0], "area": 3132, "segmentation": {"size": [512, 512], "counts": "hcb33m?7I6J7H7J3M0cNVOdCj0[<^O^Cb0bDhAd0P>m0I6JO10000000O100000O100000O100000O10000000OQOXBHh=8_BAa=?fBZOZ=f0nBQOnf0cASO]>l0<000006J8H7I7Inlh2"}, "image_id": 556, "id": 9773}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 87.0, 91.0, 56.0], "area": 3263, "segmentation": {"size": [512, 512], "counts": "cSl13m?7I8H7I000O10O100000000000O01000000000LYOo@g0Q?4O010K7N4LO10000000O100000O100000O100000O1000003M2NBROiAo0W>YOaAf0`>=00000000M3000O0100000000000O013M1OO10000000O1000O10000000O101O8H7I7I3M000Hi@FX?:70004KZ\\f4"}, "image_id": 556, "id": 9774}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 110.0, 41.0, 54.0], "area": 1879, "segmentation": {"size": [512, 512], "counts": "f3\\1d>0000O100000O10000KgNcAX1^>50000O1000004L0000000O1005J100000O100000O1000000MaAeN^>\\1306UO\\ANk>K]AMk>K\\AN^j[7"}, "image_id": 556, "id": 9775}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 133.0, 62.0, 58.0], "area": 1974, "segmentation": {"size": [512, 512], "counts": "WUY61n?2M3N1N3N2M2O2M3K^Oi@c0U?6N2M3N1N3N2M2O2N2M2O2M3OO2M2O2000O01000O010O1N1O2000O01000O01000N1N3N2M2O2M3POTAi0m>UOUAj0R?M3N1N3N2M2O2M3N1N3N2M2OQkg0"}, "image_id": 556, "id": 9776}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 144.0, 41.0, 63.0], "area": 1885, "segmentation": {"size": [512, 512], "counts": "Rel24^?Nn@9k>Nm@:l>d0I6J00O01000000000002M8I6JO10000000O1000O100000O1000O10002N7I7I7I6I8I7I7I7Igi^4"}, "image_id": 556, "id": 9777}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 152.0, 39.0, 61.0], "area": 1816, "segmentation": {"size": [512, 512], "counts": "VUT23c?0d@6V?0d@7U?`0UAQOY>o0fAYOR>n0hAXOR>[100000O1000O10000000O1000O10000000O1000O10000000CQBhNP>X1<06J7I7H8I7I7IoYX5"}, "image_id": 556, "id": 9778}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 172.0, 62.0, 51.0], "area": 2079, "segmentation": {"size": [512, 512], "counts": "o5P1`>WOiAi0W>^ObAb0^>`000000O0100000000000O010000000000O010000000000O7J7I6J4L00000O10O10000000O1000O10000000O1000O10000000O10O100001O7H8I6J\\iP7"}, "image_id": 556, "id": 9779}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 176.0, 61.0, 52.0], "area": 1775, "segmentation": {"size": [512, 512], "counts": "^fk51n?2M3N1JJ`@9^?Ha@:\\?6O2M3N1N3N2M2O2M3N1O2M3N110O1000O0N3N2M2100O0N3O10O01000O0100O01000O0100O0O2M3N1N3N2M2O2M3N1N3N2M2O2M3N1N3N2MjiU1"}, "image_id": 556, "id": 9780}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 181.0, 62.0, 89.0], "area": 2727, "segmentation": {"size": [512, 512], "counts": "Qhm63[>OTC2k<0RC3k<0mAKP17Q=OnALn07S=6kBLS=5kBNU=2iBOX=1eB2Z=OcB3^=LaB6X=ZOVBc0?6S=VO`Bk0;0T=WO_Bk0:1U=5jBMS=6jBLU=6iBLU=6hBKW=7hBHY=_10O3N2M2O2M3N2M100O2O2N2M3N1N3N2M101N2O1EdAPO^>o0dAoN]>o0eAoN\\>S1dAkN^>T1710N2N2M2O2M3N1N3N2M2O2M3N1O2M3N1N3NiY3"}, "image_id": 556, "id": 9781}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 228.0, 9.0, 33.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "`gk71n?2M2V@Mf?7d@Im>:QAHm>:PAHn>:QAHm>:PAIm>f0lH"}, "image_id": 556, "id": 9782}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 231.0, 63.0, 55.0], "area": 2378, "segmentation": {"size": [512, 512], "counts": "h737i0c>:O100DnNjAR1V>;10O1000000000O10O1000000000O10O1000000000O10O1000000000O107I7I7I5K000O0100000000000O0100000000000O01000000005K7H8I_WP7"}, "image_id": 556, "id": 9783}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 265.0, 86.0, 84.0], "area": 3169, "segmentation": {"size": [512, 512], "counts": "Ui_31o?3M3L5L3M3L3N000O0100000O01000aA^O_=b0`B@`=`0^BCa==]BEb=;]BHb=8\\BJd=9WBJg=;SBGm=T101N101O01O4J3N1001N101O0O2O001O0O2O00O3L5K4L3M2N3MJkNeAS1[>oNeAo0[>TObAl0^>VO`Aj0`>:00000000000000002N2N3M2N3N1010O010O01N1N3M2N3M2N3M2N3M2N3M2N3M^WU3"}, "image_id": 556, "id": 9784}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 265.0, 14.0, 30.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "mXi71m?3N2M2N3N2M2O2M3N1N3N2M2O2fG"}, "image_id": 556, "id": 9785}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 281.0, 84.0, 84.0], "area": 3116, "segmentation": {"size": [512, 512], "counts": "\\ja42m?2O1b@Ml>6QAMl>5RAMl>5SALk>7RALl>5RAMl>5RAMl>f0M2O1N2N1O01O01O0_OgN\\BY1d=jNYBV1h=kNWBT1i=nNUBS1j=POSBP1m=ROQBn0P>SOoAl0Q>b0O0001O01O000001O01O0001O01O0001O01O0001gNnAc0T>[OnAd0S>ZOoAd0T>ZOnAc0T>[OnAc0T>[OnAc0U>ZOkAg0V>XOjAg0X>WOkAf0W>XOkAf0h>0O000000102M2N2N2OO0001O01O001O3N1N2N3M2O3L3M2N2O2M2N2N2NSVT2"}, "image_id": 556, "id": 9786}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 287.0, 55.0, 68.0], "area": 2333, "segmentation": {"size": [512, 512], "counts": "iY`25f?401N7J9F2O1O1O1000O0100O1000005K6J6J6J3L10000N2O1N1O2O10000000O1000O1001N2O1O001O4L6eNiA`0Z>^OjA>X>@gAa0_>YOaAg0l>0000O2O5K6J6J6JTUd4"}, "image_id": 556, "id": 9787}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 295.0, 67.0, 51.0], "area": 2502, "segmentation": {"size": [512, 512], "counts": "]9]1c>00O01000000000O10O1000000000O0100000000000O01000000000O0100000000000O01000000005J7J6J000000000O10O1000000000O01000000000O102N6J7I7IdUn6"}, "image_id": 556, "id": 9788}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 333.0, 19.0, 53.0], "area": 653, "segmentation": {"size": [512, 512], "counts": "\\kf77i?7I8H00O1000O1SO^O\\Bb0d=EUB;k=MmA3S>5eAK[>l01000O1000000cE"}, "image_id": 556, "id": 9789}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 346.0, 80.0, 55.0], "area": 2445, "segmentation": {"size": [512, 512], "counts": "_[Q52n?5K5J6K5K5K000O10O100000O14K6KO10002N5K2M010000000O010000000O0100000O10O100000O10O100000O10O11@VOhAm0U>XOfAh0Z>?O010000000O010000000O010000]OaAE_>;fA@Y>`0mAZOT>f0QBUOo=k0c0000O01000004L5J6K5K5K^df1"}, "image_id": 556, "id": 9790}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 368.0, 80.0, 60.0], "area": 2803, "segmentation": {"size": [512, 512], "counts": "a\\f22n?7I7H8I4L000000000O01000000000O10O1000000000O10O1000000000O10O1000000000O10O10000000YOVOXBj0h=]OPBd0P>CiAf000O10000000O1000O10000000O1000O100000O100000O100000O5L6J7I7I7I7I7HTcQ4"}, "image_id": 556, "id": 9791}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 372.0, 81.0, 50.0], "area": 2642, "segmentation": {"size": [512, 512], "counts": "m;Y1g>0007I2M100000O100000O1000O100000O100000O100000O100000O100000O1000O10000000O1000O100000O100000O100000O100000O10000IVOXAi0i>700O100000O100000O100000O100000O16J6J7H8I7I[Sg6"}, "image_id": 556, "id": 9792}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 394.0, 2.0, 14.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "[\\o76i?8_C"}, "image_id": 556, "id": 9793}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 414.0, 104.0, 61.0], "area": 3358, "segmentation": {"size": [512, 512], "counts": "`m^41o?6J7I6J7H7J7I0000O1000O10000000O10O10000000O10O10000003M6O2J5J00000O100000O10L4O10000000O10O10000000O1000O10000000O10O100000VO_OUBa0k=FmA;R>LhA4X>3aAL`>j000000000O01000000000O10O1000000000O01000000000O0100XOcAM]>3iAGW>9PB@P>`0h0O0100006J7I6I`Rm1"}, "image_id": 556, "id": 9794}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 419.0, 23.0, 27.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "Vmd75k?7I7I5J1000O10000000O1000O10000000O10O100000mB"}, "image_id": 556, "id": 9795}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 429.0, 74.0, 48.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "S^87i?7I00O10O108H5K00O100000O10000000O1000O100001O0OG[OYAe0g>:00HSO]Am0c>8000O100000O10000000O1000O10000001N9H7I6J000002N0000000O10O1000000000O1000O100000001O7I8G`ab6"}, "image_id": 556, "id": 9796}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 436.0, 83.0, 52.0], "area": 2977, "segmentation": {"size": [512, 512], "counts": "_^l24U?0^A6\\>0]A8[>0^A6\\>i0N00000000O01000000000O0100000000000O0100002N7I6I2OO1000000000O01000000000O10O1000000000O01000000000O10O1000000000O01000000000O10O1000000000O10O1001O7I7H7J7IXQj3"}, "image_id": 556, "id": 9797}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 474.0, 77.0, 38.0], "area": 1983, "segmentation": {"size": [512, 512], "counts": "hoi46j?2N0O10000000000O1000000000000O10000000000O1FJj@6V?:000000O100000000DZO^Af0b>@WAa0i>;O1000000000000O10000000000O10000000000001O2N0000O10000000000O1000000000000O15DSA[OS?6m@La?Mm`o1"}, "image_id": 556, "id": 9798}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 483.0, 16.0, 9.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "T_e28h?0000000000000000000O100000000m`R5"}, "image_id": 556, "id": 9799}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 485.0, 78.0, 27.0], "area": 1050, "segmentation": {"size": [512, 512], "counts": "o_91o?000000000O1000000000000O10000000000O1000000000000O100_OOSA1m>5m@KS?a0000000O1000000000000O10000000000O1000000000000O10000000000O14L6J7IO10000000000O100000000003M7IR`_6"}, "image_id": 556, "id": 9800}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 499.0, 75.0, 13.0], "area": 657, "segmentation": {"size": [512, 512], "counts": "h_P36j?2M10000000000O1000000000000O10000002N4L0000000000O10000000000O1000000000000O1000000000000O10000000000O1000000000000O10000000000O1000000000000O16J6JQPj3"}, "image_id": 556, "id": 9801}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 510.0, 7.0, 2.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "no42n?0000000001OQ`g7"}, "image_id": 556, "id": 9802}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 0.0, 54.0, 40.0], "area": 1252, "segmentation": {"size": [512, 512], "counts": "ePR21n?2N2N2N2N2M3N2N2N2N1O2N2N2N2N2M3LoNVAS1i>3O1O0000O1O1O1O1O1O1O1O1N2O1O1O1001O001O00N2O1O1O1Hi@FX?9j@EW?:k@DV?;8O1N2O1O2N2N2NnoR5"}, "image_id": 560, "id": 9803}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 0.0, 25.0, 14.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "UPo22k?3N201O001O001O00001O001O001O001O0000O1M3N2N2M3NR`d4"}, "image_id": 560, "id": 9804}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 0.0, 64.0, 45.0], "area": 1423, "segmentation": {"size": [512, 512], "counts": "k`b32m?2N2N3N1N2N2BDUA>j>DSA>k>DSA>k>DSA>k>DSA>k>=N2N3M1O0001O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1OQ`]3"}, "image_id": 560, "id": 9805}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "PPa71o?001O0PP="}, "image_id": 560, "id": 9806}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 3.0, 38.0, 56.0], "area": 1412, "segmentation": {"size": [512, 512], "counts": "aQg64h?4K5L5K4K5L4K6K4K5L4N3O0001O0001O01O0001O0001O0001O01K4K5L4K5L5K4K5L4K6K4Kloe0"}, "image_id": 560, "id": 9807}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 21.0, 25.0, 43.0], "area": 651, "segmentation": {"size": [512, 512], "counts": "Uac72l?2N3L3d@GQ?:l@IS?7j@LW?3g@OY?=0O010OO2L3N2N3M2N3O010UAmNg>W1O010O01O01WO"}, "image_id": 560, "id": 9808}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 28.0, 57.0, 71.0], "area": 2033, "segmentation": {"size": [512, 512], "counts": "oaV41n?2N2N2N2N2N2N2N2N2N2O1N2N2N2IVOUAl0i>VOUAl0i>7N2N2N2N2N2N2N2N2N2O10000N2N2N2N2N2N2N02QOWBDk=:WBDk=:WBDk=:WBDk=:WBDk=:WBDk=:UBFm=8SBHo=6SBHo=6SBHo=6SBHo=6SBHo=6SBHo=6SBHo=6SBHo=6SBHo=6o0N2N2Nanl2"}, "image_id": 560, "id": 9809}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 29.0, 27.0, 25.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "\\QV32k?3N3L3N2M4M2010O0010O0010O0010O0010O0010ON2N3L3N3L3Nm^\\4"}, "image_id": 560, "id": 9810}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 36.0, 17.0, 17.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "`Q[71m?2M3M4M2N3O01O01O01O010M2N2M4L3Nj^<"}, "image_id": 560, "id": 9811}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 44.0, 21.0, 23.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "jQg32m?2KMZ@5d?5N2N3M2N01O000000002N2N2N2N2N2N2N2N]^n3"}, "image_id": 560, "id": 9812}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 45.0, 35.0, 53.0], "area": 1530, "segmentation": {"size": [512, 512], "counts": "`Qh11n?XO`Ah0`>VOcAj0]>SOeAm0h>0O00010O010O00010O010O001O0M3N3M2N3O01O0M4M20001O010L3N2Co@HU?5n@HT?5o@HU?5c0kA_OS>c0lA_OS>b0kA@U>`0hACW>=hADY>9bAI]>8`AJa>5^AMb>i0O01000O010M100O010O10O010O010O010O10O010O010O10O010O2O2M2O2M3N1N3N2N1N3N2M2O2M3N2M2O2M3N1N3N2N1Na\\U2"}, "image_id": 560, "id": 9817}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 108.0, 53.0, 56.0], "area": 1873, "segmentation": {"size": [512, 512], "counts": "edV13j?3N3L3N2GDk@`0R?Bl@`0Q?:M2M3N3L3M4M21O010O01ON30O01O01M2M4N11O010O01O01O010O01ON3L2O0O3M4M2M4M2M3N3L3N3L3N2M4M2M4M^ln5"}, "image_id": 560, "id": 9818}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 119.0, 31.0, 31.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "VTm33l?2N2N2O1N2N2N2N2N2N2N2N2N2N000010O2N2N2N2N2N2N2N3M2N2N2N2N2Nl[c3"}, "image_id": 560, "id": 9819}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 122.0, 58.0, 61.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "ode51o?2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N0O1O0APOoAP1R>QOmAo0R>TOkAl0U>VOiAj0X>XOfAh0Y>ZOeAf0[>?1O01O01O010O00010O1O2O2M2N3N1N2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2OV[]1"}, "image_id": 560, "id": 9820}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 133.0, 47.0, 57.0], "area": 1563, "segmentation": {"size": [512, 512], "counts": "oTn11m?3L3N3j@H`>:]AI`>;]AGa>;]AH`>:]AI`>;]AH`>:^AH`>n00O00010O010O01O0N2N3M2010O010M2M4M2N2M4M2N3L3N3M2M310O001L3N3M2M3N3M2M4Mb[Z5"}, "image_id": 560, "id": 9821}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 135.0, 31.0, 31.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "edV41o?2M2N2N2N2N2N2N2N2N2N2N2N2N2N20O1N2N2N2N2N2N3M2N2N2N2N2N2N2OYkY3"}, "image_id": 560, "id": 9822}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 158.0, 22.0, 96.0], "area": 1403, "segmentation": {"size": [512, 512], "counts": "eWe71_?0m@3[=NlC2gN3Y=NnC1fN4Z=MmC3fN2Z=OmC1fN4Z=MmCc0P<@PCCc0P1Z<@oBFa0m0a<\\OlBK>l0fP=BmBb0S=U101O01O010O01O0mJ"}, "image_id": 560, "id": 9823}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 166.0, 28.0, 29.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "ieV51m?3N2M2O2M3N1N3M2O2M3N1N300O01O0O2M3M2O2M3N1N3N1N3M3N1NcZ[2"}, "image_id": 560, "id": 9824}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 177.0, 73.0, 75.0], "area": 3569, "segmentation": {"size": [512, 512], "counts": "bf^2b0e>i00000001O00000000000000000000fAgNm=Y1QBiNo=Q1UBROj=i0YBYOg=g0VB]Oi=c0UB_Ok=a0SBAm=?QBDn=:mAJR>6lALT>Q1O001O00001O001O0N3O001O001OO1N2N2N2M3N2N2N2M3NK52N2N200M30000000001O00000000000000000D<0000Djj\\4"}, "image_id": 560, "id": 9825}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 178.0, 86.0, 59.0], "area": 2548, "segmentation": {"size": [512, 512], "counts": "dfi02l?2M4M2N2M4e@AQ?a0l@BQ?i0M2N201O010O01O01ON3M2N30O00010OO2M2N3M21O0J@k@b0Q?8N2N3L3N3M200010O010O01O01O010O010O00010O010O0nN]Ah0b>VO`Aj0a>SObAl0i>10O01O01O010O01O01O010O010O01OQOQAm0Q?010O010OO1M4M2M4M2N3L3N2M4M_Yk5"}, "image_id": 560, "id": 9826}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 184.0, 73.0, 82.0], "area": 2879, "segmentation": {"size": [512, 512], "counts": "\\Wf33k?2N3M2N3M2N3M2N3O0010O010O010O010Om@[Oi>h0TA[Oi>g0UA[Oi>P1M2N3M2N30O010O01M2N3M2N3M2N3M2N3M2N3M2N1O0003M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2NkYU3"}, "image_id": 560, "id": 9827}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 208.0, 70.0, 98.0], "area": 3433, "segmentation": {"size": [512, 512], "counts": "\\hh41l?4M2M3N3L3N3L3N2M4M2M4M20001hAlNb=T1[BPOe=P1XBROh=n0VBUOj=j0SBYOl=i0QBYOl=j0QBZOl=^1N3O00010O010O0N2N3L3N3L3N2M4M2M4M2N02N3M2M4M2M3EeBoM^=n1dBoM_=n101ON3M]OgACW>=kADQ>010O010O0010O0O2L3N3M2M3N3M2M4M2Mffl4"}, "image_id": 560, "id": 9829}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 243.0, 82.0, 100.0], "area": 3471, "segmentation": {"size": [512, 512], "counts": "^Yb51m?2M4M2M3N3L3N3L3N201O010O00010O01O01O010O01O01O010O01O01\\AXOn=h0oA[OR>e0kA^OR>d0kA_OR>d0lA_OQ>d0kA_OR>d0lA^OR>Z1L3O20O0010O0010OO1M4M2M4M2M3N0O010O010O0101GgBjM]=S2eBkM]=R2;L3N2M4M2M4M2M3N3L3N3L3N2M4L3N3L3N2M4M2M3N3LVhT1"}, "image_id": 560, "id": 9830}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 249.0, 79.0, 58.0], "area": 2679, "segmentation": {"size": [512, 512], "counts": "dX?2g?7H8I7M30001O0000000001O0001O00000001OL401O0000000000010O000000000001O01OSORAh0S?00000001O01O0000000000010WA\\OT>d0dAD\\>P1000000001OL5M20000001O0001O00000001O000N2I7I7H8I8H7HXXY6"}, "image_id": 560, "id": 9831}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 254.0, 69.0, 57.0], "area": 2212, "segmentation": {"size": [512, 512], "counts": "lhi63j?3N2M4M2N3L3N2M4M2M4M2M310O010O01O01O0\\AlN]>T1`AnN`>R1^AQOb>V100010O010eN^AV1g>O010O01O01O010O01M2N2M4M201OO1N3M2M4M2M3N3M2010O00010O010O0001M2N3L3N3L3N2M4M2M4Mcg3"}, "image_id": 560, "id": 9832}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 307.0, 66.0, 58.0], "area": 2227, "segmentation": {"size": [512, 512], "counts": "R[]62l?3L3N3L3N2M4M2M4M2M3N3L3M4N10010O010O000N3M1N10O010O2O3M20010O0010O0010O0010O0010O0010O0010O00010O010O000N3L3N3L3N2Jm@\\OW?`06N3L3N2M4M2MQfa0"}, "image_id": 560, "id": 9833}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 309.0, 27.0, 28.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "XZm02k?3N2N3L3N3M2M4M20010O010O010O00010O01M2N3L3N2N3L3N3MUVe6"}, "image_id": 560, "id": 9834}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 318.0, 42.0, 38.0], "area": 1147, "segmentation": {"size": [512, 512], "counts": "dj33h?5K5L5g@_Ol>f0o@_On>k00001O0001O0001O0001O0001O01O0001O0001O0001O0001O00000L5J5K5000001O0M3L4KPVW7"}, "image_id": 560, "id": 9835}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 318.0, 97.0, 72.0], "area": 3276, "segmentation": {"size": [512, 512], "counts": "V[]12l?3L3N2M4M2N3O000N3L3N3M21O01O0HSO^Am0^>VO`Al0^>WO^Al0_>=0O0M4M10N30010O0010O0010O0010O0010O0010kN_Ai0a>UOaAk0`>QOdAo0f>010O00N3N1010O00010O010XATOZ>l0dAVO\\>j0aAZO_>f0^A\\Ob>d0[A_Of>n0O0010O0010O0010O0010O0010OO1N1N10O010O013L3N210O010M2M3N3L3M4M2M3N3L3N3L3N2M`UR5"}, "image_id": 560, "id": 9836}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 326.0, 27.0, 44.0], "area": 768, "segmentation": {"size": [512, 512], "counts": "Xkb71l?3N3L3N3M2M3N3L3N3L3N2N3O001N100O2O001N101OO0N3N2M20kE"}, "image_id": 560, "id": 9837}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 357.0, 94.0, 63.0], "area": 3123, "segmentation": {"size": [512, 512], "counts": "Slc43j?3N3M2M4M2M3N3M2M4M21O010O01O010O01O010O01O010O01O010O01O0M3N30O01O010O01O010O01O01O010O01O010O01O010O01O01O010O010O01O01OO2M2M4M2N3L3N2N3L3N30M2N3L3N2M4M2N3L3N2N3L3N3L3N3M2M3N3L3N3M2M4McTm1"}, "image_id": 560, "id": 9838}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 373.0, 48.0, 61.0], "area": 1433, "segmentation": {"size": [512, 512], "counts": "e;b1^>010O010O0001QOcA:\\>CgA=Z>@hAa0W>\\OmAc0S>[OoAe0R>XOQBh0n=UOUBk0`>10O010O0010O0010O0010O00O2M201O01O010O01O010O01O01O0O2M2N2M4M2M4M2MVcW7"}, "image_id": 560, "id": 9839}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 382.0, 44.0, 54.0], "area": 1633, "segmentation": {"size": [512, 512], "counts": "X\\k32i01W>2fA0\\ONf>5eAIH<`>NeAIH=`>MdA<\\>DbA>_>A^Ac0a>>10O00010O00010OO2M200010O0010O0010O00010O0010O0010O00010O00WO\\A4g>I[A4h>I\\A3h>I[A5g>I\\A3g>J\\A3Z?NSc^3"}, "image_id": 560, "id": 9840}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 386.0, 90.0, 73.0], "area": 3566, "segmentation": {"size": [512, 512], "counts": "W]T62l?2M4M2M3N3L3N3L3N2M4N110O00010O0010O0010O0010O0010O0010O0010O0010O0010O0GQOaAP1[>TOeAk0X>XOhAh0V>ZOjAg0MQOn=a0O01O01O010O01O01O010O00010O010O00010O010O00010O00N3L3N3L3N2M4M2M3N3L3N3L3N2M4M2M4L3N2M4M2M3N_c>"}, "image_id": 560, "id": 9841}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 401.0, 72.0, 64.0], "area": 2497, "segmentation": {"size": [512, 512], "counts": "fmW12l?2M4M2M4M2M3N3L3N3L3N2M4M2M4M20010O0010O0010O0010O0010O00010O010O00010O010O00010O010O00010O010O00010O010OTO_A9a>DbA<_>@dAa0[>]OhAb0Y>ZOjAg0U>WOnAh0e>0010O010O0N2M4M2M4M2M3N3LYRd5"}, "image_id": 560, "id": 9842}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 413.0, 51.0, 48.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "omd42k?3JL^@6_?8M2M4M2M3N3L3M4M20N2010O01OM4N1010O00010O010O00010O01O01O010O01O01O010O00001UOYA:h>C[A=f>@]A>d>_O_A>d>@^A=U?M2M4M2M3NZba2"}, "image_id": 560, "id": 9843}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 444.0, 44.0, 68.0], "area": 2068, "segmentation": {"size": [512, 512], "counts": "lo]54j?2M3`NH]B04:]=J[BO4:_=KYBM5;`=LVBM79a=OSBJ9:d=;YBGf=:WBIi=7TBLk=U100O100001O001O001O001O00001O001O001O0IPB^NQ>_1RB`Nn=]1UBcNl=Z1VBfNj=W1ZBhNf=U1]BkNd=R1_BmNa=P1bBPO_=m0cBSO]=j0o0^On@1U?Mm@1V?Km@2c?L]Ql1"}, "image_id": 560, "id": 9844}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 457.0, 13.0, 31.0], "area": 226, "segmentation": {"size": [512, 512], "counts": "Y>n0R?UOQAe0V?O01O01O0M3M4M2M3M4L3M`Qi7"}, "image_id": 560, "id": 9845}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 461.0, 71.0, 51.0], "area": 2060, "segmentation": {"size": [512, 512], "counts": "f_l02l?2[@MV?2d@44LV?;f@HW?b0N2N2M3N2M3N200001O0000N2N2M3N2M3N2001O001O001O00001O001O00001O001O001O00001O001CZA^Of>`0\\A@d>>_AAb>;aAE_>9cAG^>5fAJZ>4hALX>1lANU>OmA1S>MoA3R>IRB6l>01O001O00001O001O00001O001O0000QPP6"}, "image_id": 560, "id": 9846}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 474.0, 103.0, 38.0], "area": 2693, "segmentation": {"size": [512, 512], "counts": "`_W33j?3M3M4L3M3N30O00k@]Om>d0o@_OQ?h010O00010O00010O0001O00001O000VOm@f0W?M301OM3M4L3M3M40O00010O00O2O0001O00001O00001O0000001O00001O00001O00001O00001O00001O00001O00001O00001O000\\Oj@>W?^Ol@b0Y?0O10000000000O10000000000O100000000O14L6J5K6JRPU3"}, "image_id": 560, "id": 9847}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 489.0, 36.0, 23.0], "area": 532, "segmentation": {"size": [512, 512], "counts": "no^62l?2M3000000N2N2M3N2N2M3N21O00001O001O001O00001O001O001O00001O001O0J`@La?0e@M[?1hPo0"}, "image_id": 560, "id": 9848}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "01ooo7"}, "image_id": 563, "id": 9849}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 0.0, 28.0, 18.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "UP:3k?5J301O001O00001O001O00001O00001O001O0000O1N2M3N2M3N2MSPX7"}, "image_id": 563, "id": 9850}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 0.0, 35.0, 41.0], "area": 955, "segmentation": {"size": [512, 512], "counts": "^Pk01a00i>3TA0j>2TA1h>3UAOi>3TA1h>3UAOj>d001O001O001O00001O001O00001OO1N2N2M3N2IRAYOQ?e06M3N2N200K`@J`?3d@M[?1g@L]?0Z`c6"}, "image_id": 563, "id": 9851}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 0.0, 64.0, 59.0], "area": 2176, "segmentation": {"size": [512, 512], "counts": "PPk21o?2N1O2N2N1O2N2N1O2N2N1O2g@]OQ?e0l@^OR?i0O2N2N1O2N2N1O2N2N1O2N2N1O2N2N1O2N2N00O1O100O100O1O100O100O1O100O3N2M2N3N2oNYAd0i>ZOZAd0h>ZOYAd0j>YOYAd0T?N1N3N2M2N3N2M2O2MjnT4"}, "image_id": 563, "id": 9852}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 0.0, 38.0, 17.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "P`P42n?1O2N2N1O2N2N2N1O2N0000O1O100O100O1O100O100O1O100O100O1O100O100O1O100O100OQ`\\3"}, "image_id": 563, "id": 9853}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 0.0, 49.0, 14.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "PPl61o?001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00O1N2M3N2N200001O001O00001OM3NR`;"}, "image_id": 563, "id": 9854}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 4.0, 64.0, 53.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "lPn53l?1N3M2O2M3M2O2M3N1N002O2M3N1N3O10O10O010RAPOk>S1O10O01000N1O2N20O10O010O10O10O10O10O010O10O10O10O10O010O1N1N3M3N1N3N1N3M3N1N3N2M2N3NonQ1"}, "image_id": 563, "id": 9855}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 8.0, 18.0, 27.0], "area": 320, "segmentation": {"size": [512, 512], "counts": "iPg72k?4M2N3L3N3M2M310O010O0010O0010O010B"}, "image_id": 563, "id": 9856}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 11.0, 72.0, 85.0], "area": 2957, "segmentation": {"size": [512, 512], "counts": "d`h11n?3N1N3M3[@G_??M3N1N3M3N1N3O1O01000O0O2N2M2O2M3M2O2M3N2M2N3N2M2O2M3M2O2N20O10O10O10OO2O10O01O1M2O2M3M2O2M3cNfAP1[>oNgAn0\\>oNfAo0]>nNfAP1[>oNdAQ1f>0O01O01O0101N3M3N1Ek@HX?6j@HX?5k@HW?7j@GY?6;N1Nm]S5"}, "image_id": 563, "id": 9857}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 12.0, 16.0, 13.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "aPm63k?2N3N1010O0010O010O0010O0N3L3Naoj0"}, "image_id": 563, "id": 9858}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 16.0, 27.0, 27.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "QaS73k?3L3N2M4M2M4M20010O0010O0010O0010O001O0M3N3L3N3L3N2M\\o>"}, "image_id": 563, "id": 9859}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 33.0, 10.0, 14.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "RQh76j?7I00O010000001O7Ign2"}, "image_id": 563, "id": 9860}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 38.0, 56.0, 51.0], "area": 1754, "segmentation": {"size": [512, 512], "counts": "f1b0^?1M2M4M2M3N3L3O20O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O010O000O2M2M4M2M4M2N2M4M2M4M2M3NYnS7"}, "image_id": 563, "id": 9861}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 43.0, 65.0, 62.0], "area": 1903, "segmentation": {"size": [512, 512], "counts": "Zbd51n?2N2N2N1O2N2N2N2N2N2N2I\\Oo@f0o>\\Oo@f0o>7N2N2N2N1O0000000000000001O2N2NaAnNS>P1mAROS>n0kASOV>m0hAUOX>k0fAWOZ>i0dAYO\\>g0bA[O^>e0`A]O`>R1000000000N2N2N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2NbmZ1"}, "image_id": 563, "id": 9862}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 60.0, 42.0, 76.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "WR[74l?5J5L4LO1000L400O01003M5K4K6K5K5K5K4K2O00O1000O10O100000O101O5K5J5LO1000004L5J5L5KbM"}, "image_id": 563, "id": 9863}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 65.0, 70.0, 55.0], "area": 2213, "segmentation": {"size": [512, 512], "counts": "UcT13k?2N3L3N2N3M2M4M2N3L3N201O0010O010O0010O0010O010O0010O0010O01N1IoN_AR1`>PO]AQ1b>7O2O0O2O0O1O10O0010O00010O0010O2N3N2M3M2O2M3M2O2M3M3N1N3M3N2M2N3N2M3M2Om\\h5"}, "image_id": 563, "id": 9864}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 72.0, 77.0, 82.0], "area": 2884, "segmentation": {"size": [512, 512], "counts": "hS_43l?1O2N2N2N2M3N1O2j@@i>b0UA@i>b0UA@h>c0VA^Oi>d0UA^Oi>n0N2N2N2M3N1O200O100000OO2M2OO100000O10O10000000O10O100000O10O10000000O10O3N2N2N1O2M3N2N2N2N1O2M3N001O2N101M3N2N2N1@c@kNjAT1W>jNkAV1U>hNlAY1T>eNnA[1JeN^>[1`AgN`>]1000O01000O1M3N20O10O1000000O0WOZA7h>GZA7h>GZA7h>F[A8g>F[A7h>GYA8h>GZA7h>GZA7Y?N2M3N1OXj="}, "image_id": 563, "id": 9868}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 130.0, 23.0, 24.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "bTQ51m?3N2N2N2N1N3N2N2N1N1000O101O2N2N2M2O2N2N2M3Nh[c2"}, "image_id": 563, "id": 9869}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 134.0, 61.0, 44.0], "area": 1861, "segmentation": {"size": [512, 512], "counts": "\\Tj01o?4L4K5L4L4L4K5L3M4L2M0100000O05L00O10O1000O01000O10O1000O10O1000O10O1000O10O10003M0O0100000O0100000O01002N4K5L4L4K5L4L4LlZW6"}, "image_id": 563, "id": 9870}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 146.0, 73.0, 71.0], "area": 2684, "segmentation": {"size": [512, 512], "counts": "meR53k?2O2M2N3M2O2M3M2N3N1N3M2N3N1N3M3O0010O03NO010O10O10O010O010O10M2N3M3N1N3M2N3N1N3M3M2O0O02N2O2M3M2N3N1N3M2N3N2M2N5K3M2O2M2N3M3N1N3M2N3N1N3M2N3N2Mnjh1"}, "image_id": 563, "id": 9871}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 165.0, 7.0, 20.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "U5d0\\?M4M2M4L3N2MlZl7"}, "image_id": 563, "id": 9872}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 182.0, 78.0, 64.0], "area": 2742, "segmentation": {"size": [512, 512], "counts": "ifh01m?2N3L3N3M2N3M2N3M2N3M2N3M201O010O010O0010O010O010O010O010O010O010O010O010O0010O010O010N1N3M2N3M2N3M2N3M2N3L310O010O01N1N3M2N3M2N3M2N3M2N2N3_OTAJn>4UAIn>4TAJn>4UAIn>4TAJn>4a0M2NQZP6"}, "image_id": 563, "id": 9873}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 192.0, 65.0, 39.0], "area": 1996, "segmentation": {"size": [512, 512], "counts": "lVT69\\?;Eo0PBnNS>Q1>01O0N3L3N2N3L3N3M2M4M2N2M4MWh\\7"}, "image_id": 563, "id": 9877}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 239.0, 49.0, 38.0], "area": 950, "segmentation": {"size": [512, 512], "counts": "PXd02l?2N3L3N2N3M2M4N101O01Og@\\OV?g01Gi@GW?7i@LV??1_Oi@8V?Fj@2XBL\\O4[>2UB:j=FTBo=AnAb0P>AmAb0P>@nAb0o=AnAb0P>i0ZBUNS=m1jBVNT=m1iBVNV=j1gBYNY=h1dB[N\\=T20010O010O0010O0O2M2N3L3N2AXBdNk=Y1WBdNl=Y1WBeNl=X1WBeNk=Y1WBdNl=Y1`0M2DXA^Ok>>XA_Ok>?WA_Ol>>WA_Ok>>=01O010Cb@5e?N3LUWY3"}, "image_id": 563, "id": 9879}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 275.0, 38.0, 31.0], "area": 675, "segmentation": {"size": [512, 512], "counts": "Ti]22k?3N3M2M4M2N2M4O0010O010O0010O0010O0010O0010O010O0010O0010O0O2M2N3L3N2M4M2NSWo4"}, "image_id": 563, "id": 9880}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 283.0, 54.0, 70.0], "area": 2248, "segmentation": {"size": [512, 512], "counts": "djh02l?2M4M2N2\\OE`A?\\>DbA>\\>EaA>\\>DaA?\\>DbA?[>DbA>\\>d0M4M2N3N110O01O010O01L3N3L3O101O01N1N3O00010O0M4M2N3L3N2N3CaATOb>j0aASOb>i0aAUOa>i0^OTAb0l>=L3L5L4L3M4K4M000O10OcF"}, "image_id": 563, "id": 9882}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 308.0, 73.0, 76.0], "area": 2722, "segmentation": {"size": [512, 512], "counts": "V[Y12l?2M4M2M3N3L3N3L30001O010O01O01O010O01On@XOk>j0QAYOo>m0O010O01O01O010N1N2M4M2M4O0GdNoA[1n=hNPB[1m=hNPBZ1m=?M2O1010O01O0M3N3N01M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2MUVb5"}, "image_id": 563, "id": 9883}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 311.0, 51.0, 44.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "`jm43j?3N3L3M4M2N2M4M2M4M200010O01O010O01O010O01O01O010O010O01O01O01N1M4O01O010nNVAl0j>ROXAo0l>01M2N3L3N3M2M3N3L3N3M2MleX2"}, "image_id": 563, "id": 9884}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 323.0, 51.0, 70.0], "area": 2821, "segmentation": {"size": [512, 512], "counts": "Y[k5P1P?000000000TOBVB>j=BVB=k=CTB>l=BTB>l=BTB>l=BTB>l=BTB>l=BTB>l=BTB>l=BTB>l=BTB>l=BTB>l=>=00O1000000009K4K2O000000O0100000O0100000O012N4L4K5L4L4L5J5L4L4L4K5L4L4L4K6Kkc;"}, "image_id": 563, "id": 9886}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 334.0, 50.0, 82.0], "area": 2148, "segmentation": {"size": [512, 512], "counts": "el^23j?3N3L3N2FEm@>o>En@>P?;L3N2N3L3ZOiNaBZ1[=iNbBZ1\\=iNaBY1]=iN`B[1\\=hNbBZ1\\=iN`B[1]=e000010O001L3N3M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3M2M4M2M4M2M3NXUh4"}, "image_id": 563, "id": 9887}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 349.0, 64.0, 98.0], "area": 3034, "segmentation": {"size": [512, 512], "counts": "`]P31m?2N3M2N3L3N3M2N2M4M2N3M2N3L3N3^OkNUBW1i=kNWBV1f=mNVBV1h=lNVBV1g=nNVBU1g=c0N3M2N3L3N3M2N2M4M2N3O00N3L3N2N3nNiBWOY=g0iBVO[=f0hBXOZ=f0iBWOZ=g0gBWO[=i0fBTO]=l0bBQOa=o0`BnNb=S1]BkNf=T1[BiNg=X1XBeNl=X1?N3L3N3M2N3L3N2N3M2N3L3N3M2N3Lhdo3"}, "image_id": 563, "id": 9888}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 353.0, 19.0, 22.0], "area": 239, "segmentation": {"size": [512, 512], "counts": "a[R42l?3M2M3N3M2M4M201O01O0O2M2M3N3M2M4M2NmTd3"}, "image_id": 563, "id": 9889}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 365.0, 29.0, 27.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "n[V52l?3M2M4M2M3N3M2O2O0010O0010O010O0010O001O0N3M2M3N3M2M4M2N]T[2"}, "image_id": 563, "id": 9890}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 376.0, 18.0, 24.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "l[g74l?5K4K6K2N00000O0100000O0100000O010XD"}, "image_id": 563, "id": 9891}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 384.0, 61.0, 91.0], "area": 2717, "segmentation": {"size": [512, 512], "counts": "[nQ43k?2N3L3N2M4M2M4M2M4M2N2M4M2O20OBkNoAX1n=jNPBX1n=kNnAX1o=kNoAX1n=>N3L3N3M2M3N3M2M4M210OO2M2M3N3mNcB_O_=>dB_O`=>cB_O_=?cB_O`=`0aB\\Ob=e0]BYOe=g0[BWOh=h0YBTOj=m0UBQOn=n0SBoNo=R1PBkNT>T1<10M2N2N3M2M4M2N3L3N2N3L3N3Mdco2"}, "image_id": 563, "id": 9892}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 385.0, 17.0, 23.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "alQ53k?3M2M3N3M2M4M21O01OO2L3N3L3N2M4Mmce2"}, "image_id": 563, "id": 9893}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 406.0, 66.0, 93.0], "area": 3357, "segmentation": {"size": [512, 512], "counts": "T_Q52k?4M2M3N3M2M4M2^N]OdCf0Z<]OcCf0Z<\\OdCf0[<\\OaCh0_g0aAYO`>d0cA[O]>c0eAZO^>c0eAZO_>c0`0M4M2N3L3N2M4Mnbm1"}, "image_id": 563, "id": 9894}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 421.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "U=2ibo7"}, "image_id": 563, "id": 9895}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 429.0, 26.0, 27.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "j]c71n?2N2N2M2O2N2N2N2N2N2M3O10O10000N1O2N2N2M3N2N2N2N2NXB"}, "image_id": 563, "id": 9896}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 432.0, 57.0, 80.0], "area": 2412, "segmentation": {"size": [512, 512], "counts": "Y?g0Z?O001O00001O001O_O^ObAd0\\>^OaAe0\\>^ObAd0\\>_O`Ad0]>_OaAc0]>a0M3N2N2M3N2M3N2M3N2N2M3N2M3001N1M3I\\BPNg=m1\\BPNf=m19M2M3N3O001L3N3M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4MVRS7"}, "image_id": 563, "id": 9897}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 444.0, 66.0, 68.0], "area": 2202, "segmentation": {"size": [512, 512], "counts": "g_h51m?3L3N3M2M4M2M3N3O010O010O0i@YOU?i01O001O00001O0000M3N2N2M2O0O10002M3N3M2M4M2M4M2N2M4M2N30O00010O010N1N2M4M2CiAnN[>n0iAoNY>o0iAoNZ>n0=M3N3L3N3M2M4M2N2M4M2MmaV1"}, "image_id": 563, "id": 9898}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 449.0, 66.0, 57.0], "area": 2308, "segmentation": {"size": [512, 512], "counts": "_oi63k?2M4M2M3N3M2M4M2M4M2M3N3L3N3L3N2M4O001O01ON0001N3N3L3N3M200010O010O0010O00O2O0N3M2010O00010O0N3M2N6I3O20O010O00N30O010ON3L3N2Dg@O\\?Mh@O[?Og@Om`5"}, "image_id": 563, "id": 9899}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 458.0, 57.0, 54.0], "area": 1753, "segmentation": {"size": [512, 512], "counts": "oog01m?2M3N2N2M3N20000001OZOCiA=U>FkA9R>JnA6P>MoA3o=OoA3n=0PB2n=1nA2P>0nA2P>1mA1T>NiA5W>LfA6Z>h01O001O001O00001O001O0O20O0001O001O001M2N2N3L3N3M2M4M2N3L3N2N3L3N3M2MYa[6"}, "image_id": 563, "id": 9900}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 472.0, 17.0, 37.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "eog71l?4M2N3L3N3M2M3N3M2M4M2N3N10010O0WA"}, "image_id": 563, "id": 9901}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 495.0, 45.0, 17.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "n_g12l?2O1O11O001O001O001ON2N2N2N2N2O1N2N2001O001O001O001O1O001O001O001O001O001O001O1O001O001MUPb5"}, "image_id": 563, "id": 9902}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 0.0, 68.0, 17.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "P`o01o?0000001O00001O0000001O0000001O00001O0000001O0000001O0000001O00001O0000001O0000001O0000001O00001O0000001O0000001O00001O00000000L4M3L4LT`n5"}, "image_id": 566, "id": 9903}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 0.0, 76.0, 62.0], "area": 2463, "segmentation": {"size": [512, 512], "counts": "cQo12l?2M4M2M4M2M3N3M2O20O00010O010O010OM3N3L3N3M2M3N1N100O0100O02O2O101O001O00001O001O00001O001O00M3N2M3N2M3N2M3N2N2M3N2M3N2O11O001O001O00001ON2N2M3N2Da@4b?J`@4h?Lmoj4"}, "image_id": 566, "id": 9904}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 0.0, 5.0, 2.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "PPU41o?00001O0P`h3"}, "image_id": 566, "id": 9905}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 0.0, 67.0, 32.0], "area": 1277, "segmentation": {"size": [512, 512], "counts": "V`Z43k?3L3O2O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O03NO00010O010O00010O01O0N2M4M2M4M2M3N3Lioc2"}, "image_id": 566, "id": 9906}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 0.0, 9.0, 3.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "PP]61o?001O001O000000NR`^1"}, "image_id": 566, "id": 9907}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 4.0, 72.0, 56.0], "area": 2334, "segmentation": {"size": [512, 512], "counts": "PQ\\54j?2M3N3M2M4M2M3N3L3N3N110O01O01O010O01O01O010O010O00010O010SAPOh>T11O01O010O0N3L3N2M4M2O20O0010O0010O010O0N2M4M2M4M2N3N100010OO2L3N2M4M2N3L3N3L3N2N3Lbo_1"}, "image_id": 566, "id": 9908}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 18.0, 25.0, 25.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "Tac12j?5K4L4M3M4O0000010O0000010O000010O00001L3M3L4L5K]oo5"}, "image_id": 566, "id": 9909}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 22.0, 3.0, 18.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "f0b0^?00J`_n7"}, "image_id": 566, "id": 9910}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 25.0, 73.0, 58.0], "area": 2040, "segmentation": {"size": [512, 512], "counts": "gQf01l?4M2N3L3N3M2M3N3M2N3L3N3M21O010O010O01O01O010O010O010O01O01O010O010O01O01nNWAk0i>SOZAm0l>0010O010O0010O0010O010O0010O0010O010O010O0010OO1M4M2N3L3N3M2M3N3M2MZ^U6"}, "image_id": 566, "id": 9911}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 47.0, 59.0, 51.0], "area": 1578, "segmentation": {"size": [512, 512], "counts": "YbU42l?3L3N3L3N2M4M2M4M2M3O20O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010OQAROl>R1O01O010O01O01TOUAa0k>]OWAd0i>XOZAh0P?0O0001M2M4M2M3N3L3N3Lgml2"}, "image_id": 566, "id": 9912}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 66.0, 86.0, 57.0], "area": 2893, "segmentation": {"size": [512, 512], "counts": "dbb23j?3N3L3N2c@DS?`0j@BV?e0O01O010O01i@XOT?k00O010O0001L3N3L3O110O0010O0010O0010O00N30O010O00010O0010O001N11O01O010O01O01O010O01O01O010L3N201O010O01O01O010O01M2M3N3L3N3L3N3L3M3N3L3010ON3L3M3Ma]R4"}, "image_id": 566, "id": 9913}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 72.0, 60.0, 50.0], "area": 1551, "segmentation": {"size": [512, 512], "counts": "RcR51m?2M4M2M3N3M201O01OO2L3N3L3N2N30O01O01O010O010O00010O010O01O01O010O01O01OSAQOi>S11O01O010O01O01OUOUAa0l>[OWAf0h>XO[Ag0o>0010O001M2N3L3N2M4M2M4Mn\\o1"}, "image_id": 566, "id": 9914}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 76.0, 15.0, 12.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "cRV11l?3N2N3O010O00010O010O00N3M2Mc]b6"}, "image_id": 566, "id": 9915}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 77.0, 42.0, 31.0], "area": 733, "segmentation": {"size": [512, 512], "counts": "ibl12l?2N3L3N3M2O1010Ob@A[?b010O01O01N1O2O010O01O01O010O010O01O01O010O010O01O012M0N3M2N2M4M2NW]^5"}, "image_id": 566, "id": 9916}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 87.0, 52.0, 50.0], "area": 1439, "segmentation": {"size": [512, 512], "counts": "och33j?3N3M2M3N3N110O01O01O001L3N2M4M2M4M2M3N2M010O02100010O010O00010O010O0M2O1N2O2M4O000O2M2M4M2M3N3L3N3L3NU]]3"}, "image_id": 566, "id": 9917}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 91.0, 47.0, 61.0], "area": 1634, "segmentation": {"size": [512, 512], "counts": "ZTa03k?2N3L3N3M2\\OBdA`0Z>BgA>U>F[AK;?X>LhA5U>MiA5U>NgA6U>MiA5W>LfA7Y>h01ON3N1010O010O010O00010N1N3L3N3M2N3L3N2N3M2M4M2N3M2M4M2N2N3L3N3Mj\\g6"}, "image_id": 566, "id": 9918}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 101.0, 52.0, 48.0], "area": 1423, "segmentation": {"size": [512, 512], "counts": "Xd^43j?3N3M2M3N3L3N3M2M3N3L3N3L3N3N10010O010O00010O010O01M2N2M4M2N3L3N2M4N11L3O2O01O01O010OO2M2N2M4M2M4M2M4Mc\\g2"}, "image_id": 566, "id": 9919}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 107.0, 2.0, 10.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "[3:g?Jiln7"}, "image_id": 566, "id": 9920}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 117.0, 52.0, 43.0], "area": 1404, "segmentation": {"size": [512, 512], "counts": "gTR52k?3N3L3N3L300N3M2M4M2M3N3M2M4M200010O000N3L3N3N10010O010O0010O0010O0010O0010O010O000M4M2M4M2M4M2N2M4M2MPlS2"}, "image_id": 566, "id": 9921}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 132.0, 15.0, 21.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "fTo21m?2M3N3L3N3L3N2OO3M2M4M2M3N3Ll[i4"}, "image_id": 566, "id": 9922}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 148.0, 74.0, 57.0], "area": 2554, "segmentation": {"size": [512, 512], "counts": "ce]13j?3N3L3N2M4M2M4M2M3N3L310O00010O010O0010O0010O0010O0010O001N1N210O010O01O010O010O0010O001M2M3N3M2N3M20010O0010O0010ON2M4M2M4M2M3N3L3N3L3N2Fi@H[?4i@IY?5;L3NU[]5"}, "image_id": 566, "id": 9923}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 161.0, 48.0, 52.0], "area": 1489, "segmentation": {"size": [512, 512], "counts": "S5[1e>10O0N2N0O210010O00010O010O010O00010O01POXAg0g>VO]Ai0d>TO^Am0j>01O010O010O01O01O010O010O01O01M2N3M2M4M2N2M4M2N3LWjW7"}, "image_id": 566, "id": 9924}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 163.0, 66.0, 54.0], "area": 2100, "segmentation": {"size": [512, 512], "counts": "Sfk32l?2M4M2M3N3L3N3L3N2M4M2M4M20001O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O01N1N3L3N2M40OO2M2M3N3Hd@G_?77L3N[ZS3"}, "image_id": 566, "id": 9925}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 165.0, 30.0, 26.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "cUm41m?2M4M2M3N3M6L010O00010O010O00010O010OO1M4O010O01OM4M2M4M2Mejc2"}, "image_id": 566, "id": 9926}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 169.0, 21.0, 17.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "dUS31l?3N2M4L30001O01O01O010O01O01O01O0O1N3L3MeZb4"}, "image_id": 566, "id": 9927}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 200.0, 26.0, 26.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "jfn22k?3M3M4L3M3M4O00010O00010O0010O00010O000N3L3M3M4L3MeYd4"}, "image_id": 566, "id": 9928}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 209.0, 35.0, 39.0], "area": 793, "segmentation": {"size": [512, 512], "counts": "\\WU23k?2N3L3N3M2M3N3M2M4M2N3M21O010O010O01O01O010O0N3M2M3N3M2M4M2N3L3N2N3LXYY5"}, "image_id": 566, "id": 9929}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 214.0, 28.0, 36.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "bgg02l?2HO^@5_?7M3N3L3N3L3N2O2O0010O0010OO1N3L3N3L3N2M4M2M4M2M3NVYj6"}, "image_id": 566, "id": 9930}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 216.0, 30.0, 21.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "QWc11l?3N3M2N30O0010O010O010O0010O010O0010O010O0010O010O001M2N3Lohm5"}, "image_id": 566, "id": 9931}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 226.0, 28.0, 73.0], "area": 1019, "segmentation": {"size": [512, 512], "counts": "`7k1S>0O10O10O10O3N3M2M4M2N3L3N2M4M2N3L3N3M2M3N3M2M4M2N2M4Mgha7"}, "image_id": 566, "id": 9932}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 228.0, 78.0, 52.0], "area": 2188, "segmentation": {"size": [512, 512], "counts": "Rho21m?2M4M2M4L3N2M4M210O00010O010O00010M2N3L3N2M4M03L310O01O01O010O01O01O010O01O01O010O01O01O010O0010O0010O0010O0010O0010O0010O0010O0010O0N3L3M3N3L3N3L3N2M4M2MYXi3"}, "image_id": 566, "id": 9933}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 257.0, 73.0, 50.0], "area": 1892, "segmentation": {"size": [512, 512], "counts": "gXe12k?3N2M4M2M4O01O010O01O01O010O01O01O01M2M4M2M3N3M2010O00010O010O00010O010O00010O010O0001O0O2M2O110O010O00010OlNYAo0m>0O01O01O010OZOUA7l>FWA9i>DZA=e>A]A?S?0O01N1N2M4M2M4MoVV5"}, "image_id": 566, "id": 9934}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 283.0, 75.0, 67.0], "area": 2705, "segmentation": {"size": [512, 512], "counts": "^Z83j?3IL`@8[?9M2M4M2M3N3N100N3M2M4L3N2M4M2M4M2M301O010O00010O010O00O2L3N210AjAROV>k0mATOU>j0mASOU>n0jAoNY>Q1gAlN]>R19N3L3N2N3O010Oo@WOk>j0QAYOo>l010O01TOPAg0o>VOUAi0Q?0010O010O00010O010O00010O010M2M3N3L3N3L3N2M^Vb6"}, "image_id": 566, "id": 9935}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 293.0, 88.0, 60.0], "area": 2328, "segmentation": {"size": [512, 512], "counts": "kiU33k?2M3N3M2M4M2M4O01O010O01O01O010O01O0N2N3L3N30O010O00010O010O00010O010O010O00010O010O00010O010O00010ORAROj>R110O01O01OROYAc0h>ZOZAg0e>WO]Ai0n>O010O01O010O01O010O01O01O010O01O01O010O0N3M2M3N3M2M4M2M3NhU^3"}, "image_id": 566, "id": 9936}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 309.0, 68.0, 51.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "cZk51m?2N3M2N3M2N3M2M3NO12N3M3M2N30O010O01O010O01O010O01O010O0O1M4M2O20O010O0010M2O20O01O01O010O010O01O01O010O010O0O1N3M2M4TOSAa0o>]OSAa0P?\\OSA`0X?N3M2M3N3M2MjeR1"}, "image_id": 566, "id": 9937}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 325.0, 78.0, 54.0], "area": 2377, "segmentation": {"size": [512, 512], "counts": "oje12k?3N3L3N2M4N1010O0010O0010O0010O0010O0001L3N3M2M4M2M3N3O010O00010O010O01O01O010O01O01O010O010O00010N1N3L3N2010O010O00010O010O0010OhNcAl0^>ROdAl0_>POdAm0j>M2N3L3N3L3N2M4M2M4MWUS5"}, "image_id": 566, "id": 9938}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 330.0, 27.0, 27.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "kZY12l?3L3N3M2M3N3M2O20O010O00010O010O00010N1N3L3N3M2M3N3M`UY6"}, "image_id": 566, "id": 9939}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 349.0, 57.0, 68.0], "area": 2213, "segmentation": {"size": [512, 512], "counts": "_\\W33\\?0PA3l>0QA3m>0PA2m>1PA3m>OQA3l>b0O2O010O00001M2M4M2M3O2O010O01ON3M2N3L3N12M20010O01O01O010O01OSOoAKR>1RBNn=_OlA88:l=ZOoA99O01O01O0N2N3L3N3L3M3N3LhSl3"}, "image_id": 566, "id": 9940}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 357.0, 24.0, 26.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "U;e0[?00001O01O0001O01O01O01O0001O01O01O01M2L4L4M4Kidc7"}, "image_id": 566, "id": 9941}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 371.0, 31.0, 27.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "U\\d52k?4M2M3M4L3N2N30O01O01O01O01O010O0N201O01O01O0Be@8[?Dh@<_?0O00O2L3N2MQTl1"}, "image_id": 566, "id": 9942}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 382.0, 18.0, 14.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "Slm23k?3L300010O010O01O01O010O01O01M2N3LoSi4"}, "image_id": 566, "id": 9943}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 385.0, 18.0, 31.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "Qg0VA\\Oj>d0TA_Ol>j0010O00010O010O00010O010O00010O010O0001N1N3L3O1010O01O01O010O01O01O010O011N1O01O010O01O01O01N1M4M2M3N3L3N3L3N2M4M2M4M2M\\cW6"}, "image_id": 566, "id": 9945}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 414.0, 99.0, 72.0], "area": 3344, "segmentation": {"size": [512, 512], "counts": "T^T22k?3N3ZOJ]A9a>I\\A:a>J\\A9`>J]A9a>J\\A9a>I\\A:d>a010O0001N1N3L300010O00010O01O01O010O01O01O01O01O010O01O01O010O00010O01OZAPO\\>Q1`ARO1OV>o0gAUO0NZ>^101O01O0N3L30001O010O0mN^Ai0a>UOaAk0`>QOcAo0]>nNgAR1c>01O01O010O01O01O01O01O001L3M301OO1M4M2M3M4M210O00010O01ON3M2M4L3N2M4M2M3MQRZ4"}, "image_id": 566, "id": 9946}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 421.0, 27.0, 28.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "Z]23k?2_@MS?6j@MT?4j@NW?1f@3Y?:0010O0O20O010O0010O0010O010O0M4M2N2N3L3N3MdR`7"}, "image_id": 566, "id": 9947}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 454.0, 79.0, 58.0], "area": 2391, "segmentation": {"size": [512, 512], "counts": "Y_71m?2M4M2M4M2N3L3N2M4M2M4M2N2M4N110O00010O010O010O00010O01mNZAk0e>SO]Am0c>QO`An0i>10O01O010O01O010O01O01O010O01O01O010O010O01O01O001O00001OO1N2M3O1001O0N3M2M4M2N2M4M2M4M2N2M4M2MSQa6"}, "image_id": 566, "id": 9948}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 505.0, 20.0, 7.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "oon11l?3N2O1001O00001O001O001O00001O001O0000QPg5"}, "image_id": 566, "id": 9949}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 509.0, 8.0, 3.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "noe22m?1001O001O0000QPV5"}, "image_id": 566, "id": 9950}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 511.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "oo81o?000Q`e7"}, "image_id": 566, "id": 9951}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 40.0, 49.0, 69.0], "area": 1743, "segmentation": {"size": [512, 512], "counts": "obV14j?2N3M2M4M2N2M4M2N3M2M4M2N3L3N2N3L3N3M2N3L3N2N3L30M3N3M2M4M2N3M2M4M2N2M4M2N3M2M4M2N2M4M2N3M2M4M2N`nP6"}, "image_id": 567, "id": 9952}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 101.0, 48.0, 70.0], "area": 1727, "segmentation": {"size": [512, 512], "counts": "nd^13k?3M2M3N3L3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N2M4M2M4N01M4M2N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N3M2M3N3L3Nd\\i5"}, "image_id": 567, "id": 9953}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 106.0, 49.0, 71.0], "area": 1780, "segmentation": {"size": [512, 512], "counts": "UUl02k?3N3L3N3L3N2M4M2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N30N1M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M_\\[6"}, "image_id": 567, "id": 9954}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 157.0, 135.0, 355.0], "area": 14196, "segmentation": {"size": [512, 512], "counts": "dVo11n?3N3L3N2M3N3L3N2N2M4M2M2O1N100O2O001N100O2O0O101N10001N100O2WOiNgBW1Y=h001O00006JcMC_2;aMEb28]MHf26ZMJi23VMMm21SMOo2OPM1T3LlL4V3JiL6[3GeL9]3EcL:a3C^L>d3@\\L?g3_OXLb0k3[OULe0m3YORLg0R4VOnKj0T4TOnKi0V4TOlKj0V4TOlKi0W4UOkKi0X4TOjKi0Y4UOjKh0Y4UOiKh0Z4VOhKh0j9N2M4M2M3N2M3N2M4MnYm3"}, "image_id": 567, "id": 9955}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 0.0, 159.0, 68.0], "area": 3442, "segmentation": {"size": [512, 512], "counts": "SP62k?4O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O01O010O01O010O01O010O01O01O010O01O01O010O01O010O01O010O01O01O01O0M4M2M4M2N2O2O001O00001O001O00001O001O001O00001O001O00001O001O00[O_AKa>2cAN\\>0fA0Z>MiA4V>JmA5S>HPB9o=ETB:l=DVB10O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010O010O00010O010O0010O0010O0010O0010M2N3M2M3N3L3N_^Z5"}, "image_id": 568, "id": 9956}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 0.0, 112.0, 102.0], "area": 2897, "segmentation": {"size": [512, 512], "counts": "Xbh21l?4M2M4M2M301O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O010O00010O010O00010O010O0010O0010O0010O0010O0O2L3N2M4M2M4M2M3N3L3N3L2O00O010O010O010O010O010O010O010O010O01000O3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3NR`_3"}, "image_id": 568, "id": 9957}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 19.0, 7.0, 32.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "Ral73k?2_@MR?7j@LT?6j@MR?7k@KS?b0M4\\O"}, "image_id": 568, "id": 9958}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 28.0, 56.0, 49.0], "area": 1592, "segmentation": {"size": [512, 512], "counts": "hah63k?2N3L3N3L3N2M4M2M4M2N3M20010O010O00010O010O00010O010O01O01O010O010O00010O010O00010O010O00POUAk0Q?M2M4M2M3N3M2M4M2M4M_^;"}, "image_id": 568, "id": 9959}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 60.0, 49.0, 39.0], "area": 1119, "segmentation": {"size": [512, 512], "counts": "dbX61e?2`@1\\?2b@0\\?2b@1[?;M3N3M201O010O01O01O010O010O01O01ON3O010O010O01O01O010O010O0O2N11O010O01O0Ch@3X?Jj@6V?Hm@8R?EQA;[?L3N2N3L[mn0"}, "image_id": 568, "id": 9960}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 88.0, 54.0, 35.0], "area": 1133, "segmentation": {"size": [512, 512], "counts": "ZSU71l?3N3M2M4M2M3N3N1010O0010O0010O010O0010O0010O010O00010O010O0001L301O0010O0010ON3M200010N101O010O01O0M3N3M2MoL"}, "image_id": 568, "id": 9961}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 106.0, 55.0, 67.0], "area": 1746, "segmentation": {"size": [512, 512], "counts": "oTf53k?2M4M2N2M4M2N3M2M4M2N2M4M2N3L3O2O010O01O010O01N1N3M1N10000O010001N4M2N3M2M3N3M2M4M2N3M2O2O01ON3M2M4M2N3M2M3N_\\^1"}, "image_id": 568, "id": 9962}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 125.0, 56.0, 66.0], "area": 1772, "segmentation": {"size": [512, 512], "counts": "bU[62l?2N3L3N2N3L3N3L3N2N3L3N3L3N3M2N2010O01O01O010O01O0M3N3L10O010O10002M3N3L3N3L3N2N3L3N3L3O101O010O0N3M2M3N3L3N3Lmkh0"}, "image_id": 568, "id": 9963}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 132.0, 58.0, 61.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "bUR72l?2M4M2N3L3N2N3L3N3L3N3M2M3N3M2010O0010O010O0010O0010O0010ON3L10O01000O013M2M3N3L3N3M2M4M2N210O010O0N2N3M2M4M2N3L3Nek0"}, "image_id": 568, "id": 9964}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 134.0, 29.0, 38.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "Ue`11m?3M2N3M2N2N3M2N3L3N3N1010O001M1O00O010001O2M4M2Kc@E`?85N2M4MikP6"}, "image_id": 568, "id": 9965}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 152.0, 77.0, 114.0], "area": 4267, "segmentation": {"size": [512, 512], "counts": "jW]12k?3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N1N100O04M2M4M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2MlZ\\5"}, "image_id": 568, "id": 9966}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 180.0, 23.0, 29.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "ZfY12k?3N3L3M4M2M3M4M2O110O0010O001L3N2M4L3N2M4M2MZjZ6"}, "image_id": 568, "id": 9967}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 208.0, 53.0, 47.0], "area": 1548, "segmentation": {"size": [512, 512], "counts": "[gc51m?2M4BKm@7P?Lm@7Q?Ll@7Q?=010O00O2M2010O010O0010O0010O0010O0M4O00012M0010O0010O0010N1010O00010O010O0001ZOWA4h>I[A7f>F]A7e>F^A8d>F^A7X?M2M4Mcha1"}, "image_id": 568, "id": 9968}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 244.0, 29.0, 55.0], "area": 1042, "segmentation": {"size": [512, 512], "counts": "lha73k?3M2M4M2N3L3N2M4M2N3L3N3L3N2N3L3N3O0010O010O00010O010M2M[H"}, "image_id": 568, "id": 9969}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 252.0, 22.0, 23.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "[X^62l?2M4M2M4M2N2O20O01O01O010O010N1M3N3L3N3M2MQhV1"}, "image_id": 568, "id": 9970}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 263.0, 23.0, 27.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "jXS73k?2M4L3N2M4M2M3O2O010O00010O01M2N2M4M2M4L3N2MgWa0"}, "image_id": 568, "id": 9971}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 280.0, 52.0, 52.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "hiU52l?2M4M2N2N3M2N3L3N2N3M2N3M2M3O20O01O01O010O010O01O01O010O010O00010O010O0010O010O00010O01@WAHh>6ZAJg>2]ALd>2^ALd>1`AKd>2^ALd>1_ALe>1^WP2"}, "image_id": 568, "id": 9972}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 282.0, 36.0, 92.0], "area": 1674, "segmentation": {"size": [512, 512], "counts": "V9`2^=0O100O0102N2M3N3M2N3L3N3M2M4M2N2N3L3N3M2M4M2N2N3L3N3M2M4M2N3M2M3N3M2Nlf]7"}, "image_id": 568, "id": 9973}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 292.0, 26.0, 26.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "fY[61l?4M2M3N3L3N3L30010O01O01O010O00010O01O0M3M4M2M3M4MgfW1"}, "image_id": 568, "id": 9974}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 318.0, 50.0, 65.0], "area": 1797, "segmentation": {"size": [512, 512], "counts": "b[P72l?3L3N2M4M2M4M2M3N3L3N2M4M2M4M2M3N3L3N3L3N210O0010O0010O00O2L3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3N3L3N2Mje6"}, "image_id": 568, "id": 9975}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 323.0, 16.0, 15.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "\\jR11m?2N3M2M3010O010O010O01OO2M2N3LkUe6"}, "image_id": 568, "id": 9976}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 341.0, 29.0, 27.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "Wk\\61m?2N3M2M4M2N2M4M20010O0010O0010O0010O00010O0N3L3N2M4L3N3LVeT1"}, "image_id": 568, "id": 9977}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 359.0, 13.0, 37.0], "area": 246, "segmentation": {"size": [512, 512], "counts": "Uli71l?4M2M4M2M3N3M2M4M2M4M2M4hD"}, "image_id": 568, "id": 9978}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 367.0, 15.0, 13.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "fko52l?2N3L31O01O010O01O010O0N2N3M^dh1"}, "image_id": 568, "id": 9979}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 368.0, 55.0, 46.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "\\lo43j?3N2N3L3N3L3N3O00010O010O00010O010O00010O0M4M2M4M20010O010O0O1M4M2N3O01O010O0N2N3L3N3L3N3M2M301O001M2Ge@K]?3:M2M[dT2"}, "image_id": 568, "id": 9980}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 372.0, 14.0, 14.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "mkj11m?3L3N3M201O010O00O2M2N3M2N[Tn5"}, "image_id": 568, "id": 9981}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 374.0, 31.0, 49.0], "area": 788, "segmentation": {"size": [512, 512], "counts": "alQ61m?2M4M2N3L3n@B`>`0^AC^>a0^AB`>`0^AB`>a0\\AC`>`0^AB`>Q1N11O001M2M4M2N2M4M2M4M2N2M4M2M4M2M3N3MTd^1"}, "image_id": 568, "id": 9982}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 395.0, 31.0, 25.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "k\\_73j?3M3N3L3M4M21O01O01O010O01O01O01O01O001L3N2M4L3N210O010O000N^S1"}, "image_id": 568, "id": 9983}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 405.0, 56.0, 47.0], "area": 1761, "segmentation": {"size": [512, 512], "counts": "fm_63i?4K5K5L5J5L4K5O2O0001O01O0001O01O000001O01O0003M010O0000010O000001O01O0000010O0000010O0000010O000K5L4L5K4K5L4LRSd0"}, "image_id": 568, "id": 9984}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 410.0, 58.0, 49.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "jmX42k?4M2M4M2M3N3L3N3L3N2M4M2N3O00010O0010O010O00010O010O0010O0010O0010O0010O001O0N2N3O010O01O01M2N3L3N2N3L3N3L3N2M4M2MhRj2"}, "image_id": 568, "id": 9985}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 447.0, 34.0, 65.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "^>b1[>3N2N2M3N2M300001O001O001M2M3N3M2M4M2M3N3L3N3L3N2M4M2M4M2M4M2M3N3Lia^7"}, "image_id": 568, "id": 9986}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 462.0, 34.0, 34.0], "area": 698, "segmentation": {"size": [512, 512], "counts": "ToY52k?4M2M3N3M2M4M2M3N3O010O010O00010O010O010O00010O01M2M3N3M2M4M2N3L3NZQU2"}, "image_id": 568, "id": 9987}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 467.0, 33.0, 45.0], "area": 733, "segmentation": {"size": [512, 512], "counts": "no`02k?3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2N0002M3N3M2M3N3L3N3M2M3N3L3N3L3NZan6"}, "image_id": 568, "id": 9988}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 490.0, 50.0, 22.0], "area": 665, "segmentation": {"size": [512, 512], "counts": "n_a62k?3N2M3N2N2M3N2N2O11O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O2N3MQ`e0"}, "image_id": 568, "id": 9989}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 492.0, 55.0, 20.0], "area": 748, "segmentation": {"size": [512, 512], "counts": "o_d31l?3N2M3N2M3N2O100001O001O00001O001O00001O001O00001OO1M3N2N2N200001O001O00001O001O00001O001O00001O001O0N2M4M2M^P`3"}, "image_id": 568, "id": 9990}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 493.0, 30.0, 19.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "o__71l?3M3N2M3M3N2N21O00001O00001O001O00001O001O00001O00001M2M4M\\`1"}, "image_id": 568, "id": 9991}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 505.0, 3.0, 7.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "non72l?2M3"}, "image_id": 568, "id": 9992}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 0.0, 15.0, 2.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "PPa01o?000000000000001O00000000000P`W7"}, "image_id": 570, "id": 9993}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 0.0, 30.0, 27.0], "area": 764, "segmentation": {"size": [512, 512], "counts": "V`j0c0W?60001O0000000000000000000000000000000000001O000000000000A_`f6"}, "image_id": 570, "id": 9994}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 0.0, 39.0, 13.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "PPe1:f?1O000000000000000000000000001O0000000000000000000000001O000000000000000000NR`g5"}, "image_id": 570, "id": 9995}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 0.0, 44.0, 32.0], "area": 779, "segmentation": {"size": [512, 512], "counts": "aP]24d?80019F00000IAm@?P?DPAEQA;o>EQA8R?Hn@Ia?74000000000000001O0000000000000000000000000PPm4"}, "image_id": 570, "id": 9996}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 0.0, 51.0, 23.0], "area": 811, "segmentation": {"size": [512, 512], "counts": "UPc39b?50000000000000000000N200001O00000000000000000000000000001O00000000009G00000000000000000000000000M3A_`c3"}, "image_id": 570, "id": 9997}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 0.0, 18.0, 54.0], "area": 909, "segmentation": {"size": [512, 512], "counts": "TQg7a0j>e0A?00000000000000000001O0000000000"}, "image_id": 570, "id": 9998}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 17.0, 22.0, 23.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "Uan12_??K500001O00000001O00000000000000000001O0He_f5"}, "image_id": 570, "id": 9999}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 19.0, 25.0, 14.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "cPf3=c?00000000001O0000000000000000000000000000000000N__m3"}, "image_id": 570, "id": 10000}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 22.0, 74.0, 41.0], "area": 2456, "segmentation": {"size": [512, 512], "counts": "^Qn4=^?50000001O00G9E;000000000001O000001O000000000000002N0000000001O000001O0000002N000000000000001O01O00000000000L400000000000000000001O0001O0000000_Oa0_Ogol1"}, "image_id": 570, "id": 10001}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 29.0, 16.0, 15.0], "area": 223, "segmentation": {"size": [512, 512], "counts": "m`k0>b?00000000001O00000000000001O0G[_l6"}, "image_id": 570, "id": 10002}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 31.0, 36.0, 61.0], "area": 1890, "segmentation": {"size": [512, 512], "counts": "^Qo6e0l>?0001O000\\AWOQ>\\100000000000004L00000000000000000001O000001O0000000000J6VOj0WOio>"}, "image_id": 570, "id": 10003}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 34.0, 20.0, 21.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "Ra\\2d0\\?1O00000000000000000000000000000000000n^Y5"}, "image_id": 570, "id": 10004}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 37.0, 7.0, 54.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "U1f1Z>000001I6_Oa0^O\\_l7"}, "image_id": 570, "id": 10005}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 40.0, 8.0, 12.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "bag41e?:1O00000001OFQ_T3"}, "image_id": 570, "id": 10006}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 45.0, 38.0, 50.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "kQd17b?700000001UADP>k0N201O0000000000000001OM30000001O2OO0000000001O000000000C=A?Bmnh5"}, "image_id": 570, "id": 10007}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 45.0, 26.0, 25.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "laU69X??0000000000000000000001O000000000000000000000000Do^]1"}, "image_id": 570, "id": 10008}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 60.0, 35.0, 47.0], "area": 1373, "segmentation": {"size": [512, 512], "counts": "mQh3a0_?00000000000XAEk=X100O1000000000000000000000000000000000000000000000M3]Oc0]Og^f3"}, "image_id": 570, "id": 10009}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 65.0, 47.0, 31.0], "area": 1254, "segmentation": {"size": [512, 512], "counts": "hR>5]?>G90000000000001O0001O0000000000000000000001O0001O000000000000000000000001O01O0000000000K5A[^j6"}, "image_id": 570, "id": 10010}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 67.0, 45.0, 35.0], "area": 1397, "segmentation": {"size": [512, 512], "counts": "\\bb2h0S?500010O000000000000000000000000K5000000000000000000000001O01O00000000005K0000001O00000Nhmf4"}, "image_id": 570, "id": 10011}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 83.0, 44.0, 30.0], "area": 1176, "segmentation": {"size": [512, 512], "counts": "QSf4=U?>0000000000000001O00000001O0000000000000000000001O00000001O00000000000000000001O0000Dgmc2"}, "image_id": 570, "id": 10012}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 85.0, 20.0, 21.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "TS[35\\??00000000000001O000000000000000001O0M]mZ4"}, "image_id": 570, "id": 10013}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 89.0, 13.0, 22.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "lR^4b0[?30000000000000001O00000W][3"}, "image_id": 570, "id": 10014}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 104.0, 8.0, 8.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "Xcl17i?00000000001O0h\\o5"}, "image_id": 570, "id": 10015}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 105.0, 34.0, 30.0], "area": 759, "segmentation": {"size": [512, 512], "counts": "UdT21\\?c0G90000000000000001O0000000001O0000000000000A?L41O000000000000000f\\Z5"}, "image_id": 570, "id": 10016}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 106.0, 6.0, 7.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "ZcY43m?4L0000000f\\c3"}, "image_id": 570, "id": 10017}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 107.0, 41.0, 35.0], "area": 1126, "segmentation": {"size": [512, 512], "counts": "jcn2c0n>?00000000001O0000000000000000000000000000000000000001O0000DA?J600000001O0001O0000000N2B>Aelk5"}, "image_id": 570, "id": 10021}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 132.0, 30.0, 37.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "XT33m?3\\@KW?8f@KV?8g@LV?b0M2M4M3L1000O011N4M3M3L3N00O01000O0101O2M4M3M3L4M3MPk]7"}, "image_id": 570, "id": 10022}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 143.0, 35.0, 26.0], "area": 645, "segmentation": {"size": [512, 512], "counts": "We`41]?b0J600000000001O000001O000000000000H810O000000000000000H9O0000000000^km2"}, "image_id": 570, "id": 10023}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 147.0, 35.0, 68.0], "area": 1783, "segmentation": {"size": [512, 512], "counts": "Qfd01o?7cNMcAO??f=4RB3f=5SBJm=U1O10000N2000O100000O10000000O1000O10000002M9H7I7I7I8H7I7H8Iaii6"}, "image_id": 570, "id": 10024}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 149.0, 22.0, 26.0], "area": 381, "segmentation": {"size": [512, 512], "counts": "Ye^52j?4L4L5K4L40010O0000010O0000010O00M3K6K4L4L[[V2"}, "image_id": 570, "id": 10025}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 156.0, 26.0, 72.0], "area": 1015, "segmentation": {"size": [512, 512], "counts": "l4P2P>21000M01O2M4M3M3L4M3M3L4M3L3N3M3L4M3M3L4M3M3L3N3MTib7"}, "image_id": 570, "id": 10026}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 157.0, 39.0, 74.0], "area": 2089, "segmentation": {"size": [512, 512], "counts": "dfP37X?a0O11O00000001O00:F000^Ob0C=00I70000000000001O000001O00000000000000iM`Bn1i=0000@`0_Oa0^O[k[4"}, "image_id": 570, "id": 10027}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 159.0, 47.0, 28.0], "area": 727, "segmentation": {"size": [512, 512], "counts": "ae^61m?3M2N3M2N3M2N3M2N3O0010O010O01O010O01O0N3M2M4N1010O010O010O010O010O0010O010O0010O01M2N3M2N3Mbji0"}, "image_id": 570, "id": 10028}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 161.0, 34.0, 53.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "]f^13i?5[OI\\A:`>J\\A:`>J\\A:`>K[A:a>d0L4O101O01O01O0001O01O0001O01O00L5L3L4L4L5K4M3L5K4L4M3LmZP6"}, "image_id": 570, "id": 10029}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 162.0, 41.0, 78.0], "area": 1924, "segmentation": {"size": [512, 512], "counts": "nVg446M[?7a@M[??K4E]OVAf0g>;M4K4M3M3eA]NU>i1M4L3L4M3M4N10000010O000N2L5L3L4L5K4M3L4L5K4M3L4L5K4M3L4L5K4MkZd2"}, "image_id": 570, "id": 10030}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 166.0, 42.0, 55.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "dfU26_?;1O000001O0000000000000E;@`0G900000000000001O000000000000000001O000000000001OD<@`0_OZ[U5"}, "image_id": 570, "id": 10031}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 173.0, 29.0, 52.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "ben3_1a>0000000K500000000000000005K0000000000000000000000000000^jb3"}, "image_id": 570, "id": 10032}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 180.0, 52.0, 67.0], "area": 1579, "segmentation": {"size": [512, 512], "counts": "Zg]51m?3M2N3M2N3M2N3M2N3M2N3N110O0O2M2N3M2N3M2N3M2N3M1O000000000001O3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2NTZh1"}, "image_id": 570, "id": 10033}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 217.0, 56.0, 57.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "lgT62m?2N2N2M2O2N2N2N2N2M2O2O10000O1N1O2M3N2N2N2N1N3N2N00O10O100000O1000O1002M3N2N2N2N2M2O2N2N2N2M3N2N2N2N2N2M2O2N2NfXo0"}, "image_id": 570, "id": 10034}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 231.0, 85.0, 77.0], "area": 2440, "segmentation": {"size": [512, 512], "counts": "khe61o?2M2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N1O000000001O0002N2N2N2N2N2N2N2N2N0010O00000000000000000000010O00000000000000000000010O000001O2N3M00000000000001O0000Lk@^OV?a06N2NhH"}, "image_id": 570, "id": 10035}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 232.0, 6.0, 9.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "XWg28h?0000001O0hhU5"}, "image_id": 570, "id": 10036}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 260.0, 19.0, 32.0], "area": 357, "segmentation": {"size": [512, 512], "counts": "lhf72m?2N2N2FJg@8W?Jh@8U?Ji@7V?Kh@5X?91O01O0000002N2N2N2O0lG"}, "image_id": 570, "id": 10037}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 292.0, 45.0, 54.0], "area": 1321, "segmentation": {"size": [512, 512], "counts": "QjY72m?2V@Me?4Y@Ne?8N2N2N2N2N2N2N2N2N2N2N2O10000O1N2N0000000000001O0001O000000002N2N0000001O0000002\\OQA4Q?JQA4j5"}, "image_id": 570, "id": 10038}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 317.0, 51.0, 74.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "kkW43k?2N2M4M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2M4M2N2M4M2N0O1001N4M2M3N3M2M4M2N3L3N2N3L3N3M2M3N3L3N3M2M4M2N2M4Mken2"}, "image_id": 570, "id": 10039}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 318.0, 45.0, 65.0], "area": 1681, "segmentation": {"size": [512, 512], "counts": "Ykl03k?3N2M3N2M4M2M3M3N2M3N2M3N2M3N2M3G:E90N101N2O1N2O1N2O1N2O1N2O1O1N2O1N2O1N2O1N2O1N2O1N2O1Aaf\\6"}, "image_id": 570, "id": 10040}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 323.0, 37.0, 47.0], "area": 1052, "segmentation": {"size": [512, 512], "counts": "Yk\\31l?4L3L4M4L3L4M4K4M3M3L5N10010O00gN\\AV1h>0O0001OO2L30010OM3M4K4M3M3L5L3L4M4L3L4MjeP4"}, "image_id": 570, "id": 10041}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 344.0, 4.0, 7.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "kZn71n?2N2N2WE"}, "image_id": 570, "id": 10042}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 352.0, 12.0, 23.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "Z[j71n?2N2N2N2N2N2N2N2N2O2M2oD"}, "image_id": 570, "id": 10043}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 355.0, 55.0, 66.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "[;d0[?2N2N2N2N2O2M2N2N2O10000001O000001O0000000000lNXAo0m>01O0001O0000000000000001N1O1N2N2N2N00002N2N2N2N2O2M2N2N2N2N2N]ST7"}, "image_id": 570, "id": 10044}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 356.0, 57.0, 56.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "X\\m41n?2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N3M2N2N1O00000000000000001O000000000000000001O2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2NZTV2"}, "image_id": 570, "id": 10045}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 376.0, 15.0, 36.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "hlU33[?b0B>000000000001O0000000000^Ojdb4"}, "image_id": 570, "id": 10046}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 386.0, 14.0, 25.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "ZlP4a0W?800001O000000000000000M3^O_Th3"}, "image_id": 570, "id": 10047}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 395.0, 19.0, 24.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "m\\a41l?4M2M4M2N2M4M2O2O010O00N3M2M4M2N3L3NcSU3"}, "image_id": 570, "id": 10048}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 401.0, 54.0, 45.0], "area": 1317, "segmentation": {"size": [512, 512], "counts": "Q]a52m?2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2O100000000000000000N0000000000000000011N2N2N2N2N3M2N2N2N2N2N2N2N2N2O1N2Nnbc1"}, "image_id": 570, "id": 10049}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 402.0, 28.0, 29.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "mlo02m?2N2N2N2N2N2N2O1N3M2N2N20000000N2O1N2N2N2N2N2N2N3M2N2NoRb6"}, "image_id": 570, "id": 10050}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 421.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "U=3n?Nkbn7"}, "image_id": 570, "id": 10051}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 430.0, 26.0, 26.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "k]m41n?2N2N2N2N3N1N2N2N2N2N2N00000101N2N2N3M2N2N2N2N2N3NVbe2"}, "image_id": 570, "id": 10052}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 437.0, 32.0, 35.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "Sn<3l?2N2O1N2N2N2N3M2N2N2O1N2N2N002N201O00O1N2O1N2N3M2N2N2Eb@1`?Mb@1`?Mb@2_?Lc@2g?NdQS7"}, "image_id": 570, "id": 10053}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 439.0, 55.0, 51.0], "area": 1336, "segmentation": {"size": [512, 512], "counts": "bnU61n?2N3M2N2N2N2N2N2N2N2N2N2000001O0O1N2N2N2N2N000000000001O00000001O0000000001O2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N2Nian0"}, "image_id": 570, "id": 10054}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 452.0, 38.0, 60.0], "area": 1620, "segmentation": {"size": [512, 512], "counts": "eo\\12l?3M2N3M2N3SOKkA7Q>MmA6R>JlA8T>HjA;U>EiA=W>CgA`0X>d0I8O001O001O000000000000O1N2N2N2N2N2N2N2N2N2N2N2N2NnQP6"}, "image_id": 570, "id": 10055}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 453.0, 22.0, 22.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "]nb42m?2N2O1N2N2N2N3M2O1N2000O1N2N2N2N2N2O1N2N2NaQR3"}, "image_id": 570, "id": 10056}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 457.0, 36.0, 35.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "m^a51n?2N2N2N2N2O1N2N2N2N2N2N2N2N2N1O000000000002N2N2N2N2N2N2N2N2N2N2N3N1N2NYal1"}, "image_id": 570, "id": 10057}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 458.0, 43.0, 38.0], "area": 812, "segmentation": {"size": [512, 512], "counts": "ToS32m?1N3M2N3M3N1N3M2N3M2O2M3M2N3N1010O010O0N3M3M2O2M2N3M2O2M3M210O0100O010O0100M2N3M2O2MYaV4"}, "image_id": 570, "id": 10058}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 463.0, 30.0, 29.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "l^g01n?2N2N3M2O1N2N3M2N2O1N3M2N100O0001O2N2O1N3M2N2N2O2M2N2N2N3NRai6"}, "image_id": 570, "id": 10059}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 469.0, 31.0, 30.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "S_j62n?1N2N2N2N2N2N2N3M2N2N2O1N2N1O0001O2N2N2N2N2N2N2N2O1N2N2N2N2NoPf0"}, "image_id": 570, "id": 10060}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 470.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "f>3n?NZan7"}, "image_id": 570, "id": 10061}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 479.0, 28.0, 29.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "Z_?1o?1N3M2N2N2N2O2M2N2N2N2N3N1N20N2N2N3M2N2O1N3M2N2N2N2O2Mb`R7"}, "image_id": 570, "id": 10062}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 480.0, 51.0, 32.0], "area": 898, "segmentation": {"size": [512, 512], "counts": "b_c42m?2N2N2N2N2N2O2M2N2N2N2N1O1O100O1O1O1O1O1001O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1OQPc2"}, "image_id": 570, "id": 10063}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 483.0, 44.0, 29.0], "area": 768, "segmentation": {"size": [512, 512], "counts": "g_U72m?2O1N2N2Z@Ha?BmAa0P>BnA?P>CeAF4j0U>AeAG4j0U>GiA:U>HiA9V>i00O0001O01O0002O2M2N3M2O101O_OUBlNl=R1UBlNm=m0RBiN1:P>j0QBjNO=Q>h0TBWOm=h0SBXOm=h0SBXOm=h0UBWOj=j0WBTOj=k0YBROg=n0[BQOd=o0^BoNb=R1`BkNa=T1e00010O0000010O0000010O1O1O100O1O3N1N3M2@l@1V?Nk@1W?Ll@1V?Ml@1W?Ll@1V?Nk@1h>"}, "image_id": 571, "id": 10070}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 25.0, 118.0, 132.0], "area": 7538, "segmentation": {"size": [512, 512], "counts": "fQa13m?1N3M3N2M2O2M3N2M2N3N2M3N1N3N2M3M2O2M3N2M2O2M3M3NO010O0010O0010ORBXNe=i1[BYNc=f1]B]N`=c1`B_N_=a1aBaN\\=_1dBdNY=]1gBeNW=Z1iBhNU=Y1kBiNS=V1mBmNP=U1nBnNP=T1nBmNP=T2M3N2M2O2M3N2M1001N3N2M3N1N3M3N2YNmBX=_OiB`0Y=^OgBb0\\=\\OdBd0^=YObBg0a=WO_Bk0`=SO`BP1`=nN_BU1a=hN`BY1a=eN^B^1a=`N`Bb1`=[N`Bh1`=VN`Bk1a=RN_BQ2`=nM`BT2d=3M2O2M3N2M2O2M2N010O010O01O01O010O0101N3M3N2M2fN_B3c=J`B3c=K^B4d=I_B4c=K_B2d=K_B3c=K^B3e=J^B4c=J`B3c=K_B3c=J_B4d=J^B3d=K_B3c=K_B2d=K^B4i>M3Nfkc4"}, "image_id": 571, "id": 10071}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 27.0, 71.0, 72.0], "area": 2814, "segmentation": {"size": [512, 512], "counts": "]bk32n?4L3M4K5L2ZO^OjAb0V>BeA?[>DbA;^>J^A6b>M[A3e>c010O100GjNhAU1X>oNeAQ1[>:000O01000O0102N4L2M100G\\NVBc1i=;O1O10O01000FXB[Ni=e1[BWNe=h1:0000O2O001N3N3M3M3L4M3M3L3NO10O102N3L10000O01000O2O3M3L4M3M3L5LfmP3"}, "image_id": 571, "id": 10072}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 64.0, 38.0, 64.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "ZS]73l?2O2M2N2N3VODlA=S>DkA?R>DkA>S>DlA=S>DkA?R>DkA>S>DeAD1k0W>HhA9W>i0N1O0001O01O01O0001O01O010O3M2N3N1N2I_AlNb>S1`AlN`>S161O000mM"}, "image_id": 571, "id": 10073}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 90.0, 94.0, 67.0], "area": 3467, "segmentation": {"size": [512, 512], "counts": "ZS]42m?4M4L3i@Dc>a0XADd>>ZAEc>;]AH_>9`AK]>8`AK]>8`AL[>P1M000O10O10O10O10O10O10O1000O10O10O10O10O10O10O10O1000O01000O01000O01000O012jNiA?Z>]OkA?X>\\OlAa0X>ZOiAg0Y>VOhAj0g>O01001N4M3M4NM4M3M0O1000O01000O01000O10O10O10O1000O01000O01000O01000O3N4L3M3L4M4LfkS2"}, "image_id": 571, "id": 10074}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 126.0, 33.0, 38.0], "area": 554, "segmentation": {"size": [512, 512], "counts": "VdR12n?2M2N2O2M2N2N3N1N3M2N2O20O01O01O01O01O010M2N2O2M2N3M2O1N3M2O1N3MWk\\6"}, "image_id": 571, "id": 10075}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 149.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "edo71Z;"}, "image_id": 571, "id": 10076}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 171.0, 78.0, 67.0], "area": 2808, "segmentation": {"size": [512, 512], "counts": "RfU52f0O`>3^A0_OOk>2eA0^O0k>3dA5Z>MdA6Z>LZAE6`0_>1_ANa>4]ALb>7\\AKb>l0M2O1N3M2O1N1O00010O0000010O0000010O00010O01O1O101N1O1O2O1N2N2O2lN[Ah0o>O1O101N1O1O100O0000010O01O3M2O1N1O0001O011N2N3M2O1N3M2N2N3N1N2N2N3N1N2NlYc1"}, "image_id": 571, "id": 10077}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 174.0, 24.0, 37.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "d5o0P?0010O0000010O001O2O2M2N2N3N1N2N3M2O1N3M2N2O2MPjc7"}, "image_id": 571, "id": 10078}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 184.0, 15.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "Pfo42m?2N2N2N2N2N01O00001O2N2N3M2NTjh2"}, "image_id": 571, "id": 10079}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 199.0, 85.0, 73.0], "area": 3034, "segmentation": {"size": [512, 512], "counts": "SWU31n?2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N3TAmNf>X1N2N2N2N2001O000000000000000000010O00N2N2N000000000000000000000000000002N2N2N1O01O00HaAPO_>P1dAmN\\>S1fAkNZ>U18000002N2N2N2N2N2N2N2N2N20000N2N2N2J_@Jc?46N2NXY`3"}, "image_id": 571, "id": 10080}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 215.0, 28.0, 35.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "lVe41n?3N2M3N2M3N2M3N1N10O3O1000O100000000N00O2N3N2M3N2M3N2M^hl2"}, "image_id": 571, "id": 10081}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 221.0, 84.0, 64.0], "area": 2442, "segmentation": {"size": [512, 512], "counts": "iW_52m?3N1N2X@Jc?m0hAQOY>o0hAoNX>Q1kAmNT>S1nAkNR>U1=O010O2N3M2O1N3M2N2O2M2N2O2OO1N3M2N2O2M2N2N3M12M2N2O2M2N2N3N1Fe@L]?2e@L^?1e@L]?39N\\hV1"}, "image_id": 571, "id": 10082}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 238.0, 84.0, 69.0], "area": 2850, "segmentation": {"size": [512, 512], "counts": "dXa11n?2N1O2N2M3N2N2N2M3N2N1O2N2M3N2N2O100000O1000N2N2N2N2N2N1N3N2N2N2N2O1NO2O1O001O1O001N2O001O1O001O0IaAnN`>Q1bAlN`>S1601O10O0100O010000O3NO10O10O10O0103J6M1N01000O10000Hm@BU?8O2N3M3KXhT5"}, "image_id": 571, "id": 10083}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 243.0, 44.0, 42.0], "area": 880, "segmentation": {"size": [512, 512], "counts": "RX23l?2N1N3N2N2M3N1O2M3N2N2N11000O1000O1000O1000O10O10000000O01000O1M3N1O2N2M3N2N1O2M3N2N1NggW7"}, "image_id": 571, "id": 10084}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 252.0, 8.0, 21.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "Qhn2`0[?5000000000000TXm4"}, "image_id": 571, "id": 10085}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 257.0, 14.0, 11.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "VXW11o?1N2N2O2M1O010O0102M2O2M2Oiga6"}, "image_id": 571, "id": 10086}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 263.0, 76.0, 78.0], "area": 2898, "segmentation": {"size": [512, 512], "counts": "oin51n?3M2O1L5M2O2M2N00010O002O1N3M2N2O2M2N2N3N1N2N0001O01O0001O01O0001O01O0001OEkNlAV1T>kNjAU1V>nNhAQ1X>QOfAo0Z>SOdAn0\\>:00010O0000010O03M2N2O1N3M2N01O00100O1O1O100O1O2N2TOoAFS>8PBFQ>8QBFR>DhA:80[>NgA0\\>MgA1Z>MhA1Z>MhA1^ek0"}, "image_id": 571, "id": 10087}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 264.0, 27.0, 37.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "Y8j0U?1100000O0100000O0100000O0100N2N1O2M3N2N1N3N2N2M2O2NVWb7"}, "image_id": 571, "id": 10088}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 265.0, 80.0, 69.0], "area": 3161, "segmentation": {"size": [512, 512], "counts": "jXk37g?4L4M2N2O0RABY>`0fAEU>;kAKo=5PB1k=OUB4h=LXB3i=MWB3h=2TBNl=8mAIS>R10OJ\\NRBd1n=700000O1000O100000O10000000O02O6@kAiNZ>R1lAhNX>T1:0000O10O10000000O10O100000O10O100000O10O10000000O10O1003M6I6K2N00000O01000000002M7J5K5K[fl2"}, "image_id": 571, "id": 10089}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 299.0, 75.0, 57.0], "area": 2139, "segmentation": {"size": [512, 512], "counts": "Qjf02l?3M2O2M2N3N2M2N3M2O2M2N3N20O010O10O10O010O010O10O10O010O10O010O10O10O010O10O010O10O010O10O10O0N3N1N3M3N1N3M2O2000O001M2N3M2O2M3DTA@n>>TA@o>=TAAm>=UAAn>=[1kAgNU>Z1hAiNX>_1010O010O010O010O010ROiA1W>MkA4U>InA6R>KmA6S>HoA7Q>GQB:n=DUB;l=^OjAH10M2N3M2N3L3N3M2N3M2Nccd6"}, "image_id": 571, "id": 10094}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 361.0, 93.0, 49.0], "area": 3532, "segmentation": {"size": [512, 512], "counts": "Plj3c0]?0O1000000B>00000000000000000000000000000000000000H80007I0000000000000000000000N2000000000000000000000000000000000002N0J60006J000000000000000000000000O100000000000000000000000000000000`df2"}, "image_id": 571, "id": 10095}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 362.0, 22.0, 23.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "\\kc57h?;F4LO100000000000O100000O1000000000002N:FZTQ2"}, "image_id": 571, "id": 10096}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 394.0, 34.0, 39.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "i\\[31n?2N2M2O2a@IR?9l@IR?9l@IR?9l@HS?d0N2M3N2N2N200O01O1M3N2N2N2N1O2N2M3N2N2N2N2N1N3N2N2NVcS4"}, "image_id": 571, "id": 10097}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 399.0, 60.0, 75.0], "area": 3185, "segmentation": {"size": [512, 512], "counts": "b\\g57i?9G9G9G9G9G9F:G2N0000O10000000O100000O1000000000O14L8G100000000WOUB]Ok=c0^BTOb=Q1c02HiNeAW1[>61000000000O100000O1000000000O100008G9H9G9G9GTbZ1"}, "image_id": 571, "id": 10098}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 414.0, 25.0, 41.0], "area": 737, "segmentation": {"size": [512, 512], "counts": "U=Q1o>00000I700000001O0001K4I70000M3L400000000001OI8B]Sc7"}, "image_id": 571, "id": 10099}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 422.0, 72.0, 87.0], "area": 3022, "segmentation": {"size": [512, 512], "counts": "anf21o?3L3DKi@7U?Kh@8U?Ki@7U?S1iAkNY>S1iAjNZ>U1:O1M3N2M4M0O10O0100O010O102M3N2M3N2M3N2M3N2M3NnPU4"}, "image_id": 571, "id": 10100}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 424.0, 92.0, 49.0], "area": 2621, "segmentation": {"size": [512, 512], "counts": "YnT41o?8H00O1000GMe@3Z?:08H000000000000000O0K[OQAe0o>6O10000000000000O10O1000000000001O03M000000O10000000M30L4000000000000000O010000000000000000HSO]Al0c>N;0000O100000000000O1000O10000000000000O1000O4M9G:FRR]2"}, "image_id": 571, "id": 10101}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 445.0, 10.0, 10.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "Pn`32m?3N2M2O0O0102M3N2MnQZ4"}, "image_id": 571, "id": 10102}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 450.0, 79.0, 62.0], "area": 2794, "segmentation": {"size": [512, 512], "counts": "d_41n?3N1N2N3N1N3M2O1N1O100O1O100O1O100O1O100O1O1O100AVOgAk0Y>WOdAj0[>XOdAh0[>[ObAf0^>[O`Af0_>]O_Ac0`>?00O1O100O1O100O1O100O1O1O10O00010O000101N3M2O0002N1O2N1O1O2N1O2M2N3lN_Ad0c>[O_Ac0c>ZO_Ad0c>ZO_Ad0d>ZO^Ac0d>[O^Ad0Q?M2N3M2O2M2N3M2O_Pd6"}, "image_id": 571, "id": 10103}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 506.0, 10.0, 6.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "no12m?100O1O100O12N1O2NQPi7"}, "image_id": 571, "id": 10104}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 12.0, 19.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "<>c?001O01O0O1N2N2N2N2N2OXoi7"}, "image_id": 573, "id": 10105}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 96.0, 17.0, 19.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "WSo02m?2N2O1N2N2N2N2N2O01N2O1001OH_@Ob?O`@Ob?O`@OZlh6"}, "image_id": 573, "id": 10106}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 104.0, 26.0, 26.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "bcb71n?2N2N3N1N2N2N2N2N2N3M200000O2M2N2O1N2N2N2N3M2N2N2OX<"}, "image_id": 573, "id": 10107}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 149.0, 54.0, 40.0], "area": 1130, "segmentation": {"size": [512, 512], "counts": "ZUb61n?2N2N2O1N2N2N3M2N2N2N2O100000N2O0O1O000001O2N2N2N2N3N1N2N00001O00002000O1N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2Ofjb0"}, "image_id": 573, "id": 10108}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 153.0, 21.0, 23.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "PeV11n?2N2N2N2N2N2N2010O00000000000N2N2N3M2N2Nij^6"}, "image_id": 573, "id": 10109}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 164.0, 14.0, 28.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "`Ui71n?3M2N2N2N2N2N2N2N2N2N2O2M2kJ"}, "image_id": 573, "id": 10110}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 168.0, 55.0, 54.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "Xf:1n?2N2N2N2O2M2N2N2N2N2N2N2N3N1DTOaAn0]>TOaAn0^>SO`Ao0^>SO`Ao0^>SO`Ao0^>90000000000100O2N2N2N2N3M2N2N2O1N2N2N2N00001O002O1N2N3M2N2N2N2N2N2O1NSji6"}, "image_id": 573, "id": 10111}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 175.0, 55.0, 64.0], "area": 1806, "segmentation": {"size": [512, 512], "counts": "lVS22`?0k@3R?Ol@3R?Ol@3S?Nk@4S?Nk@4S?Nk@4S?>N2N2N2N2N2HkNbAW1\\>kNbAW1\\>8O1N00001O000000000000000002N2N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N2N2N2NgYQ5"}, "image_id": 573, "id": 10112}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 179.0, 76.0, 73.0], "area": 2248, "segmentation": {"size": [512, 512], "counts": "Vgf52m?2N3M2N2N2N2N2N2N2N2N2EYO[Ah0c>[OZAh0c>ZO[Ah0c>ZO[Ah0c>>iA@Y>>iAAX>=jA\\OHM`>e0jA\\OHM`>e0jA]O\\>a0fA]O\\>a0a0001O002O1N2N2N2N2N3M2N00000000200000N2N2N3M2N2N2N2O1N2N2N2N2N2N2N3M2NiYS1"}, "image_id": 573, "id": 10113}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 184.0, 17.0, 17.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "Rfa11n?2N2N3M2N2N0001O0000001O2N2N2N2NUjU6"}, "image_id": 573, "id": 10114}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 190.0, 30.0, 31.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "\\fR13l?2O1N2N2N2N2N2N3M2O1N2N2N2N000002N2N2N2O1N3M2N2N2N2N2N2N3NcY^6"}, "image_id": 573, "id": 10115}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 203.0, 75.0, 73.0], "area": 2176, "segmentation": {"size": [512, 512], "counts": "nga62m?2N2N3M2N2O1N2N2N2N2N2N2N2N2N3M2N2N2O0O000002N3M2N2N2N2N2N2N10O000^O[OjAe0W>\\OgAd0Y>^OeAb0[>@cA`0]>BbA?\\>DaA>]>DaA>]>DaA>]>DaA=^>b0000000001O0000000001O0001O2F_AROc>l0_AROc>l0_AROd>k0^ASOd>k0:N2N2O1N2N2N2N2N2N2N3M2N2NRi8"}, "image_id": 573, "id": 10116}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 205.0, 24.0, 24.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "jfd12m?2N2N2O1N2N2N2N2N2N0000000001O2N2N2N2O2M2N2N2N[Yo5"}, "image_id": 573, "id": 10117}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 229.0, 10.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "YW\\12m?2N2N2N2000N2N2N2Ngh^6"}, "image_id": 573, "id": 10118}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 230.0, 63.0, 62.0], "area": 1976, "segmentation": {"size": [512, 512], "counts": "bhf22T?OdA3Z>OdA4Y>NfA3X>OfA3Y>NeA4Y>NeA4Y>OdA3Z>OdA4Y>NeA4Y>NfA3Y>NeA4Y>OdA3Z>k0N2OO2N3M2N2N010O0000000001O01O000L`AiN`>W140010O2N2N3M2N2N2O0O1O00000001O0001O1O2N2N2O1N2N3M2N2N2N2O2M2N\\hY4"}, "image_id": 573, "id": 10119}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 238.0, 76.0, 73.0], "area": 2201, "segmentation": {"size": [512, 512], "counts": "QYe42m?2N2N2N3N1N2N2N2N2N2C[O\\Ag0b>[O\\Ah0b>YO]Ah0a>ZO]Ah0a>ZO]Ah0a>ZO]Ah0a>=N2N1O01O0000000001O000001O000\\OdAE\\>;gABY>>iA@X>?jA\\OHK`>f0jA]OHK`>g0iA\\O]>b0eA\\O]>b0`0000000011N2N2N2N2N2N3M10O00000002O11N1N2N2O1N2N2N2N3M2N2N2N2O1N2N3M2N2NngT2"}, "image_id": 573, "id": 10120}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 239.0, 52.0, 54.0], "area": 1397, "segmentation": {"size": [512, 512], "counts": "`XU11n?2N2N2N2N2N2N2O2F@o@b0o>@o@b0o>@o@b0o>9O1N2N2N3M2N2N000010O00000000000000101N2N2N2N3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N3MjgP6"}, "image_id": 573, "id": 10121}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 245.0, 35.0, 59.0], "area": 1153, "segmentation": {"size": [512, 512], "counts": "Xh^72m?3M2O1N2N2N2N3M2N2O1N2N2N3M2N2N2O1N2N2N3M2N2O10000010O000000N2N2000NTH"}, "image_id": 573, "id": 10122}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 266.0, 107.0, 88.0], "area": 3245, "segmentation": {"size": [512, 512], "counts": "\\jU52m?2O1N2N2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N1O1O01O002N2N2N3M2N2N2O0O000000@ZOhAe0X>]OfAc0Z>_OdAa0\\>AbAa0\\>AbAa0\\>AbAa0\\>AbA`0^>A`A?`>a0O0000000000000001O01O00000001H`AoNb>o0`AoNb>o0`AoNa>P171O0000000001O00000001O002N2N1O01O002N2N2N2O1N2N2N0000000000002O1N2O101M2N2N2O1N2N2N2N2N3MVgT1"}, "image_id": 573, "id": 10123}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 273.0, 48.0, 46.0], "area": 984, "segmentation": {"size": [512, 512], "counts": "]if32n?1N3M2N2N2N2N2N2O2M2N2N2N2N2N0001O000001O0002N1O00000001O01O00000000002O1N2L4N3M2N2N2N2N2O1N3MRWa3"}, "image_id": 573, "id": 10124}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 279.0, 52.0, 56.0], "area": 1438, "segmentation": {"size": [512, 512], "counts": "iik12m?2N3N1N2N2N2N2N2N3F]ORAe0l>]ORAe0l>]OSAd0k>:N2N2O1N3M0000000000010O000000000002N2O1N2N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2O2M2NbVZ5"}, "image_id": 573, "id": 10125}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 283.0, 90.0, 80.0], "area": 3228, "segmentation": {"size": [512, 512], "counts": "\\jY61n?3M2N2N2O1]@F]?TOUAj0R?N2N2N2N2N2N2O2M2N2N2N2N2NYV9"}, "image_id": 573, "id": 10126}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 289.0, 16.0, 15.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "Ui[42m?2O1N3M2N02O10000000001N1N2N2NeV\\3"}, "image_id": 573, "id": 10127}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 307.0, 58.0, 61.0], "area": 2024, "segmentation": {"size": [512, 512], "counts": "cja21n?2N2N2N6K1N2^OC^A?`>C^A?`>C^A?`>C^A`0_>B_A`0`>A_A`0_>C^A?`>b0N2N3M2N2O1O100000N2N3M2N2N2O1N2N0000001O01O00000000010O0002N2N2TOXA=j>AXA=j>AXA>i>AXA=k>@WA>k>@WA>X?M2N2N2O1N2NbUa4"}, "image_id": 573, "id": 10128}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 324.0, 44.0, 33.0], "area": 712, "segmentation": {"size": [512, 512], "counts": "mZ`31n?3M2N2N2N10O0001O0000000000011N3M2N2N2N2N2N2N10O02N2N1O02N2N2N3M2O1N2N2N2N2N2N3M2N2O1N\\ei3"}, "image_id": 573, "id": 10129}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 325.0, 28.0, 28.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "bZd11n?2N2N2O2M2N2N2N2N2N2N3N1N000002O1N2N2N2N2N2N2O1N2N2N2N`em5"}, "image_id": 573, "id": 10130}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 334.0, 8.0, 8.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "ajQ51n?2N2N2000N2N2N_Uj2"}, "image_id": 573, "id": 10131}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 350.0, 45.0, 41.0], "area": 758, "segmentation": {"size": [512, 512], "counts": "i[n31n?2N2N2N2O2M2N2N2N2N2N2N2N0001O1O2OO0000000000000001O01O000000000001O2N3M2N2O1N2N2N2N2N2NhT[3"}, "image_id": 573, "id": 10132}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 353.0, 53.0, 59.0], "area": 1609, "segmentation": {"size": [512, 512], "counts": "f[i51n?2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2O100000000000000001O00000N2N2N2N2N2N2N2N2N2N2O1Aj@1Y?Li@2Y?Li@2Y?Li@2Y?Li@2d?NgS\\1"}, "image_id": 573, "id": 10133}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 356.0, 63.0, 69.0], "area": 1911, "segmentation": {"size": [512, 512], "counts": "dlc41o?1N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2O0O00000BkNRBU1n=mNPBS1P>oNnAQ1R>QOlAo0T>SOjAm0V>VOgAj0Y>>01O0000000001O2N2N3M2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2NRd\\2"}, "image_id": 573, "id": 10134}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 365.0, 27.0, 43.0], "area": 691, "segmentation": {"size": [512, 512], "counts": "kkb71n?2N2N3M2N2N2O1f@Bo>`0o@Bo>`0o@Bo>`0PAAn>a0PAAn>j000000N2O1N1O02N2N2N3M2N2N1O1cD"}, "image_id": 573, "id": 10135}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 367.0, 56.0, 55.0], "area": 1576, "segmentation": {"size": [512, 512], "counts": "WlS31n?2N2N3M2\\@G_?:_@H_?>N2N2N2N2N3M2O1000000N2N2N2N3N1N2N2N00000000000010O0000002N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N3MjSP4"}, "image_id": 573, "id": 10136}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 389.0, 91.0, 90.0], "area": 3393, "segmentation": {"size": [512, 512], "counts": "Qn[61n?2N2N2N2N2N2O1N2N2h@_On>c0PA_On>c0PA_On>c0PA_On>k00010O00N2N0001O2N2N3M2N2N2JfNcA[1\\>400001O000001O0000000000000G`NSB`1m=bNQB^1o=dNoA\\1Q>gNlAY1T>900000001O0001O00000000000000001O2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2Nnb6"}, "image_id": 573, "id": 10137}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 28.0, 61.0], "area": 1024, "segmentation": {"size": [512, 512], "counts": "_O0000000000000000010O2N2N2N3M2N2N2SOVAa0l>]OVAON;n>DVAONCWAONC^A;U?N2N3M2N2Nhba7"}, "image_id": 573, "id": 10138}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 392.0, 29.0, 30.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "el>1n?2N3M2N2N2N2N2O1N2N2N2N2N3M20N2N2N2O1N2N2N2N2N2N3M2N2N2NZcR7"}, "image_id": 573, "id": 10139}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 402.0, 58.0, 59.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "f]V51n?2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1HoN^AT1_>nN_AT1`>mN^AU1`>50000000001O000001O000000001O3N1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N2N2Oebl1"}, "image_id": 573, "id": 10140}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 405.0, 52.0, 58.0], "area": 1506, "segmentation": {"size": [512, 512], "counts": "kml31n?2N2N2N2N2N3M2FBo@`0P?An@a0P?An@a0P?9N2N2HnN_AT1_>nN_AT1_>nN_AT1_>8O1N1O00000000000011N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N3M2N2NaRY3"}, "image_id": 573, "id": 10141}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 423.0, 22.0, 41.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "g]e71n?2N2N2N2N2N3M2N2N2N2N2N2O2O000N2N2N2N2N2N3gB"}, "image_id": 573, "id": 10142}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 425.0, 22.0, 22.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "fmo52m?2N2O1N2N2N2N2N1O000000000002N2N2N3M2N2N2O_Re1"}, "image_id": 573, "id": 10143}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 447.0, 53.0, 52.0], "area": 1372, "segmentation": {"size": [512, 512], "counts": "cn>1n?3M2N2N2N2N2h@Cj>?TACk>>SADk>>SADk>>SADk>>SADk>j0N2N2N2N2N2N2N000002N2N2N2N2N2N2N2N2O2M2N2N1O0000000000002N2N2N2N2N2N2N2N2O1N2N2N_af6"}, "image_id": 573, "id": 10144}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 449.0, 49.0, 44.0], "area": 1132, "segmentation": {"size": [512, 512], "counts": "a^b51n?2O1N2N2N2N2N2N3a@@Y?e0N2N2N2O1N2N2N2N2N2O2O000000O1O2M2N2N1O1O000000001O2O2M2N2N2N2N2N2N2N2N2N3N1N2N[Qe1"}, "image_id": 573, "id": 10145}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 450.0, 29.0, 29.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "`nb31n?2M3N2N2N2N1O2N2N2N2N2M3O100000N2N2N2N2N2N1O2M3N2N2N2N2Naan3"}, "image_id": 573, "id": 10146}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 452.0, 56.0, 58.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "XoV42m?2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N3M2ImN^AU1`>mN^AU1`>6O000000000000000000000001O2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NVQm2"}, "image_id": 573, "id": 10147}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 462.0, 15.0, 30.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "^>n0S?O1N2N2N2N3M2N2N2N2N2N2N2N2NTQh7"}, "image_id": 573, "id": 10148}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 463.0, 65.0, 49.0], "area": 1960, "segmentation": {"size": [512, 512], "counts": "Zoo61n?2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O0O1O1O1O1O11O1O2NO1O1O10O00000000011N2N2N2N2N3M2N2N2N2O100000001O000000O1N3M2N1O001O02N2NRA"}, "image_id": 573, "id": 10149}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 485.0, 49.0, 27.0], "area": 793, "segmentation": {"size": [512, 512], "counts": "g_Q12m?2N2N2N2N2N2N2N1O1O1O1O100O1O1O1O1O1O11O1O1O1O1O2N1O1O1O1O1O1OO1O1O1O1O1001O1O1O1O1N2N2N2N2N2N3NWPV6"}, "image_id": 573, "id": 10150}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 487.0, 12.0, 11.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "[o=1n?2N2N2O1N3NO2N2O1N2N2NeP\\7"}, "image_id": 573, "id": 10151}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 494.0, 44.0, 18.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "oo]51n?1O1O1O1O1O11O1O1O1O00O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1001O1O1O1O1O2N1O1O1O1O1O1O1O1O1NSPl1"}, "image_id": 573, "id": 10152}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 498.0, 16.0, 14.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "io92m?2N2N2N2N2N100O1001N2N2O2M2N2NXP^7"}, "image_id": 573, "id": 10153}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 505.0, 14.0, 7.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "ooa61n?1O1O1O1O1O1001O1O1O1O1O1OQPW1"}, "image_id": 573, "id": 10154}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 15.0, 15.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "0=c?1O001O00N2O1O1N2O1O1N2O1O1NR`h7"}, "image_id": 574, "id": 10155}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 0.0, 41.0, 26.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "Y`o11n?2M3N2N1O2M3N2O001O1O1O001O1O1O001O1O1O1O00O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2O1O1N2OQP\\5"}, "image_id": 574, "id": 10156}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 0.0, 40.0, 22.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "RP^32m?2N2O1O001O1O001O1O1O001O1O1O001O1O001O1O1O001O1ON2O1O1N2O1O1N2O1N2O1O1N2O1O1NRPn3"}, "image_id": 574, "id": 10157}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 0.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "PPo61o?1O1O00O1OQPn0"}, "image_id": 574, "id": 10158}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 0.0, 58.0, 55.0], "area": 1582, "segmentation": {"size": [512, 512], "counts": "RPS71n?1O2O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1000O100000O1000000000000D"}, "image_id": 574, "id": 10159}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 12.0, 74.0, 64.0], "area": 2296, "segmentation": {"size": [512, 512], "counts": "jaS62l?3N2N2N2N2N2N2N1O2N2N2N2N2M3N2N2N2N1O00000002N2M3N000000O10002N1O2N2N2N2N2N2N2N2N1N10000000O10002N2N2N2N2N2N1O2N2N2M3N2N2N2N2N2N2N2N2N2N1O2N2M3N2Nn^g0"}, "image_id": 574, "id": 10160}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 16.0, 25.0, 26.0], "area": 351, "segmentation": {"size": [512, 512], "counts": "o`Y22l?2O2M3N2M2O2N2M2O2M3O01000O01M3N2M2O2M3N1O2M3N1NYoY5"}, "image_id": 574, "id": 10161}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 25.0, 32.0, 31.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "Zaa21n?2N2M2O2N2M3N1N3N2N2M2O2N2O00100000O001N2M2O2N2M3N1N3N2N2M2O2Nl^n4"}, "image_id": 574, "id": 10162}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 33.0, 62.0, 55.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "gQQ32l?3N1O2M3N2M2O2N2M3N1O2M3N2N1N3N2O010O1000O10O1UAnNg>V10O10O1000O0100000O01000O10O01N2N1N010O10O11N3N2N1N3N2N2M2O2M3N2N1N3N2N2M2OZno3"}, "image_id": 574, "id": 10163}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 53.0, 73.0, 66.0], "area": 2408, "segmentation": {"size": [512, 512], "counts": "Ucj62m?2N2N2N2N2N2N2M2O2N2N2N2N2N2N2N2N2M2OO1000000000000000O1000002N2N2N2N1O2N2M3N2N2N2N000001O2N2N2N2N2N2N2M3N2N2N1O2N2N2N2N2TOn@g0W?M3N1O2N2N2N2N2N2N2N2Mam0"}, "image_id": 574, "id": 10164}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 65.0, 6.0, 10.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "Q2:g?O0O2M3N1Nnml7"}, "image_id": 574, "id": 10165}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 70.0, 86.0, 77.0], "area": 3133, "segmentation": {"size": [512, 512], "counts": "nRV22m?2N1N3N2N1N3N2M3N1O2M3N1N3N2N2M2O2N2O1O010000O010000O0O2M3N1O20000O01000O10O1000O01000O10O1000O0100000O01000O10O1000O01000O10O1000OhNaAo0`>oNaAR1_>lNcAT1]>iNfAW1a>0100N2N1N3N2M3N1O2M3N1Fg@L[?2g@K\\?2g@LZ?3;N\\l^4"}, "image_id": 574, "id": 10166}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 79.0, 8.0, 16.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "_2`0a?N2N2N2O1N3M2NZmk7"}, "image_id": 574, "id": 10167}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 90.0, 57.0, 79.0], "area": 2505, "segmentation": {"size": [512, 512], "counts": "dTZ52m?2M2O2M3N1O2M3N1N3N2N2j@YO`>0kAi0DXO_>2kAl0S>UOkAn0R>UOlAm0R>UOkAm0S>UOlAm0R>UOlAm0R>a0N3N2M2O1N01000O010O0101N[BoM`=o1`BSN_=m1_BUNb=k1\\BWNd=h1[BYNe=h1[BVNg=j1YBTNi=o13LTBQNm=P22N2nNoA1S>MoA2S>LoA1T>MnA1T>LnA2T>MnA1T>LoA2S>LnA2T>LoA2S>LoA1T>MnA1S>MoA1T>MnA1T>LoA2P?Ni[i1"}, "image_id": 574, "id": 10168}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 93.0, 24.0, 22.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "WSk11n?1N3N2N2M2O2M3O0100000O01000O0100O1M2O2M3N1O2Mjlh5"}, "image_id": 574, "id": 10169}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 112.0, 40.0, 51.0], "area": 1214, "segmentation": {"size": [512, 512], "counts": "\\T\\72m?2N2N1O2M3N2N2N2N2N2M2O2N2O10000N2N1N3N2N2N2N2N2M2O1O000001N3N2N2N2N1O2M3N2OO2YL"}, "image_id": 574, "id": 10170}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 117.0, 54.0, 51.0], "area": 1370, "segmentation": {"size": [512, 512], "counts": "ZdP22m?2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N101O1000O10O100000O0100000O0100000O0100000N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2Na[T5"}, "image_id": 574, "id": 10171}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 121.0, 26.0, 31.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "WTT12m?1O2M3N1100000O0Z@Hd?;N1N3JBg@`0W?5N3N2O10O1Cm@LR?3PAMP?1QAOP?NSA0o>NSAOo>0SANo>OSAOP?Oal^6"}, "image_id": 574, "id": 10172}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 142.0, 64.0, 73.0], "area": 2254, "segmentation": {"size": [512, 512], "counts": "QfQ61m?3N2N2N2M2O2N2N2N4K3N2N1O2M3N2DoNfAS1X>oNfAR1X>QOfAQ1X>QOeAR1Y>WO`Ai0`>UOaAi0b>UO`Ai0m>N2N2N2M2O2N2N2N2M2O2N2N2M`Zn0"}, "image_id": 574, "id": 10173}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 145.0, 29.0, 29.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "oTn01n?2N2N2N2N2N2N3M2N2N2N2N2N2N10O2N2N2N2N2N2N2N2N2N2N2N2N2NS[c6"}, "image_id": 574, "id": 10174}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 175.0, 74.0, 80.0], "area": 2904, "segmentation": {"size": [512, 512], "counts": "gfh41n?2N2N2N2N2O1N2_OD[A?b>C\\A?b>C\\A?c>B[A`0c>B[A`0c>CZA?d>`0N2N2N2N0000001O01O000002N2N3O0000000N2000000001O01O00000N2O1O100000001O00`NoAn0S>POPBm0R>QOPBn0R>oNPBo0R>oNPBo0R>oNPBo0R>oNPBo0b>N2000001O000Eo@DS?:o@DS?:o@ER?9QADQ?:4UANi>4TANj>4UANi>4TAOi>4UANi>3UAOj>3SA0m>b0O10O1000O10O10O1N2N1N3N2O10O010XO]A1d>M]A1e>M^A1d>M^A0e>M]A2e>L]A1e>M^A1d>M^A0e>M]A2d>M^A0hXd5"}, "image_id": 574, "id": 10176}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 202.0, 71.0, 68.0], "area": 2596, "segmentation": {"size": [512, 512], "counts": "dgf62]?0QA3l>0QA2n>OPA3n>OPA3n>OPA3f>D\\A;L3f>E[A:N2e>F[Al0c>:N2N2N3M2N2N2N2N2O1N2N2O1000N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N1O000001O000001O00000000000000101N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2NQi5"}, "image_id": 574, "id": 10177}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 227.0, 79.0, 69.0], "area": 2044, "segmentation": {"size": [512, 512], "counts": "agl01n?1N3N2N2M2O2N2M3N1N3000O010000O0100000O0100000O01000O0100000O0i@\\O20j>d0RA^O10m>n0N2M3N1O20000O010O1N1100000O0100O1kN[Al0f>SO\\Am0d>PO^AQ1i>O10O10O1000O10O1000ON3N2N2M2O2M3N1O2M3N2N1N3N2N2M2O`gk5"}, "image_id": 574, "id": 10178}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 232.0, 58.0, 48.0], "area": 1451, "segmentation": {"size": [512, 512], "counts": "RXV42m?2N2N2N2N2N2N2KAf@a0X?5N2N2N2N2N2O2M2N2N2N2N001O000001O3M2N2N2N2N2N2N2N21O0000000000O10001O00000O1N2N2N2N2N2N2N2N2N2N2Nggl2"}, "image_id": 574, "id": 10179}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 234.0, 31.0, 54.0], "area": 860, "segmentation": {"size": [512, 512], "counts": "Z7f1[>M2O2M3N1O2M3N2N1N3N2M10000O01001N3N1N3N2N1N3N2N2M2O2M3N2N1N]X`7"}, "image_id": 574, "id": 10180}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 261.0, 35.0, 36.0], "area": 655, "segmentation": {"size": [512, 512], "counts": "ehl52m?2N2N2O1N2N2N2N3M2N2N2N2O1N2N2N3M1O03N1N2N2N2N2N2N2N3M2O1N2N2N2N2N3MYga1"}, "image_id": 574, "id": 10181}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 280.0, 36.0, 33.0], "area": 586, "segmentation": {"size": [512, 512], "counts": "UYR51n?2N2N3N1N2N2N2N2N2N2N1O1O2N200000010O00O1N2N2N00001O3N1N2N2N2N2N2N2N2Ngf[2"}, "image_id": 574, "id": 10182}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 286.0, 11.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "TYa62m?2N2N1O000001O2O1N2NPWY1"}, "image_id": 574, "id": 10183}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 289.0, 53.0, 58.0], "area": 1213, "segmentation": {"size": [512, 512], "counts": "ci`01m?2O2N2M3N1KHa@:]?6N1N3N2N20O1000WAWOY>j0eAWO\\>h0cAZO]>6ZAHO97L`>1_A3ONa>NbA3K1d>IdA6F3f>EeAh0Z>WOhAh0i>000O10ON3N2N2M2O2N2M3OO2N1N3N2M3N1O2M3N2N1N3N2N1N3N2M3N1O2MSfd6"}, "image_id": 574, "id": 10184}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 291.0, 75.0, 71.0], "area": 2551, "segmentation": {"size": [512, 512], "counts": "eZd62m?2N2N2N2O1N2N2ICh@?V?Ch@?V?7N2GUOZAm0d>UOZAm0d>UOZAn0Z>nNmAZ1Q>hNmAZ1R>hNkAZ1S>hNkAZ1S>:O00000000000000001O000001O2N2N2N2N3M2N2N1O00003M2O1N2N2N2N2N2N20000000N3M2N1O02O1N2N2N3M2N2N2N2N2N2N2N2N2N2N2NnU6"}, "image_id": 574, "id": 10185}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 293.0, 31.0, 35.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "^ib42m?2N2N2N2N2N2N3M2N2O11O0000000000000000000N2001O00O1N2Ga@Na?0a@Na?0a@Nb?O]fm2"}, "image_id": 574, "id": 10186}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 298.0, 61.0, 54.0], "area": 1687, "segmentation": {"size": [512, 512], "counts": "mY_32m?2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2O1000000001O01O0000N2N0000000000101N3M2N2N2N2N2N2N2N0000012M2N2N2N2N2N2N2N2N2NoUb3"}, "image_id": 574, "id": 10187}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 312.0, 48.0, 96.0], "area": 2328, "segmentation": {"size": [512, 512], "counts": "n9V100000O0100000O10O100000O0100000O0O2N4L1O2M3N2N2M2O2N2M3N1O2N2M3N1ORdo3"}, "image_id": 574, "id": 10193}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 348.0, 43.0, 39.0], "area": 883, "segmentation": {"size": [512, 512], "counts": "]kZ41o?2M3M2O2M3N2M2O2M3M3N1N3NO010O00010O010O0010O0010O010O0010O0102M3N1N3M3N1N3N2M3N1N3Mbdo2"}, "image_id": 574, "id": 10194}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 366.0, 76.0, 63.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "a\\[52m?4M2N3L3N3L3N3M2M4M2M2O000O010O01000O010O01000O010O010@POPBP1o=TOmAl0T>VOjAj0V>YOfAg0Z>\\OdAd0\\>`0O0100O0100O0100O01000O010O01000O102M4M2N3L3N0O1000O012M3N3M2N02N3L301M2N3_On@MU?1m@MV?On@MU?1m@MV?On@NT?0kc^1"}, "image_id": 574, "id": 10195}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 374.0, 55.0, 57.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "VlT72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N10O2N2O100000001O000000O1O1O100O2N1O1O1O1O1O1O1N2N2N2N2N2N1O0000000002N2N2N2NVD"}, "image_id": 574, "id": 10196}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 377.0, 28.0, 22.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "WlP42n?1N2N2N2LH]@9b?20001O0000002N2O1000000000N2N2N2N2N2N2O1N2NkSa3"}, "image_id": 574, "id": 10197}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 386.0, 26.0, 26.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "Y\\k41n?2N3N1N3M2O1N2N2O1N2000O2O00001_Oa@>a?00O1O1N2N2N2N2N2N\\cg2"}, "image_id": 574, "id": 10198}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 387.0, 10.0, 9.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "WlV31m?3M210O10O10OO2M2OjSd4"}, "image_id": 574, "id": 10199}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 389.0, 75.0, 63.0], "area": 2186, "segmentation": {"size": [512, 512], "counts": "R]^21m?3M2O2M3M2O2M3M2N3N2M2N3N2M2N3M2O2O1O0100O0N3M2O2M3M2O2M3M2O2O1O010O01M2[A\\Ol=g0QB\\Om=f0PB\\OP>d0nA_OR>a0lA@U>?iADV>=gAEZ>:dAI\\>7aAK_>l00O10O0100O010O10O0100eN`AT1`>kNbAT1e>N3M2O2M2N3N1N3M2N3N2M2N3N1N3M2N3NkR\\4"}, "image_id": 574, "id": 10200}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 416.0, 74.0, 73.0], "area": 2731, "segmentation": {"size": [512, 512], "counts": "Wng31n?2M2N3N2M2O2M3c@@V?b0h@AV?e0N3N2O01000O0100O0100o@TOl>P101M3N1N3N2M2N3N1N3N2M2N3N101O10O01000OO0O01O01O010O02N3N1N3M3N1N3N1N3M3N1N3N2M2N3N1N3N2M2N3N2M2O2M2N3N2M2NbRS3"}, "image_id": 574, "id": 10201}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 423.0, 77.0, 66.0], "area": 2621, "segmentation": {"size": [512, 512], "counts": "R^b62m?2N2N2N2f@Hi>:UAHi>:UAHi>:UAHi>:VAGi>:UAHf>GWAc01Hf>l0N2N2N2N3M2N2N2N2N2N20O1N0000000000002N2N2N2N2N2N2N2N3M2N2N2N2N2OO000000000000000000000001O02N2N12M2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2NXR7"}, "image_id": 574, "id": 10202}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 439.0, 60.0, 73.0], "area": 2641, "segmentation": {"size": [512, 512], "counts": "Uoj41m?2N3EKg@7W?Kg@8V?Kg@7W?;M2N3M2N3N1N3XAkNa>W1]AkNa>\\1gAbNn=^1PBeNP>[1mAgNS>Y1kAjNR>c1N3M2N3M2N3O01O001O001O001O001O001O001O001O001M2N3M2[NlA\\1W>aNkA]1^>E`AROb>l0`AROc>k0:N3M2N3M2N3M2N3M2N3M2NcQW2"}, "image_id": 574, "id": 10203}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 442.0, 75.0, 64.0], "area": 2179, "segmentation": {"size": [512, 512], "counts": "gnl12l?3M2O2M3M2O2M3M2O2M3M2O2M3M2O2M3M20100O01N2M2O2M3M3M2N3N2M2N3O10O10O10VA@P>c0nA^OP>d0nA_OP>c0mA_OS>a0lAAS>`0jABW>>gADX>9cAI^>7`AK_>k010O0100O010O010O010O1gN_AR1`>lNcAR1_>lNbAR1h>M2O2M2N3N1N3M2O2M2N3M2O2M2N3N1NWam4"}, "image_id": 574, "id": 10204}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 489.0, 16.0, 13.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "^_e62l?2O2M3O0010O10O10O010O1O0N3N1Nc`R1"}, "image_id": 574, "id": 10205}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 494.0, 21.0, 18.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "noe72m?1N2O1N2O1N2O11O1O001O1O00O1N2O1N2O1N2O1"}, "image_id": 574, "id": 10206}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 496.0, 28.0, 16.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "koa12m?2T@Mg?7N2N2O10000O1O1O1001O1O001O001O001O1O001O0LZ@Ng?O\\@0j?01O00QPP6"}, "image_id": 574, "id": 10207}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 500.0, 31.0, 12.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "ook51n?1N2N2N2O1O10000O10000O10000001O001O001O001O001O001O001O001MV`d1"}, "image_id": 574, "id": 10208}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 509.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "oof51m?2001O001O00Q`U2"}, "image_id": 574, "id": 10209}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 82.0, 57.0, 62.0], "area": 1633, "segmentation": {"size": [512, 512], "counts": "fb35j?6K5K00O101O00O01000000O010000000O0100000O10O5L5M30000L3L5J6K5K003M0O3N000000O01000001O0O1003L010bNdAX1_>eNeAW1g>K5J6UOl@`0_?K5K5Keko6"}, "image_id": 577, "id": 10210}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 106.0, 20.0, 41.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "\\31o?5\\@K10e>:UAK1Oe>n003M00oNYAi0f>WO`Aa0KYOh>3_Ab0Q?0O1000O2O004L5J6K5Kcke7"}, "image_id": 577, "id": 10211}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 257.0, 17.0, 21.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "^XV37e?400001O0001O0000000L4J60000000\\Oo@3Vga4"}, "image_id": 577, "id": 10212}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 0.0, 23.0, 3.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "PP_71o?000000000000001O000000000000001O0000000000NR`5"}, "image_id": 578, "id": 10213}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 233.0, 72.0, 74.0], "area": 2837, "segmentation": {"size": [512, 512], "counts": "mXZ33l?3N1N3M2O2M2oNA^Bb0_=A_B`0`=A^Bb0_=A^Bb0`=_O_Bb0_=AoAF9l0e=@PBG9i0g=HWB9h=JUB6k=LTB3m=OPB2o=o0010O00010O0010O00010O00010O0210M2O2M2N3N1N3N0O0010O01O01O01O01OLaAhNb>U16N3N101O0O2N1N3M3N1N3N1N3M2O2M2N3N1N3M3N`ga3"}, "image_id": 578, "id": 10214}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 375.0, 27.0, 27.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "RlX11n?2N3M2N2N2N2N2N2N2N2O1N200000N2N2N2N2N2N2N2N2N2N2N3NjcY6"}, "image_id": 579, "id": 10215}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 411.0, 82.0, 85.0], "area": 2683, "segmentation": {"size": [512, 512], "counts": "_]d13l?2O1N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2O100000001OO1N20000000000000000001O0000000000N2N2N2N2N2N20000000VAPOc>P1[AROe>n0YATOg>S1000000000000000001O00O1N2N2TOUA`0m>^OUAa0l>]OVAa0l>]OVAa0l>^OUA`0W?N2N2N2N2N2N2NQaR5"}, "image_id": 579, "id": 10216}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 492.0, 27.0, 20.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "g_^79_?8M30000000000001O00000000000000001O0000000000000I7HjP4"}, "image_id": 579, "id": 10217}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 24.0, 65.0, 42.0], "area": 2013, "segmentation": {"size": [512, 512], "counts": "daW42i?5K6J5J6K5N201O000001O01O000001O01O000000011N1O000001O01O000001N100000010O000000010O00000000010ON2K500010O00000001O00M4J5K5K5K5Dc@Oh^h2"}, "image_id": 580, "id": 10218}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 274.0, 24.0, 23.0], "area": 317, "segmentation": {"size": [512, 512], "counts": "RYZ41m?2N3M2N3M2N3M2N30O010O010O001M210O0N3L3N3M2N3MXgY3"}, "image_id": 580, "id": 10219}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 333.0, 87.0, 80.0], "area": 3760, "segmentation": {"size": [512, 512], "counts": "Z[d21m?3L3N3L3N2M4M2M4M2M310O0hAVO[=j0bBXO^=h0_B\\O`=d0^B^Oc=b0YBAg=?WBDi=L4L3L5L4L4L4K4M4Lhnj7"}, "image_id": 581, "id": 10229}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 7.0, 56.0, 54.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "cP`42m?3M2N2[@I_?8_@J_?=N2N2N2N200001O0000O1N2N3M2N2N2N2O1O1N2O1N2N2N200000000000O1N2N2N3M2N2N2N2N2N2UOn@f0W?N2N2N3M2N2N2N2N2N2N2Ninc2"}, "image_id": 581, "id": 10230}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 10.0, 45.0, 55.0], "area": 1272, "segmentation": {"size": [512, 512], "counts": "WaW71m?2O2M3N1O2M3N2N1N3N2M3N1O2M3N1O2M3N2M2O2N2O1O010000O010000O01\\O_AHa>6`AK`>2cAN\\>1fAN[>0gA0Y>MiA3X>KjA3W>LkA1X>LjA3X>KjA3W>KlA2W>LkA2W>LjA3S?M]n1"}, "image_id": 581, "id": 10231}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 14.0, 26.0, 27.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "j`T12m?2N3M3M3M2N2N2N2N2N2N1O0101N2N3M2N2N2N2N2N2N2N2N2NV_^6"}, "image_id": 581, "id": 10232}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 20.0, 13.0, 12.0], "area": 87, "segmentation": {"size": [512, 512], "counts": "lP]21l?0U@2i?4N2N001O1001N2N2N2N2NX_\\5"}, "image_id": 581, "id": 10233}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 30.0, 30.0, 30.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "\\Q[11n?2N2N2N2O1N3M2N2N2N2N2N2N2N2OO003M2N2N2N2N2N2N2O1N3M2N2N2NdnU6"}, "image_id": 581, "id": 10234}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 35.0, 25.0, 28.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "]QS22m?2N2N2N3M2N2000000N2N2N2N2N2N2N02N2Dj@KX?3j@KX?3j@KX?4i@JY?4;N2Nf^`5"}, "image_id": 581, "id": 10235}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 36.0, 52.0, 60.0], "area": 1473, "segmentation": {"size": [512, 512], "counts": "maj31n?2O1N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N2O2O000000gNhAi0X>UOkAi0V>UOlAi0V>UOlAi0V>UOlAi0V>UOlAi0W>TOkAj0g>N2N2N2N2O1N2N2N2N3M2N2N2Nd][3"}, "image_id": 581, "id": 10236}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 42.0, 33.0, 32.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "aQ<1o?2M2N3N1N2N3N1N3M2N3NO200010O010O010OO1N3N0O0010O01O3N1N3M2O2M2NR^S7"}, "image_id": 581, "id": 10237}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 43.0, 57.0, 66.0], "area": 1803, "segmentation": {"size": [512, 512], "counts": "haT31n?2N2e@Lg>6WALg>6WALg>6WALg>6WALg>6XAKf>7XAKf>7XAKg>6WALg>h0N2N2N3N10000000000000000001O00000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N[mn3"}, "image_id": 581, "id": 10238}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 48.0, 35.0, 37.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "m14k?2N2N2N3N1N2N2N2N2N2N2N2N2N2O2O000000N2N2N2N3M2N2N2N2N2N2N2O1N2N3M2Nl]^7"}, "image_id": 581, "id": 10239}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 67.0, 3.0, 7.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "Vbn73k?2O2lM"}, "image_id": 581, "id": 10240}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 68.0, 58.0, 51.0], "area": 1515, "segmentation": {"size": [512, 512], "counts": "mbU11o?3L3N2M3N2N2M3N3L3N2M3N1N10O010O010O01000O010O010O010O010OKROYAo0f>TOXAk0i>6O10O010O010O010O010O3N3L101N3N2N2M3N2M4M2M3N2M3N2MP]m5"}, "image_id": 581, "id": 10241}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 68.0, 54.0, 56.0], "area": 1423, "segmentation": {"size": [512, 512], "counts": "[SZ61o?2M2N2GKd@7Z?Lc@6[?9O1N3M2N2N2N2N2N2O0O00000LmN\\AS1d>oNZAQ1f>4010O00000000000010O000002N2N2N2O1N2N2N3M2N2N2N2O1N3M2N2N2N2N2O1NWmj0"}, "image_id": 581, "id": 10242}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 70.0, 31.0, 32.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "dRW72m?3M2N2N2N2N2N2N2O1N2N2N2N3M2N01O2N2N2N2N2N2N2N2O2M2N2N2N2N2N\\]9"}, "image_id": 581, "id": 10243}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 72.0, 15.0, 24.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "_bh72n?1N2N2N2N3M2N2O1N2000001O01NbM"}, "image_id": 581, "id": 10244}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 95.0, 52.0, 56.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "^S72n?1N3M2O1N3M2O2M2N3M2O1N3M2O2M2N3N1N2N3M2O2M201O01O010O000O2M00102M2N2N3N1N5K2O2M2N3N1N2N3M2O2M2N3N1N2Nkkn6"}, "image_id": 581, "id": 10245}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 107.0, 10.0, 11.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "_cm63l?2M2O200O10M3N1O2Mc\\m0"}, "image_id": 581, "id": 10246}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 119.0, 57.0, 59.0], "area": 1686, "segmentation": {"size": [512, 512], "counts": "hdP72l?2N3M3N1N3M2010O01000O010OO2N2M2N3M2O2M2N3N2M2N3M2O2M3M2O20O10O10O010O010O10O10O010M3kNcAd0^>ZOdAd0_>ZOcAc0_>[OcAc0`>ZOcAd0^>[OcAc0o>M3N1N3M2O2M3M2N\\k2"}, "image_id": 581, "id": 10247}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 124.0, 23.0, 22.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "UdR62m?2N1O2N2N2N2N2N2N2O100000O1N2N2N2N2N2N2N2N2Nika1"}, "image_id": 581, "id": 10248}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 142.0, 50.0, 54.0], "area": 1304, "segmentation": {"size": [512, 512], "counts": "Reh52m?3M2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N20000000000000000000N3N1kNZAm0h>QOZAm0n>N2N2N2N2N2N2N2N2N2N2N3M2N2N2O^Z^1"}, "image_id": 581, "id": 10249}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 143.0, 27.0, 49.0], "area": 804, "segmentation": {"size": [512, 512], "counts": "_4Z1g>001O000001O0N2N2N2N2O1N2N1O00001O1O2N2N2N2O1N2Eb@1`?Mb@1`?Mc@0[jb7"}, "image_id": 581, "id": 10250}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 183.0, 61.0, 60.0], "area": 1615, "segmentation": {"size": [512, 512], "counts": "XVm42n?1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3M2N2001O000000000001O000001O000000000001OO1N2N01O00DWABi>>ZA_Of>a0\\A]Od>d0]AZOc>f0_AXOa>h0=N2N2N2N2N2N3M2N2O1N2N2N^YT2"}, "image_id": 581, "id": 10251}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 184.0, 68.0, 52.0], "area": 1749, "segmentation": {"size": [512, 512], "counts": "eVa61n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N10O000000000000002N2N2N2N3M2N2N2N2O1N1O0000000000000002N2N2O1N02N2N2N2N2N2N2N2N2N2N2N2N2N2Nki<"}, "image_id": 581, "id": 10252}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 189.0, 53.0, 55.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "WVZ22m?3N2M3N3L3N2N3L3N2M4M2M3N2N3L3N2M4M2M3NO10O0102M3N3M2M3N3L3N2M4M2N2M3N1N10O01000O010O010O02O2N2M4M2M3N2MnXk4"}, "image_id": 581, "id": 10253}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 197.0, 40.0, 40.0], "area": 785, "segmentation": {"size": [512, 512], "counts": "cfU31n?2N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2O10001O000001O0O1N2N2N2N2N2N2N3M2N2N2O1N2N2N2NTYV4"}, "image_id": 581, "id": 10254}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 215.0, 65.0, 65.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "VgY41n?2N2N2N2O1N3M2N2N2N2N2N2N20001l@VOo>n00000O1N001O2N2N2N2O1N2N200010O0000000000000001O01cNaAV1_>hNcAX1c>0000O1N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2Nlge2"}, "image_id": 581, "id": 10255}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 216.0, 36.0, 77.0], "area": 1608, "segmentation": {"size": [512, 512], "counts": "eW^73l?1N3DJk@8S?Jk@7S?Lj@7T?Kj@7T?Jk@8R?Y110000O010000O0100O1N1N3NhH"}, "image_id": 581, "id": 10256}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 222.0, 38.0, 61.0], "area": 1578, "segmentation": {"size": [512, 512], "counts": "aX^14g?5K5L4EAPAe0k>:N200L4K6K4O10010O000O1K5O2O0000010O00000001WNPBa1W>N2K510K4L4K5L4K6J5Ef@LTXo5"}, "image_id": 581, "id": 10257}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 224.0, 42.0, 43.0], "area": 875, "segmentation": {"size": [512, 512], "counts": "_Wn53l?2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N20001O000000000000N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2NWh\\1"}, "image_id": 581, "id": 10258}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 232.0, 15.0, 14.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "^WW32m?3N1N3N1N1O01O010O011N3M3N1NcXa4"}, "image_id": 581, "id": 10259}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 233.0, 29.0, 30.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "cWh32m?2M3N2N2N2N2N2N2N20000000000O10000000N2N2N2N2N2N2N2N2N2NVXi3"}, "image_id": 581, "id": 10260}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 257.0, 60.0, 53.0], "area": 1517, "segmentation": {"size": [512, 512], "counts": "nXV32m?3N3L3N2M4M2M3N3M2M2OO010O01000O010O010O01000O010OO2O001N101N2O001N101O0O2O10O2O1N2OO01N3N1QORAk0R?N3N2N2M2O2M3N2N1N3N2N2M2Oagk3"}, "image_id": 581, "id": 10261}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 261.0, 23.0, 18.0], "area": 195, "segmentation": {"size": [512, 512], "counts": "_hi21o?1N3M3N1N2O0O01O01O010O01O01O010O01O3N1N3N2Megj4"}, "image_id": 581, "id": 10262}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 266.0, 38.0, 39.0], "area": 719, "segmentation": {"size": [512, 512], "counts": "fh^53l?2N2O1N2N3M2N2N2O1N2N3M2N2N4N0001O0O1000001M2N2N2N2N2N3NO0002N2N2N2O1N3M2NPWn1"}, "image_id": 581, "id": 10263}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 270.0, 54.0, 60.0], "area": 1498, "segmentation": {"size": [512, 512], "counts": "fi?1n?3M2N2N2N2O1N2N3M2N2N2N2O1N3^OSOlAo0S>ROkAP1S>ROkAo0T>SOjAm0V>UOhAk0X>XOeAi0Z>YOeAf0[>?1O00000001O0003M2N2N2N2N2O1N3M2N2N2N2N2O2M2N2N2N2N2N2N3N1N2N2NgVe6"}, "image_id": 581, "id": 10264}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 277.0, 21.0, 19.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "QiP41n?2N2O1N3M2N0000010O0000000001O101N2N3M2NWgd3"}, "image_id": 581, "id": 10265}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 278.0, 74.0, 87.0], "area": 2271, "segmentation": {"size": [512, 512], "counts": "nZW63l?2M3N1JIb@9\\?7N2M2O2N2M2O2M3N2N1N3N2M2OO10O010O1KgNdAX1]>jNaAV1_>401000O010O01000O010O2O2N2M3N1N3N2NO01000O0100O0100O0100O0100O02O2O100O0O2M3N2N1N3N2M2O2N2M3N1N3Nofc0"}, "image_id": 581, "id": 10266}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 285.0, 38.0, 34.0], "area": 668, "segmentation": {"size": [512, 512], "counts": "ZYc11n?2N2N2N2N2N2N3M2O100000000O1N2N3M2N2N00001O01O002N2N2N3M2N2N2O1N2N2N2N2N3Mefi5"}, "image_id": 581, "id": 10267}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 290.0, 9.0, 8.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "UYh71m?3N101O10O10OO2MmV3"}, "image_id": 581, "id": 10268}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 304.0, 24.0, 24.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "ii]11n?2N2N2N2N2O1N2N2N2N20001O00O1N2N2N2N3M2N2N2N2NSVV6"}, "image_id": 581, "id": 10269}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 307.0, 32.0, 32.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "nic23l?2O2M2N3N1N2N3M2O2M2N3N1N2N1O10O1O2O2M2N3N1N2N3M2O2M2N3N1N2N3NiUl4"}, "image_id": 581, "id": 10270}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 311.0, 29.0, 36.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "S:e0Z?2N2N2N1O01O0000000001O1O11O1N2N2N2N2N2N2N3N1N2N2N2N2N2NjUa7"}, "image_id": 581, "id": 10271}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 312.0, 35.0, 35.0], "area": 625, "segmentation": {"size": [512, 512], "counts": "UjT32m?2N2N2N3M2N2N2O1N2N2N2N2N3M2O100001O0O1O1N2N2N2N2N3M2N2N2O1N2N2N2N2NdeY4"}, "image_id": 581, "id": 10272}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 312.0, 157.0, 182.0], "area": 7507, "segmentation": {"size": [512, 512], "counts": "eZX43j?3N2M4L3M3M4M2M4L3M3O20O00010O0001^AnNU>Q1iAQOW>P1eASO[>m0bAWO^>U101O01O010O00010O00mAcNc=9RBg09UOb=0XBk06XO_=J^Bn03[O\\=EdBP1N_Oi=a0WBAg=`0UBCl=W1PBfNP>[1RBaNP>^19001lAaNf=^1ZBeNc=4TBl07TOb=M[Bn03YO_=F`BQ11\\Oi=d0UB@h=`0WBAi=`0TBBm==QBDP>V1001N1EmAiNS>W1PBfNQ>Z1QBbNP>^1:0O0mAaNe=_1[BdNc=\\1[BhNb=1WBj07XO_=K]Bn03[O]=CdBQ1N@h=`0XBAg=`0WB@k=?SBCm=>PBDQ>T1100DnAjNS>V1oAfNR>Z1QBcNo=]1TB`Nm=_1:1O0mAcNd=9QBe0;UOa=3WBi07XO^=M]Bk05[O\\=FcBn00AY=^OjBT1J_Oi=a0VB@k=`0QBBP>W11O0O101CmAkNS>V1oAgNQ>Y1SBbNo=^19000mAbNe=^1ZBeNc=5TBi08WO`=M[Bm04YO_=G`Bo01]O\\=AfBR1MAg=`0XB@i=?UBBl=?PBDP>V11N101O0EnAhNR>X1QBdNQ>\\190010OlAdNd=\\1\\BhN`=Y1_BjN_=U1`BoN]=L[Bi07_O[=DaBm04_O[=AdBP1OA]=\\OhBS1HCm==PBEQ>U11O0O2DlAjNT>V1PBfNP>[1RBbNo=]1:0010kNaAg0_>VOdAj0\\>SOgAn0Y>oNjAP1c>1O01O010O00010L3M3M4M2M3M4L3M3N3L3MlQY1"}, "image_id": 581, "id": 10273}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 314.0, 12.0, 12.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "Pjh32m?2N2N3M0010O001O3M2N2NSVQ4"}, "image_id": 581, "id": 10274}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 319.0, 52.0, 61.0], "area": 1106, "segmentation": {"size": [512, 512], "counts": "bkf62l?2O2N2M2O2M3N2N1N3N2M2O2M2O000O010O01000O010O01000O010O01000O010O0100O011O2M2O2M3N2N1N2O0O012N1N3N2J5OnU?"}, "image_id": 581, "id": 10275}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 319.0, 11.0, 18.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "]jj71n?1N3N2N2M2O1N10O10O0SF"}, "image_id": 581, "id": 10276}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 325.0, 55.0, 57.0], "area": 1673, "segmentation": {"size": [512, 512], "counts": "Zkf12m?2N2N3M2N2N2N2N2N2N2N2N2N2O1DTOaAn0[>VOcAl0X>YOfAi0V>[OhAe0W>c00O1O1O1O1O1O1O2N1O1OO11O2N1O1O2N100O2N1O1O2N1O1O1O2N1O1O2N1M3M4K4M3M3Mge]5"}, "image_id": 581, "id": 10277}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 344.0, 73.0, 61.0], "area": 1931, "segmentation": {"size": [512, 512], "counts": "Sl^21n?2O1N2N2N2N3M2N2N2N2O1N2N2N3M2OO2N2N1O000001O01OLXOQAh0o>ZOo@f0Q?4000003N1N2N2N2N1O000001O0001O000000000001O0001O02N20000N2N2N2N2N3M2O1N2N2N2N2N3M2N2O1N2N2Nbd\\4"}, "image_id": 581, "id": 10278}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 346.0, 66.0, 95.0], "area": 3414, "segmentation": {"size": [512, 512], "counts": "h\\o62l?2O2N2M3N2N1N3N2M3N1O2M3N2N1N3N2M3N1O2M3N2N1N3N2M3N1O2M3N2N1N3N2N2M30O1000O10O10O10O1N2M2O2N2M01000O01000O010O01000O01000O01000O01WE"}, "image_id": 581, "id": 10279}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 350.0, 56.0, 46.0], "area": 1415, "segmentation": {"size": [512, 512], "counts": "ak:1n?2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2O1000O1N2N2N3M2N2N2N2O10N2N2N2O2O00N2O1N2N2N2N3M2N2N2N2O1N2N2N2N3M2NVTi6"}, "image_id": 581, "id": 10280}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 362.0, 26.0, 26.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "d[R42m?2O1N2N2N3M2N2N2N2N2O1N3N01N2O1N3M2N2N2N2N2N2N3N1NYd`3"}, "image_id": 581, "id": 10281}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 379.0, 37.0, 47.0], "area": 1142, "segmentation": {"size": [512, 512], "counts": "R2N2N00010O02N2N00000002O1000000001ON01O2N2N2N2QOQAj0U?N1N2N2N2N2@b@;c?N2N2N2N2O1NZS]7"}, "image_id": 581, "id": 10282}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 391.0, 59.0, 75.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "llk11n?2N2N2N2N3M2N2N2O1N2N2N2N2N2N3M2N1O1O101N2N2N20000000000dAfNQ>[1lAgNT>Y1jAjNV>U1hAmNX>^100000000000000010O0N2O1N2`NbA[1d>M2TO[A:g>D[A:g>D[A:g>E[A8g>F[A8g>F[A8g>F[A8g>F[A9W?N2N2N2NPbV5"}, "image_id": 581, "id": 10283}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 393.0, 51.0, 41.0], "area": 1087, "segmentation": {"size": [512, 512], "counts": "klm02n?2M2N2N2N2N2N2N2N2N3N1000000O0O0000000000001O2O2M2N2N2N2O11O00000N2N2N2N2N2O2M2N2N2N2N2N2N2N3M2O1N2NPcX6"}, "image_id": 581, "id": 10284}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 402.0, 102.0, 97.0], "area": 3118, "segmentation": {"size": [512, 512], "counts": "hng21n?2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O2O0000000001O00O1N1O00000000000001O003N1N2N2N001O000000000001O00001O2O1N2N00000000000GROaAn0`>TO]Al0Q>SOZB3Di0R>VOXB3Dh0S>WOWBR1i=POUBP1l=ROQBn0o=TOoAl0Q>VOmAj0S>XOkAh0U>a000000001O0001O1O2HiAdNY>Z1jAcNX>[18N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2O2M2N2N2N2NeRe3"}, "image_id": 581, "id": 10285}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 417.0, 72.0, 69.0], "area": 2189, "segmentation": {"size": [512, 512], "counts": "^n51n?2N2N2N2N3M2_OD[A>c>D[A>c>D\\A=b>F[AEZA=d>EZA=d>EZA=d>a0N2^AdN]>a1N2N3M001O2N1O10O000ROnAOR>1PBMP>3RBKQ>3PBKR>3PBKR>3PBKR>3PBKR>3PBKR>3PBKR>3PBLP>3RBKn=5TBIm=6VBGj=:WBDi=>WB@i=b0i02N3M2N2N2N2N2N2000000000O1N2N2O2M2N2N2N2N2N2N2N2N2N2N2N3N1NXRf6"}, "image_id": 581, "id": 10286}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 428.0, 43.0, 51.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "am]41n?2N2N2N2N2O01O10c@HQ?8m@JS?7j@KV?5h@MX?3f@0Y?<00001O002N001O0000000000000001O0000000000N2N2N3M2N2N2N2N2N\\al2"}, "image_id": 581, "id": 10287}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 450.0, 50.0, 51.0], "area": 1218, "segmentation": {"size": [512, 512], "counts": "cnW11n?3M2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O2N1000000001O000001O00N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2NoPo5"}, "image_id": 581, "id": 10288}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 452.0, 27.0, 27.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "`^R22m?2N2N2N2N2N3M2N2O1N2N2N2N10O2N2N2N2N2O1N2N3M2N2N2N2N`Q`5"}, "image_id": 581, "id": 10289}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 452.0, 25.0, 22.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "bno31n?3N1N2N2N2N2N10O000000010O00000000101N2N3M2N2N2Oeac3"}, "image_id": 581, "id": 10290}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 31.0, 45.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "e>P1o>2N2N2N001O02O1000000001ON3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N2NhP`7"}, "image_id": 581, "id": 10291}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 473.0, 50.0, 39.0], "area": 907, "segmentation": {"size": [512, 512], "counts": "o_`01n?100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1GXOZAh0e>[OVAM0g0i>^OVAL1e0h>EWA;h>GWA9h>`01\\OWAOj>OXA0i>0WAOj>1VANj>3VALk>4UAKl>5UAIl>6UAIl>5VAJk>4WAJk>4WAJk>5VAIl>5VAIl>5b0M2Nh`f6"}, "image_id": 581, "id": 10292}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 475.0, 17.0, 18.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "P_i32m?2N2N2N2N20000000000000N2N2N2N2NkPn3"}, "image_id": 581, "id": 10293}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 476.0, 46.0, 36.0], "area": 970, "segmentation": {"size": [512, 512], "counts": "YoU22n?2M2N3N2M3N2M3N1N3N2M3N1N1O100O100O100O100O100O11O1O2N2N2N2N2N1O2N2N2N2N2N1O2N2N2N1O000NV@Nl?0UPS5"}, "image_id": 581, "id": 10294}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 493.0, 37.0, 19.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "ook41n?1O100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O11O1O2N1O1O1O1O1O1O1O1O1O1O2N1O1OQ`a2"}, "image_id": 581, "id": 10295}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 501.0, 16.0, 11.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "j_U33l?2N2N2N1O1001O1O1O1O2N1O1O1O1OQ`b4"}, "image_id": 581, "id": 10296}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 503.0, 18.0, 9.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "ooi11n?1O100O1O1O1O1O1O1001O1O1O1O1O2N1OQPm5"}, "image_id": 581, "id": 10297}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "o_i41n?1001OQ`T3"}, "image_id": 581, "id": 10298}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 0.0, 5.0, 3.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "PP71o?1O1OO1OQ`f7"}, "image_id": 582, "id": 10299}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 0.0, 64.0, 40.0], "area": 1577, "segmentation": {"size": [512, 512], "counts": "_`S28h?9G00O10O1000000000000BISA7m>=1000000000000000000O10000000000000000O100000000000005K9LK7I000000O0100000000000000000O010000004L9G:Fm^l4"}, "image_id": 582, "id": 10300}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 0.0, 10.0, 5.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "P`d71o?1O1O1O1OO1O1O100OQ`6"}, "image_id": 582, "id": 10301}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 1.0, 16.0, 24.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "WPh72m?2N2N2N2N2N20001O01O0000000000D"}, "image_id": 582, "id": 10302}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 2.0, 4.0, 7.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "27j?N2N2Okom7"}, "image_id": 582, "id": 10303}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 2.0, 71.0, 54.0], "area": 1989, "segmentation": {"size": [512, 512], "counts": "i`92m?3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2N2O1N200000N2O2M2N2N2N2N2N2N2N010O00000000000000010O01O3M00000001O2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N2N^ob6"}, "image_id": 582, "id": 10304}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 49.0, 45.0, 39.0], "area": 1326, "segmentation": {"size": [512, 512], "counts": "WbQ27i?:F0O1000000000O10^OF^A:a>c00000000000000O100000O100000000000O100000O10000000000000O103M:F:F;E]mW5"}, "image_id": 582, "id": 10305}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 53.0, 54.0, 73.0], "area": 2023, "segmentation": {"size": [512, 512], "counts": "\\RT1151b?2[@0c?8N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2]AgN\\>[1bAgN\\>`1N2O1N2N3M2N2O11O000000000001O00N2N3lNkA8W>FkA8W>FkA8W>GjA7X>GjA8W>FkA8W>FkA8W>FkA8W>FkA8W>FlA7V>GlA7W>FkA8P?N2N2N2O1NelP6"}, "image_id": 582, "id": 10306}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 78.0, 14.0, 14.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "dbk22m?2N2N2N2N2N2O01N2N2N2N2N2N]]m4"}, "image_id": 582, "id": 10307}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 91.0, 20.0, 20.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "US_21n?2O1N2N2N2N2N2N3M1O0011N3M2N2N2N2N2N2NmlV5"}, "image_id": 582, "id": 10308}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 128.0, 52.0, 52.0], "area": 1353, "segmentation": {"size": [512, 512], "counts": "fdj31n?2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M2N2N2O1N2N2N20000000N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2NT[[3"}, "image_id": 582, "id": 10309}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 167.0, 50.0, 66.0], "area": 1577, "segmentation": {"size": [512, 512], "counts": "oVe41P?0mA2Q>0mA3P>0mA2Q>0mA2Q>0mA2Q>0mA2Q>0mA2Q>0mA2R>OmA1R>1lAOn=@kAd04LQ>BiAd04JS>>kABU>`0iA@W>T11O0001O00000001O2N2N2N2O2M2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2Nmia2"}, "image_id": 582, "id": 10310}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 172.0, 14.0, 15.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "deg32n?2M2N2N2N001O000003M2N2N2N`ZQ4"}, "image_id": 582, "id": 10311}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 178.0, 50.0, 47.0], "area": 1187, "segmentation": {"size": [512, 512], "counts": "QVh32m?2N2O2M2N2N2N2N2N3N1N2N2N2N2N3M2O1N2000001O000001O000N2N2N2N2OO00000001O2N2O1N2N2N2N3M2N2O1N2N2N3Mfi^3"}, "image_id": 582, "id": 10312}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 220.0, 73.0, 70.0], "area": 1941, "segmentation": {"size": [512, 512], "counts": "\\Xn42m?2N2N2N3M2N2N2N2N2O1N2N2N2N3O0001O000O1N2N2N1O0000^O[OjAe0V>]OhAd0X>]OfAc0Z>_OdAa0\\>AbA?^>C`A=`>E^A;b>H\\A7d>KZA5f>b00001O01O0000000000000000010O0000001O2N2N2N2N2N3N1N2N2N2N2N2N2N2N3M2O1N2N2N`Xm1"}, "image_id": 582, "id": 10313}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 225.0, 15.0, 16.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "Wg`42m?2N3M2N2N2O1N20N2N2N2N2N2N2NihW3"}, "image_id": 582, "id": 10314}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 238.0, 10.0, 9.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "agV32n?1N2N2N2O01N2N2N2N_Xd4"}, "image_id": 582, "id": 10315}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 240.0, 34.0, 33.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "TX_41o?1N2N3M2N2N2N2N2O1N3M1O00000001O01O000000000002N2O2M2N2N2N2N2N3N1NUho2"}, "image_id": 582, "id": 10316}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 244.0, 19.0, 20.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "hWa63l?2N2N2N2O1000001O00000001N1N2N2N2N2NoWU1"}, "image_id": 582, "id": 10317}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 245.0, 49.0, 45.0], "area": 1122, "segmentation": {"size": [512, 512], "counts": "UXa22m?2N2N2N2N3M2N2O1N2N2N2N2N3M2N2O1N2N20001O00000001N1N2N2N2O1N2N2N000000001O01O01Ji@BY?d0YA^Of>c0YA^Oe>d0YA^Oe>d0YA^Oe>e0XA^Oe>P1N200000001O00000001O01O000000000010O000000000O1N2N3M2N2N2N2O1N2N2N3O00000000000000001O000000000001O01O000O1O1N2N2N2N3M2O1N2N2N2NQfl4"}, "image_id": 582, "id": 10319}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 266.0, 20.0, 19.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "ehW42m?2N2N3M2N2N10O000001O0001O2N2N2N2N2N3N_W^3"}, "image_id": 582, "id": 10320}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 266.0, 50.0, 49.0], "area": 1263, "segmentation": {"size": [512, 512], "counts": "UYh52m?2N3M2N2N2IEf@=X?Eg@>mADS>:iAHW>o02N2N2N2N2N2N2N2N2N2N2N2N2Jo@ZOR?9n@O2FR?9n@OX?Oj@OX?Oj@OX?Oj@OX?Oj@OSce3"}, "image_id": 582, "id": 10333}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 366.0, 36.0, 39.0], "area": 702, "segmentation": {"size": [512, 512], "counts": "Tlc42n?1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O0000000001O00002N2N2N2Gj@EX?9j@EX?9j@EX?99N2N3M2NWTj2"}, "image_id": 582, "id": 10334}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 379.0, 24.0, 24.0], "area": 302, "segmentation": {"size": [512, 512], "counts": "T\\o21n?2N3M2N2N2N2N2N2N2O10000000O1N2N2N2N2N2N2N2N2Nicd4"}, "image_id": 582, "id": 10335}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 390.0, 49.0, 50.0], "area": 1150, "segmentation": {"size": [512, 512], "counts": "U][22m?2N2M3N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N00000000000O1000000002N2M3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1OYSl4"}, "image_id": 582, "id": 10336}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 399.0, 16.0, 17.0], "area": 139, "segmentation": {"size": [512, 512], "counts": "j\\n42m?2M3N2N2N1O1N100002M3N1O2N2N2M^ci2"}, "image_id": 582, "id": 10337}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 400.0, 30.0, 55.0], "area": 892, "segmentation": {"size": [512, 512], "counts": "Z]a72m?2O1N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N3M1O0000000aC"}, "image_id": 582, "id": 10338}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 403.0, 47.0, 60.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "`mU32m?2N2c@Kl>7RAKl>7RAKl>7RAKl>7RAKa>HcA>KL`>HcA>KL`>HcA>KL`>HcAn0[>TObAo0[>=N1O2N2N1O02N2N2N2N1O2N2N2N2N2N2M3N2N2N2N2N2N2N2N2N1O2N2N2N2N2M3N2N2NcbR4"}, "image_id": 582, "id": 10339}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 407.0, 13.0, 15.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "jl]52m?2N2W@Ke?9N2001O000O1N2N2N2NQc[2"}, "image_id": 582, "id": 10340}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 407.0, 36.0, 36.0], "area": 639, "segmentation": {"size": [512, 512], "counts": "Z]f62n?1N2N2N2N3M2N2N2N2N2N2N2O1N2N00000000000002N2N2N2N2O1N2N2N2N2N3M2N2N2Nkbg0"}, "image_id": 582, "id": 10341}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 409.0, 25.0, 26.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "]mm41n?2N2N2N2N2N00O0100000000000000O10O101O2N2N2N2N1OTce2"}, "image_id": 582, "id": 10342}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 414.0, 59.0, 56.0], "area": 1631, "segmentation": {"size": [512, 512], "counts": "dm01n?2N2N2N1O2M3N2N2N2N2N2N2N2N2N2M20100N2N2N2N2N20000000O0100000000000000O1OO2M2O2N2N2N2N2N2N2N2N2N2M2O2N2N2N2N2N2N2N2NUbQ7"}, "image_id": 582, "id": 10343}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 417.0, 69.0, 76.0], "area": 2120, "segmentation": {"size": [512, 512], "counts": "h^g31n?2N3M2N2N2N2N2N3M2O1N2N2N2N3M2N2N2N01O00O1O1N2O1O2M2O100001O00010O0000001O0000010O00001N1N1O0O2O2N3M4M210O00010O001O01ON3L3N2M4M2M4L3N2MkRV3"}, "image_id": 582, "id": 10344}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 425.0, 42.0, 39.0], "area": 733, "segmentation": {"size": [512, 512], "counts": "Q^j41o?2M2N2N2N2N3M2O1N2N2N2N2N01O00000001O0001O00000001O01O00001O2O1N2N3M2N2N2N2O2M2N2N[b`2"}, "image_id": 582, "id": 10345}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 427.0, 22.0, 23.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "d]l02m?2N2N2N2N2N2N2N2N20000000N2N2N2N2N2N3M2N2NYbh6"}, "image_id": 582, "id": 10346}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 446.0, 38.0, 38.0], "area": 707, "segmentation": {"size": [512, 512], "counts": "d^e51n?2N2M3N2N2N2N1O2N2N2N2N2N2N2N2N2N0000000002N2N2N2M3N2N2N2N2N2N1O2N2N2N2N2Ndag1"}, "image_id": 582, "id": 10347}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 460.0, 28.0, 28.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "h^`61n?2N2N2N2O2M2N2N2N2N3M2O1N2N01O2O1N2N2N3M2N2O1N2N2N3M2NWaQ1"}, "image_id": 582, "id": 10348}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 462.0, 49.0, 48.0], "area": 1135, "segmentation": {"size": [512, 512], "counts": "Zoi61n?2N2N2N2N3M2O1N2N2N2N2N2N2N2N3M2O1N00001O00000000000001O0000010O3M2N2N2N2N2N2N2N2O2M2N2N2N2N2N2NQa="}, "image_id": 582, "id": 10349}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 463.0, 27.0, 41.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "Wob71n?2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N00000001O000000000000bA"}, "image_id": 582, "id": 10350}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 466.0, 10.0, 29.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "b>m0T?N2Dk@JW?4k@JW?4k@JW?4k@JW?4k@JW?43l?2N2N2O1OO2N2N2N2NVQ\\7"}, "image_id": 582, "id": 10354}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 472.0, 51.0, 40.0], "area": 1175, "segmentation": {"size": [512, 512], "counts": "eoZ41n?2O1N2N2\\@I\\?:a@G^?;a@E^??00O1K_Oj@b0U?@i@a0V?5O100O1O1O1O1O100O1O1O1O1O1O100O1O1O11O2N1O1N2N2N3N1N2N2N2N3M2O1N2N2N2N3M2N2O``k2"}, "image_id": 582, "id": 10355}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 482.0, 41.0, 30.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "f_W61n?2N2N2N2N2N2N2N1O2N2N1O1N2O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1O1N2N2N2N1N3N2N2N[PT1"}, "image_id": 582, "id": 10356}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 497.0, 40.0, 15.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "ooW51n?1O1O1O1O1O1O1O1N2O1O1O1001O1O1O1O1O00O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1O1OQPT2"}, "image_id": 582, "id": 10357}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 0.0, 72.0, 64.0], "area": 2157, "segmentation": {"size": [512, 512], "counts": "P`;2n?1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2O01O010O01O01O000O2M2N3M2O1N3M2O1N3M2N3N1N2N3M2O1N3M2O1N3M2N3N1N2Nd^`6"}, "image_id": 584, "id": 10358}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 0.0, 361.0, 63.0], "area": 16254, "segmentation": {"size": [512, 512], "counts": "P`k15k?:F9G:F:F7I001O9G2N0000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000001O1O00O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000000000000000O1000000000000000000O100000000_ORA0n>0[AGe>9a000O1000000000000000000O10000000000000000O104Llo?"}, "image_id": 584, "id": 10359}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 54.0, 30.0, 51.0], "area": 960, "segmentation": {"size": [512, 512], "counts": "T2U1j>1O01O01O0001O01O01O01O0001O01O00010O03M2N2O2^OPAOR?0o@NS?0PAMS?0o@OR?OPAOS?Oo@NS?0o@Om\\a7"}, "image_id": 584, "id": 10360}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 61.0, 16.0, 15.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "SRf01o?2M2O1N3M2O1N00010O01O3M2O2M2NnmQ7"}, "image_id": 584, "id": 10361}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 77.0, 103.0, 90.0], "area": 3963, "segmentation": {"size": [512, 512], "counts": "XTn11n?2N3M2O1N3M2O2M2N2N3N1N2N3M2O2M2N2O2M2N2N3N1N1O1O01O00010O0000010O0001O01O00010O0000010O0001O01O0001O01O00010O0000010O0001O01O00010O0000010O0001O01O00010O0000010O2N2OO01O002O1N2oNZAe0i>XOYAf0i>XOYAg0h>WO[Af0Q?N2O2M2N3M2O1N3M2N2Oe\\^4"}, "image_id": 584, "id": 10362}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 90.0, 84.0, 78.0], "area": 2485, "segmentation": {"size": [512, 512], "counts": "d43l?2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N1O0000001O0001O00000000000000000001O0001O00FmNhAS1X>oNfAQ1[>POcAP1]>ROaAn0_>TO_Al0a>90001O000000000001O2N2N2N2N2N2O1N2N2N2N00000000000000101N2N3M2N2N2N2N2N2N2N2N2Ngle6"}, "image_id": 584, "id": 10363}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 121.0, 43.0, 49.0], "area": 976, "segmentation": {"size": [512, 512], "counts": "UT^12m?3N1N3M2N2O2M2N2N3N1N2N3M2O1N3M200010O01O01O01O01O000N3M2N3N1N2N3N1N2N3M2O1N3M2N2O2MU[l5"}, "image_id": 584, "id": 10364}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 155.0, 11.0, 9.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "od\\11n?2O1N3M1O0100O2N2N2ORk]6"}, "image_id": 584, "id": 10365}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 176.0, 85.0, 61.0], "area": 2760, "segmentation": {"size": [512, 512], "counts": "QV41o?1N3M2O2M2N2N3GBl@?S?Cj@`0S?8N2N3N1N2N3N101O000N3O010O01O01O010N1N2N3M2O0O0010O0000010O00012M2N3M2O1N3M2O1N000010O0000010O03M2O1N3M00010O0001O01O00010O002O1N3M2N2O2M2N3M2O1N3M2OkYa6"}, "image_id": 584, "id": 10366}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 475.0, 52.0, 37.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "moa03h?5L4K5L4K5L4L4M300001O000000001O000000001O000000003M000000001O0000001O0000001O000000001O000N2L5K4K5L4KoPd6"}, "image_id": 584, "id": 10367}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 24.0, 54.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "0f1Z>0000000000000000000000M3[Oe0G913L0000001O000000koc7"}, "image_id": 585, "id": 10368}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 0.0, 42.0, 8.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "Q`i05j?1000000000000000000000000000001O00000000000000000000000000000000000000001O0000000P`a6"}, "image_id": 585, "id": 10369}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 0.0, 47.0, 26.0], "area": 927, "segmentation": {"size": [512, 512], "counts": "c`n24^?>0000000000010O00000000000000000000000000000001O01O00000000I700001O000000000001O0000000000B]PZ4"}, "image_id": 585, "id": 10370}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 4.0, 32.0, 35.0], "area": 980, "segmentation": {"size": [512, 512], "counts": "San12`?>B>M30000000001O01O000000000000000000002OO000000000000000000@Y`a5"}, "image_id": 585, "id": 10371}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 11.0, 39.0, 57.0], "area": 1685, "segmentation": {"size": [512, 512], "counts": "QQR41e?:000000i@O_>i0E;O10000000000000000000000000009G0001O01O000000000000000000G9G9^Ob0_OU`Z3"}, "image_id": 585, "id": 10372}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 12.0, 49.0, 60.0], "area": 1919, "segmentation": {"size": [512, 512], "counts": "mQk4;e?000000000000000000000I7[Oe0[Oe0000H9O00000000000000000000000001O0000000000000001O0000000000000D<@`0AQ`\\2"}, "image_id": 585, "id": 10373}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 17.0, 43.0, 31.0], "area": 1063, "segmentation": {"size": [512, 512], "counts": "o`h0?a?00000H8J6001O00000000000001O000000000001O00000000000001OVOo@e0W?O000000000000000000000I7Beoa6"}, "image_id": 585, "id": 10374}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 21.0, 44.0, 38.0], "area": 1332, "segmentation": {"size": [512, 512], "counts": "aaV69S?d0K5000000000001O000M3000I70000000000000000000000001O03M001O0000000000000000000000000H__S1"}, "image_id": 585, "id": 10375}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 26.0, 41.0, 44.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "TaZ7`0`?00000000000d@GQ?d000F:000000000000000007I000000000000000000000000000000000000000000YOmo0"}, "image_id": 585, "id": 10376}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 27.0, 21.0, 18.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "k`]3b0^?000000000000000000000000000000000000000UoW4"}, "image_id": 585, "id": 10377}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 58.0, 43.0, 30.0], "area": 1076, "segmentation": {"size": [512, 512], "counts": "^Rg08a?7001OF:00L400000001O000000000000000000000000000000000001O5K001O0000000001O00000000GX^c6"}, "image_id": 585, "id": 10378}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 60.0, 11.0, 9.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "SbQ21h?70001O00000001O0000Snh5"}, "image_id": 585, "id": 10379}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 62.0, 23.0, 24.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "aR63b?;H81O0000000001O000000000001O0000000001OH8D\\^^7"}, "image_id": 585, "id": 10380}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 71.0, 45.0, 57.0], "area": 1758, "segmentation": {"size": [512, 512], "counts": "[Sh1b0R?<000000000001O0001O000000000000000000[O\\OjAh0R>e00SOiA1W>OiA1W>OiA2V>NjA2V>NkA1V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>NjA2V>JnA6R>YO_Bg0]>1O01O0DW]a5"}, "image_id": 585, "id": 10381}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 72.0, 8.0, 7.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "Ybk76j?000O100000000h="}, "image_id": 585, "id": 10382}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 73.0, 33.0, 55.0], "area": 1605, "segmentation": {"size": [512, 512], "counts": "kST34Z?b0]Oc0C=0000001O000000000001O0000000000000000000001O000000000L4]Oc0^OW^[4"}, "image_id": 585, "id": 10383}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 73.0, 24.0, 40.0], "area": 852, "segmentation": {"size": [512, 512], "counts": "]c\\53[?b0^Ob00000000000000000001O01O0000000000000000K5]OY^W2"}, "image_id": 585, "id": 10384}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 73.0, 34.0, 21.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "^R[7?]?400000000000000000010O0000000000000N2000000000G9000000I_@2f?000000000_m3"}, "image_id": 585, "id": 10385}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 78.0, 21.0, 16.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "^b`6?a?00000001O01O00000000000000000000000000HimT1"}, "image_id": 585, "id": 10386}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 87.0, 11.0, 12.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "gRW5;e?001O000000000000000Y]c2"}, "image_id": 585, "id": 10387}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 88.0, 60.0, 56.0], "area": 2116, "segmentation": {"size": [512, 512], "counts": "fci3b0]?100WOIiA7S>MmA3S>MeA;[>e00000001O00000000014K00000000000000000000000000001O00000001O5K00gNnAd0h>O1O1O00001O01O00000000000000000000000000@h\\X3"}, "image_id": 585, "id": 10388}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 98.0, 47.0, 32.0], "area": 1257, "segmentation": {"size": [512, 512], "counts": "Xcj0:a?50000i@Dj>i001O00000O100000000000000000001O00000001O00000000000000000UOQAe0U?1O00000000000000000000L4BUm]6"}, "image_id": 585, "id": 10389}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 104.0, 23.0, 25.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "dS6B>0000000000000001O000001O0000000000000000000001O0001O0000000000000000000001O0001O0000000000000000000001O000001O000000000K5BclV1"}, "image_id": 585, "id": 10394}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 120.0, 7.0, 11.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "ncc15e?600001O0000WlX6"}, "image_id": 585, "id": 10395}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 134.0, 7.0, 6.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "VdR16j?00000000000jki6"}, "image_id": 585, "id": 10396}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 143.0, 74.0, 31.0], "area": 2108, "segmentation": {"size": [512, 512], "counts": "_d9k0U?0000000000000000000000000000004L000000000000000000000000000000000000L40000000000000000000000000000000000000000002N0000000000000000000000000000000a[a6"}, "image_id": 585, "id": 10397}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 149.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "QUe12m?3M2O1N2N2N2N2N2N2N3M2N2N2OO1O2N2N2N2O1N2N2N2N3M2N2N2N2NnZl5"}, "image_id": 585, "id": 10398}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 154.0, 23.0, 23.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "TUP72m?2N2N2N3M2N2O1N2N2N2N11N2N2N2N2N2N2N2N2O2M2NkZd0"}, "image_id": 585, "id": 10399}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 159.0, 41.0, 35.0], "area": 654, "segmentation": {"size": [512, 512], "counts": "cUS32m?2N2N2N2N3M2N2N2N2O1N2N2N1O000101N2N2N2N1O00001O00000000000000002N2N2N2N2N2N2N2NjZX4"}, "image_id": 585, "id": 10400}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 161.0, 10.0, 9.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "SeV42m?3N1O2M1001N3N1N3NiZd3"}, "image_id": 585, "id": 10401}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 167.0, 25.0, 25.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "bUf61n?2N2N2N2N2N2O1N2N2N2N2N2000N2N2N2N2N2N2N2N2N2N2N^Zm0"}, "image_id": 585, "id": 10402}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 170.0, 30.0, 31.0], "area": 505, "segmentation": {"size": [512, 512], "counts": "deQ41o?2M2O2M3N2M3N1N3M3N2M3N0O010O01O01O02O2M2O2M3N2M3M2O2M3N2MTZ_3"}, "image_id": 585, "id": 10403}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 171.0, 39.0, 58.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "nVW52h?6K5J6J6J7I6J6J6L400010O00000000010O00000000010O00000000010L3J6K5J6J6J7I6J6JfZU2"}, "image_id": 585, "id": 10404}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 171.0, 15.0, 29.0], "area": 236, "segmentation": {"size": [512, 512], "counts": "geh71n?3M2N2N2N2O1N2N3M2N2N2N2N2O1dJ"}, "image_id": 585, "id": 10405}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 173.0, 32.0, 45.0], "area": 623, "segmentation": {"size": [512, 512], "counts": "cUa42m?3N2M3N2M3N2M3N2M2O2M3N2000000000000000N2N2M3N2M3N2M3N2M3N2M3N]in2"}, "image_id": 585, "id": 10406}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 176.0, 13.0, 14.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "eU[72m?2N2O2M2N2N21M2N2O2M2N2NYZ>"}, "image_id": 585, "id": 10407}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 180.0, 53.0, 51.0], "area": 1280, "segmentation": {"size": [512, 512], "counts": "bVT21n?3M2N2N2N2N2N2O1N3M2N2N2N2N2N2O1N3M00000000000010O00000000000000010O1O2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2NjYQ5"}, "image_id": 585, "id": 10408}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 182.0, 22.0, 23.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "lUR72m?3M2N2N2O1N2N2O10001O000000N2N3M2N2N2N2N2Nmib0"}, "image_id": 585, "id": 10409}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 190.0, 29.0, 29.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "[fa71n?2N2N2N2N2N2N2N2N2N2N2N2N200000N2N2N2N2N2O1N2N2N3M2N2N2NcI"}, "image_id": 585, "id": 10410}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 196.0, 37.0, 37.0], "area": 661, "segmentation": {"size": [512, 512], "counts": "jf41n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O00000000000003M2N2N2N2N2N2N2N2N2N2N2N2N2N^iX7"}, "image_id": 585, "id": 10411}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 202.0, 53.0, 52.0], "area": 1292, "segmentation": {"size": [512, 512], "counts": "ZgQ61n?3N1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3M10O000000000001O000000000000000001O2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2NUiS1"}, "image_id": 585, "id": 10412}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 206.0, 19.0, 19.0], "area": 187, "segmentation": {"size": [512, 512], "counts": "hfQ32m?2N2N2N2N2N2N2N1O0002N2N2N2N2N2N2N2N[id4"}, "image_id": 585, "id": 10413}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 210.0, 55.0, 50.0], "area": 1347, "segmentation": {"size": [512, 512], "counts": "TWl32l?3N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2001O0000000O100O1O100O1O100O100O1O100O0010O0010O0010O0001O0N3L3M4M2M3M4M2MfXX3"}, "image_id": 585, "id": 10414}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 225.0, 52.0, 53.0], "area": 1317, "segmentation": {"size": [512, 512], "counts": "cWi01n?2N2N2N2N2N2N2N2N2N3M2N2m@XOk>j0SAXOk>P1N2N2N2N11N2N2O1000000000000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2Nng\\6"}, "image_id": 585, "id": 10415}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 234.0, 6.0, 13.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "Z7=d?N2N2N3M2Nahl7"}, "image_id": 585, "id": 10416}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 251.0, 59.0, 52.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "^hY31n?2O1N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2O10000000000000010OO1N2N2N2N2N2N2N2N2N2N0000000000001O2O1N2N2N2N2N2N2N2N2N^gh3"}, "image_id": 585, "id": 10417}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 254.0, 28.0, 35.0], "area": 606, "segmentation": {"size": [512, 512], "counts": "S8n0Q?000003M11N3NO2N1O1O0001O0001O2N2N2N2N2N2N3N1N2N2N2N2Nfga7"}, "image_id": 585, "id": 10418}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 258.0, 35.0, 54.0], "area": 1303, "segmentation": {"size": [512, 512], "counts": "^YS75f?5K5K5K5K6J5L4K5K510O00000010O000000010O00000001OO2K4K5K5K5K6J5K5L4KnW;"}, "image_id": 585, "id": 10419}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 265.0, 8.0, 15.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "`Xl71n?2N2N2N2N2N2N2fG"}, "image_id": 585, "id": 10420}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 270.0, 29.0, 29.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "iX^22m?2N3N2M3N1N3N2M2O2M2N010O01O010O01O02N3N1N3N2M2N3N2M3N1NSWS5"}, "image_id": 585, "id": 10421}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 276.0, 32.0, 30.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "SYc01n?2N2N2N2N2O1N2N2N2N2N2N2N2N2O0O0002N2N2N2N2N2N2N2N2N2N2N2O1N2Nofl6"}, "image_id": 585, "id": 10422}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 283.0, 30.0, 30.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "TYh12n?2M3N2M3N2M2O2M3M3N2M100O010O010O0101N3N2M3M3N1N3N2M3N2M3Nafh5"}, "image_id": 585, "id": 10423}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 284.0, 53.0, 49.0], "area": 1431, "segmentation": {"size": [512, 512], "counts": "Uil293I^?9`@I^?>N2N2N2N2N2000O10000000O1N2N2N2N2N0000002O010000000000000000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NXfX4"}, "image_id": 585, "id": 10424}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 295.0, 5.0, 8.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "W98i?O1N2N2NfVm7"}, "image_id": 585, "id": 10425}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 302.0, 20.0, 17.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "hic73i?4L5O000001O01L3000010O0000010O00O1L5KcV2"}, "image_id": 585, "id": 10426}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 309.0, 6.0, 12.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "e92N2N2N2N2N2N0001O000000000000000001O000000000002N2VO[A6g>H[A6g>H[A6g>H[A6g>H[A6g>H[A6g>HYA8i>FWA:k>DUA`0YABe>a0XAAf>a0YA@f>a0XA@g>b0WA^Oi>l000002O1N2N3M2N2O1N3M2N2N2N3N1N2N2N3M2Occo0"}, "image_id": 585, "id": 10437}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 379.0, 51.0, 51.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "_le41n?2N2M3N2N2N1O2N2N2N2M3N2N2N2N2N1O2N200000000000000O010000000000000OOO100002N2M3N2Hn@_OT??n@_OT??n@_OS?`08N200F`@3`?Jc@4e?O2M[c`2"}, "image_id": 585, "id": 10438}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 382.0, 14.0, 17.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "S\\n01n?2N2N2N2O1N210O0000G\\@5d?I_@5g?N2Nfcj6"}, "image_id": 585, "id": 10439}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 384.0, 58.0, 51.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "a\\W11n?2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2O100000000000000000000000000000N3M2N000000000000002N2N2O1N2N2N2N2N2N2N2N2N2N2N2NWck5"}, "image_id": 585, "id": 10440}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 402.0, 37.0, 39.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "o\\a53l?2N2N2N2N2N2N2N3M2N2N2N2O1N20001O0001O00000N2N2N2O1N3M2N2N2N2N2N2N2O2M2NfRl1"}, "image_id": 585, "id": 10441}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 409.0, 49.0, 56.0], "area": 1307, "segmentation": {"size": [512, 512], "counts": "Ymj02m?2N2N3M2N2N2N2N2N2O1N2N2N2WAUOZ>m0dAUOZ>m0dAUOZ>m0dAUOZ>m0dAUO\\>k0bAWO^>V100000000000O1N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2O1N2N2N2N2N2NXb\\6"}, "image_id": 585, "id": 10442}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 417.0, 53.0, 49.0], "area": 1265, "segmentation": {"size": [512, 512], "counts": "f]T43l?2M2N3N2M2N3N2M2O2M3M2O2M30O10O10O10O10O10O10O10O10O10O10O10O10O10O10O10O10N2N1N3N2M2O2M3N1N3N2M2O2M3N1NWRQ3"}, "image_id": 585, "id": 10443}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 422.0, 24.0, 27.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "\\]m62n?2M2N3M2O1N3M2O2M2O10N3N110O010O0O2N1D_@6g?N3N1N2NUbf0"}, "image_id": 585, "id": 10444}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 428.0, 9.0, 10.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "_mV71o?1N3M2O2M12M2N3N_bd0"}, "image_id": 585, "id": 10445}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 436.0, 48.0, 48.0], "area": 1270, "segmentation": {"size": [512, 512], "counts": "Z^X71n?2O2M2N3N1N3M2O2M2N3N1N3N1N3M2O2M2N10O00010O00010O00010O00010O00010O2O2M2N3N1N3M2O2M2N3N1N3M2OiA"}, "image_id": 585, "id": 10446}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 448.0, 34.0, 35.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "\\nR52m?2N2N2N2O1N2N2N2N2N2N2N2N3O00000000000000N2N2N2N2N2N2N2N2N2N2N2N2N]Q\\2"}, "image_id": 585, "id": 10447}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 449.0, 51.0, 59.0], "area": 1543, "segmentation": {"size": [512, 512], "counts": "en_32m?2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N200O2M2O1N2^AfN[>\\1cAfN[>a1N201O000000O1N2N1O002N2O1N2^O\\AEg>8[AFg>8[AFg>8[AFg>8[AFg>8[AFg>8[AFg>8b0O1N3M2N2NXaf3"}, "image_id": 585, "id": 10448}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 450.0, 22.0, 23.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "`^_62m?2N2N2N2N2N2N2N00000000000002N2N2N2N3M2N2NgaU1"}, "image_id": 585, "id": 10449}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 456.0, 12.0, 11.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "]nY61n?2N2N2N2N2O01N2N2N2N2NdQ`1"}, "image_id": 585, "id": 10450}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 461.0, 31.0, 30.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "jnc61n?2N2N2N2N2N2N2O1N2N3M2N2N2N2O10O1N2N2N2N2N2N2N2N2O1N2N2N3M2NTal0"}, "image_id": 585, "id": 10451}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 476.0, 30.0, 30.0], "area": 474, "segmentation": {"size": [512, 512], "counts": "Y_Z42m?2N3N1N2N2N2N2N2N2N2N2N2N2N20O1N2N2N2N2O1N2N2N2N2N2N2N3M2Ne`V3"}, "image_id": 585, "id": 10452}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 482.0, 39.0, 30.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "loj21n?2M3N2N1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O11O1O1O001O1O1O1O001Fh@JY?4i@KX?3j@LW?1l@NT?1n@NS?0n@0S?No@1a?OQ`a4"}, "image_id": 585, "id": 10453}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 498.0, 49.0, 14.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "oo]61n?1O1O1O1O1O1O1O1O1O1O1O1O100000000000000000000000000000000000000000000001O1O2N1O1O1O1O1O1O1O1O1OQ`i0"}, "image_id": 585, "id": 10454}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 500.0, 8.0, 8.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "d_o58h?000000000000L``l1"}, "image_id": 585, "id": 10455}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 505.0, 14.0, 7.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "o_Q61o?0O1O1O1O1O1O11O1O1O1O1O1OQ`g1"}, "image_id": 585, "id": 10456}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 0.0, 45.0, 46.0], "area": 1226, "segmentation": {"size": [512, 512], "counts": "P`^32n?2N2N2N3M2N2N3M2N2N3M2N2N2N3M2N2N3M2N2N1O00O100O100O10000O2O2M3N3L3N3M2M4M2M3N3L3N3M2M4Mmnj3"}, "image_id": 586, "id": 10457}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 4.0, 18.0, 11.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "U`f25k?5K000O10O100000000000000000000000l_P5"}, "image_id": 586, "id": 10458}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 108.0, 19.0, 54.0], "area": 524, "segmentation": {"size": [512, 512], "counts": "ld[25c?8O11OJ6H9YO\\OgAm0U>a00000000N2@iAnNa>i0?G9J600N2HklZ5"}, "image_id": 586, "id": 10459}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 64.0, 28.0, 28.0], "area": 400, "segmentation": {"size": [512, 512], "counts": "]bo61n?2N2N2N3M2N2O1N2N2N2N2N3M1O02N2O1N2N3M2N2N2N2N2N2N2O2Mb]b0"}, "image_id": 587, "id": 10460}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 92.0, 78.0, 117.0], "area": 4217, "segmentation": {"size": [512, 512], "counts": "Qfh51m?3L3N3M2M4M2M4M2M3N3L3N3M2M4M2M4M2M3N3L3N3M2M4M2M3N3L3N3L3N3M2M4M2M100O010O10O10O11O2M4M2N2N3L3N3M2M4M2N2N3L3N3M2N3L3N3M2M3N3M2N3L3N3M2N3L3N2N3L3N3M2N3L3Ng\\P1"}, "image_id": 587, "id": 10461}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 203.0, 13.0, 14.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "cf02l?3M2M3O20O010O010M2N3M2Ncih7"}, "image_id": 587, "id": 10462}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 215.0, 15.0, 13.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "mV>3k?2M4N1010O0010O00010OO2M2M3NWYZ7"}, "image_id": 587, "id": 10463}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 218.0, 140.0, 90.0], "area": 5735, "segmentation": {"size": [512, 512], "counts": "oge13k?2N3L3N2N3L3N3L3N2N3L3N3M2M3N3M201O010O01O01O010O01O01O010O010O01O01O010O01O010O01O01O010O01O010O01O010O01O01O010O010O01O01O010O010O00010O010O00010O010O010O00010O010O0010O0010O010O0010O0010O0010O0010O0010O010O0010O0010O010O00010O010O00010O010O01O0N2M4M2N3L3N3M2M3N3M2M4M2M4M2N2MfWT4"}, "image_id": 587, "id": 10464}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 224.0, 17.0, 13.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "UWk52l?3M2O2O01O010O01O01O010O010O0M3NmXl1"}, "image_id": 587, "id": 10465}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 226.0, 15.0, 14.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "Ygk02l?3M2N3N10010O010O010OO2M2M4Mkhl6"}, "image_id": 587, "id": 10466}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 243.0, 219.0, 105.0], "area": 3552, "segmentation": {"size": [512, 512], "counts": "c7a0`?O00010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O0010O0010O010O00010O010O00010O010O010O00010O010O00010O010O0010O0010O010O00010O010O00010O010O010O00010O010O00010O010O0010O0010O010O00010O010O00010O010O010O00010O010O00010O010M2M4M2N2MfUb4"}, "image_id": 587, "id": 10467}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 247.0, 64.0, 99.0], "area": 2863, "segmentation": {"size": [512, 512], "counts": "]jS62k?3N3M2M4M2N2M4M2M4M2N2M4M2N3L3N2N3L3N3M2M4M2N2M4M2M4M2N0O10O10O10O10O10O2O3L3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2M4M2N2M4M2N3L3N3MoWl0"}, "image_id": 587, "id": 10468}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 266.0, 64.0, 99.0], "area": 2938, "segmentation": {"size": [512, 512], "counts": "Q[j61m?2M3N3L3N3L3N2M4M2M4M2M3N3M2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M2OO010O0100O03N2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M_g5"}, "image_id": 587, "id": 10469}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 267.0, 17.0, 13.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "aXT42k?3N3N110O01O01O010O00010O01O0M3NbWc3"}, "image_id": 587, "id": 10470}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 274.0, 17.0, 13.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "hX]42k?3M40O01O01O010O01O01O010O000M4MZWZ3"}, "image_id": 587, "id": 10471}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 292.0, 203.0, 98.0], "area": 3323, "segmentation": {"size": [512, 512], "counts": "T9a0`?O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010O010O0M3N3M2M4MZTj4"}, "image_id": 587, "id": 10472}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 297.0, 31.0, 87.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "ak`73k?3M2M3N3L3N3L3N3M2M3N3L3N3L3N3M2M4M2M3N3L3N3M2M4M2M3N3L01000iF"}, "image_id": 587, "id": 10473}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 318.0, 43.0, 26.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "Vj_31l?3M4M210O010O00010O010O010O010O00010O010O010O00010O010O010O010O00010O010O010ON3M2N2Ncej3"}, "image_id": 587, "id": 10474}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 359.0, 88.0, 67.0], "area": 3244, "segmentation": {"size": [512, 512], "counts": "Z\\[13k?3L3M4M2M3M4M2M4M2M3M4M2M3O2O010O00010O010O00010O010O00010O010O00010O0010O0010O0010O0010O0010O0010O00010O010O00010O010O00010O010O00010O010O00010ON2N3L3N3L3N2M4M2M4L3N2M4M2M4MncX5"}, "image_id": 587, "id": 10475}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 365.0, 51.0, 59.0], "area": 1525, "segmentation": {"size": [512, 512], "counts": "d\\Q33j?3N2N3L3N3M2N3N11O01O001M2N3L3N2N3L3N3L3N2N30O01O010O01O010O01OWOeAM\\>OgA2X>LjA4W>IlA6T>GoA:P>DSB;n=BTB?k=^OYBa0h=\\OZBd0a>10O0M4M2M4M2N2M4M2N^SU4"}, "image_id": 587, "id": 10476}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 431.0, 61.0, 81.0], "area": 2533, "segmentation": {"size": [512, 512], "counts": "m_l61m?2M4M2M3N3L3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N20N2N3M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3LYR5"}, "image_id": 587, "id": 10477}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 464.0, 19.0, 48.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "mof73k?2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3"}, "image_id": 587, "id": 10478}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 0.0, 73.0, 29.0], "area": 1110, "segmentation": {"size": [512, 512], "counts": "S`11m?2O2O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001e@^OV?g0O001O001O00001O001O0000M3N2N2M3N2N200001O001O00001OO1N2N2M3N2M3N2NRPj6"}, "image_id": 591, "id": 10479}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 0.0, 66.0, 16.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "P`P21o?001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O1O000000000000O100000000000000O1000000000000O1000000000001NP`n4"}, "image_id": 591, "id": 10480}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 0.0, 10.0, 4.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "P`X61o?001O00001O00001ONR`b1"}, "image_id": 591, "id": 10481}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "P`j61o_U1"}, "image_id": 591, "id": 10482}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 0.0, 25.0, 39.0], "area": 778, "segmentation": {"size": [512, 512], "counts": "l`c73Z?OTA4j>NTA5i>NTA4i>OUA3i>0SA4l>LRA6n>?O001O00001O001O001O00001O00O1N2N2N2"}, "image_id": 591, "id": 10483}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 13.0, 10.0, 23.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "=f0Z?10N1N3L3N3M2M3N3Maoj7"}, "image_id": 591, "id": 10484}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 20.0, 73.0, 84.0], "area": 3296, "segmentation": {"size": [512, 512], "counts": "lQe45h?5K5M2O1N2N100N2M4K40000010O0001O0fAQOd=n0XBVOh=j0UBZOj=g0QB]OP>b0lABT>>hAGW>Q11O0001O0O100O2N100EXN]Bi1_=\\N_Bf1\\=^NdBb1X=cNfB^1Z=cNdB_1[=c000001N1O11O01L3A_B\\Nd=`1aB[Nc=a1aB[Nd=a1?L4M300O1O010O01O0100O101N1O101N1L4M3L6J6K5J5KW_V2"}, "image_id": 591, "id": 10485}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 45.0, 67.0, 52.0], "area": 2146, "segmentation": {"size": [512, 512], "counts": "YbT24j?2M4M2N2M4M2M4M2M4M2M301O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01M2N3L3N2M4O0010O010O0O1M4M2M4M2N2M4M2M4M2M4M2N2M4M2M4M\\ni4"}, "image_id": 591, "id": 10486}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 53.0, 69.0, 93.0], "area": 3009, "segmentation": {"size": [512, 512], "counts": "cSh64j?2N3L3h@Gg>;VAHg>:VAHk>f001HRO\\An0b>TO_Al0^>VObAj0[>?N3L3N30O01D;1O010O10O01O010ON3N101O01L3N3M2M4M2M2OO10O10O010O10O10O02O3L3N3M2M4M2M3N3M2O2O01OO2M2M4M2N3L3N2M4M2NT^5"}, "image_id": 591, "id": 10487}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 61.0, 71.0, 66.0], "area": 2394, "segmentation": {"size": [512, 512], "counts": "_Sa51k?5L3L4L4M4L30001M2m@XOo>m0O00010O0000L5N10001O01O00010O0N2L4O2O01O00010O0001O01O00M4K4M3L5L3L4N201O01O01O01O00N3LO2N4M4K4M3L4M4K4M3L5L3L4L4M4K4M3LS^[1"}, "image_id": 591, "id": 10488}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 69.0, 53.0, 66.0], "area": 1898, "segmentation": {"size": [512, 512], "counts": "iR;1m?2N3\\@KY?7e@KY?8d@JZ?`0M2N3M2M4N1[AQO1Om=P1oASO20n=m0nAVO1OR>k0jAXO20S>T1jAnNW>^1O0010O010O010O010O010O0010OO2M2N3M2N3M2N2N3M2M101O2N3M2N3M2N3M2M4M2N3M2N3M]]j6"}, "image_id": 591, "id": 10489}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 83.0, 11.0, 51.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "c2c1a>L5J5L5K5K4L5J6K4L5Ko[j7"}, "image_id": 591, "id": 10490}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 88.0, 14.0, 17.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "RSi73j?3M4L3O110O00010O00010O000UM"}, "image_id": 591, "id": 10491}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 102.0, 67.0, 48.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "STP12l?2N3M2[@H`?=a@CY?c0010O0001M2N3M2O2O010OTOPAh0U?0O010O001K4KROVAP1h>6M1O02N3M1O000102O010O010OO2M2N30O010O010M2N3N11O0N2N3M2M4N110O010O0O2M2N3M2N3M2M3N3Mb\\n5"}, "image_id": 591, "id": 10492}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 113.0, 66.0, 65.0], "area": 2405, "segmentation": {"size": [512, 512], "counts": "PdS51m?5K4L4K6K4L4L4O2M2O1N3M2O1N2ON10001N101N100O2O001N100O2O03N1N2N2O2M2O1N3M2O1N01N100O2O001N100O2O0O2O000O2O0O2O0O101N101O2FSA\\OP??UA_On>;?L4Ka[k1"}, "image_id": 591, "id": 10493}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 141.0, 64.0, 71.0], "area": 2684, "segmentation": {"size": [512, 512], "counts": "oUn12l?2M4X@Ka?;M4M2M4M2M3N3N11O01N1M4M2M3M4M2M4M2M3N3L3O2O01O01O010O01O01O01O010O01O01O0O2M2M3N2M101N3N3L3M4M2M3N3L3N3L3N2M4L3N3L3N2M4MZkQ5"}, "image_id": 591, "id": 10494}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 149.0, 65.0, 68.0], "area": 2281, "segmentation": {"size": [512, 512], "counts": "eUS61l?4M2M3N3M2M4M2M40O0010O0010O0010O0010O010O0010O0M4M2\\ATOQ>o0mASOQ>P1kASOR>P1lASOQ>P1kASOS>_1M210O01O01O010O01O01O010O010O01O0N2N3M2M4@eAWO]>g0eAVO^>g0fAVO]>g0eAWO]>f0a0M2N2M4M2M4M2N3LP[l0"}, "image_id": 591, "id": 10495}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 154.0, 29.0, 38.0], "area": 802, "segmentation": {"size": [512, 512], "counts": "Tea74k?4M3L4M3M3L4M3M3L100O01000O01000O010O10O10O10O10000O10O2UK"}, "image_id": 591, "id": 10496}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 196.0, 31.0, 48.0], "area": 1028, "segmentation": {"size": [512, 512], "counts": "Rg`74j?2M4M2M5L3L3N3L3N3L3N2M4O010O010O00010O010O01O01O010O010O01OdI"}, "image_id": 591, "id": 10497}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 212.0, 55.0, 66.0], "area": 2057, "segmentation": {"size": [512, 512], "counts": "WhY11l?4M2M4M2N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2N3O010O01O010O01O01O010O010N1N2M4M2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2MQij5"}, "image_id": 591, "id": 10498}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 215.0, 63.0, 48.0], "area": 1753, "segmentation": {"size": [512, 512], "counts": "cgb51l?3N3L3N2M4M2M4N11O01O010O01OM4M2M4M2N210O010O00010O010O00010O010O00010O010O00010O010O00010O0M4M20N3M2M4M2M3N3L3N3L3N2M4M2M4Moh]1"}, "image_id": 591, "id": 10499}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 222.0, 28.0, 37.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "bgg61l?3N2N3b@In>:PAHn>;o@Hn>:o@In>g0M2O1010L3N2N3L3010O010O00N3L3N3M2M4M2N2MnXj0"}, "image_id": 591, "id": 10500}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 238.0, 27.0, 30.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "RhR73j?3N3L3M3N3L3N3M200010O01O01O010O00010M2M4M2M3M4M2M4M]h?"}, "image_id": 591, "id": 10501}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 265.0, 15.0, 16.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "chP21m?3L3N3M2O20O010O01OO2M2N3M2Negg5"}, "image_id": 591, "id": 10502}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 270.0, 64.0, 57.0], "area": 2030, "segmentation": {"size": [512, 512], "counts": "ei^53k?2M3N3L3M3N3L3M4M200010O00010O010O00010O01O01O010OM3N3L3M4M2M3M4M2O1010O010O00010N1N3L3M3N3L3M011N310O000O2M2M4L3N2M4L3N2M4M2M_Wa1"}, "image_id": 591, "id": 10503}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 271.0, 70.0, 84.0], "area": 2530, "segmentation": {"size": [512, 512], "counts": "oX=1n?2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N3M20010O0010O0001N1N2O2M2N2N3O00010O00010O00010O00010O00\\NkA\\1V>bNlA^1S>`NoA`1Y>N3N1N2N3M2O1N3M2WOQA>Q?@QA?Q?_OPA?R?_OQA>Z?O1N3M2N2O2M2N^e_6"}, "image_id": 591, "id": 10504}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 274.0, 4.0, 10.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "jXn71l?4M2M3^G"}, "image_id": 591, "id": 10505}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 286.0, 50.0, 67.0], "area": 2004, "segmentation": {"size": [512, 512], "counts": "bZo63k?3L3^OHZA:c>IZA;c>HZA:c>I[A:b>IZA:d>HZA;b>c0N2M4M2M4M2N201O010O010O00010O010O01O01OM4M2N3L3N2M4M2N3L3N2M4M2M4M2N3L3N2M4M2M4Mff7"}, "image_id": 591, "id": 10506}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 300.0, 24.0, 62.0], "area": 972, "segmentation": {"size": [512, 512], "counts": "^9l1S>10O1000O10O10004L3L5L3M4L4K4M4L4K4M4L4L3L5L4Lmdc7"}, "image_id": 591, "id": 10507}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 309.0, 11.0, 56.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "Xkj74a?;De0aA^O^>b0`A@a>?\\ADd>=YAFg>9VAJj>g00O00010O010OO2L30010O010O01O01O010O010O00010O01N1O110O0010O010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O00M4O010O01M2N3L3N2N3L3N3L3N2N3L3N3L3N2MaT`4"}, "image_id": 591, "id": 10511}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 360.0, 28.0, 19.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "^[d13k?2M3O20O01O010O01O010O01O010O01O010O01O01O010O01ON3L3N`dm5"}, "image_id": 591, "id": 10512}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 370.0, 30.0, 30.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "U\\`13k?2M4M2M4M2M3N3M210O01O01O010O010O00010O01O0N2M4M2M4M2M3N3LXdP6"}, "image_id": 591, "id": 10513}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 371.0, 25.0, 26.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "m[o02m?3N1N3M3N1N3M2O2M2N100O00010O03M2O2M2N3N2M2N3N1NPTd6"}, "image_id": 591, "id": 10514}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 376.0, 6.0, 21.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "h;e0[?01O5J6K5Khcl7"}, "image_id": 591, "id": 10515}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 389.0, 67.0, 85.0], "area": 2862, "segmentation": {"size": [512, 512], "counts": "Zn71l?3M4M2M3M4L3O20O0010O0010O00010O01N1nN[OfBh0W=\\OeBh0X=ZOfBh0W=\\OeBh0X=ZOeBi0X=ZOfBh0X=[OdBi0X=ZOeBi0Y=ZOdBi0[=WObBl0_=j0010O01O01O010O01O01O0N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L^cf6"}, "image_id": 591, "id": 10516}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 403.0, 62.0, 58.0], "area": 2061, "segmentation": {"size": [512, 512], "counts": "g]T23k?2M3N3M2N3L3N2N3L3N3M2N3L3N2N3M2010O010O00010O010O00010O010O010O00010O010O0010O0010eN`AT1`>jNbAW1d>O00010O0O2M2M4M2N2M4M2N3L3N2M4M2N3Lgbl4"}, "image_id": 591, "id": 10517}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 409.0, 80.0, 59.0], "area": 2547, "segmentation": {"size": [512, 512], "counts": "g]j42k?4L3N3L3M3N3L3M4M2M3M4O001O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010N1M3N3L3N3L3N2M4M2M4M2M3N_bm1"}, "image_id": 591, "id": 10518}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 427.0, 29.0, 34.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "S^U13k?3L3N2M4M2N3L3N2M4M201O01O01O010O01N1N2M4M2M4M2M4M2N2M4M`R\\6"}, "image_id": 591, "id": 10519}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "i=3n?NWbn7"}, "image_id": 591, "id": 10520}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 441.0, 26.0, 38.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "b^m52k?3M3M4M2M4L3N2M4L310O0PATOm>P1O0010O001HRA]Om>`0WA]Ok>a0WA\\Om>`0;N3L3N2M4MRbe1"}, "image_id": 591, "id": 10521}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 452.0, 59.0, 60.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "]oX63k?2M4M2N2M4M2N3N1010O0010O010O001N1N2N3L3N3M2M4M2N2M4M2N3N110O00010O010O010O0001M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3M2M4Maai0"}, "image_id": 591, "id": 10522}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 457.0, 64.0, 53.0], "area": 1853, "segmentation": {"size": [512, 512], "counts": "Uoe11m?2M3N3M2M4M2M4M2N2M4M2N30O00010O010O0010O0010OTAQOg>T11O01O010O01O01O010O01O010O01O01O010O01O01O010O01O01O0O2M2M4M2M3N3L3N3M2M3N3L3NTQZ5"}, "image_id": 591, "id": 10523}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 471.0, 60.0, 41.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "n_R72l?2M3N2N2M3N2M3N2N2M31O001O00001O00001O001O00001O001O00O1M3N2M3N2M3N2M3M31O001O00001O001O00001O000M4M2M4L3N2M4M2M4M2M3MSA"}, "image_id": 591, "id": 10524}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 474.0, 41.0, 38.0], "area": 1053, "segmentation": {"size": [512, 512], "counts": "j>T1l>M301O010O010O000TOQAh0o>UOSAk0R?0O01O01O000N200001O001O00001O001O001O0XOk@e0X?01O001N1N2N3L3N3L3NdP[7"}, "image_id": 591, "id": 10525}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 477.0, 29.0, 30.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "a_k02l?2N3L3N3L3N2N3L3O2O0010O010O0010O010OO1N3M2M4M2N3L3N2N3MmPf6"}, "image_id": 591, "id": 10526}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 482.0, 71.0, 30.0], "area": 1350, "segmentation": {"size": [512, 512], "counts": "mo\\43k?2M3N2M3N2M3N2M3O11O001O00001O001O00001O001O001O00001O001O001O00001O001O00O1N2M3M3N2M3N2N2001O00001O001O001O00001O001O001O001N1N2M4M2N3M2M4Me`_2"}, "image_id": 591, "id": 10527}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 490.0, 26.0, 22.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "hob53k?2N2N3M2N3M2N3O010O010O01O001O001O001O001L3N3M2N3M_PP2"}, "image_id": 591, "id": 10528}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 501.0, 33.0, 11.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "non52l?2M3N2001O00M31O00001O001O001O00001O001O00001O001O00001O001O0000Q``1"}, "image_id": 591, "id": 10529}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 506.0, 15.0, 6.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "mod13k?2O11O001O00001O001O00001O00Q`S6"}, "image_id": 591, "id": 10530}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 289.0, 87.0, 80.0], "area": 3560, "segmentation": {"size": [512, 512], "counts": "UZa21m?3L3N3L3N2M4M2M4M2M3N3M2M3N3M2010O0010O010O00010O01O01O010O01O010O01O01O010OjAgNe=Y1XBkNg=V1VBlNk=S1RBQOm=P1PBROQ>m0lAVOT>\\1O010O01O010O00010O010O00010O010O00010O0010O01M2N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2N2MUVS4"}, "image_id": 594, "id": 10531}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 289.0, 68.0, 81.0], "area": 2824, "segmentation": {"size": [512, 512], "counts": "Qk`54j?2M4M2N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N3L3N2O2O010O01O01OO2M2N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3M2M4M20010O010O01OO2M2M4M2N2M4M2MbV]1"}, "image_id": 594, "id": 10532}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 299.0, 38.0, 59.0], "area": 1031, "segmentation": {"size": [512, 512], "counts": "SjS12l?3L3N3L3N2N3L3N3L3N2010O01`ASOk=m0RBVOo=j0nAYOQ>g0lA\\OU>c0iA_OW>b0eABZ>>dAD]>P1O01O0gNdAo0[>nNhA;M4\\>^OiA;O7W>\\OmA:N:`>DcA;^>DbA=]>CcA=^>_OfAa0Y>]OiAa0m>L3N2M4M2M4M^UY6"}, "image_id": 594, "id": 10533}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 336.0, 103.0, 135.0], "area": 6067, "segmentation": {"size": [512, 512], "counts": "i]i12l?3L3N2M4M2M3M4O010O01O01O010O01O010O01O01O010O01O01O010OM4M2M3N3L3N3L3N2M4M2@^N_Bf1]=]NaBe1]=]N`Bf1]=^N`Bb1`=`N]Bd1_=9aBmM]=S2cBPNY=P2gBSNW=m1iBVNS=k1mBWNQ=h1oB\\Nmj;@VDc0i;ZOWDi0j;TOVDo0i;nNXDT1i;iNVD[1i;bNZD\\1h;bN[D[1h;bNZD\\1h;aN[D\\1i;aNZD\\1h;aN[D]1g;aN\\D[1h;aN[D]1g;aN\\D\\1g;`N\\D^1f;`N\\D]1g;`N]D]1f;`N\\D^1f;_N^D]1T=N3M2M4M2N2M4M2N3M2M4M2N2M4M2N3LcSc4"}, "image_id": 594, "id": 10534}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 339.0, 84.0, 101.0], "area": 3655, "segmentation": {"size": [512, 512], "counts": "V;a0]?2M4M2M4M2M3N3M210O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01[BjN_9RAJn>6PAMo>3n@0R?1k@2T?=0001O001O00001O001O00001O001O00001O001O00001O00M3N2M3N2M3N2M3N2M3N2M3N2M3NRPU3"}, "image_id": 595, "id": 10539}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 0.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "P`o51o?001O00001OOQPm1"}, "image_id": 595, "id": 10540}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "PPb61o?0PP]1"}, "image_id": 595, "id": 10541}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 1.0, 6.0, 15.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "]Pm71l?3N3M2M4M2O"}, "image_id": 595, "id": 10542}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 15.0, 60.0, 48.0], "area": 1699, "segmentation": {"size": [512, 512], "counts": "`ab23k?3L3@ITA;i>GTAGUA;h>ITA;h>`0O2O01O01O010O00010O00O2L3O20O00010O0001POTAk0Q?O00010O01O01O01O01O010O01O01O01O01ON3L3N0003L3M4L3N2M4L3N2M4L3MV__4"}, "image_id": 595, "id": 10543}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 25.0, 70.0, 65.0], "area": 2861, "segmentation": {"size": [512, 512], "counts": "i0W1i>1O010O01O010O01O010O010O0M301O010O010O00010O010O010O0O1N3M2O2O010O00010O010O010O00010O010O010O00010O01SOfA3Y>KiA5X>GlA9S>EoA;Q>CRBM2N3L3N3M2M3N3M2Nnml6"}, "image_id": 595, "id": 10544}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 30.0, 91.0, 62.0], "area": 3387, "segmentation": {"size": [512, 512], "counts": "Ubl41m?2M4M2M3N3L3N3L3N2M4M2M4M20010O010O00010O010O00010O010OO1M4M210O00010O01M2M3N3L3O2O01O01O010O01O01O010O01O01O010O01O01M2N3L3N2M4O010O00010O010O0001N1M4M2M3N3L3E\\AXOg>e0[AXOh>e0\\AXOf>f0SO\\AP1b>9L3N3L3O1010O010O00010O010O0O1N3L3N3L3N2M4M2M4M2O110N1N3Ei@HZ?6h@HZ?5;M2Ma^a3"}, "image_id": 595, "id": 10546}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 44.0, 6.0, 14.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "gQm71m?3M2M3N3M2dN"}, "image_id": 595, "id": 10547}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 63.0, 40.0, 60.0], "area": 1405, "segmentation": {"size": [512, 512], "counts": "YS\\72k?3N2N3M2M4M2N3L3N2N30O010O010O0001M2M4O001O01ON3M2N3L3N2M4M2N30O010O0010O0010OmM"}, "image_id": 595, "id": 10548}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 81.0, 101.0, 72.0], "area": 3515, "segmentation": {"size": [512, 512], "counts": "oc^22k?3N2M4@GUA;h>IUA:h>HUA;i>HTA:i>a0M2M4M2N3L3O110O010O010O00010O010O01O01O010O010O00010O010OPOfA:[>CgA>X>@kA?U>^OnAb0S>[OPBe0o=XOTBh0m=UOVBk0i=SOYBm0]>010O010ON3M21O01O010O010O01O01OO2M2N3M2M4M2N2N3L13L3N3M2M3N3M2M4M201O00010O010O0010M2N3O0010O001O0M3N3M2M4M2G^@1f?L]@1T\\o3"}, "image_id": 595, "id": 10549}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 82.0, 46.0, 64.0], "area": 1870, "segmentation": {"size": [512, 512], "counts": "k2T1l>1N1N3M2M3O2O010O0010O0010O0aAcN[>b10O0010O010O0010O0010O010O0001ROgA4X>JjA6W>FmA:R>DPB

    BSB>m=^OVBb0j=\\OXBd0i=XO[Bg0_>N3M2M3N3L3N3M2MWlX7"}, "image_id": 595, "id": 10550}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 104.0, 67.0, 90.0], "area": 2733, "segmentation": {"size": [512, 512], "counts": "jdQ41l?3N3L3N2N3L3O2O01O010ON3M2`AZOc=i0[BYOc=j0YBZOf=f0XB\\Oi=d0SB@l=`0RBBo=>mAFR>:lAHT>8jAJW>P1O010O00010O0N3L3N2N3L3N2M10O02OL]BPN`=P2bBPN[=Q2hBoMU=P2nBnMS=Q2PClMR=Q2a0M2M4M2N2M4M2FbAoNb>n0`APOb>o09O01OM4M2M4M2N2M4M2M4M2M3N3M`ll2"}, "image_id": 595, "id": 10551}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 125.0, 53.0, 87.0], "area": 2560, "segmentation": {"size": [512, 512], "counts": "dUT53b00d>3XA0e>3YA0d>3XA0e>3YA0d>2YA1d>g0N3L3CcNUB`1h=cNUB_1h=dNUB_1i=cNUB`1g=>N3M2O101O010O0O1M2OO102N2M4M2M3N3L3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N3Lk[Q2"}, "image_id": 595, "id": 10552}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 125.0, 55.0, 46.0], "area": 1677, "segmentation": {"size": [512, 512], "counts": "jdT71l?3N3M2M3N3L3N3N11O01O001L3N3L3N2N3O0010O0010O0010O010OO1N3O0010O0010O01N1N2N30O010O0010O0010O00TOTAe0l>XOVAh0j>VOYAi0o>10VOm@e0W?10O\\K"}, "image_id": 595, "id": 10553}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 160.0, 41.0, 94.0], "area": 1933, "segmentation": {"size": [512, 512], "counts": "T5j2V=NO02O2M3N3O0010O00N3L3N3L3M3N3]NPBQ1S>lNPBR1S>jNPBS1S>kNPBR1R>kNQBR1a>M2M3N3O010O00010O010O000M4M2M4M2M3N3L3MWZ[7"}, "image_id": 595, "id": 10554}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 178.0, 57.0, 79.0], "area": 2604, "segmentation": {"size": [512, 512], "counts": "Yfe01Y10Z=2cB1Z=3cBO[=3bB1Z=2VBEM;k=3UBDM=j=2VBEM;k=3UBDM1iNd=X21O01O010O01O01O010O01O01O010O01O0M3N3L3N3L3N2M4M2M4M2M3N3L3N30O00010O00O2M2M4M2M3N3L3N3L3N2Moi]6"}, "image_id": 595, "id": 10555}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 194.0, 74.0, 98.0], "area": 2936, "segmentation": {"size": [512, 512], "counts": "hhY13j?3N2M4M2M4M2M3N3L3N3L3^OPORBR1k=QORBS1k=PORBR1k=QORBR1l=PORBS1m=`0010O00010OO2L3N2M30N3L3N3L3N2M4M2M4M1N10O010O103L3N1N1O0101XOmABV>:mABV>>jA@X>`0hA]O\\>c0dAZO^>f0bAWOb>h0;1O010O01O01O010O01O000M4M2M3N3L3N3L3NfYa5"}, "image_id": 595, "id": 10556}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 197.0, 81.0, 77.0], "area": 3355, "segmentation": {"size": [512, 512], "counts": "Uh_63j?3N2N3L3N3M2M4M2N2M4M2N3L3N3M2N2M4M2N3L3N3M2M4M2N2M3NO2O2N3M2M3N3M2M4M2N3L3N210O010O010O00010O010O010O01O01O010O01M2O20O00010O001M2N3L3N2lNWAo0n>M4M2N3M2M4M2N2M4M2N3LYi7"}, "image_id": 595, "id": 10557}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 219.0, 60.0, 90.0], "area": 2933, "segmentation": {"size": [512, 512], "counts": "bhT23k?3L3l@I]>9aAJ[>:aAI]>9aAJ[>:aAI_>7_AKb>4[A0c>g0N2M4M2M4H\\NPBf1n=7M4M2M4N100O2L3N3L3N2O2OO1N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2O2SO]A;c>BaA>_>_OcAa0]>\\OgAd0X>ZOjAf0W>VOlAj0f>0O01L3N2M4M2M4M2M3NYXm4"}, "image_id": 595, "id": 10558}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 234.0, 19.0, 55.0], "area": 541, "segmentation": {"size": [512, 512], "counts": "hhf72k?3N3L3M3N3L3N3L3N2M4M2M3N3L3N3L3N2M4eH"}, "image_id": 595, "id": 10559}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 250.0, 87.0, 48.0], "area": 2133, "segmentation": {"size": [512, 512], "counts": "eXT31l?4M2N3L3N30O0010N1N3L3N3M2N210O0010O010O0010N101N101N1O101N101N1O2ZAhNb>\\1N12O1L4M110M2M3N3N1010O01O01O010O01O010O0O1M4M2N3L3N3NO3L3N1O0O01003N100010O010O010O00010O0N3L3N3M2M3N3MQX`3"}, "image_id": 595, "id": 10560}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 280.0, 56.0, 87.0], "area": 2061, "segmentation": {"size": [512, 512], "counts": "R[h33k?3L3N3L3N2M4M2010O0010A[OaAe0\\>^ObAe0[>]OcAe0Z>_ObAd0[>_ObAe0[>a0M4M21O01O01O01OOOO010O010O01O01O010O011N3N3L3N3L3M3N3L3N2M4M2M4M2M3N3L3M4M2MTg[3"}, "image_id": 595, "id": 10561}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 283.0, 60.0, 66.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "WZg22l?3L3N3@HSA;j>HTA:j>IRA;j>HTA:j>`0L3N2^AfN[>b1M2N30N1N3O00010O0010O0010O0UOdA2\\>KgA6Y>GjA8V>EmAAoA?Q>^OSBa0m=]OUBd0k=XOYBg0g=WO[Bj0]>01O010O01L3N2M4M2M4M2O110O010O0001A^A^Ob>`0aA]Ob>:[A@63b>:[AA53b>:g0L3N3L3N^fZ4"}, "image_id": 595, "id": 10562}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 292.0, 54.0, 89.0], "area": 2660, "segmentation": {"size": [512, 512], "counts": "Yjb42l?3[ALY=7eBKY=8cBKZ=8cBLY=7eBKY=8cBLoeNSBX1P>fNSBW1\\>M4M2M3N3L3O20O0001N1M3N3L3M4M2M3NQVb2"}, "image_id": 595, "id": 10563}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 292.0, 49.0, 52.0], "area": 1427, "segmentation": {"size": [512, 512], "counts": "^je51l?3M4^OIWA9f>JWA9g>JUA:g>IWA9f>JWA:f>a0M3M40O00010O010O00010O00001hN`Al0b>QOaAm0j>01O010O00010O01O01O0WOQAb0o>[OSAe0U?O010O01O01N1M3N3L3M4M2M3MPfa1"}, "image_id": 595, "id": 10564}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 298.0, 51.0, 64.0], "area": 2047, "segmentation": {"size": [512, 512], "counts": "V:R1m>10O10O01000O010O10O10O010N2M3N2N3L3N3L3N2N3M210O00010O010O00010O01N1M4M2M3N3A_AZOd>d0^AZOe>b0^A[Oe>c0^AZOd>c0?M2M4M2N2M4M2M^VV7"}, "image_id": 595, "id": 10565}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 307.0, 77.0, 45.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "bjk02l?2M4M2M4M200010O010OM4M2N2M4M2M4M2N2010O010O01O01O010OO2M2M31M2N2M4M2N3L3N3L31O01O010O01O010O01O010O01O010O01O01O010O01O010O01O010O01M2N1N10O12M4M2M4M2NSfm5"}, "image_id": 595, "id": 10566}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 343.0, 61.0, 99.0], "area": 3414, "segmentation": {"size": [512, 512], "counts": "Qlj12?0k>3RA0l>3QA0b0I]=:nA0b0I`=8kA1c0Jb=e0ZB^Of=b0XBA\\=F]Bi03D^=F^Bf02F]=GaBc0OJ]=EaBe0OH]=GaBc0OJ]=f1M3N3L3M4N11O01O010O01O01O01M2N3L3M3N3L2OO0102M4M2M3@WBfNm=W1UBgNm=V1VBgNn=V1UBgNm=V1`0O2O0010O00N3M2M4M2M3M4M2M4M2M3N3L3NQeV5"}, "image_id": 595, "id": 10567}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 346.0, 69.0, 89.0], "area": 2723, "segmentation": {"size": [512, 512], "counts": "fl[63k?2M3N3N100O2O0O101N100O2M2N2M4n@ZOe>i0XA^Ob>T1J4_O`N^Bb1_=dN[B`1b=`0M4M2N2M4M2010O0O2M2M3N3M2M4M4L7I5J3AgAUO\\>h0gAUO[>i0gATO\\>l0dARO\\>Q172N2M100000O4M2N2M4M2N3M2O2O0010O00O2L3N3M2M4M2N2MQea0"}, "image_id": 595, "id": 10568}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 354.0, 45.0, 106.0], "area": 2973, "segmentation": {"size": [512, 512], "counts": "nmY73k?3L3N3L3N2M4_O^O`Ad0]>_O`Ae0]>^O_Ae0^>^O`Ad0^>a0L3N3]O\\NeBg1X=\\NfBf1X=\\NeBh1W=\\NfBf1R=PNnBhNhAX1a>M2N2M4M2N3L3N3M2M3N3L3N3M2M3NYdi1"}, "image_id": 595, "id": 10571}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 386.0, 60.0, 75.0], "area": 2414, "segmentation": {"size": [512, 512], "counts": "Vmc23j?4M2M3N3L3N3L3M3N3L3N3M21O01O0lAkN^=T1`BnN`=S1\\BPOd=P1ZBSOf=m0VBVOj=j0TBXOm=g0PB]Oo=d0mA_OS>X10O0010O0N3M20010O0010O001L2OO010O01KoA]NS>a1oA\\NT>a18M2M3M4M2M4M2M3N3L3M4M2M3N3L3N2MkS^4"}, "image_id": 595, "id": 10572}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 398.0, 63.0, 89.0], "area": 2901, "segmentation": {"size": [512, 512], "counts": "R^]31l?3M3N3L3N3L3M3N3L3N2M4M2M12N20010OO1oAmNR=W1jBlNW=S1gBoNY=Q1dBSO\\=m0`BVO`=j0^BXOb=i0ZB[Of=d0XB^Oh=b0UBBj=Z1O1M4M2M4L30001O010OO1N3L3N0O00013L3N2M4L3N2M4M2M4M2M3_O\\ACh>9[AEg>9\\ACh>9[AEg>8]ADf>:b0L3N2M^Sc3"}, "image_id": 595, "id": 10573}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 405.0, 66.0, 107.0], "area": 3794, "segmentation": {"size": [512, 512], "counts": "X^\\42k?6K4L4N0O2O0O2O001N100001N10001O0OjAAfT=@nB=V=BkB;W=FhB7\\=HeB4^=MaB1b=N^BOe=2[BKh=4XBIk=DRB434n=FQB612Q>EQB8OOS>GPB;LKX>GoA;LKg>3[AJh>4[AIh>4c0N3L_bb2"}, "image_id": 595, "id": 10574}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 442.0, 74.0, 70.0], "area": 2834, "segmentation": {"size": [512, 512], "counts": "moX53k?2M3N2M3N2N2M3N2M3N2001O001O00001O00O1N2M3BTOfAn0X>TOeAo0X>TOfAn0X>UOdAn0Y>>N2M3N2N2N21O001O00001O001O0M3N3NO3L3N2M12M4M2M4M210O01O01O0O2M2M3N3M2M4M2M3N3L3N3M2M3N3L3N3L3NmQb1"}, "image_id": 595, "id": 10575}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 474.0, 62.0, 38.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "o_X61m?2N2N2N2N200001ON2N2M3N2N2N2M3N2N2001O001O001O001O00001OO1N2N2N2M3N2O11O00001O001O00001O001O001O00001N1M4M2M3N3L3N3L3N2N3Ln`h0"}, "image_id": 595, "id": 10576}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 501.0, 17.0, 11.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "oog71m?2N2M3O1O100O10000001O001O00002N"}, "image_id": 595, "id": 10577}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 0.0, 377.0, 163.0], "area": 34282, "segmentation": {"size": [512, 512], "counts": "Uca13j?3N2N3L3N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N3L3N201O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O00O1N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2N21O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001dBjNl;V1QDmNo;S1oCPOPZCDf<=VCGi<9UCIk<7RCMm<4PCNP=b1O00001O001O001O00001O001O001O00001O001O0000SMZCa2e<]M]Cc2c000M3N1N30K5M310O0010O0010O001TOUAb0j>[OYAe0g>YO\\Ag0n>010O00010O010O00010O010O00010O010O0O2M2M3N3O0WAnNe>V100010O010O00010O010O0010O0010O010O00010O0PO[Ab0i>ZOZAd0h>ZO[Ab0S?N2M4M2M4M2Mj^\\3"}, "image_id": 598, "id": 10584}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 16.0, 55.0, 53.0], "area": 1877, "segmentation": {"size": [512, 512], "counts": "RQ[63k?3M2N3L3N3M2N3L3m@[Ol>e0QA@l>m0N2M2O0O1O01O010lN[Ak0e>SO]An0c>nNaAQ1g>010O0010OM2OO04M2M4M2O20O0010O0010O0010O0010O0O2L3N2M4CWA_Ok>>YA^Ok>8PAJ7Lk>8QAH\\?5:M2MZ_i0"}, "image_id": 598, "id": 10585}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 93.0, 3.0, 8.0], "area": 15, "segmentation": {"size": [512, 512], "counts": "Rcn72l?3L3SM"}, "image_id": 598, "id": 10586}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 136.0, 56.0, 74.0], "area": 2199, "segmentation": {"size": [512, 512], "counts": "WfR53k?2M3SOIoA:n=HoA;n=IoA:n=HoA;n=IoA9o=InA;o=GoA;Q>FkA>R>DlA>R>i0M4O0010O00N3M2M4M20N2M4M2M3N3L3N3L3N210O0aNbA\\1b>0O01L3N2M4M2POTAi0S?N2N30O010O0001O0M4M2M3N3L3N3LW[Q2"}, "image_id": 598, "id": 10587}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 170.0, 79.0, 85.0], "area": 2944, "segmentation": {"size": [512, 512], "counts": "aWe53k?2M3N3L3N3M2M3N3M210O010O00010O010O00010ON3M2N3L22M2M4M2N201O010O01O_OXOjAi0S>YOnAf0o=]OQBd0l=_OSBa0k=AVB?f=EYB;e=G\\B8a=K\\B9a=J\\B8a=K\\B9a=J\\B8a=U1N3L310O00010OM4M2M3N3M2M4M2M3N3\\NjAY1Y>dNjAZ1`>L3N3L3N2N3L3N3L3N2M4M2M4M2M3NUZS1"}, "image_id": 598, "id": 10588}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 183.0, 62.0, 98.0], "area": 3197, "segmentation": {"size": [512, 512], "counts": "[hi61l?4M2N3L3N3L3N2M4^OXOhAj0U>YOhAk0U>XOhAj0U>YOiAi0U>YOhAk0T>b0N3L3IRNZBP2d=RNZBQ2b=8N3L3N2M4M2M4M2010O0010O00O2M2N3L3N2M4M2M4M2M3N3QNWBc1k=ZN\\B_1W>K2M3N3L3N3L3O110M2N3L3N3M2M3N3M2M_Y7"}, "image_id": 598, "id": 10589}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 207.0, 14.0, 40.0], "area": 288, "segmentation": {"size": [512, 512], "counts": "`Wi71l?4M2M4M2M3N3L3N3L3N2M4L3N3`I"}, "image_id": 598, "id": 10590}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 39.0, 81.0], "area": 1781, "segmentation": {"size": [512, 512], "counts": "T7R11Ok=S1100N1N3N2M2O0O010O0010O0010O010JlA`NU>`1lA_NV>^18N1N3N2O001000ON3N2M2O2M3N1Ao@MT?0n@OT?On@NT?0o@MT?1m@NU?OYY\\7"}, "image_id": 598, "id": 10591}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 220.0, 76.0, 86.0], "area": 3462, "segmentation": {"size": [512, 512], "counts": "bh\\43m?2M4M2M3N2N3L3N2M100O10OO2N1IWOWAk0f>8O2M3N1O2M2100011N3N1N2N3N1N2OO001NGXNZBj1c=YNZBi1e=:M2O2N2M2O21N101N100O2O001N100O2M2N3L3N2M4M2M4M2N4K4M2M4M2M3N3M2M4M2M4M2M3N3M2MgX]2"}, "image_id": 598, "id": 10592}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 234.0, 87.0, 79.0], "area": 3155, "segmentation": {"size": [512, 512], "counts": "\\hl12l?2M3N3L3N3L3N2M4M2M4M2M3M4N110O00010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O00010O01O01O010cAiNo=W1oAkNR>T1kAPOT>Q1iAQOW>\\110O010O00010O010OTOgA1Y>MjA2W>JlA7S>GoA9R>CRB=m=AUB?k=^OYBa0h=[O[Bf0d=XO^Bh0^>M2M4M2M3N3L3N3L3NPgg4"}, "image_id": 598, "id": 10593}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 248.0, 82.0, 57.0], "area": 2483, "segmentation": {"size": [512, 512], "counts": "Ri:1n?2M2N3N1N2N3N1N3M2O2O000010O01O010O000N3M2M4M2N201N101O0O01M3N2M3N210O01O01O010O010O01O01O010O01O0O1M4M2M4M2M4M200O2N101N1O2O0OO2N3M2N3L3N3M2N3L3N2N3M2]Ol@8W?El@7W?Gk@7`?N3M2MkW\\6"}, "image_id": 598, "id": 10594}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 255.0, 12.0, 35.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "kXj72k?4M2M3N3L3M4M2M3N3L3N3PH"}, "image_id": 598, "id": 10595}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 269.0, 66.0, 95.0], "area": 2884, "segmentation": {"size": [512, 512], "counts": "UZX52l?3L3N2M4M2M4M2M3N3M2M4M2N2nAkNY=V1cBmN^=Q1aBQO_=n0_BVO`=k0]BWOd=h0YB\\Of=e0WB]Oj=b0SBAm=?QBDn=W110O01O01O001M2M3N3M0O010O010O010O0101N3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2M4M[gf1"}, "image_id": 598, "id": 10596}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 289.0, 67.0, 102.0], "area": 3019, "segmentation": {"size": [512, 512], "counts": "njT61m?2M3N3L3N3L3N2M4M2M4M200kAQOY=P1dBRO\\=n0aBUO`=k0]BXOb=h0\\BZOe=e0XB_Og=b0VB@j=`0SBCn==oAFP>:mAIT>R1O00001L3N3L3N2M4M2N3L10O010O010O010O2GeBmM]=Q2eBmM^=o1;N2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4Mgfi0"}, "image_id": 598, "id": 10597}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 293.0, 17.0, 16.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "\\ig71m?3L3N30O010O01O010O01O010O010O01dF"}, "image_id": 598, "id": 10598}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 296.0, 91.0, 59.0], "area": 3436, "segmentation": {"size": [512, 512], "counts": "bj]17i?0O1I7H8O10010J5H8001L3M3001O00000QATOj>Q11O000001O000001O00000001O000O1H800J6O2O00000000000010O00000000009G01O0000000000010O000000000001O013L00001O000001O000001O2N000001O00L4H9H7I70000LWfT5"}, "image_id": 598, "id": 10599}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 323.0, 71.0, 88.0], "area": 3036, "segmentation": {"size": [512, 512], "counts": "h[l61m?2M4M2M3N3L3N2M4L310O01O01O0N3L3M3N3gAkNe=U1XBoNg=R1VBPOk=o0RBTOn=l0PBWOP>i0lAZOT>Z1O01O010O00N3M2M4L3N2M4M210O0001L3N2M4M0O010O00102M3N3L3N3L3N2M4M2M4M2M3N3L3M4M2M3N3L3N3L3N2Mh5"}, "image_id": 598, "id": 10600}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 331.0, 56.0, 53.0], "area": 1776, "segmentation": {"size": [512, 512], "counts": "]:c1]>O10O10FeAoN[>Q1:000000000O18G5L000000000000O2O3MO1000000000O1000O1000000000O10O10004L1O00000J60000000O10O10005K8H8HcdS7"}, "image_id": 598, "id": 10601}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 368.0, 93.0, 62.0], "area": 2758, "segmentation": {"size": [512, 512], "counts": "el_13m?2M3N2M3N2M3N2M3N2M3N2M100O010O010O010O010O010O0EROfAn0Z>TOcAl0]>WOaAi0_>YO^Ag0b>\\O\\Ad0d>;010O010O010O10O010O010O010O010O010O010O010O0100O100O2O0O101N101N4M2M3N0O101N100O100O12M2O0O010O010O010O02O2M3N2M3N2M3N2M3N2MhcQ5"}, "image_id": 598, "id": 10602}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 383.0, 22.0, 35.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "g\\e73k?3L3M3M4M2M4L3O101N10001N10001O0O2O000O2O0QD"}, "image_id": 598, "id": 10603}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 390.0, 80.0, 89.0], "area": 3189, "segmentation": {"size": [512, 512], "counts": "]n_21l?3N2M4M2M4M2M3O2O010O00010O001L3N2M4M2M4M200010O01M2M3N3L3M3N3L3N3L3N2N30O010O00010O01ON3L3N3L3N2M4M1N010O0010O010O03N3L3N3L3N2M4L3O2O01O01O0N3M2M3N3L3N2M4M2MeSX4"}, "image_id": 598, "id": 10604}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 393.0, 65.0, 50.0], "area": 2234, "segmentation": {"size": [512, 512], "counts": "Zi0000O1000O100000O10004L0000000O10000000O1000O10000000003L8I4LO10000000O10O1000000000O10O10000005J1000O10000000O10O100001O7H8I6JaRo6"}, "image_id": 598, "id": 10605}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 401.0, 69.0, 100.0], "area": 3310, "segmentation": {"size": [512, 512], "counts": "W_b33k?2POMPB7m=LPB6m=MPB7m=KQB7l=MQB5m=MPB7l=MQB5m=MPB7o=IoA9Q>HkA;U>i0L3N3M21O010O01N1N3M2N210O010O0001N1M4M1N010O0100O010O010O0101N3N3L3N3M2M3N3L3N3L3N2M4M2N3O01OO2L3N3L3N2M4M2N3L3NWS[3"}, "image_id": 598, "id": 10606}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 423.0, 60.0, 56.0], "area": 1973, "segmentation": {"size": [512, 512], "counts": "X^a43k?2N3L3CGQAGPAFPA>M2O20O01O0O1M4O010O0010O0010O010O0010O0010O0010O]AgN_>^10O010O01O01O0N3M2iNZAU1h>0010O001M2N3M2M3N3M2M4M2N3L3N2M4M2NVb`2"}, "image_id": 598, "id": 10607}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 452.0, 87.0, 53.0], "area": 2493, "segmentation": {"size": [512, 512], "counts": "mn04i?3N2M4M2M3M4M2O20O0010O00010O010O00010O0010L301OO2L3M4M2M3O20O0010O0N3O00010O01O01O010O00010O010O00010OnNXAl0h>QO[Ao0l>0O0010O00010O010O00010O00010O010O00010O001L3M3N3N11ON3L3M4M2M3M4MUac6"}, "image_id": 598, "id": 10608}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 456.0, 72.0, 56.0], "area": 2419, "segmentation": {"size": [512, 512], "counts": "ho^52m?1O2K5N2N1O2M3_O_OUAn0i>SOVAn0i>5N2O1N2O1N2O1N2O1N2O1O1001O1O1O001O1O001O1O001O1O001O1O001O1NO10O10O100O01000O10O10002N1N3N2N2N2M2O2N2N2M3N1O2N2M3N2N1N3N2NZQ]1"}, "image_id": 598, "id": 10609}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 478.0, 58.0, 34.0], "area": 1243, "segmentation": {"size": [512, 512], "counts": "n_X42k?3N2N2M3N2M3N2N2M3N2M3N2N2O11O001O001O00001O001O00001O001O001O00001O001O00001O001[Oh@`0\\?01O00001O001O00001O001O0N3M2M3N^`j2"}, "image_id": 598, "id": 10610}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 489.0, 85.0, 23.0], "area": 1059, "segmentation": {"size": [512, 512], "counts": "l_e13k?3L3N2N2M3N2M3N21O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001ON2N2N2001O001O00O1N2O1001O00001O001O00001O001O001O00001O001O00001O001O001O000MWPP5"}, "image_id": 598, "id": 10611}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 496.0, 36.0, 16.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "ooc61o?000O10000O10000O10000O100O10000KM\\@4c?50000O100O10000O10000O1002N3M3M3M3MRPj0"}, "image_id": 598, "id": 10612}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 505.0, 16.0, 7.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "n_U12l?2M30000001O001O00001O001O0000S`b6"}, "image_id": 598, "id": 10613}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 55.0, 53.0], "area": 1777, "segmentation": {"size": [512, 512], "counts": "437m0^>UO`An0]>TOaAn0]>i0ZAUOi>h0ZAUOh>i09N2N2N2N2N2N2N2N2N2O2M2N2NP_o3"}, "image_id": 600, "id": 10624}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 21.0, 23.0, 22.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "oPo02m?2N2O1N2N2N3M2N2N2N01O010O2N20000O1N2N3M2JY@0i?NY@0ine6"}, "image_id": 600, "id": 10625}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 22.0, 24.0, 23.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "RQ^62m?3M2O1N2N2N2N2N2N000001O0000101N2N2N2N2N2N2N2NSoU1"}, "image_id": 600, "id": 10626}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 29.0, 29.0, 30.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "[Qe62m?2N2N2N2N2O1N2N3M2N2N2N2N2N0001O3M2N2N2O1N2N2N2N2N3M2N2Nf^l0"}, "image_id": 600, "id": 10627}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 41.0, 31.0, 30.0], "area": 478, "segmentation": {"size": [512, 512], "counts": "gaZ71n?2N2N3M2N2N2O1N3M2N2N2N2O1N000010O2N2N2N3M2N2O1N2N3M2N2N2O1NYn5"}, "image_id": 600, "id": 10628}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 44.0, 32.0, 31.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "ka`21n?2N2N3M2O1N2N2N2N3M2N2N2O1N1O000010O3M2N2N2N2N2N3N1N2N2N2N2N2NV^o4"}, "image_id": 600, "id": 10629}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 45.0, 9.0, 8.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "aQa21n?2O1N2N0011N2N2Na^Z5"}, "image_id": 600, "id": 10630}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 61.0, 59.0, 49.0], "area": 1352, "segmentation": {"size": [512, 512], "counts": "abf31n?2O1N2N3M2N2N2N2N2N2N2N20001O00000001O00O1N2N2O2M2N0000000000001O0001O0000000000002N2O1N2N2N2N2N3M2N2N2N2N2O1N2N2N2Nbm[3"}, "image_id": 600, "id": 10631}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 62.0, 11.0, 12.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "RRR22m?2N2N2N200000N2N2N2Nm]h5"}, "image_id": 600, "id": 10632}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 62.0, 53.0, 43.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "`Rb61n?2N2N2N2N2N2N2N2N2N2N2O2M2N2N2N2N2N2N2000000000O1N2N2N3M1O000000102M2N0000000000002N2N3M2N2O1N2N2N2N2N2Nc]c0"}, "image_id": 600, "id": 10633}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 67.0, 19.0, 55.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "Tcf71n?2N2N2X@Kb?;N2N2^OA`Aa0^>AaA`0]>BaA`0]>BaA`0]>BaA`0]>BaA`0^>A`Aa0^>A`Ab0]>b0N2N1O0nM"}, "image_id": 600, "id": 10634}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 76.0, 54.0, 51.0], "area": 1281, "segmentation": {"size": [512, 512], "counts": "\\Sn02m?3M2N2N2N2O1N2N2N3M2N2N2N2N2O1N1O1O000000000001O0001O0000000001O0001O0001O2N2N2N2N2N2O2M2N2N2N2N2N2N3M2O1NSmV6"}, "image_id": 600, "id": 10635}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 81.0, 20.0, 21.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "gR]21n?2N2O1N2N2N2N30O00000000000N2N2N3N1N2NRmX5"}, "image_id": 600, "id": 10636}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 92.0, 43.0, 44.0], "area": 924, "segmentation": {"size": [512, 512], "counts": "\\SR31n?2N2N2N2N2N2O2M2N2N2N2N2N2N2N3M2O1N2000000001O01OO1N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2OY\\X4"}, "image_id": 600, "id": 10637}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 106.0, 45.0, 44.0], "area": 1052, "segmentation": {"size": [512, 512], "counts": "UT32m?2N2N2N2N2N2DEPA>m>DQA>m>DRA=l>ERA=l>=N2N2N2O0O01O2N2N2N2O0O0000000000001O2N2N3N1N2N2N2N2N2N2N2N2N2N3M2OU\\V7"}, "image_id": 600, "id": 10638}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 114.0, 2.0, 2.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "b32o?O]ln7"}, "image_id": 600, "id": 10639}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 118.0, 62.0, 44.0], "area": 1554, "segmentation": {"size": [512, 512], "counts": "Wd_44l?6J5J6K5K2N0O10O100000O10O100000O10O1000O1000O1000O1000O10O10000O001O1O1O1O001O100O1O11N2N2N3M2N2O1N2N2N2N2N2N2N2N3M2N2N2Of[a2"}, "image_id": 600, "id": 10640}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 119.0, 56.0, 57.0], "area": 1597, "segmentation": {"size": [512, 512], "counts": "WTV21n?2N2N2N3M2O1N2N2N2N2N2N2N3O00000001O000000N3M2O1N2N2N2N2N2N2N2N3M2O1N2N2OO2N2N2N2N3[O`AEb>9`AEb>9`AEb>9`AEb>9aADa>:aAE`>9bAE`>:aADa>:e0M2N2N2N2Nfkm4"}, "image_id": 600, "id": 10641}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 127.0, 20.0, 19.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "[T`32m?2N2N2O1N2N0000000000000010O2N2N2N2N2NmkU4"}, "image_id": 600, "id": 10642}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 131.0, 31.0, 32.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "aTW72m?2N2N2N2N2N2N2N2N2N2N2N2N2N200000N2N2N3M2N2N2O1N2N2N2N2N2N2N][9"}, "image_id": 600, "id": 10643}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 135.0, 15.0, 26.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "ddh72m?2N2N2N2N2N2N2N2N2N2N2OO0000jK"}, "image_id": 600, "id": 10644}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 147.0, 21.0, 21.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "oTl31n?2N2N2N2N2N2N2N2N1O000001O2N2N2N3M2N2N2NV[i3"}, "image_id": 600, "id": 10645}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 153.0, 8.0, 8.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "lTk02m?2N2N2OO2N2N2NUkP7"}, "image_id": 600, "id": 10646}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 160.0, 57.0, 55.0], "area": 1473, "segmentation": {"size": [512, 512], "counts": "QVT51n?2N2O1N3M2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N00000001O000001O000000000001O0000010O2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N^Zo1"}, "image_id": 600, "id": 10647}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 161.0, 36.0, 31.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "ce>1n?2N2N2N2N2N2N2N2N3M2OO0000000002N2N2N2O11O0O1N2N2O1N2N2N2N2N3M2N2N2N2N_Zo6"}, "image_id": 600, "id": 10648}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 163.0, 25.0, 32.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "[5?`?2N3M2O1N2N2N2N2N11N2N2N3M2N2O1N2N2N2N2N3M2N2O1N]Zc7"}, "image_id": 600, "id": 10649}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 179.0, 46.0, 45.0], "area": 1050, "segmentation": {"size": [512, 512], "counts": "Yfg12n?1N2N2N2N2N3M2O1N2N2N2N2N3M2O1N2N2N2N2N1O01O000000002O1N2N2N3M2N2N2O1N2N3M2N2N2N2N2O1N3M2NhYa5"}, "image_id": 600, "id": 10650}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 180.0, 21.0, 17.0], "area": 239, "segmentation": {"size": [512, 512], "counts": "iUe23m?4K5L1O000O01000O0100000O0100000O03N3M4KTZP5"}, "image_id": 600, "id": 10651}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 182.0, 30.0, 30.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "Sfa41n?2O1N2N2N2N2N2N2N2N2N2N3M2N2000O1N2N2N2N2N2N2N2N2N2N3N1N2NkYo2"}, "image_id": 600, "id": 10652}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 186.0, 78.0, 49.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "bfU31o?3M4L3L5L3M3L5L000O10O1000O10O10O10O1000O10O10O10O10DXO`Ah0`>[O\\Af0c>_OYA`0h><000O01000O0100000O01000O01003M3L5L1O02N3M1N01000O01000O10O1000O01000O0100000O04M4L3L4M4L3M4K4MVYc3"}, "image_id": 600, "id": 10653}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 200.0, 49.0, 56.0], "area": 1467, "segmentation": {"size": [512, 512], "counts": "S74k?2N2O1N2HGf@;X?Gf@;X?8k@[Ol>g0RA[Ol>g0RA[Ol>n0N2N2N2N2N2N2N2N2N00001O01O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2NoXW7"}, "image_id": 600, "id": 10654}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 211.0, 34.0, 34.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "RWW22m?2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N3N01N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2NmhW5"}, "image_id": 600, "id": 10655}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 213.0, 30.0, 27.0], "area": 417, "segmentation": {"size": [512, 512], "counts": "QWm02m?2N2N2N2N2N2N2N2N2N00000200000000000000O1N2N1O2N2N2N2N2N2Nlhc6"}, "image_id": 600, "id": 10656}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 227.0, 32.0, 33.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "cWk12m?2N2N2N2N2N2N2N2N3N1N2N2N2N2N000002N2N2O1N3M2N2N2N2N2N2N2N2O2M^hd5"}, "image_id": 600, "id": 10657}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 236.0, 19.0, 21.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "`Wn32n?4K4M4L3L2OO1000O10O10O10O3N3M4L3L5LTXh3"}, "image_id": 600, "id": 10658}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 244.0, 51.0, 52.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "WX`01n?2N3M2N2O1N2N2N2N2l@^Og>d0WA^Og>e0VA]Oh>e0VA]Oh>e0VA]Oh>o0N2O2M2N2N2N1O02N2N2O1N2N2N2N3M2N2N2N2N1O01O0001O2N2N3M2N2N2N2O1N2N2N2N2NfWf6"}, "image_id": 600, "id": 10659}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 247.0, 25.0, 25.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "SXb33l?2N2N2N2N2N2O1N3M2N1O001O02N2N2N2N2N2N2O2M2N2N2NnWQ4"}, "image_id": 600, "id": 10660}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 281.0, 51.0, 56.0], "area": 1550, "segmentation": {"size": [512, 512], "counts": "hYS133Nc?6[@La?g0RA[On>f0PA[OQ?j0000001O0000000000N3N1N2N2N2N2O100000001O0001O000000000N2N2N1O02N2O1N2N2N2N2N2N3M2N2N`e\\3"}, "image_id": 600, "id": 10663}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 308.0, 39.0, 38.0], "area": 667, "segmentation": {"size": [512, 512], "counts": "\\jo51n?2O1N2N2N2N2N2N3M2N2N2O1N000000000000001O01O00000000001O2N3N1N2N2N2N2N2N2N2NRf\\1"}, "image_id": 600, "id": 10664}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 309.0, 31.0, 30.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "Uj]22m?2N2N2N2N3M2O1N2N2N2N2N1O00000010O002N2N2N2N2N2N2N2O2M2N2N2NoeR5"}, "image_id": 600, "id": 10665}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 329.0, 54.0, 49.0], "area": 1340, "segmentation": {"size": [512, 512], "counts": "nZc11n?2N2N3M2N2N2O1N2N2f@^OS?d0k@^OS?i0N3M2N2N2N2N2N2O1N2N000002N2O1N2N2N2N3M2N2N00001O2N2N2O1N00000000002N2N2N2N2N2O1N2N2NXea5"}, "image_id": 600, "id": 10666}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 333.0, 54.0, 48.0], "area": 1322, "segmentation": {"size": [512, 512], "counts": "[kl22m?2O1N2N2N2N2N3M2N2N2N2N10O00000001O1O2N2N2N2N2O1N2N2N1O000000000002N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2NoTX4"}, "image_id": 600, "id": 10667}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 338.0, 56.0, 49.0], "area": 1967, "segmentation": {"size": [512, 512], "counts": "ekh62i?6I6J6K5J6K6M20000001O01O000000010O00000001O01O00000O1O2O0\\AiN`>[100000000101N0000000K6O2N0001O0001O00000L4J6K6I6K5JZU;"}, "image_id": 600, "id": 10668}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 340.0, 16.0, 29.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "R[h71n?2N2N2N2N2N2O1N2N3M2N2N2N2N000]E"}, "image_id": 600, "id": 10669}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 358.0, 79.0, 61.0], "area": 2542, "segmentation": {"size": [512, 512], "counts": "Yli31n?2N2N2N2O1N2N3M2N2N2N2N2O1N3M2N00000000010O000000000000012M2N2N2N2N20001O000O1O1N2N2N3M2N2N1O010O0001O2N2N2N2N2O2M2N2N2N2N2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N2N3Mlcn2"}, "image_id": 600, "id": 10670}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 358.0, 63.0, 72.0], "area": 1970, "segmentation": {"size": [512, 512], "counts": "n[X51n?2O1N3M2N2N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2N2N2N2N3M2N2000000001O0eNgAo0X>oNjAQ1V>mNlAS1T>kNoAT1Q>kNPBU1P>iNRBV1o=hNSBV1]>N20001O000001ON2N2O1N3M2N2N2N2N2N2N3B_@9e?N2N2N2NSSh1"}, "image_id": 600, "id": 10671}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 378.0, 24.0, 24.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "RlQ31o?1N2N2N2N2N2N2N2N3O0000000000O1N2N3M2N2N2N2O1NhSb4"}, "image_id": 600, "id": 10672}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 385.0, 66.0, 85.0], "area": 2525, "segmentation": {"size": [512, 512], "counts": "llV22m?2N3M2N2N2N2O1N2N3M2N2N2N2N2O1O2O0000000N2O1N3bAjNm=X1QBjNm=X1QBkNl=V1SBlNk=T1UBnNi=R1WBPOg=R1WBPOh=Q1WBPOg=R1WBPOg=g1M2N2O11O0O0O001O000[O\\BoNd=?]BnN0=04c=a0bBYOM4a=c0dBWOM5^=d0gBUON4[=h0hBROO4Z=i0iBQOO4X=k0lBnNN5X=k0\\CSOgXOkAh0U>ZOiAf0X>[OfAe0Z>]OdAc0\\>@aA`0_>B`A?^>C`A>_>a000000001O01O0000000000000001O01O1O2K_AhNc>V15N2N2N2N3N1N2N2N2N2N2N2N2N2N3N1N2N2N2NfQk1"}, "image_id": 600, "id": 10681}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 437.0, 10.0, 11.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "j]o42m?2N2N2N2N11N2N2M3NXbk2"}, "image_id": 600, "id": 10682}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 13.0, 25.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "i=i0X?O1N2N2N2N2N2N3M2N2N2N2NkQi7"}, "image_id": 600, "id": 10683}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 444.0, 58.0, 66.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "Yok61n?2N2N2N2N2N2N2O1N2N2N2N2N3M2N2\\OROSBP1k=ROSBP1k=ROSBP1k=ROSBP1k=ROSBn0m=TOQBl0o=VOoAj0R>XOkAh0U>a000000000001O2N2O1N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N2NYQ7"}, "image_id": 600, "id": 10684}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 458.0, 54.0, 54.0], "area": 1491, "segmentation": {"size": [512, 512], "counts": "Xoh32m?2N2N2N2N2N2`@DY?>e@DY?c0O1N3M2N2N2N2N2N2N1O1O1O1O01O0000000001O0000000001O2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N3M2N2O1N2NQQ\\3"}, "image_id": 600, "id": 10685}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 466.0, 60.0, 46.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "Z?4l?3M2M3N2M3N3L3N2M10000O100O100O100O100O1000O010OJTOYAm0g>UOWAj0i>70O010O0100O010O0100O010O010O3N2N2M4M2M3N2M10O010000O3N2M4M2M3N2Md`Q7"}, "image_id": 600, "id": 10686}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 493.0, 37.0, 19.0], "area": 356, "segmentation": {"size": [512, 512], "counts": "ooV61n?1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O11O2N1O1O1O1O1O1O1O2N1O1O1O1O1O1OQ`V1"}, "image_id": 600, "id": 10687}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 504.0, 16.0, 8.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "ooi51n?1O1O1O1O1O1O1001O1O1O1O1O1O1OQPn1"}, "image_id": 600, "id": 10688}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 509.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "ooc41o?0O1O1001O1OQ`X3"}, "image_id": 600, "id": 10689}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 0.0, 10.0, 4.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "P`42n?2N0000O100O100O10P`f7"}, "image_id": 601, "id": 10690}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 0.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "PPQ41o?1OOQ`m3"}, "image_id": 601, "id": 10691}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 0.0, 46.0, 42.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "e`W41n?2N2N2N2O1N3M2N2N2N2N2N2N2N2O2M2N2N2N0000000001O1O1O1O100O1O1O2N2N3M2N2N2N2O1N2N2N2N3M2N2Na_Q3"}, "image_id": 601, "id": 10692}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 0.0, 11.0, 5.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "PPR51o?1O1O1O1O00O1O100O1OQ`h2"}, "image_id": 601, "id": 10693}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 0.0, 38.0, 29.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "b`_51n?2N2N2N2O1N2N2N2N2N2N000000000000000001O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O2NP`m1"}, "image_id": 601, "id": 10694}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 0.0, 46.0, 26.0], "area": 683, "segmentation": {"size": [512, 512], "counts": "UPS61n?2N2N2O1N2N3N1O1O1O1O1O1O1O2N1O1O1O1O00O100O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O100O1O1O1O1O3MooU1"}, "image_id": 601, "id": 10695}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 0.0, 31.0, 31.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "^`X72m?2N2N2N2N2O2M2N2N2N2N2N2N2N2N02N2N2N2N2N2O1N2N3M2N2N2N2N2N2O`o7"}, "image_id": 601, "id": 10696}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 1.0, 23.0, 19.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "X`33m?2N2M4M2M10O10O010O010O10O10O010O0102N2M4M2Mgo`7"}, "image_id": 601, "id": 10697}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 4.0, 98.0, 98.0], "area": 3114, "segmentation": {"size": [512, 512], "counts": "gb12m?2FMd@5Z?Md@5Z?Md@5Z?;M2N2N2O1N2N2N2N0000001O00000001O000000000000JoN^AR1a>PO]AP1c>60000000000001O00011N2N2N200001O0N1O0001O00000000000JVAWOj>i0XAVOg>j0[ATOe>m060CROiAn0W>TOhAk0X>WOfAi0Z>YOdAg0]>ZOaAf0_>]O^Ac0b>=00000001O00000001O1O2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2O2M2N2NW_]6"}, "image_id": 601, "id": 10698}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 20.0, 23.0, 22.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "PQa71n?2N2N2N3N1N2N2N2N01O01O000003M2O1N2N3M2N2O1NS_3"}, "image_id": 601, "id": 10699}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 28.0, 16.0, 16.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "TaQ62m?2N2N2N2N2N2N1O02N2N2O1N2N2N2Nn^f1"}, "image_id": 601, "id": 10700}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 33.0, 29.0, 29.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "aQT63l?2N2N2N2N2N2N2N2N2N2N1O1O00000001O2N2N2N2N2N2N2O1N3M2N2Nd^]1"}, "image_id": 601, "id": 10701}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 36.0, 19.0, 28.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "\\1d0[?0001O000001O000002N2N3M2N2N2N2O1N2Nc^f7"}, "image_id": 601, "id": 10702}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 36.0, 23.0, 24.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "\\aP41n?3M2N2N2N2N2N2N3N1001O00000N2N2N2N2O2M2N2N2N^nc3"}, "image_id": 601, "id": 10703}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 36.0, 34.0, 26.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "dQX71n?2N2N2N2N2N2N2N2N2OO000000000000011N2N2N21O000000000001N1N2N2N2N2N\\n6"}, "image_id": 601, "id": 10704}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 39.0, 17.0, 16.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "_ak51n?2N2N2O2M2N2N1O0011N3M2N2N2N2N2Oank1"}, "image_id": 601, "id": 10705}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 41.0, 51.0, 50.0], "area": 1222, "segmentation": {"size": [512, 512], "counts": "Vbn41n?2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N00000000000001O000000001O2O2M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3NSnW2"}, "image_id": 601, "id": 10706}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 51.0, 56.0, 65.0], "area": 1490, "segmentation": {"size": [512, 512], "counts": "XSZ12m?2N2N2N3M2N2O1N2N2N1\\O]OjAc0V>_OhAa0X>BeA>[>DcA<]>FaA:`>G^A9b>I\\A7d>KZA5f>b000000001O0000000001O00000000000001O01O2N2N2N2N2N2N3M2N2N2_Od@;^?Cd@A\\Aa0c>@[Ac0b>@[Ab0c>@\\Aa0b>B[A`0c>>0101N1O2001OO1M3M1O1O100O1O2N11O1O2N1O2N2N1O2O0O2N2N1O2N1O2N1O2N2KRAVOn>i06M2N3N2M2N3N1N3M2N3N2M\\lm0"}, "image_id": 601, "id": 10714}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 103.0, 14.0, 15.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "^c81n?2N3M2N2N2N2N02N2N2N2O1N3Mc\\`7"}, "image_id": 601, "id": 10715}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 107.0, 57.0, 51.0], "area": 1527, "segmentation": {"size": [512, 512], "counts": "VT=1n?2N2N2N3M2N2N2N2O1N2N2N3HWOTAk0j>WOTAk0j>7N2N2N2N2O1N2N0001O2N2O1N2N2N3M1O00000001O00000002N2N2N2N2N2O1N3M2N2N2N2N2N2N2O1N3MS\\f6"}, "image_id": 601, "id": 10716}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 108.0, 28.0, 29.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "gSY72n?1N2N2N2N2N2N2N2N2N2N3M2O1001O0N2N2N2N2O2M2N2N2N2N3M2NTl8"}, "image_id": 601, "id": 10717}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 4.0, 8.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "m38i?N2O2MPlm7"}, "image_id": 601, "id": 10718}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 137.0, 22.0, 22.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "fdb53l?2N2N2N2N2O1N1O00000000000002N2N2O1N3M2N2N`[R2"}, "image_id": 601, "id": 10719}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 138.0, 54.0, 60.0], "area": 1533, "segmentation": {"size": [512, 512], "counts": "hUc21n?2N2@LQA6m>LQA6m>LQA6m>LQA6m>LQA6m>LQA6m>LRA5l>a0N2N2N2N000000001O0001O0000000000000000000000002N2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2NRka4"}, "image_id": 601, "id": 10720}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 145.0, 53.0, 55.0], "area": 1587, "segmentation": {"size": [512, 512], "counts": "^ec62m?2N3M2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N0000000000000002N2N2N2O1N2N0000000000002N2N2@PAMR?1PAMR?1PAMR?1PAMR?1PAMS?0PAMR?1PANQ?0dka0"}, "image_id": 601, "id": 10721}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 156.0, 40.0, 41.0], "area": 717, "segmentation": {"size": [512, 512], "counts": "XUf01n?2N2N2O1N2N2N2N2N2N2N2N2N20001O00000000000000000001O000N2N2N2N2N2N2N2N2N2N2N2NZje6"}, "image_id": 601, "id": 10722}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 162.0, 29.0, 29.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "^e^31n?2N2N2N2N2N2N2N2N2N2O1N3M2O1000N2N2N2N3M2N2N2O1N2N2N2N2N`jR4"}, "image_id": 601, "id": 10723}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 167.0, 21.0, 28.0], "area": 355, "segmentation": {"size": [512, 512], "counts": "^5`0_?2N2N2N2O1N000002O1N2N2N2N2N3M2O1N2N2N2N]Ze7"}, "image_id": 601, "id": 10724}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 177.0, 36.0, 33.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "ZV[71n?3N1N2N2N2N2N0MDd@;\\?Gb@9^?50O00000001O2N010O00000000010O0002N2N2N2O1N2N3M2N2NVj2"}, "image_id": 601, "id": 10725}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 192.0, 32.0, 31.0], "area": 501, "segmentation": {"size": [512, 512], "counts": "`fc02m?2N2N2O1N2N3M2N2N2N2N2O1N00001O000001O2N2N2N3N1N2N2N2N2N2N3M2ObYl6"}, "image_id": 601, "id": 10726}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 194.0, 75.0, 55.0], "area": 1839, "segmentation": {"size": [512, 512], "counts": "Sgd12m?2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N2N00000000001O000002N2N2N2N3M0000010O0000000000002N2O10O1N3M001O00000001O2N2N2N2N2O1Ke@B^?;5N1O0001O000001O2N2N2N2NgiU5"}, "image_id": 601, "id": 10727}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 196.0, 56.0, 58.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "mVP31n?2N2N2N3M2N2O1N2f@@Q?b0m@@Q?b0m@@Q?i0o@TOk>R1N2N21O00O1N2N2N2N0001O000000000000000000001O2N3N1N2HVAWOl>g0VAWOl>g0VAWOl>g08N1O00002N2N2N2N2N2N2N2O1N2N\\iS4"}, "image_id": 601, "id": 10728}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 203.0, 17.0, 35.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "[6S1n>N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3MTYg7"}, "image_id": 601, "id": 10729}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 206.0, 46.0, 56.0], "area": 1420, "segmentation": {"size": [512, 512], "counts": "]WY72m?2N2N2N2O1N2N2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N00000001O0000101N2N2N2N000000001O01O0000000001OcI"}, "image_id": 601, "id": 10730}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 225.0, 55.0, 63.0], "area": 1827, "segmentation": {"size": [512, 512], "counts": "oWU22m?2_@NQ?4m@NQ?5l@MS?4k@NS?4k@OR?3l@OR?a0N2N2N2N2N2N3N1N2N1O000000000010O0000000000000001O01O001O2N2N2N2N2N2O1XOSACTA=m>@VA>k>@WA>k>@WA>k>@WA>W?N2N2O1N3M2N2NoWo4"}, "image_id": 601, "id": 10731}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 235.0, 19.0, 19.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "bW]11o?1N2N2N2N2N2N2N2O2O00N2N3M2N2N2O1N2N\\XY6"}, "image_id": 601, "id": 10732}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 240.0, 49.0, 54.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "Z8b0]?2O1N2N2N2N2N2N3M2N1O01O000001O0000000001O000001O0000000001O1O2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3M2NnWW7"}, "image_id": 601, "id": 10733}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 249.0, 12.0, 12.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "mgg12m?2N2N2N2O1000N2N2N2N2OQXR6"}, "image_id": 601, "id": 10734}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 255.0, 128.0, 122.0], "area": 4391, "segmentation": {"size": [512, 512], "counts": "Rkb31n?2N2N2N2N2N3M2N2N2N2N2N2N2N2N3M2N2N2N2N2OO2N2N2N2N2N1O000000000000FROcAn0]>TOaAl0_>WO^Ai0b>YO\\Ag0d>:01O2N1O001O000000000000000001O0000000N2000000001O000000000000000001O2N2N2N2N2N2N1O000001O00000ESOdAm0\\>UObAk0^>WO`Ai0`>YO^Ag0b>;000002O1N1O000000000000001O000001O2I_AlNc>R1_AlNc>R17N2N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N]W]2"}, "image_id": 601, "id": 10735}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 258.0, 36.0, 36.0], "area": 646, "segmentation": {"size": [512, 512], "counts": "`hf11n?2N2N2N2N2N2N2N2N2N2O1N2N2N3M2000000001O0O1O1N2N2N2N2N2N2N2N2N2N2N2N2NZWg5"}, "image_id": 601, "id": 10736}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 268.0, 63.0, 57.0], "area": 1658, "segmentation": {"size": [512, 512], "counts": "fXn01n?3M2N2N2O1N2O1000O10000001O01ON2N2O10000O1N3M2N2N2O1N2N2N2N3M2N2N2N2O1N2N20001O000000N2N2N2N3M2N2N2N2O1N2XOQAm0cAUO]>l0`AWO`>h0^AZOb>S10O010O010O010O010O010O010O0N3M2N3M2N3M2N3N2M2N3M2N3M2N3M2N3MoUa6"}, "image_id": 601, "id": 10740}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 323.0, 64.0, 57.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "V[Q51_?0o@2P?On@3P?On@3P?0m@2Q?0m@2Q?0n@1P?1n@1P?a0N2N2N201O0001O000000O1N2N0000000000000003N1N0000001O0000000000000001O01O2N2N2N2N2N2N2N001O011N2N2N3M2N2N2N2N2N_en1"}, "image_id": 601, "id": 10741}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 358.0, 31.0, 32.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "akY41n?2N2U@Lg?8N2O2M2N2N2N2N2N2N2O2O01OO1N02N2N3M2N2N2N2N2O1N2N3M2N2NZdV3"}, "image_id": 601, "id": 10742}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 367.0, 116.0, 68.0], "area": 3195, "segmentation": {"size": [512, 512], "counts": "Rmm42m?2N2O1N2N2N2N2N2N2N2N2N3M2N2N0001O0001O000000000000000000000001O01O000000000000000002N2Jm@\\OU?b06N2O1N2N2N2N2N01O2N2N2N2N2N2N2m@[Oh>g0VA[Oh>h0UA[Oh>g0VA[Oh>P1O1N2N2N2N2N2N2N2N1O0002N2N2N001O2O1N2N3M2N2N2N2N2N1O0000001O0001O00000001O2N3M2N2N2N2N2N2N2O1N2N2N2N2NPTX1"}, "image_id": 601, "id": 10743}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 374.0, 28.0, 27.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "n[l61n?2N2N2N2N2N2N2N2O10000000000000000000N2N2O1N2N2N2N2N2Nice0"}, "image_id": 601, "id": 10744}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 384.0, 60.0, 54.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "b\\k22m?2O2M2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M200001O00000001O000000N1O0001O0001O2N2N3M2N2N2O1N2N2N1O00001O2N2N2O1N3M2N2N2N2NWcV4"}, "image_id": 601, "id": 10745}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 396.0, 45.0, 44.0], "area": 998, "segmentation": {"size": [512, 512], "counts": "S]b62m?2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2N2N2N000010O0000001O2N2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2NRSg0"}, "image_id": 601, "id": 10746}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 400.0, 11.0, 11.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "e\\Y41n?2N2N2N2N20N2N2N2N2N\\Sa3"}, "image_id": 601, "id": 10747}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 412.0, 51.0, 58.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "`m]21n?2N2N2N3M2N2f@Dm>>n@GP?f0N2N2N2TAnNg>W1N2N2O1N2N2N2N2N2N2O10N2N2N3N1N2N2N2N2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2NWbh4"}, "image_id": 601, "id": 10748}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 415.0, 22.0, 22.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "Vml32m?2N2N2N2N2N2O1N2O11O0000000N2N2N2N2N2N2N2NeRh3"}, "image_id": 601, "id": 10749}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 425.0, 58.0, 84.0], "area": 1977, "segmentation": {"size": [512, 512], "counts": "mnb52m?2N2N2N2N2N2M3N2N2N2N2PAXOe>j0YAXOe>j0YAXOe>j0YAXOe>S1N2N3M2N2N2N01O000000C^N]Bb1d=_NZBa1f=aNXB_1h=cNVB]1j=eNTB\\1k=GZA:f>GQAL4=j>IPAL4ORA2m>a0LQOVAP1i>4O1O1O1O100O1O1O1O1O11O1O1O1O1O1O1O2N1O1N2O1N2N3M2N2N2N2N01O0000000003M2N2D_@40Kb?O`@4i?N2OfPf3"}, "image_id": 601, "id": 10756}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 468.0, 17.0, 18.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "kn[22m?2N2N3M2O1N2N2N2OO2N3M2M3N2N2N2NUa[5"}, "image_id": 601, "id": 10757}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 479.0, 31.0, 29.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "]_e61n?2N2N2N2N2N2O1N2N2N2N2N2N2N2N11N2N2N2N2N2N2N2N2N20000O1N2IX@3l?NaPk0"}, "image_id": 601, "id": 10758}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 493.0, 52.0, 19.0], "area": 695, "segmentation": {"size": [512, 512], "counts": "o_e01o?0O1O1O1O1O1O1O1O1O1O100O1O1O1O100O100O100O1000000O1000000000000000000001O00001O00001O001O1O0J_@Mb?1`@Na?0a@O`?Ob@0h?OQ``6"}, "image_id": 601, "id": 10759}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 495.0, 6.0, 13.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "d_m72m?2N2N2N2N3_@"}, "image_id": 601, "id": 10760}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 501.0, 21.0, 11.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "o_Y51n?1O100O1O1O1O1O1O1O1O11O2N1O1O1O1O1O1O1OQP\\2"}, "image_id": 601, "id": 10761}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 507.0, 9.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "o_]61n?1O1O1O11O1O1O1OQP^1"}, "image_id": 601, "id": 10762}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 508.0, 9.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "o_`41n?1O1O1001O1O1O00QP[3"}, "image_id": 601, "id": 10763}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_l51P`S2"}, "image_id": 601, "id": 10764}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 154.0, 22.0, 22.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "QUo17g?4M3M2N2N101N101O0000000000O2O0O2O1N2N2M4LQke5"}, "image_id": 603, "id": 10765}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 106.0, 116.0], "area": 7140, "segmentation": {"size": [512, 512], "counts": "U1]2c=0OF_BXN_=h1cBWN\\=i1gBUNX=l1iBTNU=n1iBSNV=o1iBQNV=Q2hBPNW=]2O1O10ZOkBaNT=_1nB`NQ=`1RC^NmJcA4^>KdA4\\>KfA3\\>LeA2]>LeA2]>LeA2^>KdA3^>KdA3^>KdA3\\>MgA0Y>0iANW>2kALX>1jANW>1eP[6"}, "image_id": 604, "id": 10766}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 0.0, 26.0, 13.0], "area": 187, "segmentation": {"size": [512, 512], "counts": "P`a31o?2N1O1O1O1O1O1O2N1O1O00O100O1O1O1O1O1O1O100O1O1O1OQ`Q4"}, "image_id": 604, "id": 10767}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 0.0, 58.0, 52.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "n`n32m?2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M00000001O01O0000000000000001O010O1O1O1O1O1O1O1O1O101N2VOk@f0Y?N2N2N2N2N2N2N2O2M2N2Na_T3"}, "image_id": 604, "id": 10768}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 0.0, 36.0, 23.0], "area": 501, "segmentation": {"size": [512, 512], "counts": "SPW52m?2N2N2O1O1O1O1O1O1O2N1O1O1O1O00O1O1O1001O1O2N1O1OO2N2N3M2N2N2N2O1N2N2NfoV2"}, "image_id": 604, "id": 10769}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 0.0, 13.0, 12.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "V`h51n?2N2N2N2N2N01O1O2N2N2N2NmoP2"}, "image_id": 604, "id": 10770}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 0.0, 69.0, 49.0], "area": 1889, "segmentation": {"size": [512, 512], "counts": "cPR62m?2N2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2O1O1O1O1O1O1O1O00O1O1O2N2N2N2N2N2N2N3M2N2N2N2N01O002N3M2O0O000000001O02N2N2N2N2N2N2N2N2N2N2N4L2N`_k0"}, "image_id": 604, "id": 10771}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 6.0, 18.0, 19.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "`Pg72m?2N2N2N3M2N2N1O000002N2O2M2N2N2N2NC"}, "image_id": 604, "id": 10772}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 27.0, 44.0, 45.0], "area": 842, "segmentation": {"size": [512, 512], "counts": "fQ_51o?1N2N2N2N2N2N3M2N2O1N2N2N2N002N2N2O1N00ITA[Ol>e0VAYOj>g0XAWOh>i0700010O0000000002N3M2O1N2N2N2N2N2N3M2N2Ognj1"}, "image_id": 604, "id": 10773}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 31.0, 54.0, 61.0], "area": 1648, "segmentation": {"size": [512, 512], "counts": "Vb_42m?2N2N2N2N2N2N2N2H@m@b0Q?@m@b0Q?8N2N2N2N2N3M2N2N2N1O00000000000000000101N1O00000000002N3M2N2TOVA?l>_OVA?l>_OVA?l>_OVA?l>_OVA?W?N2N2N2N2N2N2N2NV^e2"}, "image_id": 604, "id": 10774}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 35.0, 29.0, 29.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "caa71o?1N3M2N2N2N2N2N2N3M2N10O0000000000012M2N2N2N2N2N2N3N1N2NbN"}, "image_id": 604, "id": 10775}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 58.0, 26.0, 29.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "[Rc72m?2N2N2N2N2N3N1N2N2N1O0000000001O01O0002N2N2N2N3M2NPN"}, "image_id": 604, "id": 10776}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 61.0, 58.0, 69.0], "area": 1613, "segmentation": {"size": [512, 512], "counts": "PS`11n?2N3M2N2N2N2N2O1N2N3SA\\OY>f0eA\\OY>f0eA\\OY>f0eA\\OY>f0eA\\OY>f0fAYOZ>i0dAWO\\>k0bAUO^>V1000IaAnN_>R1cAlN]>T1eAkNZ>U1hAiNX>W18001O000001O000000000000002N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2O1N2N2N_mb5"}, "image_id": 604, "id": 10777}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 67.0, 53.0, 62.0], "area": 1534, "segmentation": {"size": [512, 512], "counts": "]SR52m?2N2N2N2N2N2N2N2N2N2N2N2N2]OVOnAk0P>WOnAk0P>WOnAk0P>WOnAk0P>XOmAj0Q>XOmAh0S>ZOkAf0U>\\OiAd0W>b00000001O2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NS]S2"}, "image_id": 604, "id": 10778}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 78.0, 30.0, 30.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "mRT62m?2N2O1N2N2N3M2N2N2N2N2O1N001O000002N2N3M2N2O1N2N2N2N2N3M2NUm\\1"}, "image_id": 604, "id": 10779}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 91.0, 26.0, 44.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "dSc72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O0000000000000VM"}, "image_id": 604, "id": 10780}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 98.0, 31.0, 31.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "aci02m?2N2N2N2O1N3M2N2N2N2N2N2N2N1O0001O2N2N2N2N2N2N2N2N3M2N2O1N2Nalf6"}, "image_id": 604, "id": 10781}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 100.0, 60.0, 50.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "iSQ21n?2N2N2N2N2N2N2N2N3M200000000N2N2N2N2N3M2N2N2O1N200000O1N1O001O0000000001O00000001O2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N3N1NYlP5"}, "image_id": 604, "id": 10782}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 110.0, 47.0, 58.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "kTf51n?2UOOdA3[>NcA4[>NcA4[>NdA3Z>OdA3Z>OdA3Z>OdA3Z>OdA3Z>OdA3Z>0cA2[>0cA2[>0cA0]>i0001O000001O000001O2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N2Nj[b1"}, "image_id": 604, "id": 10783}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 110.0, 29.0, 29.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "mSg61n?2N3M2N2N2N2N2N2O1N2N2N2N1O000002N2N3M2N2N2N2N2N2N2N2O1NV\\j0"}, "image_id": 604, "id": 10784}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 136.0, 50.0, 58.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "aUc22m?2\\OMXA5f>NXA4f>Mm@L89i>1TA1j>1TA2i>0VA1i>1TA1j>e0M2N2O1N1L31O0001O01O0001O101N1O0010KeAdN]>Z1eAdN]>Z16N3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N3M2O1N3Mljc4"}, "image_id": 604, "id": 10785}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 138.0, 32.0, 33.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "mTZ11m?3N2N2N2N2N2N2N2N2N2N2N2N2N00000000O3N2N2N2N2N2N2N2N2N2N2N2N2NZkU6"}, "image_id": 604, "id": 10786}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 139.0, 59.0, 65.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "kUV62m?2N2O2M2N2N3M2O1N2N3M2N2O0O00000010OIVOYAj0g>YOVAg0^>WOiA4Ge0`>YOgAo0X>SOfAm0[>TOdAk0\\>XOaAh0_><01O01O000001O01O0000102M2N2N3M2O1N2N3M2N2O1N3M2N2N2O2M2N2N2N3N1NkZl0"}, "image_id": 604, "id": 10787}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 166.0, 60.0, 58.0], "area": 1666, "segmentation": {"size": [512, 512], "counts": "VfT31o?1N2N2N2N3M2N2O1N2N2N3M2N2N2N200N2N3M2O1N2N2N0N20000000010O0000000000000101N2N2N2N2N2N2O2M2N2N2N2N2N3M2O1N2N2N2N2N3M2OQZm3"}, "image_id": 604, "id": 10788}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 168.0, 29.0, 28.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "ien11n?2N2N2N2O2M2N2N2N2N001O01O0000000000011N2N2N3M2N2N2N2O1N_jb5"}, "image_id": 604, "id": 10789}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 186.0, 63.0, 58.0], "area": 1730, "segmentation": {"size": [512, 512], "counts": "fVb61n?2N3N1N3M2N3N1N2N3N1N3M2O2M200010O01O0O2M1O00010O0001O01O01OHQO`AP1_>RO_An0a>810O00010O0001O01O01O2O2M2N3N1N3M2N2O2M2N3N1N3M2O2M2N3M2O2M[Y>"}, "image_id": 604, "id": 10790}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 187.0, 27.0, 24.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "Tfb71n?3N1N3M2O2M00010O00011N2N3N110O010OO0O010O00010O0001TJ"}, "image_id": 604, "id": 10791}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 206.0, 24.0, 27.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "gfR21n?2N2N3M2O1N2N2N2N2N3N11O0000000N2N2N3M2J]@Le?3\\@Kf?35NUYa5"}, "image_id": 604, "id": 10792}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 207.0, 34.0, 33.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "Rg^23l?2N2O1N2N2N2N3M2N2N2N10O00000001O000001O00002N2N2O1N2N3M2N2N2N2N2OUYP5"}, "image_id": 604, "id": 10793}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 207.0, 63.0, 56.0], "area": 1711, "segmentation": {"size": [512, 512], "counts": "^gg31n?3M2N2N2O1N2N2N2N2N3M2N2O1N2N2N2N2N3M2O1N2N1O0000000001O0000001O2N2N2O1N2N000000000000001O00002N2N2N2N3M2N2N2N2N2N2O2M2N2N2NQiX3"}, "image_id": 604, "id": 10794}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 234.0, 46.0, 55.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "TXY72n?3L4M3M3L4M3M3L4M3M0O01000O0KROZAn0f>UOWAk0i>501000O01000O01ImNaAS1^>QO_An0b>800O01000O01000O01000O0101O3L4M3M3L4MVH"}, "image_id": 604, "id": 10795}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 245.0, 57.0, 51.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "[hZ43l?2N2N2O1N2N2N2N3i@^Ok>d0SA^Ok>d0SA^Ok>d0TA]Ol>c0RA_On>j00010OO00O0000001O2N2N1O010O00000000000000010O0001O2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2Nhgh2"}, "image_id": 604, "id": 10796}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 253.0, 32.0, 34.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "WhP31n?2N2N2N3N1N2N2N2N2N2N201O00000000000010O0N2N2N2N2N2O2M2N2N2N2N^W_4"}, "image_id": 604, "id": 10797}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 255.0, 21.0, 22.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "VXd33l?2N2N2N2N3N1N2N2O100001N1N2N2N2N3M2O1N2NeWQ4"}, "image_id": 604, "id": 10798}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 266.0, 16.0, 15.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "bhk62m?3M3N1N00010O0010O01O0102M3M3N`Wl0"}, "image_id": 604, "id": 10799}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 272.0, 32.0, 31.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "Rih31n?2N2N2L4N2N2N2N2N3M2N2N1O00000010O2O1O1N2N2N2N2N2N2N2N2N2N2N2NSWg3"}, "image_id": 604, "id": 10800}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 288.0, 31.0, 30.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "`Yh41n?2N2N2N2N2N2N2N2N2N2N2N2N2N1O0001O2N2N2N2N2N2N2N2N2N2N2N2N2NdVh2"}, "image_id": 604, "id": 10801}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 303.0, 56.0, 53.0], "area": 1552, "segmentation": {"size": [512, 512], "counts": "`ZQ52l?3M2N3M2N3M2010O010ON3N2M2N3M2N3M210O010O0100OO2M2N3M3M2N3M2O20O010O0N3M2N3M2O2M3M2N3M2N3M2N3N1N3M2N3M2N3M2O2MVfR2"}, "image_id": 604, "id": 10802}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 308.0, 30.0, 33.0], "area": 669, "segmentation": {"size": [512, 512], "counts": "\\Za76^?n0O01O0001O01K41O01OM3M4K4O101O0001O01O00M4L3L4M4K4L4M3L5L3Li4"}, "image_id": 604, "id": 10806}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 367.0, 33.0, 37.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "Xl[32m?2M2N3M2N3N1N3M3M2O2M2N3M2N3O01000OO2M2O2M2N3M3N1N3M2N3M2O2M2N3MYdS4"}, "image_id": 604, "id": 10807}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 393.0, 5.0, 15.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "flm71k?4M4L3M3gC"}, "image_id": 604, "id": 10808}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 402.0, 64.0, 51.0], "area": 1604, "segmentation": {"size": [512, 512], "counts": "X]h31m?3M2N3M2O2M3O010O0100OO2M3N1N3M2N3M2O2N10100O010O0100O010O0100O010O0100O0100O01nNSAP1o>O010O010O0100O0100O0N3M2O2M2N3M3N1N3M2N3M2O2MfbW3"}, "image_id": 604, "id": 10809}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 409.0, 33.0, 33.0], "area": 588, "segmentation": {"size": [512, 512], "counts": "\\]n22l?3N1N3M2N3M2O2M2N3M3N110O010O0100O0100O010ON3M2N3N2M2N3M2N3N1N3MmRa4"}, "image_id": 604, "id": 10810}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 409.0, 68.0, 49.0], "area": 1880, "segmentation": {"size": [512, 512], "counts": "]ma63k?2N3M2N3N1N3M2N3M2N3N110O10O010O010O01O1N101O010O0100O0100O010O0100O0100N1N3N101O0N3M3M210O0100N1O2M2N3M2N102M3M2N3N1N3M2N3M2O2M3M2N3NkR<"}, "image_id": 604, "id": 10811}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 439.0, 56.0, 58.0], "area": 1630, "segmentation": {"size": [512, 512], "counts": "Sn]32m?2c@Og>3XAOe>4XANf>5WANf>4XANf>5WANg>3WAOi>e00O010O0100O010O0100O010O010O0100O0100O010O010O0100O010O0100O0WOXA;h>BZA>f>@]A`0c>]O`Ab0`>\\ObAe0m>0O01O0N3M2O2M2N3M3M2N3NQQf3"}, "image_id": 604, "id": 10812}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 442.0, 8.0, 17.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "W^l71m?2N3M2N3N1N3M2VB"}, "image_id": 604, "id": 10813}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 453.0, 35.0, 33.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "ina22l?3M3N1O2M2O0O001O2d@AU?b0h@AV?e0N3O0100O0100O010O0100O001N1N3M2N3M2O2M3M2N3M2O`al4"}, "image_id": 604, "id": 10814}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 453.0, 17.0, 15.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "]^[71m?2N3M2N3O010O010O010O010OO2M3M2NgQ<"}, "image_id": 604, "id": 10815}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 457.0, 20.0, 17.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "`^T31n?1N3M3M201O010O010O011N10O010D^@8e?O2N1N3M_aa4"}, "image_id": 604, "id": 10816}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 462.0, 60.0, 43.0], "area": 1413, "segmentation": {"size": [512, 512], "counts": "nnX62l?3M3M2N3M2N3M2N30O010O0100O010O010O0100O010O010O0100O0N3M2O2M2N3O010O010O10O010O010O010O10OO2M2N3M2O2M2N3M2N3M2N3N2M2NUQi0"}, "image_id": 604, "id": 10817}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 470.0, 32.0, 31.0], "area": 537, "segmentation": {"size": [512, 512], "counts": "WoZ72l?3N1N3M3M2N3N1N3M2O20O10O10O010O010O010O010M3M2O2M2N3M2N3N1N3MPQ5"}, "image_id": 604, "id": 10818}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 493.0, 60.0, 19.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "noP32m?1N2N2N2N21O001O001OO1N2O1N2N2N2O1001O001O1O001O001O001O1O001O001O1O001O001O001OL40000000000000000001O000000002N3M0000TPQ4"}, "image_id": 604, "id": 10819}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 496.0, 25.0, 16.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "n_c22h?0Z@2e?5N2N2N2O100001O1O001O001O001O1O001O001O001M3MXPP5"}, "image_id": 604, "id": 10820}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 506.0, 13.0, 6.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "n_W62m?1N2O1001O1O001O001O001OQPb1"}, "image_id": 604, "id": 10821}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 507.0, 10.0, 5.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "o_P61m?2N21O001O1O001O00Q`j1"}, "image_id": 604, "id": 10822}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 509.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "ooa61m?2001O001O00Q`Z1"}, "image_id": 604, "id": 10823}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 439.0, 46.0, 53.0], "area": 1412, "segmentation": {"size": [512, 512], "counts": "V^m52n?2M3N2GJc@9[?Ic@9Z?9N2M3N2M300N2M3L4M3N2M3N2M1002M3NO10O010O010O2KaAgNa>V17N2M3N2M3N2M3N2M3N2M3B^@:f?M3N2M3NUa[1"}, "image_id": 608, "id": 10824}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 11.0, 28.0, 31.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "d`01n?2N3M2O1N3M2N2O2M2N2N3N10010O0001M2O2M2N2N3N1N2N3M2O1NR_a7"}, "image_id": 609, "id": 10825}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 54.0, 2.0, 2.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "f12o?OYnn7"}, "image_id": 609, "id": 10826}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 57.0, 42.0, 81.0], "area": 1791, "segmentation": {"size": [512, 512], "counts": "i1V2k=010O00010O00010OO2M2O1N3M2O2M2N2N3N1N3M2O1N3M2N2O2M2N3M2O1N3M2O2M2N2N3N1N2N3N1N3M_lZ7"}, "image_id": 609, "id": 10827}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 431.0, 67.0, 77.0], "area": 1706, "segmentation": {"size": [512, 512], "counts": "^_g42m?2N2N2M3N1O2N2M3N2N1O2M3N2N2N1N1000000O0100000O0100000O01000O0100000O0100000O0100000O0100000O01000000O3N1O2N2M3N2N1O2M3N2N2M2O2N2N2MWRW2"}, "image_id": 609, "id": 10828}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 159.0, 110.0], "area": 11669, "segmentation": {"size": [512, 512], "counts": "Pb`01m?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3M20001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O00O1M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2N2M4M2M3N3M2M4M2M3N3L3Nnoo4"}, "image_id": 611, "id": 10829}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 13.0, 6.0, 15.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "=?a?N2M4M2M4Mcol7"}, "image_id": 611, "id": 10830}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 28.0, 345.0, 442.0], "area": 75772, "segmentation": {"size": [512, 512], "counts": "]U[21m?2M4M2M4M2N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2M4M2N2M4THXM\\2j2aMYM\\2k2aMWM]2k2aMXM[2l2aMWM]2k2aMXM\\2j2aMYM\\2k2aMWM]2k2`MYM\\2k2aMWM]2k2aMXM\\2j2aMYM\\2k2aMWM]2k2`MYM\\2k2aMWM]2k2aMXM[2k2bMXM\\2k2aMXM\\2j2aMYM\\2k2aMWM]2k2`MYM\\2k2aMWM]2k2aMXM\\2j2aMYM\\2k2aMWM]2k2`MYM\\2k2aMWM]2k2bMWMZ2l2gMSMW2P3hMPMX2P3iMPM_MgMT4Z5\\NoL^MjMV4V5\\NQMZMlMZ4T5\\NoLXMoM]4Q5[NQMTMRN`4m4]NPMQMUNb4l4\\NoLPMXNd4h4\\NdMd1]2\\NcMZNRMn1Z5HdMXNUMo1W5JcMUNXMR2U5HdMRNZMV2R5IcMoM^MW2P5IcMlM`M\\2l4HdMjMcM]2i4JcMfMgMa2f4HdMdMiMc2c4JcMaMlMf2a4HdM^MnMj2^4HdM\\MQNk2\\4IcMXMTNP3X4HdMVMWNQ3U4JcMSMZNT3S4HdMPM\\NX3P4HQO8o0HPO8P1HQO7o0JPO7P1HPO8P1IPO7P1HPO8P1HQO8n0GSO9n0DUO;k0CWO>i0^OZOb0f0\\O]Od0c0XO@h0`0VOCj0TLiLT3Z2k0m0nKmLW3S2m0P1jKoLY3o1P1R1cKSM]3g1S1V1^KUM_3c1V1X1WKXMc3]1Y1[1QK\\Mf3V1\\1^1kJ^Mi3R1^1`1fJbMl3j0a1d1aJdMn3f0d1f1ZJgMS4?f1j1TJkMU49j1P3VNmLm1T3SNiLo1W3QNgLR2Y3nMcLS2_3mM_LR2d3nMYLS2j3mMSLR2P4nM[KkK>W6Z4RNcKn1`4SN]Km1e4SNXKn1k4RNRKm1Q5SNlJn1W5Z43N2M4M2N3L3N3L3N2N3L3N3L3N2M4M2N3L3NRJPGP5m8PKWGP5f8oJ^GP5`8PKcGP5Y8PKkGo4S8PKQHP5k7PKYHo4e7kJeG]Oi0g5_7lJkHT5R7lJWIm4g6SK]Il4_6TKeIk4Y6UKjIj4S6VKPJk4m5UKVJj4g5VK\\Jk4a5UKaJk4]5UKeJk4X5UKkJl4R5TKPKl4m4TKVKm4g4SKZKn4d4QK^KP5_4QKbKQ5[4oJhKP5U4PKoKP5n3PKTLP5i3PK[LP5b3PKaLo4]3QKeLo4X3QKlLo4Q3QKQMo4l2QKXMo4e2QK^Mn4`2QKcMo4Z2RKiMn4T2RKnMn4o1RKUNn4h1RK[Nm4b1SKaNm4]1SKfNm4W1SKkNm4R1SKROm4k0SKWOm4f0SK^Om4?SKCm4;RKIm44TKNm4OSK4l4ITK:m4CSK`0l4]OTKf0l4XOTKk0l4ROTKP1l4mNTKW1l4fNTK\\1l4cNRK`1n4`NPKc1P5]NlJf1T5ZNjJi1V5VNhJl1X5UNdJn1\\5RNbJQ2^5oM^JT2b5V40O0010O0001]I_Jf2`5XMbJh2_5TMeJk2[5SMgJn2X5oLlJP3U5mLmJS3S5jLPKW3P5fLSKY3m4eLUK[3k4bLYK]3h4`LZKa3e4\\L_Kc3b4ZL`Kf3`4WLcKi3^4TLeKl3Z4RLhKn3X4oKlKP4U4mKmKS4S4jKPLW4P4fKSLY4m3dKVL\\4j3bKYL]4h3`KZL`4f3]K]Ld4c3YK`Lf4`3WKcLi4]3UKfLj4[3RKhLn4X3PKjLP5W3mJlLS5S3jJPMV5Q3gJRMX5n2eJUM[5k2cJWM]5j2_JZM`5f2^J\\Mb5e2ZJ_Mf5`2XJbMh5^2VJeMi5\\2SJgMm5Y2QJiMo5X2mIlMR6T2lInMT6P6O00001O0010O0001O001O00001O001O001O0M3M4M2M4M2M3N3N101O01O01O0M4M2M3N3L3N3L3N2M4M2N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2M4M2M3N`V8"}, "image_id": 611, "id": 10831}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 92.0, 46.0, 217.0], "area": 5058, "segmentation": {"size": [512, 512], "counts": "VSY73m?4K6K4L5K4K6K5K4L5K4K6K4L5K4K6K4L5K4L5J5L5K4L5J6K4L5K4L5J5L5K4L5J5L5K4L5K4K6K4L5K5J5L5K4L5oL"}, "image_id": 611, "id": 10832}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 178.0, 22.0, 51.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "b5c1]>N3L3N3M2M4M2N2M4M2M4M2N3L3N2N3O0J[@1j?010O00ORjd7"}, "image_id": 611, "id": 10833}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 230.0, 80.0, 100.0], "area": 4451, "segmentation": {"size": [512, 512], "counts": "fi41m?3L3N3L3M3N3L3N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N201O010O010O00010O010O00010O01M2M3N3L3N3M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3MYXc6"}, "image_id": 611, "id": 10834}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 348.0, 18.0, 22.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "oZg73m?5K5J6K1OO1000O1000O1000O1000O1000TE"}, "image_id": 611, "id": 10835}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 267.0, 13.0, 12.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "_hb64j?4M2O1N1000000001O0O2N4KbgV1"}, "image_id": 613, "id": 10836}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 330.0, 105.0, 84.0], "area": 7629, "segmentation": {"size": [512, 512], "counts": "^[\\4`1`>00000000000000000000000mNgN_CY1aBYTo1"}, "image_id": 613, "id": 10837}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 331.0, 15.0, 62.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "\\jh78g?2O000jAIc<7QC5P=JdBb0\\=^OdBb0\\=^O`Bf0`=ZOTBR1l=a0003M0001O0aE"}, "image_id": 613, "id": 10838}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 379.0, 5.0, 31.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "k;o0Q?0000M3^OgTm7"}, "image_id": 613, "id": 10839}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 446.0, 30.0, 46.0], "area": 996, "segmentation": {"size": [512, 512], "counts": "S>Y1g>0000000O10000M30A_A^Ob>b0=10000000000000000000000000000000O:Gd0\\OUa`7"}, "image_id": 613, "id": 10840}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 0.0, 46.0, 14.0], "area": 417, "segmentation": {"size": [512, 512], "counts": "RPY72l?20001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O000000"}, "image_id": 614, "id": 10841}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 506.0, 12.0, 6.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "j?6j?000000000000000000000V`i7"}, "image_id": 614, "id": 10842}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 0.0, 106.0, 30.0], "area": 2537, "segmentation": {"size": [512, 512], "counts": "bPe21m?2U@Nf?8L3N3JBg@?V?Dk@;T?:O001O001O00001O001O00001OO1M3N21O3M1O00001O00O1O1001O001OO1N2N2M3N2M3001O000d@_OY?d00001O001O00001O01O010O01O01O010OO1N3N101O00001OM3N2001O001O001O00001O0000M3O100001O001O001O00001O00O1M4M2M4M2N3L3N2N3Looe3"}, "image_id": 615, "id": 10843}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 0.0, 55.0, 48.0], "area": 1482, "segmentation": {"size": [512, 512], "counts": "]Pe42m?2N2N2N2N2N2N2N2N2N2N2N2N2O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O00N2SO`A9a>FaA8`>GbA7_>HcA6^>IdA5]>JeA4\\>KfA3[>LgA2Z>MhA1Y>NiA0X>OjAOW>0kANV>1lAMU>2m0O1O1OQ`_2"}, "image_id": 615, "id": 10844}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 0.0, 64.0, 49.0], "area": 2286, "segmentation": {"size": [512, 512], "counts": "lPW62e?Oa@5[?Mc@8X?Ke@8W?:N3M20001M2N3M2N3O00001O001O00001O0010O00O2O001O00O1N2O1001O001O001O00001O001O00001O001O00M3N2N2M4M2CVAAm>=UAAn>>SA^OP?b08Ee@0Z?Nh@2X?Kl@5T?Hn@6`?L3Nboh0"}, "image_id": 615, "id": 10845}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 0.0, 52.0, 39.0], "area": 1340, "segmentation": {"size": [512, 512], "counts": "nPV72l?2M4M2N2M4M2M4M2M3N3L3N3N10001O001O0000O1M3N2N2M3N2M3O11O00001O001O00001O001O00001O001O0000N2N2M3N2M3O1"}, "image_id": 615, "id": 10846}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 26.0, 50.0, 58.0], "area": 1714, "segmentation": {"size": [512, 512], "counts": "`Q21m?3L3N3L3N2M4TA@T>b0jA@S>d0jA_OS>c0kA_OU>b0gABY>=eAE[>m0010O0010O0010O010O01N1N2010O010O010N1N3M2M3N3M2N3L3N3M2N3L3N2N3M2M4M2N3MjnT7"}, "image_id": 615, "id": 10847}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 46.0, 65.0, 72.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "QRj21m?3N2M2O2N2M2O2M3N1N3N2M3N101000O01000OfATOa=l0]BVOb=k0[BXOe=g0ZB[Of=e0WB]Oi=d0UB^Ok=b0SB@m=?QBCo=>oADQ>8kAIU>8hAKX>o0000O0100O0N3N0O10O3N2M2O2M3N1N3N2G]AROd>m0^APOe>m0]AROe>l09M2O2N2M2O2M3N1N3N2M2O2N2Mj]U4"}, "image_id": 615, "id": 10848}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 49.0, 59.0, 79.0], "area": 2518, "segmentation": {"size": [512, 512], "counts": "YSR43k?2M3N3L3M4N11O01O010M2N3L3BWOcAk0Z>XOcAl0Z>WOcAk0Z>XOdAk0Y>?N2N3O0010O0010O0010O01M2M3N3M2M40O0010O0010SO[B]Oe=a0]B_Od==_BD`=:cBE]=9eBH[=4iBKW=3kBMV=OmB2R=LQC3P=ISC8lM4M2Mi\\P3"}, "image_id": 615, "id": 10849}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 56.0, 58.0, 72.0], "area": 2158, "segmentation": {"size": [512, 512], "counts": "nbm61l?3N2M4M2N3L3N201O010O00010O010O0O1M4ZAXOP>j0nAXOo=l0mAXOP>j0nAXOo=k0nAXOP>k0mAXOo=^1N3L30001O010O01O01O010O01O000N3L3N3L3BkAoNW>o0kAnNY>n0jAPOX>n0?L3N2M4M2M4M2M3N3L3N3Lo]5"}, "image_id": 615, "id": 10850}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 87.0, 2.0, 7.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "g27i?M\\mn7"}, "image_id": 615, "id": 10851}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 88.0, 79.0, 54.0], "area": 2095, "segmentation": {"size": [512, 512], "counts": "]Sc02k?3N3M2M4M2N2O2O010O01O01O010O000M4M2M4N11O010O01O010O01O01O010O01O01O010M2N2N30O01O01O010O01O01O0102M0010O010O0XOYA7g>F\\A;d>B^A>b>_ObAa0^>\\OdAd0\\>YOhAf0i>1O010O01O01O010O01M2M3N3L3N3L3N2MZ\\U6"}, "image_id": 615, "id": 10852}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 107.0, 66.0, 67.0], "area": 2729, "segmentation": {"size": [512, 512], "counts": "hSQ2>b?b0^O0000000O100000000E;0000000000000000000000000000000000000000000000O10000000000000f0ZO03Mh0XO00000000000000000000000000000000000000c0]OWkm4"}, "image_id": 615, "id": 10853}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 116.0, 19.0, 19.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "jcV33l?2M3N1N30000O0100000O010000N1N3N2N2MSl_4"}, "image_id": 615, "id": 10854}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 138.0, 8.0, 18.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "Z4b0_?M2N3M2N3M2N3Nckk7"}, "image_id": 615, "id": 10855}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 138.0, 6.0, 18.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "gTm74i?3M3M4L3O1fK"}, "image_id": 615, "id": 10856}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 148.0, 143.0, 95.0], "area": 4511, "segmentation": {"size": [512, 512], "counts": "aU[33k?3L3N2M4M2M4M2M3N3L310O01O01O010O01O01O010O01O01O010O0N2N3L30001O010O01O01O010O01O]AiN[>Z1aAiN\\>_1M4O0010O0010O0PBZNf=g1VB\\Nk=c1SB_Nm=f0SB6m=GVB8j=EYBe=^O_Ba0a=]OaBd0^=YOfBf0[=WOgBj0X=SOkBk0X=ROkBk0V>Io@\\O0OR?a0SABl>7TAMl>0TA2l>KTA9l>DTA>l>_OTAd0R?30O01O01O010O010O01O01O010O01O01O010O01O010O000N3L301O00010O010O0010O0010O0010O0010O010N1N2O20O010O01O01O010O010O00010O0N3L3N2M4M2M4M2N2M4M2M4M]Y]2"}, "image_id": 615, "id": 10857}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 149.0, 69.0, 53.0], "area": 1878, "segmentation": {"size": [512, 512], "counts": "`en42k?3N3L3N3L3O101O010O01O01O010N1M4M2M3N3M2M4O01O01O010O01O01O010O01O010O01O010O01O01O01lNVAQ1l>01O01O010O01O010O0VOSAb0l>\\OWAc0j>YOYAh0P?O0010O00O2M2N3L3N2M4M2M4M]jn1"}, "image_id": 615, "id": 10858}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 152.0, 20.0, 23.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "YU`11m?2M4M2N3L3N3M2O110O0010O0M4M2N3L3N2M4MUkU6"}, "image_id": 615, "id": 10859}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 154.0, 78.0, 62.0], "area": 2439, "segmentation": {"size": [512, 512], "counts": "jU43k?2N3L3N2N3M2M4M2N2N3N1010O01O01O010O01O01OVATO`>m0]AVOc>i0[AYOe>R10O00010O010O00010O010O00010O010O01O01L2O00O010O010O01002N210O00010O010O0010O001N1N3L3N2N3L3N3L3N2M4M2M4M2Nnjd6"}, "image_id": 615, "id": 10860}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 186.0, 60.0, 76.0], "area": 2387, "segmentation": {"size": [512, 512], "counts": "^V]12k?3N2M4L3N2O2O00010O00010O00010O000UA_OW>b0eAA\\>>aAE\\>>`AG\\>=`AF]>=`AF`0BZ=]1bBfN^=Z1_BiNb=V1[BnNd=S1XBPOh=d110O00010O00010O00010O00010O00010O00M4L3\\OoAVOT>g0PBUOT>g0oAVOT>g0oAVOT>g0PBUOT>g0d0M3M4L3M3M4L3Mmid5"}, "image_id": 615, "id": 10861}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 192.0, 77.0, 67.0], "area": 2715, "segmentation": {"size": [512, 512], "counts": "Zgj51m?3N1N3M3N1N3N2M2O200O010O010N2N1O2M3N1N3N2M2O2N2M2O2M2O2M3N1N3N2N1N3NO0200010O10O10O100O0N3N2M2N3N2M2O2M3N1N3N2N20O01000O0100QOUAh0j>WOWAi0j>TOYAj0P?M2N3N2M2O2M3N1N3M3N1NQin0"}, "image_id": 615, "id": 10862}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 193.0, 54.0, 61.0], "area": 1623, "segmentation": {"size": [512, 512], "counts": "ifb21m?3L3N2M4M2M4M2M3N3M210O01O01O010O010O00010O010O0_ATOn=l0oAWOR>i0kAZOT>f0iA]OX>c0eA_O[>a0bAC]>P110O01O010O01O01O010O01OO2L3N3L3@XAFj>8YADk>8XAFj>8XAEl>7`0N2M4M_Yb4"}, "image_id": 615, "id": 10863}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 29.0, 68.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "h6Q2P>0O0010O010L3N3M2M3cNgAQ1\\>lNfAR1\\>lNgAP1f>N2N3L3N3O0010ON2N3M2M4M2N3L3N2NeXa7"}, "image_id": 615, "id": 10864}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 219.0, 82.0, 64.0], "area": 2433, "segmentation": {"size": [512, 512], "counts": "gW^33k?2M3N3M2M4M2M3N3L3N3M2N201O010O01O01O010O01O010O01O010OQOUAi0l>SOXAl0n>10O01O01O010O010O0001TATOc>k0[AWOe>j0WAZOi>n001O01O010O01O010O01O010O01O01OM4M2O10N3L3N3L3N2M4M2N3L301O01O010OO2L3N2M4M2M]hX3"}, "image_id": 615, "id": 10865}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 231.0, 45.0, 73.0], "area": 1840, "segmentation": {"size": [512, 512], "counts": "Zi;1l?3M3L5GFg@=V?8M4K4M3O110N1M3M4K4M3M4L3L4M3M21M3L2OO13N3O01O01O00010O000^NiA\\1V>aNmA^1\\>L3M3L4nNWAj0Q?M4K4M3Bh@1\\?Kg@2cWn6"}, "image_id": 615, "id": 10866}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 235.0, 68.0, 57.0], "area": 2089, "segmentation": {"size": [512, 512], "counts": "Xhk61m?2O2M3N2M2O2N2M2O2M3N2M2O2N2O10O10O10O10O1N2M2O2N2M3N1O2O1000O01000O01N2N2M100O10O3N2M2O2N2M3N1O20000N1N3N2N1N3N2M3N1O2M3N2N1N3N2N2M2OSX2"}, "image_id": 615, "id": 10867}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 251.0, 57.0, 69.0], "area": 1960, "segmentation": {"size": [512, 512], "counts": "nXZ21m?2M3M4M2M4M2N201O010O00010O010O0VA^OV>b0hA@Y>`0cAC]>=aAF^>:_AI^>;_AH^>n0M3N3L3M310O0010O0010O001N1M3N0O03N3L3N2CkAlNY>Q1iAmNY>P1jAmNZ>P1b0nAAQ>`0lABU>U1O00010O010O01O01O010O01O0N2N3L3N3L3N2F\\AVOg>g0[AVOh>h0:L3N3L3N2M4M2N3LXgg1"}, "image_id": 615, "id": 10869}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 273.0, 46.0, 79.0], "area": 1863, "segmentation": {"size": [512, 512], "counts": "TZR31m?3L3i@Jb>8\\AJa>:[AJb>8\\AJa>9\\AJb>9ZAKb>8\\AJb>k0N3L3N3L3N2M4M2M4M20010L2O0O04L3N3ETB]No=a1SB]No=`1;L2O2M3N3L3N2M4M2M3M4M2M4M2M3N3L3M[gV4"}, "image_id": 615, "id": 10870}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 274.0, 14.0, 14.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "ihY63k?3M2M4O001O01O010O0O1M4M2N]W_1"}, "image_id": 615, "id": 10871}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 288.0, 72.0, 56.0], "area": 2248, "segmentation": {"size": [512, 512], "counts": "gi[63l?2M2O2M3N1N3N2M3N1N3N2M2O2O100O010000O010000O001QAPOk>T1M3N1N3N2O10O10O10O10O10OO2N2M2O1N10O2O2M2O2M3N1N3N2O10O01O1M2O2N2M3N1N3N2M2O2N2M3N1N3N2N1N3N^V`0"}, "image_id": 615, "id": 10872}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 291.0, 46.0, 78.0], "area": 1762, "segmentation": {"size": [512, 512], "counts": "djf32l?2M3i@Jb>9[AIc>9ZAKc>8ZAJc>9[AJb>9ZAJd>8ZAKb>k0N2N3L3N3M2M4M2N2M4O0010M2M4M2N2CnAiNV>T1lAjNU>U1nAhNR>W1:1000O4M2N3L3N3M2M3N3M2M4M2N3L3N2NiVb3"}, "image_id": 615, "id": 10873}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 307.0, 38.0, 42.0], "area": 1063, "segmentation": {"size": [512, 512], "counts": "e9P1Q?O010O01O01OO2L3O2O01O01O010O01O01O010O01O0N2SOTAc0P?YOTAd0U?N210O0010O00O2M2M4M2M3N3Lne\\7"}, "image_id": 615, "id": 10874}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 322.0, 65.0, 68.0], "area": 2171, "segmentation": {"size": [512, 512], "counts": "]kb41m?2M4M2M4M2M310O010O00010O010O00010O010O0O1i@_Om>d0PA^Om>l0N3L3N3L3N2M4M2M4M2M3N3N110O00010O010O00N3L3N3L3N2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4Mde\\2"}, "image_id": 615, "id": 10875}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 340.0, 91.0, 64.0], "area": 2455, "segmentation": {"size": [512, 512], "counts": "Rlb51n?2N2N2O2M2N2N2N2N3N1N2N2N2N3N0O0000001O01O00000000001O0001O0001O0000101O2N1O1O100O00O2N2N3M2O1N2N3M0001O01O000000010O000000010O00000000010O000002N3N1N2N2N3M2O1N2N3M2N2O1N3M2N2N2O1Ngdo0"}, "image_id": 615, "id": 10876}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 382.0, 68.0, 73.0], "area": 2435, "segmentation": {"size": [512, 512], "counts": "f]21m?2M4M2N3M2M4N110O01O010O010O010OO2SO^OWBe0g=^OVBe0g=]OWBe0g=^OVBe0f=^OXBd0f=_OWBc0g=_OVBe0g=^OVBd0h=^OVBe0i=i010O010O010O010O01O0O1N3M2N2N003L3N3M2N3M2N3L3N2N6J2O2O010O010N1N3M2M3N3M2N3M2M4Mack6"}, "image_id": 615, "id": 10877}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 395.0, 65.0, 82.0], "area": 2609, "segmentation": {"size": [512, 512], "counts": "o]S11m?2M3N3L3N3L3n@Bb>?[ACf>9UAJj>7SAKn>d0M2N2M4M2M4M2M3N3L301O01O01O010O01O01O010O010NOO010O10O0100O0101O3L3N3M2M3N3M2M4M2N2M4M2N3L3N2N3L3N3M2N2M4M^Sl5"}, "image_id": 615, "id": 10878}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 414.0, 67.0, 83.0], "area": 2489, "segmentation": {"size": [512, 512], "counts": "Q_l13j?3N3M2M4M2N2O20O010O0010O0O2L3N3L3N2N3L3^OjNXBY1e=jNXBX1e=kNXBX1f=jNXBY1e=jNWBY1f=jNXBY1e=b0N3L3N210O01M2M3N3M2M4M2N3L3N2M4M2bNeAU1^>hNdAU1f>M2N2M4M2M4O0010O0010O001O0M3N3M2M4M2N3L3N^RR5"}, "image_id": 615, "id": 10879}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 444.0, 67.0, 68.0], "area": 2329, "segmentation": {"size": [512, 512], "counts": "gnm22l?2N2M4M2M4M2dA_OV=d0\\B]OF1n=c0YB_OE1S>?UBCF1T>=SBDF2X>9PB5o=LmA7S>IkA9V>h0001O00001O001O001O00001OO1N2001O00001O0M4M2M3N0O100O010O10O10O4M2M4M2M301O010O01O01O010M2N3L3N2N3L3N3L3NmaP4"}, "image_id": 615, "id": 10880}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 467.0, 49.0, 45.0], "area": 1524, "segmentation": {"size": [512, 512], "counts": "oon31b?1g@1W?2f@0X?2f@0X?3e@OY?=M3N2N2N2N2N2N2M3N2N2O11O001O001O00N2001O001O00001O001O001O001O0M3N3L3N3L3N3L3N3L3N2M4MSaX3"}, "image_id": 615, "id": 10881}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 477.0, 51.0, 35.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "eob6270\\?3b@N]?4a@M^?4a@M]?_?M1O001O1O001O1O1O001O1O1O001OR`c0"}, "image_id": 615, "id": 10882}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 482.0, 36.0, 30.0], "area": 703, "segmentation": {"size": [512, 512], "counts": "m_l53m?00000000000001O00G9H8L41O00H800000000001O000000000000001O000000000YOo@:^?Gm`a1"}, "image_id": 615, "id": 10883}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 486.0, 32.0, 26.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "jo41m?3L3N3L3M3N3L3N30O010O001O00001O001O001O00001O001N1N3M2N2M4M2NbP[7"}, "image_id": 615, "id": 10884}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 487.0, 51.0, 25.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "doQ13k?2N3L3N2N3M201O01O010O01O01O010O01O01N1M3O2O010O00010O010O00010O010O00010O010O00001O00001O001O000N3L[`T6"}, "image_id": 615, "id": 10885}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 496.0, 44.0, 16.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "hok47h?2O0O1O100O10000001O0000001O00001O0000M3L400001O000000001O00000000001O00000000001G_@1h?00NWP^2"}, "image_id": 615, "id": 10886}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 508.0, 12.0, 4.0], "area": 31, "segmentation": {"size": [512, 512], "counts": "nok12l?20000001O001O001O0000QPn5"}, "image_id": 615, "id": 10887}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 140.0, 80.0, 80.0], "area": 2974, "segmentation": {"size": [512, 512], "counts": "[5\\1_>5L4K5K6K4K5M310O000000010O000000010O00000010O000UO_BWOa=e0dBZO]=`0hB]O[=>jB]O[=?iB]O[=>kB\\OZ=?kB]OZ=`0T100000001O0000000010O00000001O04L0001O0001O0001O01O0001O0001O01O0001O0001Of@@T?g0O0001O000N2L5J5K5JZkg6"}, "image_id": 616, "id": 10888}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 243.0, 118.0, 119.0], "area": 7135, "segmentation": {"size": [512, 512], "counts": "ciZ51n?2N2M3N1O2N2M3N2N2N1O2M3N2N2N2M2O2N2N2N2M2O2N2N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2M2O2N2N2N2M2O2N2N2N2000O10O10000000O010000000N2M2O2N2N2M3N2N1O2N2M3N2N1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N1O2M3N2N2N2N1N3N2N2N2MjVj0"}, "image_id": 616, "id": 10889}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 86.0, 53.0], "area": 3005, "segmentation": {"size": [512, 512], "counts": "k8T1l>1O01O0001O0001O0001O01O0001O0001O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O01O00000K5L5J5L4K5K5Lgfd6"}, "image_id": 616, "id": 10890}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 302.0, 34.0, 39.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "^jg62l?2O2N2N2M3N1O2N0O10O1000O10O1000O10O10O1000O10O1001N3N1O2N2M3N2N1O^Vg0"}, "image_id": 616, "id": 10891}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 332.0, 70.0, 162.0], "area": 6299, "segmentation": {"size": [512, 512], "counts": "Qo45f?5K5K5K5K6K4K5K5010O[OdNaBa1Z=eNaB_1[=eN`B`1\\=e0J5L4L4K5L5K4K5L4K6K4L4K5L4L5J5L4K5N3O000000010OM3L4K5L5J5K5L4K5L5J5L4K5L4K6K4K5K5L4K6K4K5L4K5L5J5L4K5L4K6ZOm@8`?L4KZUh6"}, "image_id": 616, "id": 10892}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 335.0, 56.0, 54.0], "area": 1473, "segmentation": {"size": [512, 512], "counts": "UkS72m?2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N1N3N2O1000O1000O1000O1000O100000O10O100000N2N1N3N2N2N2M3N1O2N2N2M3N1O2N2M3N2N1Oe4"}, "image_id": 616, "id": 10893}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 436.0, 94.0, 76.0], "area": 3979, "segmentation": {"size": [512, 512], "counts": "b_[41n?2N2M3N1O2N2N2M3N2N2N1O2M3N2N2N1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O100001O1O1O1N2N2N1O2N2N2M3N2N002N2\\NeA_1`>N2N2O000N2O1O2N2N2KTASOn>j06N2N1O2N2N2M3N2N1O2N2M3N2N]aU2"}, "image_id": 616, "id": 10894}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 453.0, 13.0, 42.0], "area": 341, "segmentation": {"size": [512, 512], "counts": "U>Y1g>000010L3K5L4K5L5K4K5L4KnQi7"}, "image_id": 616, "id": 10895}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 0.0, 33.0, 21.0], "area": 347, "segmentation": {"size": [512, 512], "counts": "RP]42m?2N2O001O1O001O1O001O1O001O1O001000O01000O01000O010O1N1N3N2M2O2Mg_R3"}, "image_id": 618, "id": 10896}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 30.0, 99.0, 92.0], "area": 3193, "segmentation": {"size": [512, 512], "counts": "VRh12m?2N2M3N2]OH[A:c>H[A:c>H[A:c>H[A:c>H[A:c>H[A:c>H[A9d>c0N2N2N2N000000O1000002N2N2N2N2N2N2000000000O1000000000O100000000000000000000000000000000000000000O1000000000000000O10000000000000000000000N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N2N2Nj\\f4"}, "image_id": 618, "id": 10897}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 36.0, 26.0, 27.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "eaW71n?2N2N2M3N2N1O2N2M3N2N000O0101O2N2N2M3N1O2N2M3N2N1Oe^;"}, "image_id": 618, "id": 10898}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 83.0, 27.0, 30.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "XSa42l?3N1N3N1N3M3N1N3N1N3N2M2N11N2O2M3M2O2M2O2M3M2O2M2N3NW]Q3"}, "image_id": 618, "id": 10899}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 125.0, 42.0, 9.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "mSd59g?000000000000000000000000000000000000000000000000000000000000000000000000000000000Slf1"}, "image_id": 618, "id": 10900}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 128.0, 21.0, 25.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "Ud^72m?3N2M3N2M2O2M3N2M3N11N2M3N1N3N2M3N2M3M3N]k6"}, "image_id": 618, "id": 10901}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 164.0, 18.0, 43.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "YUg72m?3N2N2M3N2M3N2M4M2N2M3N2N2000001O0bJ"}, "image_id": 618, "id": 10902}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 243.0, 55.0, 32.0], "area": 681, "segmentation": {"size": [512, 512], "counts": "mgm42l?2N2N3M2N3O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O00010O010O01O01O010O0N3L3N2MlgV2"}, "image_id": 618, "id": 10903}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 330.0, 42.0, 32.0], "area": 726, "segmentation": {"size": [512, 512], "counts": "jjR32k?3N3M2M3N3L3O20O00010O010O00010O010O00010O010O00010O010O00010O010O00010L3N3L3N2M4MZUX4"}, "image_id": 618, "id": 10904}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 340.0, 49.0, 41.0], "area": 1149, "segmentation": {"size": [512, 512], "counts": "[kX22k?3N2N3L3N3M2N3L3N2N3O0010O010O010O00010O010O01O01O010O010O01O01O010O01O010O01M2N3L3N2N3L3N3M2N2MPen4"}, "image_id": 618, "id": 10905}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 364.0, 43.0, 50.0], "area": 1168, "segmentation": {"size": [512, 512], "counts": "a\\b32l?3L3N2N3L3N3M2M4M21O010ON3L3N2N3L3N3L3O2O01O010O010M2N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N]Th3"}, "image_id": 618, "id": 10906}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 382.0, 67.0, 60.0], "area": 2057, "segmentation": {"size": [512, 512], "counts": "[mm12l?2M3N3L3N3L5L2N3L31O010O010O00N3BUOcAm0[>UOcAn0Z>UOcAm0Z>>N3M201O00010O010O01O0M3N3L3N3L3N2M4M2N3L3N2O20O010O010O0010O0010O0010O010O0O1M4M2M4M2M3N3L3NccP5"}, "image_id": 618, "id": 10907}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 385.0, 47.0, 43.0], "area": 1213, "segmentation": {"size": [512, 512], "counts": "j\\a43k?3M2N3L3N3M2N2M4M2N3M2O2O01O010O01O010O01O010O01O010O01O010O01O010O01O0N3M2N2M4M2N3L3N3M2M3NcSg2"}, "image_id": 618, "id": 10908}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 403.0, 53.0, 42.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "XmX53k?2M4M2N3M2M4M2N3M2O1010O01O010O010O01O010O01O010O010O01O010O01O010O010O01O010O01O01O0N3M2M4M2N3M2M3N3M2Nmbl1"}, "image_id": 618, "id": 10909}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 406.0, 28.0, 29.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "WmQ41n?2M2N3N2M2O2M3N1N3M3N1100O10O10O10ON3N2M2O2M3M2O2M3N1NQS`3"}, "image_id": 618, "id": 10910}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 414.0, 45.0, 35.0], "area": 816, "segmentation": {"size": [512, 512], "counts": "^]U63k?2N3L3N3M2M3N30O010O010O0010O0010O010O010O0010O0010O010O010O00010O010O010O0N3M2N2M4M2N3McRT1"}, "image_id": 618, "id": 10911}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 432.0, 26.0, 25.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "o]R53k?3M2M3N3M2M4N101O01O010O010O00010O001L3N2N3L3N3M2M\\b`2"}, "image_id": 618, "id": 10912}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 441.0, 82.0, 54.0], "area": 2247, "segmentation": {"size": [512, 512], "counts": "`nV32k?3N2N3L3N3M2M4M2O1010O010O0010O0010O010O0010O0010O010O0010O0010O0010O010O010O0N2N3M2M4M2N2N3N110O0010O0010O010O0010nN^Ad0b>YOaAg0`>VObAk0]>SOfAl0g>10O0010O010O0010O001L3N3M2M3N3M2M4M2N3M]Q`3"}, "image_id": 618, "id": 10913}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 443.0, 53.0, 49.0], "area": 1608, "segmentation": {"size": [512, 512], "counts": "nnd11\\?1SA1k>1SA2j>1SA1k>1SA2j>1SA1l>0RA3m>`010O010O010N1N3M2N3M2OO3M210O010O0100O01O0O20O010O010O010O010O010O01M2N3M2N3M2N3Gh@FZ?8h@F[?78N3M2Nfa`5"}, "image_id": 618, "id": 10914}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 454.0, 62.0, 38.0], "area": 1605, "segmentation": {"size": [512, 512], "counts": "o^`62i?5K5K5K6L300001O01O000001O01K41O000001O01O000001O0001O0001O0001O0001O0001O0001O0001O0001O0001O0001O00TOPAh0T?000010O000M3K5K6J5Kba`0"}, "image_id": 618, "id": 10915}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 487.0, 47.0, 25.0], "area": 748, "segmentation": {"size": [512, 512], "counts": "f_k41n?2N2Z@L]?6a@K^?7`@J_?1n?2N2O1O1O1O1O1[@H`?9_@H`?=O1O1O1O1O1O1O2N1OO1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100OQPk6"}, "image_id": 620, "id": 10923}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 0.0, 25.0, 17.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "Q`h11n?3N1O1O1O1O2N1O1O1O1O1O2N00O1O1O1O1O100O2N2N2N3Mloj5"}, "image_id": 620, "id": 10924}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 0.0, 35.0, 18.0], "area": 379, "segmentation": {"size": [512, 512], "counts": "P`[21o?2N1O2N2N2N2N2N1O2N1O00O100O100O1O100O100O100O100O100O1O100O100O2O2MnoR5"}, "image_id": 620, "id": 10925}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 0.0, 10.0, 6.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "PPW31o?1O2N1O1OO1O1O1O1OQPd4"}, "image_id": 620, "id": 10926}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 0.0, 44.0, 35.0], "area": 814, "segmentation": {"size": [512, 512], "counts": "]P\\32m?2KNY@4f?4N2N1O0`@D]?BSADSA:n>ETA9m>FUA8l>GVA7k>HWA6k>HWA6[?N3M2N\\__2"}, "image_id": 620, "id": 10928}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 10.0, 64.0, 49.0], "area": 1542, "segmentation": {"size": [512, 512], "counts": "YQP41n?2O1N2N2N3M2N2N2N2N2O1N2N3M1O1O000000000JUOYAj0h>WOVAi0j>YOTAg0l>7O00000002O2OO1N2O1000N2N2O101O01O00000000000001N1N2O1N2N2N2N2N3M2N2N2N2O1N2N2Ndno2"}, "image_id": 620, "id": 10929}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 16.0, 31.0, 30.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "n`Q31n?2N2O1N2N2N2N2N3M2N2N2N2N2N2N0101N2N2N2N2N2N2N2N2N2N3M2N2O1NRo^4"}, "image_id": 620, "id": 10930}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 17.0, 52.0, 51.0], "area": 1287, "segmentation": {"size": [512, 512], "counts": "^QQ11n?2N2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N3M2N2N1O000001O0000000001O000002N2N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N2NknT6"}, "image_id": 620, "id": 10931}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 27.0, 19.0, 19.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "Ual11n?2N2N2O1N2N2N3M00000001O2N2N2N2N2O1NPoi5"}, "image_id": 620, "id": 10932}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 41.0, 21.0, 17.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "cQ]52m?2N3N2M100O00010O010O01O01O010O02N2O2M2Ob^X2"}, "image_id": 620, "id": 10933}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 47.0, 33.0, 33.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "nQP23l?2O1N2N2N2N2N3M2N2O1N2N2N3M001O01O001O2N2N2O1N2N2N3M2N2N2N2O1N3MS^_5"}, "image_id": 620, "id": 10934}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 53.0, 51.0, 52.0], "area": 1353, "segmentation": {"size": [512, 512], "counts": "]bo23l?2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N1O0003M2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N2N3M2N2N2N2NamV4"}, "image_id": 620, "id": 10935}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 66.0, 31.0, 49.0], "area": 855, "segmentation": {"size": [512, 512], "counts": "Qc`72m?2O2M2N3M2O2M2N3N1N2N3DWO]Ah0c>ZO[Af0e>]OXAd0g>^OXAa0i>:000100O3M01O01O01O01O01O01O2mM"}, "image_id": 620, "id": 10936}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 77.0, 54.0, 55.0], "area": 1394, "segmentation": {"size": [512, 512], "counts": "`cb12m?2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N2N1O001IlNaAT1_>nN_AR1b>oN\\AQ1d>5000000000001O0001O2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2Nn\\b5"}, "image_id": 620, "id": 10937}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 99.0, 52.0, 49.0], "area": 1272, "segmentation": {"size": [512, 512], "counts": "PTZ21n?2N2N2N3M2O1N2N2N2N2N2N1O002N2O1N2N3M2N2N1O00001O01O0000000000001O2O1N2N2N3M2N2N2N2N2N2O1N3M2N2N2N2N2NZlk4"}, "image_id": 620, "id": 10938}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 100.0, 69.0, 85.0], "area": 2687, "segmentation": {"size": [512, 512], "counts": "hcm62m?3N1N3M2O2M3c@AT?a0j@Bm>NUAb0LBl>NVAi0h>:N1N3N1N3_AbN[>d1N1N3M2O2M2010O10O10O001M210O10OO2N1N3O010O010O10O01M2O2M2N3N1N3M3N1N1O01O01O0LSAWOl>i0WATOj>k0512M2O2M3M2O2M2N3N1N3M2OSK"}, "image_id": 620, "id": 10939}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 123.0, 10.0, 11.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "mSg72n?1N3M3N2ON3N1N3N1Nok3"}, "image_id": 620, "id": 10940}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 133.0, 7.0, 15.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "Ydl72n?2M2N3N1N3M2jK"}, "image_id": 620, "id": 10941}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 169.0, 49.0, 49.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "]5j0U?3N2M2UAQOc>Q1[APOc>Y1M3N2NO2N3N2O1M2O2M3M3N1N3N1N01O01O010O010O01O01O010O010O01O01O011N2O2M3N2M2N3N2M3N1N3NnYW7"}, "image_id": 620, "id": 10942}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 192.0, 49.0, 54.0], "area": 1531, "segmentation": {"size": [512, 512], "counts": "oVh41o?1^OORA4k>OSA3k>NSA5j>NSA5j>NTA3k>ORA4k>OSA3k>c0M3N1N10O01O010O010O01O03M3N1N3N1N12M3M3N1N3N2M3N2M2O2M3M3N2M2O2M3N2M2O2M3M3N2MiX_2"}, "image_id": 620, "id": 10943}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 223.0, 7.0, 17.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "o6a0a?M2O2M3M3N2MgXl7"}, "image_id": 620, "id": 10944}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 232.0, 54.0, 47.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "WXQ41n?2O2M3N2M2FEl@>Q?Em@=Q?El@=R?Em@=P?910O01O01O010O010O003N2M100O010O00010O010O1O2OO010O01OMZAmNh>R15M3M2O2M3N1N3N2M2N102M2O2M3M2OPhS3"}, "image_id": 620, "id": 10945}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 247.0, 95.0, 68.0], "area": 2330, "segmentation": {"size": [512, 512], "counts": "]ie42m?2O2M3N1N3M3N1N2O0I_On@a0S?Ak@>U?70O01O01O010O01O01O010O01O010O01O01O010O01O01O0KYOSAg0l>\\OQAd0P?5010O01O01O010O01O01O010O010O00010O010O01O01O010O3N0O0010O003N1N100O010O00010O03N2M010O00102M3N1N3M3N1N3N2M3M2O2Megj1"}, "image_id": 620, "id": 10946}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 261.0, 55.0, 49.0], "area": 1359, "segmentation": {"size": [512, 512], "counts": "lXX32n?1N3N1N3M3N1N3N2M2N3N1N3M3N1N3NO00010O010O00010O01O01O010O00010O01O01O010O010O2N3N2M2N3N1N3N2M2N3N2M2O2M2N3NSWl3"}, "image_id": 620, "id": 10947}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 282.0, 56.0, 48.0], "area": 1314, "segmentation": {"size": [512, 512], "counts": "dYX21n?2O2M3M2O2M3N2M2N3N2M2O2M2N10O010O0010O0010OKTOWAm0h>UOWAj0j>501O010O01O01O010O01O102M3N2M2N3N2M2O1N1O01O0100O3N1N3M3N1N3Nafk4"}, "image_id": 620, "id": 10948}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 284.0, 14.0, 14.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "RYY42m?3N1N3M2OO0001O01O2O2M2N3Mof_3"}, "image_id": 620, "id": 10949}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 287.0, 8.0, 10.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "QYm12n?2M3N1N2OO4M3Llfn5"}, "image_id": 620, "id": 10950}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 297.0, 83.0, 62.0], "area": 2791, "segmentation": {"size": [512, 512], "counts": "eY\\51n?3M3N2]@HY?;e@GY?;d@HY?b0N2M3N2M3N2M3N2M3N0O10O010O010O011N3N2M3N2N20000N2N2M30M3N1N3N2M3N2M3N1N1O010O010O010O010O010O010O010O010OD`AYOa>f0bAXO]>i0dAUO]>j0fATOY>m0iAPOX>o0<10O010O012M3N2M3M3N2M2O2M3N2M3N2M3NlUZ1"}, "image_id": 620, "id": 10951}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 312.0, 22.0, 25.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "nYT31o?2M3N1N3N2M3M3N1N3N2M03N1N3M3N2M3N1N3N2M3Nfe`4"}, "image_id": 620, "id": 10952}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 312.0, 22.0, 25.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "nY^32n?2M2O2M3M2O2M3N1N3M21O1M2O2M3M2O2M3N1N3M2OfeV4"}, "image_id": 620, "id": 10953}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 315.0, 49.0, 53.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "ZZe11o?2M2O2M3N2M2N3N2M2O2M3M2O2M3N2M2N3N2M2O2M3N1N1O10O012M3M2O2M3N1N3M3N2M2O2M3N1N3M3N1N3N2M3M2O2M3NoTb5"}, "image_id": 620, "id": 10954}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 328.0, 11.0, 11.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "[Zk23m?2M3M2OO011O1N3N2M2OaUo4"}, "image_id": 620, "id": 10955}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 345.0, 82.0, 63.0], "area": 2733, "segmentation": {"size": [512, 512], "counts": "^km31o?2M3N2M3N2M3N2M3M3N1N3N2M3O1N2O1N2O1N2N2O10N2N2M3N2M010O010O010O010O0010O0010O010O010O010O010O010O01I6O2O001O003N2M3N2N1N0100O101N100O2O0O2O1QO`A;b>CaA;a>AcA^OcAa0P?M3N2M3N2M3M3NnSi2"}, "image_id": 620, "id": 10956}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 353.0, 13.0, 13.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "T[W32m?5L3M1N01000O0102N2M4M2NfTb4"}, "image_id": 620, "id": 10957}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 360.0, 75.0, 70.0], "area": 2772, "segmentation": {"size": [512, 512], "counts": "b[R63m?1N3N2\\@JY?9e@IY?a0M5L2M3N2M3N0O1O3N2M3N1N10O01011000O10O1M3NO2O2M3N2M20100000O1N2M3N1N100O010O010O010O01O010O01O010FaARO_>o0cAoN]>P1fAmNZ>T19O011N3N2M2O2M3M3N2M3N2M3N2M3N1N3NaSh0"}, "image_id": 620, "id": 10958}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 365.0, 29.0, 27.0], "area": 347, "segmentation": {"size": [512, 512], "counts": "lkT23m?1N3N1N3M3N1N00010O01O01O010O00010O00010O00La@H`?7b@H]?8602M3N1N_d\\5"}, "image_id": 620, "id": 10959}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 366.0, 36.0, 57.0], "area": 1123, "segmentation": {"size": [512, 512], "counts": "c;R1m>2O2M3M3N1N3N2M21000O01M3N1N3M3N2M2O2M3M2O2Gm@@U??m@_OR?a07O010O2O1N3M3N1N3N2M2N`c]7"}, "image_id": 620, "id": 10960}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 391.0, 23.0, 26.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "]\\V31n?3N2M3N1N3N2M3N2M2N3N1OO3N2M3N2M2N3N2M3N1N3NWS^4"}, "image_id": 620, "id": 10961}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 401.0, 61.0, 88.0], "area": 2334, "segmentation": {"size": [512, 512], "counts": "hli32>1o>2n@0o>3n@0P?2n@0o>3n@0P?2n@0o>c0N2M3N2M3N2hAcNh=`1UBcNi=_1UBcNh=`1UBcNi=_1UBcNi=l100000M3N2M3N2M3N2M3N2O10000000000000000M3N2M3N2M3N2M2O0O012M3N2M3N2000000M3N2M3N2M3N2M3Nm`W3"}, "image_id": 620, "id": 10962}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 405.0, 56.0, 44.0], "area": 1367, "segmentation": {"size": [512, 512], "counts": "Z]^21n?3N3J5N2M3N2M01003L3N2M4M0O01000O010O010O01000O010O010O010O10O10O010O010O01000O010O0100O3N2N3L3N2M4M2M3N2N3L3Nbbe4"}, "image_id": 620, "id": 10963}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 405.0, 10.0, 9.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "i\\T71n?3N2M10O01O02O2M3NVcf0"}, "image_id": 620, "id": 10964}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 407.0, 73.0, 65.0], "area": 2608, "segmentation": {"size": [512, 512], "counts": "f]`41n?3N2M3N2M3N1N3N2M3N2M3N2M3N2M300N2M10O010O010O0102M1O010O010O010O010O01L]AjNc>V140O2O2M3N2M100O010O010O0010O010O010O010[OfACZ>>gAAY>>jA_OV>b0lA[OU>d0nAZOQ>g0QBVOP>i0SBUOl=l0f0M3N2M3Cf@0\\?Nf@O]?Nf@0[?Og@N\\?OUS[2"}, "image_id": 620, "id": 10965}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 411.0, 7.0, 8.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "nlW41n?3M3NO011N4MQcd3"}, "image_id": 620, "id": 10966}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 411.0, 9.0, 8.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "nlo62m?3N1N010O0102M3NQck0"}, "image_id": 620, "id": 10967}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 416.0, 8.0, 7.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "S]l42m?3N0O010O10O3Nnbo2"}, "image_id": 620, "id": 10968}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 426.0, 71.0, 70.0], "area": 2396, "segmentation": {"size": [512, 512], "counts": "d^i62m?2O1N3M2N2O1N3M2N2N2OO0001O002O1^OXOiAj0U>YOhAj0U>XOiAj0V>WOiAj0U>XOiAk0T>XOiAj0U>XOiAj0V>a0N3M2N2N2OO0001O0002N3M2O1N2N3M2N2O1N3M2N2H]AQOe>m0\\AQOe>n0600000010O0000011N2N2N2N3N1N2N2N3M2O1N3M2N2O1NkQ3"}, "image_id": 620, "id": 10969}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 431.0, 25.0, 20.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "g]\\32m?4M2N3L3N0O1000O010O01000O010O01000O012M4M2N2M4MURW4"}, "image_id": 620, "id": 10970}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 432.0, 22.0, 25.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "f=1o?2M2O2M3N2M2N3N2M3N1N20N3N1N3M3N2M2O2M3N2MPbd7"}, "image_id": 620, "id": 10971}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 456.0, 56.0, 53.0], "area": 1530, "segmentation": {"size": [512, 512], "counts": "k^;2m?2O2M3M2O2M3N1N3M3N1N2O0O00010O010O00102M3N1N3M3N1N3N2M2N3N2M1001N3M3N1N3N1N3M3N1N3N2M2N3N2M2O2M3M2O2M3N1N3M3Nb`h6"}, "image_id": 620, "id": 10972}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 456.0, 38.0, 43.0], "area": 1154, "segmentation": {"size": [512, 512], "counts": "_nQ34k?7J5K6J5K6J4K01000000000O01000NlNXAT1h>20O100000O1000O100000O10O7J5K6J3MOEd@4\\?Lj@MW?3a0JoP[4"}, "image_id": 620, "id": 10973}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 464.0, 73.0, 48.0], "area": 2072, "segmentation": {"size": [512, 512], "counts": "ioR53m?2M3N0O100O100O100O100O100O100O1CBXA>g>EVAFTA:k>IRA8n>JPA6o>>00O100O100O100O100O100O100O100O100O100O100O100O100O1O100O100O100O100002M3N2M3N2M3N2M3ZOn@:T?Co@:T?Dn@:]?N3N2M3N2MZ`h1"}, "image_id": 620, "id": 10974}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 480.0, 22.0, 28.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "]_e71o?1N2N2N3M2N2N00000000001`@C\\?b0N2O10000000000N2k@"}, "image_id": 620, "id": 10975}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 493.0, 28.0, 19.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "bo]43m?4L4K4M00000O1000000O1000000O1000000O1000000004L4L4L4LSPT3"}, "image_id": 620, "id": 10976}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 505.0, 14.0, 7.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "oo^71n?1O100O1O1O1O1001O1O1O2N1OQP:"}, "image_id": 620, "id": 10977}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 508.0, 21.0, 4.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "o_`31o?0000000O10000000000O100000000O100000000TPU4"}, "image_id": 620, "id": 10978}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 509.0, 6.0, 3.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "oo`01o?0O100O12NQP\\7"}, "image_id": 620, "id": 10979}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 0.0, 210.0, 60.0], "area": 12327, "segmentation": {"size": [512, 512], "counts": "PPg4c1]>000000000000O100001O00000000000000001O0000001O00001O00001O001O001O001O1O001O000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, "image_id": 621, "id": 10980}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 116.0, 242.0, 211.0], "area": 32578, "segmentation": {"size": [512, 512], "counts": "]Uo13l?4M3M3L3N3jNAeBa0Y=BdB`0Z=CbBa0[=BbB`0[=DbB>\\=E`B>^=E_B=_=F^B;`=I]B9a=J[B9c=T1N1N3N1O2N1N3_O\\MbCe2]FXA=e>EZA=c>FZAEYA>f>>01O001O001O001O001O00N2O1001O001O001O001O001O1O001O001O001O0010O010aNeAW1[>gNgAZ1`>0O010O010OlN^Aj0c>SO`Am0`>PObAP1h>0O010O01000O010O01M2N3@o@NR?0PANS?OPANR?0PANS?0o@MS?1_oh3"}, "image_id": 623, "id": 10985}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 0.0, 25.0, 10.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "P`Z41o?001O001O001O001O001O1O001O001O001O0000N2N2N2O1NRPY3"}, "image_id": 623, "id": 10986}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 0.0, 9.0, 9.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "T`S52k?3O2O010O010ON3Moog2"}, "image_id": 623, "id": 10987}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 73.0, 21.0], "area": 824, "segmentation": {"size": [512, 512], "counts": "PP\\52n?001O00001O0000001O0000001O0000001O0000001O00001O0000001O0000001O0000001O0000001O00002N00001O0000001O0000001O0000001O00001O0000001O00O1L4M3L4L4LT`_1"}, "image_id": 623, "id": 10988}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 0.0, 55.0, 57.0], "area": 1945, "segmentation": {"size": [512, 512], "counts": "Ya_62l?3M2M4M2N2N3L3N3M2N3L3N3M2N1N3N2N3M2O2O001O001O00001O001O001O001O00001OO1N2N2M3N2N2C^AZOd>c0_AZOd>d0_AYOc>e0_AYOc>d0>N2N2M4M2N3L3N3Mood0"}, "image_id": 623, "id": 10989}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 0.0, 49.0, 68.0], "area": 2222, "segmentation": {"size": [512, 512], "counts": "haW74j?2N2M4M2M4M2N2M4M2N3L3N3L3N2O20O010M2000N3M2M2OO11N4M2M3N3M2O2O001O00010O010O00010O01M2M4M2N2M4ML"}, "image_id": 623, "id": 10990}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 1.0, 76.0, 72.0], "area": 2846, "segmentation": {"size": [512, 512], "counts": "eP61n?1N3n@MS>6]AJ12a>5\\AK22_>6]AJ13`>4]AL02a>5]AJ13_>R1N1N3M2O2N2lAYNm=g1RBZNo=l1O01000O010O01000O010O01000O01M3N1N3M2O2M3N1N3N101N2M2N3N1N3M30O01000O010O01000O010O01000O010O01N2N1N3M3N1N3M2O2M3M2O2M2Oknc6"}, "image_id": 623, "id": 10991}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 2.0, 8.0, 17.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "2a0`?N2N2N2N2N2N2Nhok7"}, "image_id": 623, "id": 10992}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 9.0, 9.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "^`_41m?2N3N110O010ON3Mfo[3"}, "image_id": 623, "id": 10993}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 32.0, 76.0, 67.0], "area": 2520, "segmentation": {"size": [512, 512], "counts": "mQl42l?3M3M2O2M2N3M2N3N2N110O010O0100O0100O010O010O0100O0100IYOSAg0k>\\OUAd0h>^OXAb0g>@YA`0d>C\\AF^A;_>HaA8]>IcA7[>LeA4X>NhA2Y>NgA2Y>MgA3Y>NgA2Y>NgA1Y>OgA2Y>NgA2X>NgA3Z>j0O0100O010O0100O0100O010O0100O010OTOgA1Z>LhA5X>HkA7U>HlA8U>EnA8T>FoA7T>FnA9S>FoA7T>FnA8T>FoA7n>N3N2M2Nbmm1"}, "image_id": 623, "id": 10994}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 40.0, 86.0, 78.0], "area": 3190, "segmentation": {"size": [512, 512], "counts": "dRe21m?3M2N3N1N3^OD\\A>c>C[A`0b>CXAMLb0i>DYAa0e>AYAb0e>=N3M2N3N2M2O20O010O0100O0100O010M2N3O010O10O010O10O010O10O010O10O010O10OfNcAQ1^>lNeAT1Z>kNgAV1b>O10O01jNXAS1j>010O10O10O010O010O10O10O010O0N21M2N3M2O2M2N3M3N1N3M2N3N1N3M3N1NZmo3"}, "image_id": 623, "id": 10995}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 77.0, 82.0, 77.0], "area": 3200, "segmentation": {"size": [512, 512], "counts": "cc]41m?2O2M2N3M2N3N2M2N3M2N3N1N3M20100N1N3M2N3N1N3M2N3O10O010O010O10O010O10O01hA]NR>b1mA_NS>h1000O010YNlA`1T>_NnAa1X>010O010O10O10O010O010O010O10M3M2N3N1N3M1O00021O010O010N1N3M2N3N2Al@NV?0m@MV?1k@MW?1l@LW?1l@MU?1R]Y2"}, "image_id": 623, "id": 10996}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 87.0, 20.0, 48.0], "area": 541, "segmentation": {"size": [512, 512], "counts": "g2`1`>O2O1N3M2N3N1N2N3N1Ko@XOS?g05M2\\Oh@=_?N3N1N2N3N1N]le7"}, "image_id": 623, "id": 10997}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 99.0, 9.0, 9.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "VSR32l?2O2O10O10O10M3Nj\\i4"}, "image_id": 623, "id": 10998}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 105.0, 99.0, 76.0], "area": 3340, "segmentation": {"size": [512, 512], "counts": "PdR21n?2N1N3N2N2N1N3N2N2N2M2O2N2M3N20O01000O1M3N2N101000000O010000000O10O1N2M2O2ISOXAo0f>SOXAo0f>7N1N3N2N2N2N10100000O010000000O10O1000O1000O1000O1000O1000O1000O10O100bNdAW1\\>gNfAY1Y>fNiAY1`>N1O2N2M3N2N1N3000000M2O2YOk@`0W?^Ok@`0\\?M2O2N2N2M3N1OSk[4"}, "image_id": 623, "id": 10999}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 140.0, 69.0, 65.0], "area": 2314, "segmentation": {"size": [512, 512], "counts": "\\Ue63k?2O2M2N3M3M2O2M2N3M2O2O010O10O01M2N3M2N3N2M2N3M210O010O010O10O10O010O001O1O010O0100O010O0100O010OlNcAe0^>XOdAi0\\>TOgAk0Y>TOhAm0X>POkAP1T>nNoAP1a>N3M2N3N1N3M2N3N2M2N3M2N3N1NaZ8"}, "image_id": 623, "id": 11000}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 145.0, 84.0, 60.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "VUo31m?3N1N3M2N3N1N300O001M2N3N1N3M3N110O0100O0100O010O010N1N3O1O0100O010O0100VAnNd>R1ZAPOg>U10O010OO2N2M2N3M2O2O01000O010O010O01000O01M2O2M2N3M3N1N3M2N3N1N3UOm@e0X?100O010ON3N2M2N3M2O2M2N3Mijf2"}, "image_id": 623, "id": 11001}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 149.0, 73.0, 88.0], "area": 3174, "segmentation": {"size": [512, 512], "counts": "dUj12m?3M2N2N2N2N2N2N2N2O1N2N2N2N2N2JROWAP1g>7M1O002N2N2N2N2N2N2N2N2000000000001O0001O00000000000000000000N2N2N201O000000O1O1N2N2N2eNlAg0V>WOlAg0V>WOlAg0V>WOlAg0V>WOlA;GH_>KlA;GH_>LkA:HH`>KjA;HHR?5PAIR?5PAIR?5>N2NbYQ5"}, "image_id": 623, "id": 11002}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 173.0, 15.0, 42.0], "area": 335, "segmentation": {"size": [512, 512], "counts": "_fh72l?3L3N3L3N3L3N2N3L3N3L3N2M4M2cJ"}, "image_id": 623, "id": 11003}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 186.0, 78.0, 79.0], "area": 2756, "segmentation": {"size": [512, 512], "counts": "WVP12m?3N1N2N3M2O2M2N2N3N1N2N3M01O01O1O2O2O0010O00010O00010O00O2O0O1O2N1O1O2N1O2N1O1O2M2N2O2M2N2O2O01O01O01O010M2N2N10O01O00011N3M2O1]OaABb>;`ACb><_ABc><`ABb>;`ACb>;aABa>=`AAc><_ACb>;d0M2O1N3M2Nmhh5"}, "image_id": 623, "id": 11004}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 188.0, 8.0, 8.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "nUZ33l?2N11000O01M3NQja4"}, "image_id": 623, "id": 11005}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 188.0, 72.0, 68.0], "area": 2411, "segmentation": {"size": [512, 512], "counts": "PgU61m?2N3N2M2N3M2N3N1N3M2N3O1O010O010M2N3M2N3N2M2N3M2N3N1010O0100O010O0100O0100O010O01bNeAV1[>gNgAY1Y>eNjA[1^>O0100O010O010O0100O0jN^Ao0b>nN`AS1_>lNcAS1e>O2M3M2O2M2N3M2N3N1N3M3M2N3N1NnXf0"}, "image_id": 623, "id": 11006}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 193.0, 78.0, 71.0], "area": 2790, "segmentation": {"size": [512, 512], "counts": "oVd32l?2N3M2N3M2O2M2N3M3M2N3N1N3M2N3M201O010O10O010O010O010O10O010cAhNP>Y1mAiNU>U1iAnNW>R1fAPO[>o0cATO\\>X110O10O010O010O010O10O010O10O010O0N3M2N010O00000000102M2N3M2N3KRATOQ?i04N3N1N3M2N3M2N3N1N3M_iT3"}, "image_id": 623, "id": 11007}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 211.0, 77.0, 86.0], "area": 3059, "segmentation": {"size": [512, 512], "counts": "\\W82m?2O2M2N2N3N1N3M2O1N3M2N2O2M1O1O1KQOYAQ1d>QOZAQ1e>6N3M2N3N1N2N3M2O1N30O00010O00O2M2N3N1N2N30O0010O00010O00N3lMVBm1l=QNWBm1o=N2N3`NiAS1X>lNiAR1Y>lNiAR1Z>kNiAR1Y>lNiAS1d>M2N2N3N1N2N00010O3M2N2O2M2N2O2M2N2N3N1N_Wa6"}, "image_id": 623, "id": 11008}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 221.0, 46.0, 52.0], "area": 1607, "segmentation": {"size": [512, 512], "counts": "kWY73k?3M2M4M2M3N3M2M4M2N2M4O001O010O01O010O01O010O01ON3M2N3L300010O010O010O00010O010O01O01O010OlH"}, "image_id": 623, "id": 11009}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 238.0, 75.0, 74.0], "area": 2795, "segmentation": {"size": [512, 512], "counts": "mWn21o?2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N210O01O01O0[AkN\\>W1bAkN\\>X1aAkN\\>^1O1N3M2N2O2M2N2N3N10010O00O2M2N2N3N0O1O002O2M2N2N3N1N3M2O101M2O1N3M2N3N1N2N3M2O2M2N2N3N1N2N3M2O2MRWl3"}, "image_id": 623, "id": 11010}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 42.0, 99.0], "area": 2151, "segmentation": {"size": [512, 512], "counts": "d7V2i=2N2O2O010O01O01O01jM\\Bm1d=RN]Bo1b=oMaBP2h=1O01O01O01O0XNSB]1m=`NUB`1k=_NVB`1k=^NXB_1k=^NWB`1W>N1N2N3M2O1N3M2N3N1N2]Om@6V?Gm@7T?Gn@7T?Hn@5U?Hm@6`?O2M2NleZ7"}, "image_id": 623, "id": 11011}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 242.0, 71.0, 66.0], "area": 2395, "segmentation": {"size": [512, 512], "counts": "chg53k?2N3N1N3M2N3N2M2N3M2O2O010O010O1M2O2M2N3M2N3N2M2N3O0010O010O10O10O010O010O010O10OcNcAX1]>eNeA[1a>0O0100O010O0100O0100O0jN_An0a>oNaAQ1_>mNdAR1e>N3M3N1N3M2N3M2O2M2N3M3M2O2MYgT1"}, "image_id": 623, "id": 11012}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 265.0, 66.0, 44.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "lXn63k?2N3M2M3N3M2M4M2O20O00010O010O010O00010O010O010O0001M2M4N101O01O010O010O01O01O010O010O01O010O01O010O01OO2M2M4M2N3L30010M2N3L3N3M2MZg0"}, "image_id": 623, "id": 11013}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 267.0, 9.0, 10.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "^XW42m?2N3M2000N2N2N2NaWd3"}, "image_id": 623, "id": 11014}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 270.0, 69.0, 79.0], "area": 2659, "segmentation": {"size": [512, 512], "counts": "mXT23m?1N2N3EHi@:U?Ii@9T?Ij@9U?:N3M200010O01N1N2O2M2N2O20O00010O01O01O01N1O1N3M2bA`NY>f1N1N2N3M2O2M2N2O2O01O000O2M2N3M2O1N3M2UOiAJY>5hAJZ>3hAKZ>3iAJX>5jAJU>7lAGU>8nAER>;PBCQ>=RBAQ>VOgAg0\\>VOfAi0\\>UOfAi0[>VOgAg0\\>VOgAh0[>VOfAi0j>M3N2N1N3N2M2O2N2M3N1OdUc1"}, "image_id": 623, "id": 11016}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 294.0, 88.0, 86.0], "area": 3242, "segmentation": {"size": [512, 512], "counts": "giY12n?1N2N3N1Y@Hc?=M2N2O2M2N3M2O1N3M2O1N3M2N3N1N2N3M2O2M20001O01\\NgA^1Y>aNhA`12_NU>`1jAbNU>_1hAcNX>d10O00O2M2N3N10010O0001O0N3M2O1N3M2N2OO00000100O3M2O1N3M2N3N1N210O00010N1N3M2O0O001O01O01O2N3N1N2N3N1N2N3M2O2M2NmTZ5"}, "image_id": 623, "id": 11017}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 301.0, 71.0, 43.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "nid62l?3L3N3L3N2N3N1010O01O01O010O01O01O010O010O01O01O010O01O000M4M2N3L301O01O010O010O01O01O010O01O01O010O010O01O01O010M2N2N3L3N3M210N1N2N3L3N3L3NVf7"}, "image_id": 623, "id": 11018}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 333.0, 73.0, 43.0], "area": 1661, "segmentation": {"size": [512, 512], "counts": "oj\\61m?2N3L3N2N3L3N30O01O01O010O010O01O01O010O010O010O00010O010O01L3N2N3L3N30O010O00010O010O010O00010O010O010O00010O010O001L3N2N3L3N3O001O0N2M4M2N3M2MVe>"}, "image_id": 623, "id": 11019}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 336.0, 65.0, 83.0], "area": 2635, "segmentation": {"size": [512, 512], "counts": "[kd41n?2N2N3N1N2N3M2O2M2N2O2M2LWOn@k0Q?4M2N3M2O1N3M2N2O2M2N3N1N2N3M2O1N3M2N3N1N201O01O01O010OO1N3M2O2M2eNRBa0P>]ORBa0Q>]OQBa0P>]ORBa0P>]ORBa0Q>\\ORBa0P>^OQBa0Q>\\OQBb0Q>\\ORBa0P>]ORBb0P>\\ORBa0h>N2N3N1N2N3M2O2M`cZ2"}, "image_id": 623, "id": 11020}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 339.0, 84.0, 78.0], "area": 3082, "segmentation": {"size": [512, 512], "counts": "Pka01n?2O1N3M2O1N3M2N2O2M2N2O2M2N002O1N2N3N110O010O000]AmNY>S1eAoNZ>Q1dAQO[>Q1bARO[>P1dAQOZ>Z1010O03M2O1N3M210O01O01O010O010O0N00010O01O2N3N1N3M2O2M2N2O2M2N00010O002O1N2N3M2O2M2N2O2M2N3M2O2M2N2O2M2N3N1NlST6"}, "image_id": 623, "id": 11021}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 359.0, 9.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "ZkP62m?1O2N2O1000N2N1Nhdj1"}, "image_id": 623, "id": 11022}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 374.0, 59.0, 83.0], "area": 2744, "segmentation": {"size": [512, 512], "counts": "PmNfAU1X>mNgAT1W>oNfAT1W>nNgAT1X>;N3M2N3N1N2N3M2O1N3O00010O0001O0N2N3N1N2nNSBNP>0QBNQ>0RBNo=0SBNm=2UBLl=4VBIl=5VBJl=3VBKl=3WBJk=3dA@b0^O[Aa0f>\\O\\Ae0c>XOaAg0l>1O01O010O01O01O010O0O2L3N2N3L3N3L3N2N^Sg0"}, "image_id": 623, "id": 11024}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 376.0, 74.0, 94.0], "area": 3313, "segmentation": {"size": [512, 512], "counts": "dll32n?1N3M2N3N1N3M2N2O2M2N3M2O1N3M0010OLQOXAo0h>410WAoNb>Q1^AQO`>Q1_APO_>[1N1N2N3M2O1N3M2N2O2M2O110O00010O0010O00N3N11O01O01O01O01O000O2M2N2N3N1^NoAR1S>lNoAR1T>kNoAR1S>mNnAR1S>lNoAR1T>kNoAg0EAl>=VAAl>JSA?3Fo>8TAEn>:SADP?9=N2N3N1N2NWRn2"}, "image_id": 623, "id": 11025}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 412.0, 71.0, 72.0], "area": 2580, "segmentation": {"size": [512, 512], "counts": "i]V32n?2M2N2N3N1N2N3M2O2M2N2GXOXAj0e>YOXAi0g>XOWAj0g>:M2N3N1N2N3M2O1N3M2O2M2N2N3N1N2OO2O2M2N2N3N1N2N3N1N3M2N2O2M2N2N3N1N2N3N1N3M2N010O0000010O1O3N1N3M2N2O2M2N2N3N1NmQf3"}, "image_id": 623, "id": 11026}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 418.0, 83.0, 61.0], "area": 2803, "segmentation": {"size": [512, 512], "counts": "k]c51n?2M3M2N3M2N3M2O2M2N3M2N3M3O0010O010O010O010O010O01000O010O010O010O010OSAROg>T1N3N2M2N3M2N30O010O010O010O0100O010O0100N1N3M2O2M2N3M2N3M201ON3M2000N3M3M2N3M2N3N1N3M2N3H_@Mc?27M`RS1"}, "image_id": 623, "id": 11027}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 440.0, 24.0, 40.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "h=X1i>N3N1N2N3Ko@VOS?h04N010O0000100O3M2O1N3M2N3N1N2N3N1Neac7"}, "image_id": 623, "id": 11028}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 444.0, 75.0, 68.0], "area": 2588, "segmentation": {"size": [512, 512], "counts": "jn\\22m?3N1N2N3M2O1N3M2N2O2M2N2N3NO01O0JTOYAl0g>VOXAi0i>7O1O2N2O2M2N2N3N1N2N3M2O1N3O00010O0001N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O1O2\\Om@7T?Hn@6S?Ho@7S?Fo@9R?EQA9P?FQA9\\?O2N1O1O2N1O1OQ`]4"}, "image_id": 623, "id": 11029}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 470.0, 70.0, 42.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "do^51n?1N3M2N3M2O2M3M2i@^On>e0o@]Oo>e0o@]OP?j01O001O001O00O1N2N2N2O1N2N2N21O001O001O001O001O1O001O001O001O001O1O00N2N2N2001O001O001O1O001O001O001Fo@CQ?;RADo>9SAEP?8SAEo>9>N1N3M2NlP^1"}, "image_id": 623, "id": 11030}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 482.0, 58.0, 30.0], "area": 858, "segmentation": {"size": [512, 512], "counts": "ood11n?100O1O1O100O1O1O100O1O1O100O1O10000KEf@:Y?Hg@7X?Ki@3V?0i@OW?2i@MV?5j@JU?9h@HX?`0O1O1O100O1O1O100O11O1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O1O2N1O2N1O1OQP^5"}, "image_id": 623, "id": 11031}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 504.0, 17.0, 8.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "oo\\41o?0O1O100O1O1O100O1O1001O2N1O1O2NQ`Z3"}, "image_id": 623, "id": 11032}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 51.0, 42.0, 63.0], "area": 1311, "segmentation": {"size": [512, 512], "counts": "Wci33k?2N3M2M4M2N2N3M2M4M2N3M2M4M2N3M2M3N3M2N11N2N000O10002N3L3N2N3M2M4M2N3M2M4M2N2Gd@L_?0d@N^?0a^a3"}, "image_id": 624, "id": 11033}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 68.0, 71.0, 66.0], "area": 2710, "segmentation": {"size": [512, 512], "counts": "bc^51m?3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M21O01O01O010O01O01O010O010O00010O010O00010O010O0010O001O0N2M4M0O10O012M4M2M3N3L3N2M4M2M4M2M3M4M2M4M2M3N3L3Ngm]1"}, "image_id": 624, "id": 11034}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 102.0, 55.0, 45.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "dSX44j?2M4M2M3N3M2l@^Oj>c0RA@o>?o@CQ?g0O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01ON3L3N3L3N2M4M2M4M2MZ\\l2"}, "image_id": 624, "id": 11035}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 153.0, 24.0, 25.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "Yej32l?3M2N3M2O2M2N3M2O2O010O010O01M2O2M2N3M2N3M2N3MR[i3"}, "image_id": 624, "id": 11036}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 179.0, 60.0, 61.0], "area": 2126, "segmentation": {"size": [512, 512], "counts": "SWj32l?2N3L3N2N3L3N3M2N3L3N2N3M2M4M2N000O2O2N3L3010O01O01O010O010O01O010O01O010O010O01ON3M2N3L3N3M2N3L3N2N3L3N3M2N3L3N2N3M2MPjW3"}, "image_id": 624, "id": 11037}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 205.0, 13.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "fVn43k?2N3M2M310O010O01M2M3N3MaYk2"}, "image_id": 624, "id": 11038}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 389.0, 85.0, 58.0], "area": 2353, "segmentation": {"size": [512, 512], "counts": "nle12l?2M4M2N3M2M3N3M2N3L3N3O00010O010O010O010O00010O010O010O00010O010O010O010O00010O010O010O00010O010O010O010O00010O010O010O00010O010O010O010O00010O01O0N3M2N2M4M2N3L3N3M2N3Lnbo4"}, "image_id": 624, "id": 11039}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 461.0, 99.0, 51.0], "area": 2383, "segmentation": {"size": [512, 512], "counts": "o_V41o?0O100O100O1O100O100O100O1O100O100O100O1O100O100O100O100O1O100O100O100O1O100O100O1AYOeAg0[>ZOcAg0\\>\\ObAd0]>_O`Ab0`>@]Aa0b>A]A?c>?O100O100O1O100O100O100O1O100O1001N2O2M3N2M2O2M3M100O01O010O01O012M3N1N3N2M3N1N3M010O010O012M3M3N1N3NfPX2"}, "image_id": 624, "id": 11040}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 502.0, 27.0, 10.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "n_Q12l?2N2M3O1001O001O00001O001O001O00001O001O001O00001O00QPa6"}, "image_id": 624, "id": 11041}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 259.0, 16.0, 15.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "\\XY12k?3N3L3O101O0001O01O01O01ON2M4Kng^6"}, "image_id": 626, "id": 11042}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 94.0, 21.0, 17.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "Uc`56d?6O20O00000010O0000010O0001O01O0001O0L4JSmT2"}, "image_id": 627, "id": 11043}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 353.0, 29.0, 22.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "\\[j42k?3M4M2M301O00010O010O00010O00010O010O00010O00010ON3L3M3NiTg2"}, "image_id": 627, "id": 11044}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 149.0, 14.0, 30.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "ZUi73k?2N3M2N3M2N3M2M4N1010ON2N3ZK"}, "image_id": 629, "id": 11045}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 191.0, 38.0, 47.0], "area": 1121, "segmentation": {"size": [512, 512], "counts": "mV]71m?2N2M4M2N3M2N3L3N3O010O00001M2N3M2M4M210O010O00010O010O010O010O010O00010O0iI"}, "image_id": 629, "id": 11046}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 240.0, 54.0, 54.0], "area": 1705, "segmentation": {"size": [512, 512], "counts": "]XU72l?3L3N3L3N3M2M310O0M4M2M3N3L3010O010O00010OO20O00010O010O01O01O010O010O00010mNXAl0h>ROZAo0k>010O01O010O01O01O010O01O010jG"}, "image_id": 629, "id": 11047}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 291.0, 70.0, 51.0], "area": 2002, "segmentation": {"size": [512, 512], "counts": "mYj64j?2M3M4M2M4L3N2M4N10010O010O0O1N3L310O00010O00010O010O00010O01O01O010O00SOVAd0k>XOXAi0g>UO\\Aj0m>1O01O010O0001M2N3L3M301O01O010O01O01L3N3L3M3N3L3N2M4L3N3L3M3N_f2"}, "image_id": 629, "id": 11048}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 317.0, 3.0, 7.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "Rjn71l?4M2SF"}, "image_id": 629, "id": 11049}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 335.0, 24.0, 41.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "_:T1m>O010O010O00010O010O010O0N2N3M2Im@^OW?>7N3M2M3N3M2MZec7"}, "image_id": 629, "id": 11050}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 343.0, 74.0, 54.0], "area": 1965, "segmentation": {"size": [512, 512], "counts": "a[a61l?4L3N2M4M2M4f@]OR?j0N2N3O0010O001N1N21N1N3M21O010O01O010O01O01O010O01O01O010OROTAh0m>UOVAj0P?10O01O01O010O010O01O01O010O01O01O010O01OVOo@e0V?010O0010O0010OO2L3N2M4M2M4M2NZd9"}, "image_id": 629, "id": 11051}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 345.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "ijo72U5"}, "image_id": 629, "id": 11052}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 358.0, 74.0, 72.0], "area": 3276, "segmentation": {"size": [512, 512], "counts": "R\\f03k?2M3N3M2M4M2M3N3M2M4M4N1kAmN[=S1cBoN^=Q1_BQOa=o0\\BUOd=k0YBWOg=i0VB[Oi=e0UB]Ol=c0QB@n=`0oACR>U1010O010O00O2M2N3L310O00010OO2M2N2M4O010O010O00010O010O00010O010O001YNlA^1V>_NmA_1\\>M2M4M2M3N3L3N3M2M3N3L3N3M2M4M2M3NTdT6"}, "image_id": 629, "id": 11053}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 394.0, 72.0, 50.0], "area": 1864, "segmentation": {"size": [512, 512], "counts": "P]T61l?4M2N2M4M2N3N110O01O01O010O0O2L3N2010O01O01O010O010O01L3N2N3N110O0010O0010O010O0010O00O2N1010O01O01O010O0nNZAk0f>RO\\An0l>UOQAd0n>ZOTAf0T?O01O010O010ON3M2N2M4M2N3L3N3Mkbg0"}, "image_id": 629, "id": 11054}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 427.0, 43.0, 76.0], "area": 1780, "segmentation": {"size": [512, 512], "counts": "dnb01m?2M3M4M2M4M2WAAn=b0PB@n=c0nAAn=b0PB@n=c0nA@o=c0oA@n=c0nA@o=[1M2M3N3N11O01O010O00010O010OO1kMZBX1NZOP>c0SB[Oo=c0TBYOP>c0SB[Oo=c0SBZOP>c0TBZOo=b0TB[Oo=c0i0L3N2M4L3N3L3NYbg6"}, "image_id": 629, "id": 11055}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 433.0, 110.0, 75.0], "area": 3184, "segmentation": {"size": [512, 512], "counts": "d^h51m?2M4M2M3N3L3N3L3N3L3N2M4M2M4O001O01O010O010O00010O010O010kN]Ak0c>SO_Am0a>POcAP1f>010O01N1O110O0010O0010O0ZOo@>Q?^ORAb0n>\\OUAc0T?10O01O010O01O010O01O010f@[OW?g010O0010O0010O0O2M2N3M21O010O01O010O01O010O01O010O01O010O01O010M2N2N3L31N1N2M4M2N3L3N3M2M3N3M2M4M2N3L[a`0"}, "image_id": 629, "id": 11056}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 460.0, 58.0, 52.0], "area": 1776, "segmentation": {"size": [512, 512], "counts": "joZ12k?4M2M3N3L3N3L3N2M4M2M3N2M3N2M3N2M3N2O1001O001O00001O001O00001O001JaAiN_>U1cAkN^>Q1eAoN[>o0hAPOX>n0jAROW>j0a0N3N1N201ON201O010O0001O0M4M2N2M4M2M4MYQh5"}, "image_id": 629, "id": 11057}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 486.0, 14.0, 26.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "e_i71l?4[@MY?5d@NZ?5c@MZ?>N2M30000001O001O00"}, "image_id": 629, "id": 11058}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 490.0, 82.0, 22.0], "area": 1247, "segmentation": {"size": [512, 512], "counts": "o__21l?3N2M3N2N2M3N2O100001O001O00001O001O001O00001O001O00001OO1N2N2M3N2001O00001O001O00001O00M3N200001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O0N2N3L[`W4"}, "image_id": 629, "id": 11059}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 502.0, 13.0, 10.0], "area": 95, "segmentation": {"size": [512, 512], "counts": "mo[73j?3M3O100001O001O00001L3MZ`="}, "image_id": 629, "id": 11060}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 0.0, 41.0, 34.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "Y`[73k?2N3N2M2N3O001O001O001O1O001O001O001O001O1O010O010O01000O010O010O010O01000O010O0F"}, "image_id": 631, "id": 11061}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 76.0, 29.0, 58.0], "area": 873, "segmentation": {"size": [512, 512], "counts": "\\2j1W>N2O1N2N3M2N2N2O1N3M2N2N2N3N1N2N2N2N3M2O1N2N2N3M2O1N2N2Nd\\a7"}, "image_id": 632, "id": 11062}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 260.0, 3.0, 6.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "T86j?N2MoWn7"}, "image_id": 632, "id": 11063}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 0.0, 62.0, 72.0], "area": 2543, "segmentation": {"size": [512, 512], "counts": "iai22l?2M4M2M4M2M3N3L3N3L3N3M2M3N3L3N3L3N2M4M2M4M2M301O001O00001O001O00O1N2M3N2N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2MS`W4"}, "image_id": 633, "id": 11064}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 10.0, 9.0, 25.0], "area": 117, "segmentation": {"size": [512, 512], "counts": ":i0W?M4M2M3M4M2M4M2Mf_k7"}, "image_id": 633, "id": 11065}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 17.0, 54.0, 72.0], "area": 2315, "segmentation": {"size": [512, 512], "counts": "_Rf31l?3N2M4M2M4XOBjA`0S>CkA?S>DiA`0S>CkA?S>DiA`0S>CkA?S>DiA?U>h0N1010O01ON3O01O0M3N3M2M4O01O01O01O0M4M2M4M2N2M4M2M4M2M3N3M2M4M200O2L3N3M2M4M2F`@1c?M`@0f^_3"}, "image_id": 633, "id": 11066}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 28.0, 52.0, 64.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "^b03k?2M3M4M2M4M2M3M4M2M4L3N2M4L3N2M4L3N3L30001O01O010O01O01O01O01O0O2L3N2M4L3N2M4L3N3L3M3N3L3M3N3L3M4M2M3Ml^U7"}, "image_id": 633, "id": 11067}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 34.0, 57.0, 65.0], "area": 2073, "segmentation": {"size": [512, 512], "counts": "cR\\41l?3N3M2M4M2M4M2N2M4M2M4N10010O001L3N3M2M3N3L3N3M2M4O000010O010O00010O010L3N3M2M3N3M2M4M2M4M2N2M4M2N3L3N2M4M2N3L3Nd^g2"}, "image_id": 633, "id": 11068}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 62.0, 93.0, 97.0], "area": 3208, "segmentation": {"size": [512, 512], "counts": "gR_12k?3M3RAKo=8nAKn=8jACF7]>9jACG7^>7gA3Y>MdA6]>IaA:^>d0010O01O000M4L3N3L3O2O01O01O010O00010N1M4O00O1M4N100010O@oAQOQ>l0RBUOn=DQBl04@k=AUBk04Co=OiAJ;8k=KmAJ;;i=HoAI1OIb@L_?0d@1[?Lh@4c?0O000e@Oh>1SA3m>Mn@8S?:00O2O00000010O00000010O0000010O00000010O00000010O00000010K4O11O@i@7X?Dl@<\\?00M4K4Le[R5"}, "image_id": 633, "id": 11069}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 62.0, 54.0, 60.0], "area": 1788, "segmentation": {"size": [512, 512], "counts": "bbT51m?3L3N2M4N110O0UADR>9hAJX>6fAMZ>3bA0^>0`A2`>N]A6c>JZA8f>b00O00010O01N1N3M2N2M4M2N3N10010O0010O0O2L3N3M2M3N3L3N3L3N3M2M3N3L3N3L3N3M2M4Mi]P2"}, "image_id": 633, "id": 11070}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 66.0, 49.0, 65.0], "area": 1849, "segmentation": {"size": [512, 512], "counts": "eco53j?3N2M4M2M4M2M3N3_OZOcAi0[>YOcAj0Y>YOdAj0Z>?M4L3N2010O010O00010O010O00010O010N1M3N3L3N3L3N2M4M2M3N3L3N3L3M3N3L3N3L3NcmW1"}, "image_id": 633, "id": 11071}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 87.0, 27.0, 27.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "YcV31m?3L3N3L3N2M4M2O20O010O00010O010O01O01O0N3M2M3N3L3N3LTm[4"}, "image_id": 633, "id": 11072}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 93.0, 63.0, 73.0], "area": 2323, "segmentation": {"size": [512, 512], "counts": "fdc61m?2M3N3L3N3M2M4M20010O010O00010M2M4M2M3N3L3N3M2M4M2M3N3M2M4M2M3N30O010O01O01N1N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3L3N2N3L3N3L3N2N3Lil<"}, "image_id": 633, "id": 11073}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 100.0, 29.0, 28.0], "area": 492, "segmentation": {"size": [512, 512], "counts": "eSQ43k?2N2M4M2M4M2M4O000010O010O00010O010O00010M2N3L3N2M4M2M4Mf\\`3"}, "image_id": 633, "id": 11074}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 112.0, 19.0, 24.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "RTk42k?3N3L3N3L3N2M40O01O01O0O2L3N2M4M2M3N^\\k2"}, "image_id": 633, "id": 11075}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 118.0, 31.0, 61.0], "area": 1003, "segmentation": {"size": [512, 512], "counts": "ld`721Oi?5O2N2M2O4L2g@@Q?`0m@BQ?`0l@CQ?g0O2M3N2M10O10O10O02O2N1N100O010O011O0O1001N\\L"}, "image_id": 633, "id": 11076}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 122.0, 16.0, 13.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "QTn31m?3L3N2010O01O01O010O01O01M2N3LTli3"}, "image_id": 633, "id": 11077}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 137.0, 26.0, 27.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "lTR61l?3N3L3N2M4M2M4N10010O0010O0010O0010O0M4L3N2M4M2M4Mbk`1"}, "image_id": 633, "id": 11078}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 140.0, 92.0, 77.0], "area": 3163, "segmentation": {"size": [512, 512], "counts": "Yfc22l?3L3N3M21O01O010O01O01O010O01O01O010L3N2N3L3N3M2M4M2M3N3M2M4M2N2M4MO010O12M3N3M2M4M2O101O01O0M4M2M3N3L3N3L3N3L3N2M4M201O01O01O010O01O01O010O01O01O010O01O01O010N1M3N3L3N3L3N2M4M2M4M2MT[n3"}, "image_id": 633, "id": 11079}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 147.0, 14.0, 15.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "md^42k?3N2M4M20001O010O01L3M3N3L][Z3"}, "image_id": 633, "id": 11080}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 162.0, 32.0, 40.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "PfV62k?4M2N2M4M2M4M2M3N3L3N3L30010O010O00010ON3M2M3N3L3N3L3N2M4M2M4MhZY1"}, "image_id": 633, "id": 11081}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 176.0, 110.0, 87.0], "area": 5180, "segmentation": {"size": [512, 512], "counts": "_fc04j?2N3M2N2M4M2N3M2O20O010O010O0010M2N3M2N3L3N3M2010O0010O010O010O010O0010O0010O010O010O010O0010O0010fAiNi=W1UBkNl=U1QBnNn=R1PBPOQ>P1lASOS>]110O00010O010O010O010O010O00010O001M2N3M2M4M2N3M2N2M4M2OO3M2010O0010O0010OYOdBjN]=R1fBoNY=o0jBPOW=m0kBSOU=k0mBSOV=j0mBSOU=j0nBSOU=k0nBROU=a0XB]Oe00U=a0XB]Of0OU=a0XB]Oe00U=a0\\1L3N3M2N3M2M3NVZe5"}, "image_id": 633, "id": 11082}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 191.0, 29.0, 30.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "bVo63k?2M4M2N2M4M2M4M210O01O01O010O01O01O010O01M2N3L3N2M4M2N3LkYb0"}, "image_id": 633, "id": 11083}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 209.0, 15.0, 26.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "Ugh71l?3M4M2M4M2M3M4N11O010O01O01O\\I"}, "image_id": 633, "id": 11084}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 213.0, 48.0, 64.0], "area": 1820, "segmentation": {"size": [512, 512], "counts": "`Wi33k?2M4M2N3L3N3L3N2M4M2dAROd=R1XBPOg=Q1WBQOj=o0RBUOm=k0QBWOo=j0mAZOS>e0kA]OU>X10O0010O0010O0010O0010O0010M2M3NO3N2N3L3AhATO[>i0gATO\\>i0gAUO[>i0hATO[>4\\A=S?Ao@<]?M2M4M2N2MPi^3"}, "image_id": 633, "id": 11085}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 218.0, 22.0, 21.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "VW>4j?2M3N3L3N3O01O01O010O01O01O01O0O1M4L3N3L3NRiV7"}, "image_id": 633, "id": 11086}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 235.0, 99.0, 58.0], "area": 2778, "segmentation": {"size": [512, 512], "counts": "Th^63j?4K4M30001O0M3M3L5L3N21O01O00010O0000010O0000010O0001O01O0001YOo@=Q?_OSAb0U?01O01O0001O01O0001O01O0i@^OP?i010O0000010O0000010O0000010O0000010O000010O0000010O000001O01ON2L5K4L4O110O000010O00001N1L4L4L5L3L4L4L5K4M3LYH"}, "image_id": 633, "id": 11087}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 245.0, 51.0, 67.0], "area": 2023, "segmentation": {"size": [512, 512], "counts": "YYY23k?2M4QOIQB9m=JoA:m=IQB9m=JPB8n=JoA:m=JPB8n=JoA:n=HPB:Q>FlAj110O010O01O01O010O01OON100O102N2M4N1010O010ON2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3LTXm4"}, "image_id": 633, "id": 11088}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 245.0, 76.0, 83.0], "area": 3691, "segmentation": {"size": [512, 512], "counts": "`YX41l?4M2M3N3L3N3M2M3N3L3N3L3N3L3N2N3L5L3L3N3L3N3L3N210O010O00010O010O010O00010O010O00010O010O01O01O010O01N1M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2M4M2N2M4M2M4Mgga2"}, "image_id": 633, "id": 11089}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 253.0, 25.0, 28.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "ahd51m?3M2M4M2M3N3M2M4N1010O00010O010N1N3L3N2N3L3N3M2MPhn1"}, "image_id": 633, "id": 11090}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 280.0, 43.0, 55.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "_i[31m?3L3n@JX>9eAIY>9dAKX>9eAIY>9eAJX>8eAKX>9eAIY>9eAJX>P101O00010O010O01O00001N101O0O101N101N101N100O2M2M4M2M4Cm@IU?5n@HT?5o@HU?5^OgAa0Z>[OiAb0Z>\\OhAb0Z>\\OiAa0Z>[OiA;BH[?5g@I[?4;M2Nge5"}, "image_id": 633, "id": 11092}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 319.0, 21.0, 28.0], "area": 331, "segmentation": {"size": [512, 512], "counts": "djU62k?4M2M4M2M3N3L3N2O2O0010M2M3N3L3N3L3N2M4Moe_1"}, "image_id": 633, "id": 11093}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 325.0, 36.0, 55.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "b[\\56c?7I7I7I7I8H7M31O00000001O0001O00000001O0001O00000001O0001N1I7I7I7I7I7IneQ2"}, "image_id": 633, "id": 11094}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 344.0, 43.0, 65.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "^\\U61l?4ROLlA6R>LlA6Q>MlA7Q>LlA6R>LkA8Q>LlA6R>LlA7Q>KlA8T>IiA9X>FeA>Z>d0010O0010O0O2L3N2M4O0010N1N3L3N3M2M3gNeAk0^>ROdAk0_>SOdAj0j>N3L3N2M4M2N3L3N2M4MfTU1"}, "image_id": 633, "id": 11095}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 362.0, 16.0, 12.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "`[X72k?3N3N10010O01O01O010O0001O0M4Mcd?"}, "image_id": 633, "id": 11096}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 382.0, 62.0, 54.0], "area": 1747, "segmentation": {"size": [512, 512], "counts": "olb62l?3M2M4M2N3L3N2N3L3O20O0010O010O0010O0010O01[Om@n>@TAa0V?O00M4M2N3L3N3M2M3N3M2M4M2N3O01O01O010OM4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3LkS>"}, "image_id": 633, "id": 11097}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 78.0, 64.0, 112.0], "area": 2543, "segmentation": {"size": [512, 512], "counts": "cUh33k?2N2M4M2M4M2N2M4M2M4M2M4M2N2M4M2M4M2N2M4M0O01000O010O01000O010O010O10O10O010O01000O03N3L3N2N3L3N2M4M2N3L3N2M4M2M3N3M2M3N3L3N3M\\mW3"}, "image_id": 634, "id": 11098}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 207.0, 25.0, 24.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "kVj02l?2O2M2N3M3M2O2O0100O0100O010O010O10N1N3M3M2N3M2NZYi6"}, "image_id": 634, "id": 11099}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 216.0, 57.0, 103.0], "area": 1465, "segmentation": {"size": [512, 512], "counts": "hYR61m?2N3L3N3M2N3M2N3L3N3M2N3M0O0100000000O0100000000O0100000000O01000000000O0100000000O010002N3M2M4M2N3M2N2N3L3N3M2NUYQ1"}, "image_id": 634, "id": 11100}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 273.0, 150.0, 142.0], "area": 7768, "segmentation": {"size": [512, 512], "counts": "[kc32l?2N3M2N3L3N3M2N2N3M2N3L3N3M2N3M2N3L3N3M2N3M2N2N3L3N3M2N3M2N3M2M4M2N3M2N3M2N]OWCTNeBTA?l>^OWAa0T?10O010O010O01O010O010O010M2N2N3L3N3M2N3M2N3M2M4M2N3M2N3M2M3N3M2N3M2N3M2M3N00000000O10O10000002N3L3N3M2N3M2N3L3N3M2N3M2N2N3L3N3M2N3M2N3M2M4M2N3M2N3M2N2M4MkUQ2"}, "image_id": 634, "id": 11101}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 337.0, 33.0, 35.0], "area": 626, "segmentation": {"size": [512, 512], "counts": "Xko22l?2N3N2M2N3M2O2M2N3M3N1N3M2010O0100O01N1N3M3N1N3M2N3N1N3M3M2O2M2NWe_4"}, "image_id": 634, "id": 11102}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 369.0, 13.0, 13.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "e[b35j?3M2N101N100001N101N2N4K[TW4"}, "image_id": 634, "id": 11103}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 372.0, 62.0, 41.0], "area": 988, "segmentation": {"size": [512, 512], "counts": "Pla62l?3M2N3M2N3M201O010O010O010O010O010O010O010O010O01O010O01O010O010O010O010O010O010O010O010O010O010O010O010O010ON3M2N3M2N3M2NdS?"}, "image_id": 634, "id": 11104}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 384.0, 140.0, 103.0], "area": 5513, "segmentation": {"size": [512, 512], "counts": "P]j52l?2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N30O010O010O010O010O010O010O010O010O010O010O01M2O2O010O010O010OeN`AV1e>10O010O010O010O010O010O01O010O01O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O010O01RB"}, "image_id": 634, "id": 11105}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 505.0, 15.0, 7.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "no^72l?2O1N21O001O001O001O001O1O00Q`9"}, "image_id": 634, "id": 11106}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 0.0, 60.0, 31.0], "area": 1041, "segmentation": {"size": [512, 512], "counts": "S`g03l?1N3O001O1O001O001O1O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O1O001O001O1OO1N2O1N2N2O1N2N2O1N2N2O1N2N2O1N2O1NR`Z6"}, "image_id": 635, "id": 11107}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 0.0, 12.0, 5.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "PPf11o?001O001O1O001O00O1N2OQPT6"}, "image_id": 635, "id": 11108}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 0.0, 41.0, 23.0], "area": 376, "segmentation": {"size": [512, 512], "counts": "^PQ23l?2N1O2N2N2LE`@=^?4N0000000001O1N2O1O1O1O1O1N2O1O1O1O1O1O1001O1O1O1O1O1O1O00O1O1O1O1LV@1koZ5"}, "image_id": 635, "id": 11109}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 0.0, 92.0, 86.0], "area": 3401, "segmentation": {"size": [512, 512], "counts": "VRh21n?2XON_A4_>N_A4_OJm>4bA6\\>MbA5\\>MbA5\\>MaA6]>LaA6]>KbA6]>LaA6]>LaA6\\>MbA5\\>h0O2N2N002NO0100000000000O0100000000000O0100000000000O0100001O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1OQPj3"}, "image_id": 635, "id": 11110}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 0.0, 119.0, 73.0], "area": 5073, "segmentation": {"size": [512, 512], "counts": "P`T62n?1O1O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O00O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1"}, "image_id": 635, "id": 11111}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 42.0, 43.0, 42.0], "area": 931, "segmentation": {"size": [512, 512], "counts": "VbP22m?2N2N2M3N1O2N2N2N0000O010002N2N2N2N2N1O2M3N2N2N2N2OO1O2N2N2VOTA=l>CVA;j>EWA:i>FYA9g>F[A8g>F[A8g>E\\A9f>E\\A9W?N2N2N1N3NomY5"}, "image_id": 635, "id": 11112}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 49.0, 157.0, 150.0], "area": 11724, "segmentation": {"size": [512, 512], "counts": "lcP41m?3N2N2N3M2N2N2M3N2N2N2N2N3M2M3N2N2N2N2N2N2M4M2N2N2N2N2N2M3N2N3M2N2N1O0O1000001O00000O100000002N2N2N2M2O2N2N2N2N2N2M3N2N1O2N2N2N2M3O100000O100000000000O1000O10000000000000000O1000000000O100000O100000000O1M3N2N2N2N2N2N2M2O2N2N2N2N2N2M3N2N2N2N2N2N2M3N2N2N1O2N2N2M3N2N2N2N2N2N2N2M3N2N2N2N2N1O2M3N2N2N2N2N2N2M3N2N2N2Nkk`1"}, "image_id": 635, "id": 11113}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 81.0, 24.0, 23.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "gbR22m?2N2N1O2M3O10000000000O10O100000000O1N2M3N1O2NQ]a5"}, "image_id": 635, "id": 11114}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 212.0, 32.0, 64.0], "area": 1044, "segmentation": {"size": [512, 512], "counts": "fW`71n?2M3N2N2N2N2N2N1O2N2N2N2M3N2N2N2N2N2N2N1O2N2N2N2M3N2N2N2N2N2N2[I"}, "image_id": 635, "id": 11115}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 342.0, 25.0, 21.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "Rkh33j?3M4L3M301O01O01O01O01O01O01O01O01O01O000M4L3M4LVej3"}, "image_id": 635, "id": 11116}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 352.0, 376.0, 160.0], "area": 27719, "segmentation": {"size": [512, 512], "counts": "j]d02l?3L3M4L3M3M4M2M3M4L3M3N3L3M3M4L3M3N3M210O00010O01OjBWNU^OcAa0]>\\OfAd0[>XOhAh0X>VOkAi0f>0001O00001O00001O001O00001O00001O00"}, "image_id": 635, "id": 11118}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 473.0, 17.0, 22.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "[_^61m?2M4L3M4L3N2N3O01O0O1M4L3M3N3L3MWQY1"}, "image_id": 635, "id": 11119}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 505.0, 22.0, 7.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "ood61l?3M300001O00001O001O00001O00001O00001O0000QPP1"}, "image_id": 635, "id": 11120}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 0.0, 43.0, 36.0], "area": 1438, "segmentation": {"size": [512, 512], "counts": "PP=i0W?9G0000000000000000O1000000000000003M00000000000000000000M300000000000000000000000?Aa_m6"}, "image_id": 638, "id": 11121}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 0.0, 58.0, 13.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "P`l63m?5K3MO10000000000O100000000O100002N3M00O10000000000O1000000K5000000O100000000O10000004L0000000000O1KZ@0f?0500O10000000P`6"}, "image_id": 638, "id": 11122}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 3.0, 64.0, 43.0], "area": 2186, "segmentation": {"size": [512, 512], "counts": "Y`h36j?;E;E6I1000000000000000O1000O10002N2N00000000000O100000O100LZAnNf>Q150000000O1000000000O10000000O5L00000000L4000000000O1000000;E;D;Fm^W3"}, "image_id": 638, "id": 11123}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 0.0, 47.0, 79.0], "area": 2878, "segmentation": {"size": [512, 512], "counts": "P`^58h?:F9G:F:F:F:F9G3M0000000000000000O1000000000000000000O10000000000000000O1oMeB]1[=cNnBT1Z=dNPCR1Y>F:F0000001O:Ekmi1"}, "image_id": 639, "id": 11124}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 0.0, 43.0, 28.0], "area": 1056, "segmentation": {"size": [512, 512], "counts": "P`i63m?9G:F6J0000000000O1000000000000000000O10000000000000000O10000000000000000O1000006J:F`o`0"}, "image_id": 639, "id": 11125}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 0.0, 6.0, 35.0], "area": 88, "segmentation": {"size": [512, 512], "counts": "PPm73m?003M:F9G:F"}, "image_id": 639, "id": 11126}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 20.0, 49.0, 59.0], "area": 1980, "segmentation": {"size": [512, 512], "counts": "hP^49g?9G9G9RAROX>P1fAVOT>i0mAWOS>i0mAWOS>[10000O010000000000^OkAYOU>g0lAXOT>h0lAXOS>h0nAXOR>h0nAWOS>i0mAWOS>i0lAXOT>h0PBTOP>l0b000000O010000000000000000O010000000000005K9G:E:G[^i2"}, "image_id": 639, "id": 11127}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 37.0, 36.0, 62.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "_ac33m?:F9G9G9G9F4M00000000000I7O10L40000O1000000GnAdNQ>]19000000O1000007I9G3M0@TALP?0d^j3"}, "image_id": 639, "id": 11128}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 40.0, 41.0, 49.0], "area": 1539, "segmentation": {"size": [512, 512], "counts": "ZQ[2;e?:F:F;E5K000O1000000000O10000000O10000000F:00O103M1O00O1000000000O100000009G:F;Eg]P5"}, "image_id": 639, "id": 11129}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 40.0, 8.0, 21.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "[Ql79f?5L00000N201O4dN"}, "image_id": 639, "id": 11130}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 43.0, 45.0, 86.0], "area": 2873, "segmentation": {"size": [512, 512], "counts": "cQm69g?8H0O1000O15jAAaE:E40000000000ODiAPOX>P1<0000000000O1000O10000000000000O1000O100004L:F:Fbmj5"}, "image_id": 639, "id": 11133}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 88.0, 21.0, 18.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "jb`49g?7I0O10O1000000000000000O10O10000000006JRmT3"}, "image_id": 639, "id": 11134}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 89.0, 23.0, 23.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "kRS41o?:F:F0000O10O1000000000000000O10O10000001O:Fl\\a3"}, "image_id": 639, "id": 11135}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 96.0, 16.0, 10.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "Rce58h?0O1000000000O1000O1000000007Hj\\R2"}, "image_id": 639, "id": 11136}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 103.0, 9.0, 27.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "Xck73m?9G8H6J000O10000iL"}, "image_id": 639, "id": 11137}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 106.0, 58.0, 40.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "\\Sh52n?:F:F:F6J0000O1000O10000000000000O1000O100004L0000O10O1000000000000000O18G3N000000000000O1000O1000000000000000:F:EhkZ1"}, "image_id": 639, "id": 11138}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 111.0, 47.0, 36.0], "area": 1000, "segmentation": {"size": [512, 512], "counts": "Udg46d?60000001O0001L3H800000000000000L5M20000001O00000TORAf0T?0h@]OS?h00010OO1L400001O00000001O00000G9N20000L\\l`2"}, "image_id": 639, "id": 11139}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 113.0, 14.0, 18.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "bcV28g?:G00000000O102N000000005KX\\b5"}, "image_id": 639, "id": 11140}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 120.0, 63.0, 42.0], "area": 2090, "segmentation": {"size": [512, 512], "counts": "kS`35k?:F;E1O000O;F2N00000000O100000O100000000000O100O1000O10O10000000007IO10000000O1000000000O10000000O100000000000O10000003L6TAJk>f000IQO]Ao0c>70000001N5L000000000000O1000O1000000000000000O1000O1000002N:F1O0000000O100000O10000000000000O10004L;EV[_4"}, "image_id": 639, "id": 11142}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 130.0, 6.0, 48.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "R4`1h>H9G:F9G:F`jl7"}, "image_id": 639, "id": 11143}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 144.0, 39.0, 57.0], "area": 1563, "segmentation": {"size": [512, 512], "counts": "ge\\78h?9G1N1000000000000RO@\\B?e=KQB5o=5fALY>n000000000000O1000O10000000000000O1000O100K500000000`K"}, "image_id": 639, "id": 11144}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 148.0, 5.0, 7.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "dTR67i?000002M[[k1"}, "image_id": 639, "id": 11145}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 149.0, 8.0, 5.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "edS53m?2N00000000000[[h2"}, "image_id": 639, "id": 11146}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 156.0, 52.0, 64.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "fe`42m?5L3M2IGc@9\\?60O1000O4M3M4K4M4L3M4K4M1O0O10O1000GbNoA^1Q>fNlAZ1T>900O010000O010000O0100POkA5U>KnA1R>OSBMm=3VBJj=6ZBFf=:]BBc=>bB^O^=b0eB[O[=e0P1O5L3M4L3L5L4L^Ze2"}, "image_id": 639, "id": 11147}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 165.0, 22.0, 18.0], "area": 330, "segmentation": {"size": [512, 512], "counts": "WUc36j?9G1O0O10000000O10000000O10000000O10005K9G]jQ4"}, "image_id": 639, "id": 11148}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 165.0, 48.0, 51.0], "area": 1724, "segmentation": {"size": [512, 512], "counts": "oU`51o?:F:F4L00O1000O10000ZOZORBf0n=EgA;X>g0000O10000000000000O1000O10000000000000O1000O1000007I:F6J0000002M;F:F^ig1"}, "image_id": 639, "id": 11149}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 165.0, 55.0, 51.0], "area": 2126, "segmentation": {"size": [512, 512], "counts": "he[63m?:F6J001O:E3N000AXOfAh0Z>?000O1000000000O100000O100000000000O100000O100000000000OK6000000O10000000002N:F8H0O108H:Feih0"}, "image_id": 639, "id": 11150}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 196.0, 54.0, 53.0], "area": 2019, "segmentation": {"size": [512, 512], "counts": "Tfh3a0_?0000000QA0n=0RB0n=0RB0n=Q100000000001O0000000000000000000000000000000003M0000000l0TO000000000000000000000000000000000000oX\\3"}, "image_id": 639, "id": 11151}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 222.0, 170.0, 176.0], "area": 11490, "segmentation": {"size": [512, 512], "counts": "Uhd02m?2N2N2N2N3M2O1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2O2M2N200000000000010O00000000000001O01O000000000001O0001O0000000001O000001O000001O0000000001O0001O000000000001N1N2N2O1N200000001O00000001O00000001O00oLTCm2P=000000000000O2M2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2NmTf4"}, "image_id": 639, "id": 11152}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 247.0, 21.0, 36.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "j7Q1n>01O00002N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2NjWe7"}, "image_id": 639, "id": 11153}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 269.0, 30.0, 48.0], "area": 1228, "segmentation": {"size": [512, 512], "counts": "ZYa73m?9mNMgA:R>OeA0\\>j00000000000O1000O10000000000000O1000O100000000000O1cG"}, "image_id": 639, "id": 11154}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 272.0, 69.0, 51.0], "area": 1773, "segmentation": {"size": [512, 512], "counts": "Xie32n?1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N01O01O01O01O01O01O01O01O01O01O0001O011N3M2O2M2N3N0O00010O000101N0010O00010O00102M2N3N1N3M2O2M2N3N1N3MPgW3"}, "image_id": 639, "id": 11155}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 276.0, 69.0, 67.0], "area": 2364, "segmentation": {"size": [512, 512], "counts": "j96i?2N2GGh@B^A5F;j>B^Ag0a>ZO]Ah0a>ZO]Ah0a>ZO^Ag0`>[O^Ag0`>>N2N2N1O000000000002N2N2O1001O01O0000000000000000000001OO2M2N2N2N2O1N2N2N2N2N00000000001O1O2N2O1N2N2N2N2N2N3M2N2N2NVVm6"}, "image_id": 639, "id": 11156}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 283.0, 47.0, 42.0], "area": 1677, "segmentation": {"size": [512, 512], "counts": "mXg6:f?;D;F9G0000000000O100000O100000000001O3L1000000000O10000000O1000000000O1000000000O100D\\A\\Od>d0>N;EfVa0"}, "image_id": 639, "id": 11157}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 300.0, 59.0, 60.0], "area": 1901, "segmentation": {"size": [512, 512], "counts": "eYe51o?3L3N3M3L3N3L4M2N3TAVO[>n0bATO\\>o0`AUO]>m0aAVO[>Z1N3L10000O01000O01000O0103L3N3M3L3N3M3L4M2M4M000O10O10O10O10O010O10O10O10O11N3N3L4M2N3L4M3MZU]1"}, "image_id": 639, "id": 11158}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 302.0, 57.0, 61.0], "area": 1725, "segmentation": {"size": [512, 512], "counts": "jjY43k?2N3M2N3M2N3H@m@a0Q?Bl@a0Q?8010O01M2N3M3M2O2M2N30O10O010O010O010OO2M2N2N000101N3MIaAPO^>P1dAnN\\>R1802O2M2N3N20OO2M2N3M2N3N1N3M2N3M2N3M[fi2"}, "image_id": 639, "id": 11159}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 348.0, 30.0, 35.0], "area": 540, "segmentation": {"size": [512, 512], "counts": "`k81n?2N2N3M2N2N2O1N2N2N3M2N2N1O01O01O000000000001_Om@4T?Kn@4S?Jo@4T?In@5T?In@5T?Jm@4a?NcTX7"}, "image_id": 639, "id": 11160}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 349.0, 39.0, 66.0], "area": 1660, "segmentation": {"size": [512, 512], "counts": "T[b65k?9G00O10O10000M3O105K9G9G8H9G6J000O10000000O10000000O100000O106J8H9G9G8H9G9GYSj0"}, "image_id": 639, "id": 11161}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 353.0, 46.0, 38.0], "area": 1374, "segmentation": {"size": [512, 512], "counts": "T[Y79g?N2N2N2N3M2N2N2N2N2O1N2N2N3M2N200000O1N3N1N2N2N2N2N2N2N3M10O01O00000000000000010O00002N2N2N2N3M2N2O1N2N2N2N2N3M2NYdP2"}, "image_id": 639, "id": 11163}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 381.0, 26.0, 27.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "Zli02m?2N2N2N2N2N2N2N2N2N2N2N2N01O2N2N2N2N2N2N2N2N2N3M2NhSi6"}, "image_id": 639, "id": 11164}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 390.0, 11.0, 12.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "]lS61m?3M2N3N1010O010N1M4Micf1"}, "image_id": 639, "id": 11165}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 392.0, 54.0, 70.0], "area": 1971, "segmentation": {"size": [512, 512], "counts": "S=Z1e>2BfNTBZ1l=gNRBY1n=iNPBW1P>kNnAU1R>mNlAS1T><0010O0001O2N2N2N2N3M0001O0001O0000000000001O01OL_AjNc>T16N2O1N2N3M2N2N2O1N20000O1N2N2N2N3M2N2N2O1NQcT7"}, "image_id": 639, "id": 11166}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 393.0, 75.0, 52.0], "area": 1959, "segmentation": {"size": [512, 512], "counts": "llo31o?2M2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N2O10001OO1N2N2N2N2N3M2N00000001O3M200000N2N2N2O100000001O0001N1N2O1N2O10O1N2O1N2N2N2N2N3M2N2N2N2N2N2N2O1N2Ndbj2"}, "image_id": 639, "id": 11167}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 394.0, 35.0, 29.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "m\\W71n?2N2O1N2N2N2N1O0000001O002N2O1N2N2N2N2N1O02N2N2N2N2N3N1N2N2N2N2N2N2NYS7"}, "image_id": 639, "id": 11168}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 428.0, 69.0, 60.0], "area": 1864, "segmentation": {"size": [512, 512], "counts": "^^R62m?2N2N2M3N2N2N1O2N2N2M3N2O100O0N3PASOj>S1N2N2N1O2N20N2N1100000O1N2N2N2N1N100000O100000O100000O100000O10000000O11O2N2M3N2N2N2N2N1O2M3N2N2N2N2N2NWRk0"}, "image_id": 639, "id": 11169}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 431.0, 73.0, 62.0], "area": 2468, "segmentation": {"size": [512, 512], "counts": "h^c42m?2JN[@4d?M[@4c?6N2N2N3M2N2O1N2N2N3M2N1O0001O02N2N1O1O0001O00000001O0001O01O2N3M2N2N2O1N2N3M2N2000N2N3M2O1N2N1O1O2N2N2O1N2N3M2N2N2IQAZOQ?d0QA[OP?c08N3M2O1N2N2N2N3M2NcQX2"}, "image_id": 639, "id": 11170}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 437.0, 69.0, 52.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "enU32m?2N2N2N2N3M2N2O1N2N2N2N2N2N3M2N1O0001OMQOVAo0j>31O01O0000000000001O2O1N2000001O00000001ON2OO000000001O3N1N2GWAXOk>f0WAXOk>g0VAWOl>g08N2N1O1O00003M2N2N2O1N2N2N2Nhag3"}, "image_id": 639, "id": 11171}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 438.0, 85.0, 74.0], "area": 2555, "segmentation": {"size": [512, 512], "counts": "ood11n?100O1O1O1O1O1O1O100O1_OGZA:e>HYA9f>IXA8g>JWA7h>KVA6i>MTA4l>MSA3l>ORA2m>a0O1O1O1O1O100O1O1O1O1O1O1OK`AlN`>S1bAlN]>T1eAkNZ>U181O0000000EiNoAV1Q>lNmAT1T>mNjAS1V>POgAQ1X>QOfAo0Z>;000000002N2O1N2N3M1O00000001O000002N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N2N2N2N2NeaP5"}, "image_id": 639, "id": 11172}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 439.0, 83.0, 70.0], "area": 2933, "segmentation": {"size": [512, 512], "counts": "f^82m?2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O1N2N01O2N2N200O2M2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N00000000010O000000001O2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N]Q^6"}, "image_id": 639, "id": 11173}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 470.0, 26.0, 26.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "Qoj51n?1O2N2N2N2N2M3N2N2N10100000000000N2N2M2O2N2N2N2N2NnPh1"}, "image_id": 639, "id": 11174}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 471.0, 15.0, 15.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "ln]42n?2M2N2N2N3N1N12N1N2N2N3M2O1NQaZ3"}, "image_id": 639, "id": 11175}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 478.0, 47.0, 34.0], "area": 862, "segmentation": {"size": [512, 512], "counts": "a_X51n?3M2N2N2N2N2O1N2N3M2N2N2N1O1O100O1O1O1O1O11O1O1O1O1O1O1O2N1O1\\Oi@=X?Aj@>W?Aj@>]?O1O1O1O1O2N1O1O1O1O1O1O1OQPP2"}, "image_id": 639, "id": 11176}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 483.0, 25.0, 26.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "]o`42m?2N2N2N3N1N2N2N2N3M2O1N20N2O1N3M2N2N2O2M2N2N2N2N``R3"}, "image_id": 639, "id": 11177}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 488.0, 14.0, 14.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "__T61n?2M3N2N2N2N20O1N2N2N2N2N2Nb`d1"}, "image_id": 639, "id": 11178}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 489.0, 48.0, 23.0], "area": 626, "segmentation": {"size": [512, 512], "counts": "loh22m?2N2N1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O1001O1O1O1O1O1O1O1O1O1O1O1O00O1O1O11O1O1O1O1O1O1O1N2N2NUP_4"}, "image_id": 639, "id": 11179}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 489.0, 16.0, 18.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "]_j51n?2N3N1N2N3N110O0001O0N3N1N2N3M[`m1"}, "image_id": 639, "id": 11180}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_g41P`X3"}, "image_id": 639, "id": 11181}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 0.0, 71.0, 50.0], "area": 1753, "segmentation": {"size": [512, 512], "counts": "lPU11n?2N2N2N3DGl@;S?Fk@P?BQA<[?N2N2N2O1N3M`_m1"}, "image_id": 640, "id": 11186}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 28.0, 36.0, 34.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "baW31o?1N2N3M2N2N2N2N2N2O1N001O00000000000001O01O00000002N2N2N2N3M2O1N2N2N2Nj^V4"}, "image_id": 640, "id": 11187}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 30.0, 63.0, 63.0], "area": 1941, "segmentation": {"size": [512, 512], "counts": "eah12m?3M2N2N2N2N2O1k@Af>a0YA@e>b0YAAd>a0ZAAd>a0ZAAd>b0YA@f>n0000000000001M2N2N2N01O00000000000001O000001O000IaAnN_>R1cAlN]>T1eAjN]>T19O1N2N2N1O001O002N2N2N2O1N2N2N2N2N2N2N3M2N2NanW5"}, "image_id": 640, "id": 11188}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 30.0, 24.0, 44.0], "area": 582, "segmentation": {"size": [512, 512], "counts": "^Qd71n?3M2N2O1N2N2N2N2N2N3M2N2N2N2QASOh>o0WAROg>P1WAROg>U100000N2O1000kN"}, "image_id": 640, "id": 11189}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 55.0, 28.0, 29.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "la`3290Y?2e@0Y?2e@0Y?2e@0Y?2e@0Y?=0000000000O1N00000002N2N2N2N2N2N2N2O1N2N2N2Nm]Q4"}, "image_id": 640, "id": 11190}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 66.0, 56.0, 50.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "bbo62m?2O1N2N2N2N2N2N3M2N2N2N2O1N2N2N2N2N3O0000000000000000001M1O0000001O011N2O101OO1N2C\\A\\Of>b0[A\\Og>b0[A\\Og>b0[A\\Og>b0[A\\Og>b0=N2N2N2N2N3M2O1N2NX]4"}, "image_id": 640, "id": 11191}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 68.0, 34.0, 33.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "aRj31o?1N3M2N2N2N2N2N2N2N2N3N1N2N2N2O1000O1N2N2N2O1N2N3M2N2N2N2N2N2N2O1N[md3"}, "image_id": 640, "id": 11192}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 74.0, 52.0, 55.0], "area": 1407, "segmentation": {"size": [512, 512], "counts": "ZSb21n?2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N00000000000000010O0002N1O0000000000011N2N2N2N2An@MU?0m@NU?1l@MV?1l@MV?1l@NU?0m@NU?0m@NU?0gmc4"}, "image_id": 640, "id": 11193}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 77.0, 32.0, 34.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "dRX61n?2N2N2N2N2N2N2O1a@B[?b00000001O0000000000000000000N2N2N2N2O1N2N2N3MklW1"}, "image_id": 640, "id": 11194}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 93.0, 22.0, 42.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "n2Y1f>01O2N2N2O1N3M2N2N2N2N2N2N2N2N2O1N3M2N2N2N_ld7"}, "image_id": 640, "id": 11195}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 101.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Uco72i<"}, "image_id": 640, "id": 11196}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 104.0, 13.0, 11.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "_Sm31l?3M3O20O01O01O01O01O0M3Ng\\l3"}, "image_id": 640, "id": 11197}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 105.0, 23.0, 22.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "fSU43m?1N2N2N2N2N2N1O1O000000000001O2N2N2N2N2N2O1Na\\_3"}, "image_id": 640, "id": 11198}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 106.0, 60.0, 54.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "TTV31o?1N2N2N2N3M2N2N2N2N2N2O1N001O2N2N2N2N2N3M2O1N2N2N2N2N2N2N01O2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N0001O000001O2N2N3I`@Ib?56O1N2NSlk3"}, "image_id": 640, "id": 11199}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 108.0, 61.0, 59.0], "area": 1742, "segmentation": {"size": [512, 512], "counts": "ncR61n?2N2N2N2N2N2O1N3M2N2N20000001O0001N1N2N2N2N2O1N2N2N3M2N2N2N20000N2N2N3M2N0003M2N2N2N2O1N2B^A[Od>c0^A[Od>c0_AZOd>c0^A[Od>c0^A[Od>d0=N2N2N2N2N2N3M2N2N2Okkn0"}, "image_id": 640, "id": 11200}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 127.0, 60.0, 72.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "iUX51n?2N2ZOL]A6a>L^A5`>M^A5a>L]A7`>K^A7`>K^A7`>K^A7`>K^A7`>L]A6a>L]A6a>e0O000000010O0000EaAVO_>j0cATO]>l0eARO[>n0gAPOY>P1iAnNW>R1;000001O0001O0000000000000001O001O3N1N2N2N2N2N2N2N2N2N3M2O1N2N2N2N`ki1"}, "image_id": 640, "id": 11201}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 136.0, 31.0, 30.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "fdb01n?2N2N2N2N2N2N2N2N2N2N2N3M2O1N01O2N2N2N2N2N2O1N2N2N2N2N2N2N2N[km6"}, "image_id": 640, "id": 11202}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 139.0, 27.0, 19.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "hdl61n?2N2N2N2N2N01O01O0000000002N2N2N2OO001O2N2N11O1N3M2Nake0"}, "image_id": 640, "id": 11203}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 147.0, 60.0, 82.0], "area": 2235, "segmentation": {"size": [512, 512], "counts": "eUR72m?2N2]OKXA7f>KXA7f>KXA7f>KXA8e>JZA78Ak=9kA88Ak=9kA88Ak=9kA88Ak=R1SBPOk=R1SBPOk=R1SBQOj=Q1UBPOi=S1TBoNj=e1N2O1N2O10SN\\B^1d=_N^Ba1b=]NaBa1`=]NbBa1`=]NbBa1`=]NbB5FQ1j=hNbB5FR1i=hNbB4GT1W>jNkAV1U>hNmAX1^>000000N3M2O1N2N2N2N1O00000001O0001O0000001O2N3M2O1N2N2N2NnI"}, "image_id": 640, "id": 11204}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 158.0, 44.0, 51.0], "area": 1288, "segmentation": {"size": [512, 512], "counts": "a5`02EV?d0N2N2N2N2N3N1N2N00001O002N03M1O001O000001O00000001O3M2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N`jY7"}, "image_id": 640, "id": 11205}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 175.0, 10.0, 10.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "bUg71n?2N3M2O1O11M2N2N2N]j3"}, "image_id": 640, "id": 11206}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 178.0, 9.0, 9.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "gec51n?2N2N2N0002N2N2N\\jW2"}, "image_id": 640, "id": 11207}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 178.0, 19.0, 19.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "jea61n?2N2N2N2N2N2N2N200000N2N2N2N2N2N2N2NUjT1"}, "image_id": 640, "id": 11208}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 182.0, 72.0, 74.0], "area": 2291, "segmentation": {"size": [512, 512], "counts": "RW;2n?1N3M3N1N3M2O2M2N3N1N3N2M2N3N1N3N1010N2M2O2M2N2OO01OQOXOgBi0X=YOfBg0[=[OcBd0]=^OaBc0_=_O^Ba0b=A]B?b=D[B1nAOS>3kALU>P1O01O010O101N3M2O2M3M2O2M2N3N1N3M2O2M3N1N3M2O2M2N3N1N3M3N1N3MTi`6"}, "image_id": 640, "id": 11209}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 183.0, 53.0, 52.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "cVY42n?1N2N2N2N2N2N3M2N2N2N2N2N2N2O1N3M2N2N2N2N00000000000001O0000010O2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2NeYl2"}, "image_id": 640, "id": 11210}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 183.0, 30.0, 30.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "Ufb51n?2N2N2N2N2N2N2N2N2O1N2N2N2N3M01O2N2O1N2N2N2N2N2N3M2N2N2N2NlYn1"}, "image_id": 640, "id": 11211}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 189.0, 15.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "RfP71n?2N2N2N2N2O100000O1N2N2N2N2Nlig0"}, "image_id": 640, "id": 11212}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 210.0, 3.0, 4.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "b64m?N2O\\Yn7"}, "image_id": 640, "id": 11213}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 212.0, 63.0, 48.0], "area": 1426, "segmentation": {"size": [512, 512], "counts": "ag]61n?2N2N2N2N2N2N2N200000001O000N2N2N1O000000000000010O2N2N3M2N2N2N2N00001O0001O00000000002N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2N2N2Ohhb0"}, "image_id": 640, "id": 11214}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 216.0, 70.0, 64.0], "area": 2193, "segmentation": {"size": [512, 512], "counts": "Phm42n?1N2N2N3M2N2N2N2F_ORAc0l>_ORAc0l>_ORAc0l>_ORAc0l>:N3M2N2N2O1N2N2N1O0000000000000001O1O2O2M2N2N2N2N2N000000000001O0001O00000000001O2N2O1N2N3M2N2N2N2N2N2N2N2N2O1N3MgXo1"}, "image_id": 640, "id": 11215}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 219.0, 25.0, 37.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "\\gc72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O10\\OVAOl>OVAOl>OVAOl>OVAOl>OVAOl>OVAOj7"}, "image_id": 640, "id": 11216}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 221.0, 24.0, 44.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "R7P1o>2N2O1N3M2N21M2N2O1N2N2N3M2N2N2^Oi@9X?Ej@9X?Ek@8`?N2N2N2N2OWhc7"}, "image_id": 640, "id": 11217}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 241.0, 58.0, 58.0], "area": 1695, "segmentation": {"size": [512, 512], "counts": "fhT11n?2N2N2KJ]@8a?5N2N3N1N2N2N2N2N2N2N2N2N3M2JiNaAV1_>lN_AT1a>501O01O02N11N2N0000000000001O00010O2N2N3M2N2N2N2N2N2N2O1ZOh@a0]?N2N3M2N2N2N2N2NhWn5"}, "image_id": 640, "id": 11218}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 246.0, 24.0, 24.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "UhP21n?2O1N2N2N2N2N2N2N00001O0000000002N3M2N2N2N2N2NSXc5"}, "image_id": 640, "id": 11219}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 249.0, 95.0, 90.0], "area": 2902, "segmentation": {"size": [512, 512], "counts": "mYY52m?2N3M2N2O1N2N2N2N2N2N2N3M2N2N2N2O1N2N1O0000000GkNhAU1X>mNfAS1Z>oNdAR1[>POcAP1]>9001O00000000000001O000001O0@cA^O]>c0dA[O^>c0dA[O^>c0dA[O^>c0eAZO^>c0dA[O]>d0eA[OZ>e0`000000000001O00011N3M2N2N2N2N2N00000010O0000000000001O2N3M2O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2NdWW1"}, "image_id": 640, "id": 11220}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 254.0, 57.0, 56.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "PiS72m?2N2N2N3N1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N01O00000000000000000000000001O0001O2N2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N_G"}, "image_id": 640, "id": 11221}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 265.0, 19.0, 19.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "bh91n?2N2N2N2N2N2N2N2N20N2N2N3M2N2N2O1N2N^g\\7"}, "image_id": 640, "id": 11222}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 282.0, 64.0, 67.0], "area": 2032, "segmentation": {"size": [512, 512], "counts": "SjT22m?2N2N3M2N2N2O1N2N2N2N2N2N2N2N2N3M2N2N2N2O1N2N1O0GbNQB^1o=dNoA\\1Q>fNmAZ1S>hNkAX1U>900000001O02N2N2N2N2N002N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2N2N2O2M2N2NZVk4"}, "image_id": 640, "id": 11223}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 289.0, 16.0, 15.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "]Ya12m?2N1O1O0001O00NLY@4g?20000101N2N2NmfV6"}, "image_id": 640, "id": 11224}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 292.0, 65.0, 57.0], "area": 1792, "segmentation": {"size": [512, 512], "counts": "Sj81o?2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N2N10O00010O00010O00010O00010O00010O00010O00010O00010O00010O00102M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1NUff6"}, "image_id": 640, "id": 11225}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 312.0, 10.0, 10.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "mYV72m?2N2N2N1O02N2N2N2NUfd0"}, "image_id": 640, "id": 11226}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 314.0, 48.0, 47.0], "area": 1167, "segmentation": {"size": [512, 512], "counts": "ajT62m?2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2N1O001O0001O2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3M2O1N2N2N2N`US1"}, "image_id": 640, "id": 11227}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 316.0, 20.0, 20.0], "area": 199, "segmentation": {"size": [512, 512], "counts": "WZX31n?2N2N2N2N2N2O2M0000000001O2N2O1N2N2N3Mne]4"}, "image_id": 640, "id": 11228}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 316.0, 33.0, 33.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "\\jS73l?2N2O1N2N2N2N2N2N2N002N2N2N2N2N2N11N2N2N2N2N2N2N2N2N2B`@:d?N2N2N2N2Nce;"}, "image_id": 640, "id": 11229}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 329.0, 10.0, 11.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "]jl22m?2O1N3M2N03M2O1N2NcUn4"}, "image_id": 640, "id": 11230}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 342.0, 63.0, 61.0], "area": 1731, "segmentation": {"size": [512, 512], "counts": "m[f22m?2N3M2N2N2O1N2N2N2N2N2N3M2N2N2N2O1N2N1O00000000000000010O00000000000000000010O000000002N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2O1N3MfTZ4"}, "image_id": 640, "id": 11231}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 342.0, 15.0, 14.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "nZh71n?2N2N2N2N2N1O01O011N2N2N3M2NU5"}, "image_id": 640, "id": 11232}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 344.0, 87.0, 85.0], "area": 3023, "segmentation": {"size": [512, 512], "counts": "blQ11n?2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N2N3M2O1N0000000000000000001O01O0NcAbN]>^120000000000001O0001O000000000000000001O0001O0000002N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N3M2O1N2N2N2N`db5"}, "image_id": 640, "id": 11233}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 349.0, 7.0, 7.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "oZb01o?2M2N2OO2N3MQUZ7"}, "image_id": 640, "id": 11234}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 353.0, 53.0, 54.0], "area": 1488, "segmentation": {"size": [512, 512], "counts": "Pla62m?2N3M2JI`@9^?6O1N2N2N2N2N2N2N3M2N2N2N2O1N2N1O00001O000000000001O2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N3MXdc0"}, "image_id": 640, "id": 11235}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 353.0, 13.0, 24.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "[ki71n?2O1N2N2N3M2N2N2N2N2N2O1nD"}, "image_id": 640, "id": 11236}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 359.0, 24.0, 25.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "`kl51n?2N2N2O1N3M2N2N2N2N20001O00N2N2N2N3M2N2O1N2N2N\\Tg1"}, "image_id": 640, "id": 11237}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 375.0, 31.0, 31.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "Sl`01o?1N2N2N2N3M2N2N2O1N2N2N3M2N2O1000N2N2N2N2O2M2N2N2N2N2N3M2O1Nico6"}, "image_id": 640, "id": 11238}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 388.0, 54.0, 53.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "R]U72m?2N2O1N2N3M2N2N2N2N2N2N2N2O1N3M2N2N2N2N1O000001O000001O000000000001O2N2N2N2O1N3M2N2N2N0000000000101N2N2N3McC"}, "image_id": 640, "id": 11239}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 392.0, 9.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "\\lQ21n?2N2N2N2OO2N2N2Nfci5"}, "image_id": 640, "id": 11240}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 396.0, 58.0, 49.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "Rm]31n?2N2N2N2N2O2M2N2N2N2N2l@[Oj>g0TA[Oj>g0TA\\Oi>o0N2O1N1O00000011000001N1O1N2N0000000000000001O0001O002N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2ORSe3"}, "image_id": 640, "id": 11241}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 410.0, 34.0, 33.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "ZmZ22n?1N2N2N2N3M2N2O1N2N2N3M2N2N01O000001O01O2N2N3M2N2O1N2N2N3M2N2N2O1NhRT5"}, "image_id": 640, "id": 11242}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 413.0, 32.0, 31.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "e]S41n?2N2N2N2N3N0O00000000001O00000000000000000000000001O2N2N2N2N2Nob\\3"}, "image_id": 640, "id": 11243}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 421.0, 25.0, 24.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "c]n21n?3M2N2N2N2N2N2N2N1O0010O0000002N2N2N2N2N2N3M2O1NbRe4"}, "image_id": 640, "id": 11244}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 428.0, 29.0, 59.0], "area": 999, "segmentation": {"size": [512, 512], "counts": "Una71n?3M2N2N2O1N2N2d@CR??l@CR?`0k@CR?g0n@TOm>Q1N2O1N2N2N2N2N3M2N1O01O01O000000000eB"}, "image_id": 640, "id": 11245}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 431.0, 54.0, 56.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "]nS42m?2N2N2b@Kn>7PAKn>7PAKn>7PAKn>7QAJm>8QAJm>f0O1N001O000000002N2N2N2N1O000001O000001O00000002N2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2NkQQ3"}, "image_id": 640, "id": 11246}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 455.0, 39.0, 36.0], "area": 698, "segmentation": {"size": [512, 512], "counts": "l^_31n?2N2N2N2O1N3M2N2N2N2N2N2N2N01O01O0000001O2N0011N2N2N2N2N2N2N3M2N2O1N2N2N2N2NZQm3"}, "image_id": 640, "id": 11247}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 477.0, 51.0, 35.0], "area": 1009, "segmentation": {"size": [512, 512], "counts": "j_g41n?2N2N2N2N2N100O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O100O11O1O2N1O1N2N2N2N2N2N2N2O1N3M2N2N2N2N`P_2"}, "image_id": 640, "id": 11248}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 482.0, 23.0, 22.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "^on31n?2N2N2N2N2N2N2N2N2N1O0001O2N2N2N2N2N2N2N2N2Nf`e3"}, "image_id": 640, "id": 11249}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 492.0, 32.0, 20.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "ooT41n?1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O11O1O1O1N3N1N2N2N2N2N2NYP[3"}, "image_id": 640, "id": 11250}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 500.0, 20.0, 12.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "l_i32m?2N2N1O1O1O1O1O1001O1O1O1O1O1O1O1O2N1OQ`l3"}, "image_id": 640, "id": 11251}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 265.0, 62.0, 68.0], "area": 2015, "segmentation": {"size": [512, 512], "counts": "Qjc11m?3N2N2\\OI\\A9a>I^A8a>J\\A9b>I\\A9b>I\\A9b>I\\A9a>J]A6c>LZA5f>LYA4g>c0O2N2O1O001M3N2NO1000O1000O1000O100000O10O11O1O2M3N2N2N1O2N2M2O1O1O2N2M3N2N1O2N2M3N2N1O2N2N2M3N1OVW]5"}, "image_id": 642, "id": 11252}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 303.0, 110.0, 121.0], "area": 5754, "segmentation": {"size": [512, 512], "counts": "bZd32m?2N1N3N2N2N2M2WBAPRDEn;;PDGP<8oCJQ<6nCJR<7nCIR<7nCIR<7mCJS<5nCKQ<6nCJS<6kCLU<4iCNW<1gC2Y]1PB`NP>`1SB^Nm=b180O102N2N1N3N2N2M3N1O2N2M3N1O2N2M1000O01002N2M2O2O1000O01VO\\BWOd=f0^B[Ob=c0`BoNL1d=n0bBnNM3a=n0dBmNM3a=m0dBoNL3b=l0dBoNL2c=m0PCQOR=l0QCROP=m0S1N2N2M2O2N2M3N2N1O2M3N2N2N1Nked2"}, "image_id": 642, "id": 11253}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 339.0, 33.0, 34.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "U[U52m?2N2N2M2O2N2M3N2N1O2M3N2N1N30000O10N2N2M2O2N2M3N2N1O2M3N2N1N3N2NPUZ2"}, "image_id": 642, "id": 11254}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 388.0, 14.0, 12.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "Y\\^31n?1O2N2N1O2O1000O010N2M3N1OhcZ4"}, "image_id": 642, "id": 11255}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 400.0, 17.0, 15.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "flU32m?1N3M3N110O01000O010O0100M2O2M2N[ca4"}, "image_id": 642, "id": 11256}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 425.0, 59.0, 57.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "jmb32m?2\\@OV?2i@OV?3h@OU?4i@NU?4h@OV??M2O200000O010000N2N1O2M3N2N1N3N2N2N20O100000O10O100000O01O1N2N1N3N2N2M3N1O2N2M3N1O2M3N2N2N1N3N2N2N2M2O2Nma_3"}, "image_id": 642, "id": 11257}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 474.0, 63.0, 38.0], "area": 1067, "segmentation": {"size": [512, 512], "counts": "S_\\43k?2O2M3N1N3^@D]?a0O10O10O10O01000O010O10O10O10O10O01000]Od@`0_?0O01000O010O10O01M30O1O001O1O001O001ON2O1N2N2O1N100O02N3N2M2O2M2O2M3M2O2MhPd2"}, "image_id": 642, "id": 11258}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 0.0, 56.0, 42.0], "area": 1283, "segmentation": {"size": [512, 512], "counts": "P`11o?1O2N1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1OO1O1O1O100SOWAb0j>]OXAa0i>^OYA`0h>AXA=i>CYA:h>EZA9g>F[A8f>G\\A7e>H]A6d>I^A6b>I`A5a>JaA4W?NZ_R7"}, "image_id": 643, "id": 11259}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 27.0, 62.0, 71.0], "area": 2798, "segmentation": {"size": [512, 512], "counts": "YQT31k?8K3L3N2O1N1i@_Oo>a0m@Cl0KW=l0dBXO\\=h0`B\\Oa=c0[BAe=?WBFf==UBGh=F\\A5i>F\\A6Y?KTnS3"}, "image_id": 643, "id": 11261}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 54.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "f13n?NZnn7"}, "image_id": 643, "id": 11262}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 54.0, 78.0, 77.0], "area": 3097, "segmentation": {"size": [512, 512], "counts": "dbh12n?1N2N3M2N2O1N3M2N2N2O1N3M2N2N2OO000000LSOVAn0i>41O01O3M2N2O1N2N3M2N2O1N3N1000010O0000010O0000010O0O1N2N2N2N01O0001O0001O00011hNRB;P>CRB;P>DQB:R>CPB;R>CQB:Q>DQB;P>CRB;Q>CPB;R>CQB:Q>DQB:Q>DQB;Q>BQBCQB:l>N2N2O1N3Me\\P5"}, "image_id": 643, "id": 11263}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 86.0, 212.0, 426.0], "area": 47449, "segmentation": {"size": [512, 512], "counts": "oZf47i?7I8H7I7I7I2N0O1000O10000000O1000O10000000O1000O105K0000O104L5K0000O10O100000000000O0GgAmNY>S1:000000000O0100000000000O10O1000000000O1^BmNW5k9oNgEl0>5k9oNgEl0=6l9nNgEl0=5m9nNgEm0c0Ne9\\ObEf0P1G^9J[E?X1F]92SE8a1F\\9^1dFbN\\9^1fF`NZ9`1kFZNV9f1iF[NW9e1iFRNYNGe6M]M[2eMQNS5HXM]2_MkMU5L\\M`2XMdMX50`Md2PM[M]56bMf2jLTM`5:fMi2bLnLd5=jMl2[LgLg5a0UJUOX3k3eL_Lj5f0TJTO]3o3]LWLn5j0TJTOa3Z4W2UMiMj2T2[MkMl2j1XMVNo2_1UMaNR3X1nLgN[3Q1fLnNa3l0^LTOi3e0WL[OP4>PLBW47jKHV49iKGW49iKeLjMa2^6j0hKaLnMe2Z6i0jK]LPNj2V6e0nK]LPNn2R6a0bL^O^3b0bL^O^3c0aL]O_3c0aL\\O`3d0aLWOc3i0]LQMoL_1h6a1XLlLTM_1h6e1TLhLYM^1g6j1QLcL\\M^1h6o1lK_L`M^1h6T2ZLhMj3X2WLcMm3]2SL_MQ4a2oK[MU4f2jKUM[4k2fKPM^4n2dKnL`4n2dKnL`4n2dKnL`4n2eKmL_4Q3Y41O0000001O0000001O0000001O000000N2L4L4L4L4001O0000001O0000001O0000001O0000001O00000VIVM>k2\\OZMd0f2WO_Mi0a2ROdMn0\\2mNhMT1Y2eNmM[1S2`NRN`1n1[NWNe1i1VN\\Nj1e1PN`NP2`1oMaNQ2_1oM`NR2`1nM`NR2a1nM^NR2b1nM^NR2b1nM^NR2b1nM^NR2c1nM\\NR2d1nM[NS2e1mM[NS2e1mM[NS2f1lMZNT2f1hM^NX2b1dMbN\\2^1`MfN`2[1[MhNf2X1XMjNh2V1XMjNh2V1YMiNg2X1XMhNh2X1XMhNh2X1XMhNh2X1YMfNh2"}, "image_id": 643, "id": 11264}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 106.0, 12.0, 11.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "_cR21n?2N2N2N3M1001N3M2N2O1Na\\g5"}, "image_id": 643, "id": 11265}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 124.0, 92.0, 76.0], "area": 3471, "segmentation": {"size": [512, 512], "counts": "hdQ33k?2N3L3N3M2N3M2N3M2N3L3N2N30O010O010O010O010O010O01O010O010O010O01O010O01M201O0010O010O010O010O01M2N3bAdNT>^1iAeNU>e1M2N3N1010O010O010O010O010O010O010hNTB:l=DYB:g=C^B:b=DaB;`=BcB>\\=@fB`0]=[OfBe0\\=VOfBj0X>10O010O0N3M2N3M2N3L3N3M2N3M2NbZ`3"}, "image_id": 643, "id": 11266}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 132.0, 74.0, 72.0], "area": 2604, "segmentation": {"size": [512, 512], "counts": "fdU11n?2N2N3M2N2O1N2N2N2N2N2N2N2N2N2N2N1O0002000000000001O00000001O00000O1N2N2N2N2N2O1N2N2N2N3M2N2N2O1001M200000000001ON2eNjAi0X>UOjAk0V>SOlAm0T>QOnAm0T>QOnAm0T>QOnAn0T>oNnAo0c>N2N2]On@5T?In@5T?In@5T?In@5T?Io@4`?N2NSZe5"}, "image_id": 643, "id": 11267}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 148.0, 10.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "idh23j?3N3O00010O01M2M3N\\[R5"}, "image_id": 643, "id": 11268}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 172.0, 71.0, 72.0], "area": 2690, "segmentation": {"size": [512, 512], "counts": "PV23k?6J4K4RA@Z>`0dAC[>>bAE]>;aAG^>;`AG_>9_AIa>8\\AKa>7]AL3_OV>\\1hAgNV>[1hAfNV>d1M210O10O010O10O010O010O010O10O010O0N3M2N3N2M2N3M2N3M2O0O00001O01O000001O003N1O20O01000OO2M2N3N1N3M2N3N1N3M3M2O2M2N[Zj6"}, "image_id": 643, "id": 11269}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 186.0, 95.0, 74.0], "area": 3402, "segmentation": {"size": [512, 512], "counts": "dVc22l?3M2N3M2O2M2N3M3M2N3N1N3M210O010O01000O010O010O001M2N3O010O10O010O010O010O10O010O10O010O01O0N3N1N3M2N3M3M210O0jAXNU>i1010O10O010O010O010O10O0POmA4S>IoA7R>FQB:n=DTBj=@YB?h=^OZBc0e=[O^Bd0c=YO_Bh0`=VOcBj0]=TOdBl0Y>0O010O01N2M2N3M2N3M2O2M2N3M2N3MeXm3"}, "image_id": 643, "id": 11270}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 199.0, 10.0, 10.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "[fY23l?1N3N2O0010O001L3MjYa5"}, "image_id": 643, "id": 11271}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 200.0, 10.0, 8.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "[fh11n?2N1O2O1O10O1N2N1OfYR6"}, "image_id": 643, "id": 11272}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 214.0, 81.0, 92.0], "area": 3271, "segmentation": {"size": [512, 512], "counts": "_Xi03l?1N3b@Ll>6RALl>7RAKk>8RAJl>8RAKl>7RAKk>f0N3M2N3O001000O010ON3M2N3N2M2N3O0010OO2N2M2N3M1O11N3M2N3N1N3M2N3N2M2N3N0O1O0001O01O01O0001O01O01O01O01O2N3N2M2O20O010ON3N1N3M3M2N3M2O2M2N3M3M2N3MPYn5"}, "image_id": 643, "id": 11273}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 242.0, 92.0, 61.0], "area": 3104, "segmentation": {"size": [512, 512], "counts": "[hW22l?2N3M3N1N3M2N3N1N3M2O2M3M2010O010O010O0100O010O010O001N10100O0100OVAoNe>R1YAoNh>U1O010O10O010O010O010O10O0N3M2N3N2M2O2O01M2N2OO000010O1O3M201O001N2O001O001N110O01000ROVAd0j>ZOXAg0h>WOXAj0h>TOZAm0l>N2N2O2M2N3M2N3M3M4L4MfWZ4"}, "image_id": 643, "id": 11274}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 257.0, 13.0, 13.0], "area": 91, "segmentation": {"size": [512, 512], "counts": "YXn11m?2N2N3M210O010O010ON3L3NmWk5"}, "image_id": 643, "id": 11275}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 263.0, 11.0, 11.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "_X91m?2N3N1N3N11O0N3N1N3MhWa7"}, "image_id": 643, "id": 11276}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 290.0, 85.0, 74.0], "area": 2858, "segmentation": {"size": [512, 512], "counts": "kib11m?2O2M2N3M2N3N1N3M2N3M2O2M2N3O1O010O010O010O010O010O010O010O010O10O010O010O010O010O010O010O0100O0100OO2ZAoNY>S1eAPOX>S1eAoNY>^1M2010O10O010O010O010O0100O0QOhA7X>FkA9U>EmAAPB>P>@RBa0n=\\OUBb0l=\\OVBb0m=[OVBb0m=[OVBb0l=]OUBa0g>M2N3M2N3M2N3M^eR5"}, "image_id": 643, "id": 11277}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 224.0, 202.0, 222.0], "area": 16033, "segmentation": {"size": [512, 512], "counts": "alU41n?2N2N1N3N2N2N2N2M2O2N2N2N2N2M3N1O2N2N2M3N2N1O2N2N2M3N2N2N1O2M3N2N2N2N1O2M3N2N2N2N2M2O2N2N2N2N2M2O2N2N2N2M2O0000O1000O100000O10O100000O1000O100000O10O100000O1000O100000O10O100000O1000O100000O10O100000O1000O100000O11N3N2N1O2N1OO010000000BgCUMY]OTB6I?T>YOUB7Fa0V>VOUBW1l=gNVBX1Z>O1O1O1O00O1O1O01O2N2N2N1O2M3N2N2N2N2N2N1N3N2N2N2N2N2NiPo4"}, "image_id": 644, "id": 11280}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 0.0, 105.0, 71.0], "area": 5965, "segmentation": {"size": [512, 512], "counts": "ka[65k?7I0O1VNKiC5W<2bCN^<9[CFffNiAY1X>dNkA\\1T>cNnA]1Z>01000OhN`AP1a>nNaAP1a>mNaAQ1i>N2N2M2O2N2N1N3N2N2M2O2N2M2O2N2Md\\P5"}, "image_id": 645, "id": 11282}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 98.0, 35.0, 27.0], "area": 765, "segmentation": {"size": [512, 512], "counts": "UcQ67i?9G8H000000O10O100000000000O1000O1000000000O1000O100000000000O5L9G8HYl\\1"}, "image_id": 645, "id": 11283}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 123.0, 76.0, 81.0], "area": 3310, "segmentation": {"size": [512, 512], "counts": "bUZ13l?2M2GKf@7W?Kh@7V?Kg@8W?Ih@8V?;N2M2O2N2M3N1O2M2O0O102N1N3N2M2O2N2M3N1N3N2N1N3N2M3N1O2O10O01000O1M2O2N2M3N1N3N2N1N3N2M3N1O2M3N1O2M3N2M2O2N2M2O2M3N2N1N3N2M3N1O2M3N1O2M3NYk_5"}, "image_id": 645, "id": 11284}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 125.0, 77.0, 66.0], "area": 2850, "segmentation": {"size": [512, 512], "counts": "gde21m?3N1N3N2N1N3N2M3N1O2M3N1N3N2N2M2O2M30O10O10O1000O10O10N2M2O2N2000O01000O10O1000OO2M3N1O2M3O10O10O10O1000O10O100M2O2M3NO10O100O1EbAUO_>b0aAZO23^>a0bA[O21^>c0bAZO21]>c0mA[OU>d0d0O1N3N2M3N2N3LikS4"}, "image_id": 645, "id": 11285}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 140.0, 11.0, 10.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "_T]21m?3N2O10O10O1000M2O2N`[]5"}, "image_id": 645, "id": 11286}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 155.0, 105.0, 88.0], "area": 3782, "segmentation": {"size": [512, 512], "counts": "heV42m?1N3N2M2O2M3N1N3N2M2O2N2M2O2M3N2M2O2M3N10100O01000O01000O0100000O01000O01000O01000O01000O0100000O01000O01000O01000O01000O0100000O01000O01000O01000O01000O0100000O01000OO2M3N1N3N2N1N3N2M2O2M3N2M2O2M3N1O2M3N1N3N]iT2"}, "image_id": 645, "id": 11287}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 179.0, 74.0, 74.0], "area": 2860, "segmentation": {"size": [512, 512], "counts": "Sg[23l?2CMi@5U?Mj@5T?Mi@6T?Mj@4U?=M3N1N3N2M201000O0100O0O2M3N1O2M3N00010100O10N2M3N1O2M3N1N3N2N1N3N2N1N20N3N2M2O2M3N1N3^OlAUOV>i0lATOV>j0mATOU>j0lAUOV>h0mAUOU>j0mATOU>i0c0N1N3N2O001000O0100N1O2M3N1N3N2M2O2MjY_4"}, "image_id": 645, "id": 11288}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 195.0, 44.0, 24.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "YVZ76i?:G3M00000000000O010000000000000O010000000000000O01000000000000O010000000000000O010000mI"}, "image_id": 645, "id": 11289}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 221.0, 77.0, 74.0], "area": 2906, "segmentation": {"size": [512, 512], "counts": "bh`32n?1N3M2O2mNHYB:f=GXB;f=HXB:e=HYB:f=HWB;f=GYB:e=IXB:f=HWB:g=HXB:e=IXB8h=IWB7h=LUB4l=MRB3n=0PB0o=P11O011N3M010O0010O00010O0010O00010O00010O003N1N3M2O2M2N3N1N3M2O2M2O0O0010O00010O00010O01O3N1N3M3N1N3M2O2M2O2M2N3N1NThX3"}, "image_id": 645, "id": 11290}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 233.0, 103.0, 158.0], "area": 9053, "segmentation": {"size": [512, 512], "counts": "]gi41o?8H8G9H8H8H9G1OO10O100000O1004L7H8I8H7I7I7I8H7H8I7M4O000000I7I7I6J0O1000000000O10O1000000000O10O1000000000O10O1000000000O10O1000000000O10O1000000000O10O1000000000O10O100000003M7I7H8I7I7I7I7I7I7H8I7I7I7I7IUdb1"}, "image_id": 645, "id": 11291}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 239.0, 44.0, 209.0], "area": 5398, "segmentation": {"size": [512, 512], "counts": "[YZ73m?7I8H8H8H8H8G8I3VNZNTEf1l:aNmD_1S;iNeDW1Z;QO_Do0a;XOXDh0h;_OQDa0o;FjC9Wn0WATOh>m0VAUOg>S1O2N2M2O2M3N2B^N[Bc1d=_NYBd1d=_NZBc1d=^N[Bc1c=?N2N1N010O10O102M3N1N3N2N1N3N2M2O2M3N1O2M3N1N3N2N1N3N20O01000O10O10O10nNXAj0i>TOXAm0n>M3N1N3N2M2O2M3N1N3N2M2O2M3NZWf6"}, "image_id": 645, "id": 11293}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 254.0, 16.0, 26.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "PhP77h?9H8H1O000000O10O10000001O8H8GbWg0"}, "image_id": 645, "id": 11294}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 288.0, 20.0, 15.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "Sil66j?6I10O1000000000O10O100000000000O13L8IfVi0"}, "image_id": 645, "id": 11295}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 290.0, 50.0, 91.0], "area": 3208, "segmentation": {"size": [512, 512], "counts": "XiQ47i?7I8H7I0O1000003M7I8H7H9H5K0000O100000O103M8H7I1O000O100000O10000000O100000O100000OZNlBb0T=^OSC;m0]A2a>0]A2a>O^A3`>O^A3_>0^A3`>O^A3`>O^A2a>0]A2a>0]A2a>0]A2a>h0M3N2N2N1O2RBXNa=j1]BXN`=k1^BWN`=k1^BWN`=k1^BWN`=U2O2NFcBTN[=l1gBSNW=n1kBPNU=o1nBoMR=Q2oBmMT=Q2nBmMT=Q2>O2N200O1O1N1O2N2N2N2M2O2N2N2N2N1O0O2IWAVOj>i0XAUOi>j0YATOh>k05100000O100000O101O2N20000000N2N1O2M3N2N2N2N2N2N1N3N2N2N2N2NQVe5"}, "image_id": 645, "id": 11297}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 330.0, 21.0, 40.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "Z:X1h>O2N2N2N2N2N2N2N1O2N2M3N2N2N2N2N2N1O2N2NVUe7"}, "image_id": 645, "id": 11298}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 331.0, 9.0, 11.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "ajb02l?3N1N3N2OO2M3M2NeeX7"}, "image_id": 645, "id": 11299}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 368.0, 65.0, 80.0], "area": 2572, "segmentation": {"size": [512, 512], "counts": "jl32m?2N2N2M2O2N2N2N2G_OPAc0n>_OPAc08[OY>3^Ad07[OY>n0dAUOZ>m0dAUOY>l0gAUOX>k0hAWOV>i0iAZOU>g0jA[OS>h0kAZOS>[1N10100O1000O01M3N000O1000O10001O2N2M3@nAoNT>o0nAoNR>Q1PBmNP>R1SBlNm=T1UBiNl=W1VBgNj=Y1=1000O3N2N2N1O2N2N2M3N2N1Hi@FY?8i@FY?7j@GX?7:N2N1N3NSdk6"}, "image_id": 645, "id": 11300}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 370.0, 69.0, 71.0], "area": 2607, "segmentation": {"size": [512, 512], "counts": "V\\a12m?2N2N1O2M3N2N2N2N2N2N2N2N2N1N30000000000N2N2N2N2N1O2N2N2000000O3N00O100000000000O1N2N2N2N1O2N2N2N2O1O1O1OPOVBIk=6WBHj=7XBGj=7XBGi=7ZBGh=7ZBFh=9YBFi=8YBFj=7XBGk=6WBHP>1RBMP>1RBMP>1RBMP>1RBMP>0SBNo=0jT\\5"}, "image_id": 645, "id": 11301}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 373.0, 18.0, 33.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "fkX74l?7I8H7I6I01000000000001N8I4L005K7IcS>"}, "image_id": 645, "id": 11302}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 382.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "n;3oco7"}, "image_id": 645, "id": 11303}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 386.0, 61.0, 81.0], "area": 3069, "segmentation": {"size": [512, 512], "counts": "S]W41o?7I8H7I6J0O1000O1000000000I70O1000O5L7I0000O100000\\OfNaB[1_=lNZBT1f=SOSBm0l=d00O10000003M7H3N00000000000O0100000000000O08I7I8H7I7I8H7I7H8I8HeQj2"}, "image_id": 645, "id": 11304}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 389.0, 62.0, 107.0], "area": 3516, "segmentation": {"size": [512, 512], "counts": "R^o23l?2N1N3N2N2N1N3N2N2M2O2cNZO]Ch0b<_OUCd0iP=HiB:V=MaB6^=0[B2d=V1aBfMV=Z2hBiMW=X2gBiMX=a2O1O1O001O1O1N101O1O11N10O1O1O1O0OQOSCnNlM2O2N2N2M2O2N2MWaQ4"}, "image_id": 645, "id": 11305}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 398.0, 90.0, 114.0], "area": 7126, "segmentation": {"size": [512, 512], "counts": "d\\Y56i?7J7I7I6J7I7H7J7I7I6J7ITNfBY1S=gNRCZ1gZ1N2N2N2N2N1O2M3N2N2N2N2N2N2N2N1O1O000O100001O2N2N2N1O2M3N2N2N2N2N2N2N1O2N2M3N2N2N2N2N1O2N2N2N2N2M3N2N1O2N2N2N2N2N2N2NTRo5"}, "image_id": 645, "id": 11307}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 432.0, 11.0, 11.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "dmQ21n?2N2N2N2000O100N2N2N[bh5"}, "image_id": 645, "id": 11308}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 466.0, 26.0, 18.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "e^j41o?8H6J0O01000000000000O01000000000000O0100000001O8HUah2"}, "image_id": 645, "id": 11309}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 477.0, 85.0, 35.0], "area": 1899, "segmentation": {"size": [512, 512], "counts": "ooi11n?1O1O1O1O1O1N2O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1N2O1O1O1O1O1O1N21O1O1O1O1O1O001O1O1O1O1O1O1O1OO1O1O1O1O1O1O1N2O1O1O1O1O100001O1O1O1O1O1O1O1O00O11O1O1N2M3N2N1O2N2N2M3N2N2N2N``k4"}, "image_id": 645, "id": 11310}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 479.0, 5.0, 6.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "o^X46j?000003MnPe3"}, "image_id": 645, "id": 11311}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 487.0, 2.0, 8.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "W_o71o?7b@"}, "image_id": 645, "id": 11312}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 506.0, 86.0, 6.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "mon33m?0000000O100000000000000000000000000000000O1000000000000000000000000000000O10000000000003M00000000000000000000O100000000000000000000000000000000000000O100000000000000003MRPf2"}, "image_id": 645, "id": 11313}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 63.0, 92.0], "area": 3746, "segmentation": {"size": [512, 512], "counts": "0e1[>00O1000000000000O1000000000000O1000000000000O1000000000000O1005K7I7I7I7I8H3M00000000O1000000000000O1^NoB5Q=KVCNj<2]CGc<9dC@\\<`0kCYOUQ1kAmNU>S1mAkNS>U1oAiNQ>W1QBgNo=Y1<0O10O100000000000O1006I:G8H8H9GPbZ6"}, "image_id": 646, "id": 11317}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 476.0, 100.0, 36.0], "area": 3020, "segmentation": {"size": [512, 512], "counts": "W??a?8H2M100000000000000O10000000000000000O100000000000000O100000000000000O10000000000000000O100000000000000O10000000000000000O100000000000000O10000000000000000O100000000000000O100000000000000008H7I7I7IW`]6"}, "image_id": 646, "id": 11318}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 267.0, 146.0, 245.0], "area": 19721, "segmentation": {"size": [512, 512], "counts": "R9n6P92O1O1O1O1N2O0000O1000O100000O1000O1001O1N3N2N2N2N2N2M2O2N2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N2M3N2N1O2N2N2M3N2N2N1O2N2M3N2N2N2N1O2M3N2N2N2N2N1N3N2N2N2N2N2N10100000000000O0100000000000O0100000000000ON3N2N2N2N2N2M2O2N2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N2M3]NSBP1o=mNTBQ1n=mNTBQ1m=nNTBQ1n=mNTBQ1n=mNTBP1o=nNSBP1o=mNTBQ1_>N2N1O2M3N2N2N2N2N1N3N2N2N2N2N2Mkcf5"}, "image_id": 647, "id": 11319}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 267.0, 42.0, 41.0], "area": 777, "segmentation": {"size": [512, 512], "counts": "gX\\32m?2N2M3N2N2N1O2M3N2N2O100O100000000O1000O1000O10000000000O10M3N2N2N1N3N2N2N2N1N3N2Nnfn3"}, "image_id": 647, "id": 11320}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 280.0, 29.0, 30.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "UiY72m?2N2O1N3M2N2N2N2N2N3N1N2N2N0002N4M1\\Og@>[?@g@>_?N2N2N1O10O02N3M2Nif7"}, "image_id": 647, "id": 11321}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 321.0, 44.0, 43.0], "area": 867, "segmentation": {"size": [512, 512], "counts": "`Zf22m?2N2N1N3N2N2N2N2N2M2O2N2N2000000O0100000000000O01000000000O10M3N2N2N2N2N1N3N2N2N2N2N2MWec4"}, "image_id": 647, "id": 11322}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 482.0, 25.0, 30.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "Wom33l?3N2M3N2M3N2M4M2M3N2N11N2N2N2N2N3M2N2N2N2N2N3M2NR`e3"}, "image_id": 647, "id": 11323}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 100.0, 314.0, 412.0], "area": 86324, "segmentation": {"size": [512, 512], "counts": "T3U48lKX1m6d1WMQMoK[1j6d1WMQMoKNNX1l6i1XMPMTLV1d6k1WMoLULV1d6k1eMUN[2k1eMTN\\2l1eMSN[2n1dMRN\\2n1dMRN\\2n1dMRN\\2n1eMQN[2P2dMPN\\2P2dMoM]2Q2cMTNX2l1iMSNW2n1hMRNX2n1hMRNX2n1hMRNX2n1iMQNW2P2cIjLd3V1i2P2_InLh3Q1j2Q2ZIRMl3m0j2Q2UIXMQ4f0j2d2VM\\Mj2d2VM\\Mj2d2VM\\Mj2e2VMaMc2_2]MhM\\2X2dMoMU2Q2kMWNm1j1SN\\Nf1d1ZNcN_1]1aNjNX1V1hNQOQ1P1oNWOi0i0WO^Ob0b0^OmLVLe1U4^1EVLPLa06R2n3X18_LjK`2g3Q1g0WLbKo2`3j0^LULd4Y3fNb0bLZLg4`0^Kk1R3f1g1VLYK[2i2_1U2oKRKi2b2X1d2gKjJX3[2R1T3dL[J_1Z2m1_3YL]JS2l1d1n3RLVJa2e1]1\\4kKoIo2^1W1Q5jNnJV1R5^L`IZ1^1X2Y5UL[Ic19nM>Z4U6nKTIi1:VN6T4b6fKmHQ2;\\NOm3U7gMlHcNHf3_7ZL]H;5N1O01O01O000001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O2N2N2N2N2N2N2N2N2N2N2N2N2Nd_X1"}, "image_id": 651, "id": 11333}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 18.0, 49.0, 45.0], "area": 1138, "segmentation": {"size": [512, 512], "counts": "SaS41n?2N2N2N3M2^@E\\?=b@E]?a0M2N2N2N2N2N2N2N3M2O1N2N20O1N2N3M2N2N2O1N2O10001O0001O00N2N2N2O1N3M2N2N2N2N2N2N2N2O^nS3"}, "image_id": 651, "id": 11334}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 21.0, 23.0, 22.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "Pa]52n?2M2N2N2N2N2O2M1O0000000010O2N2N2N3M2O1N2N2NSoV2"}, "image_id": 651, "id": 11335}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 26.0, 51.0, 54.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "\\aZ31n?2N2b@Ml>5RAMl>5RAMl>5RAMl>6QALm>6RAKl>7RAKm>e0N2N2N3M2N2N2N2N2N2N01O2O1N2N2N3M2N2N2N2N2N2N2N1O00001O001O2N2N2O1N2N2N2N2N2N2N2N2Nank3"}, "image_id": 651, "id": 11336}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 40.0, 97.0, 93.0], "area": 3219, "segmentation": {"size": [512, 512], "counts": "]cT12m?2N2O2M2N2N2N2N2N2N2N2N2N2N2N3N1N2N2N2N2N0000000000000010O0000000000000000000000010O0000000000000000000000010O0000000000000000000000010O000000000000001O2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N2N3N1N2N2NSnZ5"}, "image_id": 651, "id": 11337}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 43.0, 29.0, 29.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "eQg52n?1N2N2N2N2N3M2N2N2N2N2O1000000000N2N2N2N2N2N2N2N2N2N2N2NV^j1"}, "image_id": 651, "id": 11338}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 46.0, 27.0, 27.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "ia\\71o?1N2N2N2N2N2N2N2N2N2N2N3O0000N2N2N2N2N2N2N2N2N2O2M2NTn5"}, "image_id": 651, "id": 11339}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 49.0, 24.0, 25.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "ia^52m?2N2N3N1N2N2N2N3M20000001O0O1N2N2N3M2O1N2N2N2NP^U2"}, "image_id": 651, "id": 11340}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 53.0, 63.0, 51.0], "area": 1517, "segmentation": {"size": [512, 512], "counts": "[bi31n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O0001O2N2O10001O01O00000000O1N2N2N2N20000000000000000N2N2N00001O2N2N2N2N2O1N2N2N2N2NYmV3"}, "image_id": 651, "id": 11341}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 56.0, 56.0, 56.0], "area": 1697, "segmentation": {"size": [512, 512], "counts": "hR^61n?2N3M2N2N2N2N2N2N2N2J\\On@e0P?]On@e0P?7N2N2N2N3M2N2O1000N00000001O00000000000010100000O1N3M2N2N2N2N2O1N2N2Fl@EV?9l@EV?9l@EV?:9N2N2N3M2Nbme0"}, "image_id": 651, "id": 11342}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 66.0, 98.0, 87.0], "area": 2984, "segmentation": {"size": [512, 512], "counts": "SdP23l?2O1N2N2N2N2JCf@`0W?Bg@`0W?6N2N2O1N2N3M2N2N2N1O01O000000000001O01O00002N2N2N2O1N2N2N3M2N20000O1NRO]OaBb0_=@_B`0a=CRBKJb0U>DoALJ:JI\\>3nALK7MHZ>7lALK52EW>3aA9^>IaA6_>M^A3b>e01O00101001O00OO00000000010O00M]AjNc>V1300000001O2N2O2M2N2N2N2N0000100O3M2N2N2N2N2N2O2M2N2N2N\\]^4"}, "image_id": 651, "id": 11343}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 73.0, 41.0, 41.0], "area": 853, "segmentation": {"size": [512, 512], "counts": "mRP51n?2N2N3N1N2N2N2N2N2N2N2N2N2N2N3M2O1N2N00000001O2N2N2N2N2N3M2O1N2N2N2N2N2N2N2N2N2NV][2"}, "image_id": 651, "id": 11344}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 86.0, 74.0, 71.0], "area": 2337, "segmentation": {"size": [512, 512], "counts": "UTR32m?2N2N2N2N2N2N2@B[Aa0b>B[A`0c>B\\A?b>C\\A?b>C\\A?b>C\\A?b>a0N2O1N2N2N3M2N00000000000001O000XOkAGU>9lAEV>9lAEV>9lAEV>9mADU>:mADU>:mADU>:mADU>:mADU>:mADS>?g02N2N2N2N2N2N2N2O2M2N2N2O01N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N3M2Nflh3"}, "image_id": 651, "id": 11345}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 95.0, 126.0, 120.0], "area": 4440, "segmentation": {"size": [512, 512], "counts": "PVQ61n?2O1N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M000001O0001O0000000000000001O0001O0000000000000001O0001O0000000000000001O0001O0002N2N2N2N2N2N2N2N1O01O0000000001O0001O2N2N2N2N2N2N3M2OO00000000000000000001O01O000000000000000001O01O000000000000000001O01O000000RM"}, "image_id": 651, "id": 11346}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 104.0, 64.0, 50.0], "area": 1592, "segmentation": {"size": [512, 512], "counts": "TT[42m?3M2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N0001O0000000000000000002O1N2N2N2N2N2N2001O01O0000O1N2N2N1O02N2N2N2N2O1N2N3M2N2N2N2N2N2N2Nokd2"}, "image_id": 651, "id": 11347}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 107.0, 66.0, 61.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "jc\\51n?2N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2O10001O000WAlNe>T1YAnNg>V11O000001O00000001ON2O1N3M1O0000000001O2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3N1N2N2Nd[b1"}, "image_id": 651, "id": 11348}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 137.0, 76.0, 68.0], "area": 2427, "segmentation": {"size": [512, 512], "counts": "eUU22m?2N2N2N2N2HFh@;W?Fg@UOVAm0h>7N2N2O1N2N2N000KaNjA_1V>cNhA^1W>7N2N20001ON1O00002UOkAHW>6kAIV>5lAIV>5lAIV>5lAIV>5lAIV>6kAHW>6kAHU>8mAFT>9nAER>;PBCP>=SB@m=b0g03M2N2N2N2N2N2N2N2O1N2O01N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2N2N2NTkd4"}, "image_id": 651, "id": 11349}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 145.0, 55.0, 39.0], "area": 1006, "segmentation": {"size": [512, 512], "counts": "nTl41o?1N2N2N3M2N2N2O1N3O00000000O2M2O1N1O1O000002N2N2N2O1N3M2N2N2N2O2M2N2N2N02N2N2N2O1N3M2N2N2N2O1N3M2N2N2N2O2M2NkZX2"}, "image_id": 651, "id": 11350}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 146.0, 70.0, 52.0], "area": 1728, "segmentation": {"size": [512, 512], "counts": "`UZ32m?3M2N2N2O1N2N2N2N2N2O101N1N2N2O1N2N20N2N3M2N2N1O00000000000001O101N2N2N2N2N2N2N2N00000010O00000000001O2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2Okjb3"}, "image_id": 651, "id": 11351}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 174.0, 57.0, 40.0], "area": 1159, "segmentation": {"size": [512, 512], "counts": "ke[41n?2N2N3M2O1N2N2N3M2O10000010OOO1O01O000000010O01101O01OO1N2N3M2N2N2O1N3M2N01O3M2O1N2N2N3M2N2O1N3M2N2N2N2O2M2N2N2Nmig2"}, "image_id": 651, "id": 11352}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 178.0, 5.0, 13.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "iem72m?2N2KK\\@7b?5]J"}, "image_id": 651, "id": 11353}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 192.0, 67.0, 50.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "mVb22m?2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O2M21N1N2N2N2N2N2N00000010O00001O2N2N2N3M2N2N2OO00000001O0000000002N2N2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N2N^Y\\4"}, "image_id": 651, "id": 11354}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 195.0, 67.0, 63.0], "area": 1871, "segmentation": {"size": [512, 512], "counts": "\\gn61n?3M2O1N2N2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N001O0000000001O000000000001O01O3M2N1O000000000000010O0000000000001O2N2N2N2O1N3M2N2N2N2N2N2N2N`I"}, "image_id": 651, "id": 11355}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 195.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "TVo72m?2lI"}, "image_id": 651, "id": 11356}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 202.0, 62.0, 46.0], "area": 1326, "segmentation": {"size": [512, 512], "counts": "Pgh32m?2N2N2N2N2N2N2N2O2O000000O1O1N2N2N2N000002N2N2N2N2N2N2N2N3M2N2N2O1N2N1O00000000000011N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N3N1N2NSYX3"}, "image_id": 651, "id": 11357}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 202.0, 53.0, 56.0], "area": 1316, "segmentation": {"size": [512, 512], "counts": "hfT52m?2N2N2N2N2O1N2N2N2N3M2N2N2SAVOa>l0]AVOa>l0]AVOb>k0]AVOd>i0ZAYOf>Q101O00O1N2N2O1N2N3O0001O00000000000001O0N2N2N2N2N2N2N2O1N3M2N2N2N2N2N_hP2"}, "image_id": 651, "id": 11358}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 205.0, 28.0, 52.0], "area": 771, "segmentation": {"size": [512, 512], "counts": "_6b1]>01O002N2N2N2N3N1N2N2N2N2N2N2N3M2O1N2N2N2N2N3M2N2O1N2Niha7"}, "image_id": 651, "id": 11359}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 222.0, 29.0, 28.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "[gm41n?2N2N2N2N3M2O1N2N2N2N2N2N2N0101N2N2N2N2N2N2N2N2N2N2N2N2Nfhc2"}, "image_id": 651, "id": 11360}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 227.0, 15.0, 30.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "_gh71n?3M2O1N2N2N2N3M2N2O1N2N2N3M2lH"}, "image_id": 651, "id": 11361}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 228.0, 63.0, 47.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "Ph42m?2O1N2N2N2N2N2N2N2N2N3M2N2N2O1N2N0000000002N2N2O100000O1N2N2N2N1O00000000010O000000000001O2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N2N2N[hk6"}, "image_id": 651, "id": 11362}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 232.0, 60.0, 60.0], "area": 1596, "segmentation": {"size": [512, 512], "counts": "`XP61o?1N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N1O000000001O0KmN_AR1a>PO]AP1c>RO[An0e>60000010O000000000000010O2N3M2N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2N2NShQ1"}, "image_id": 651, "id": 11363}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 238.0, 62.0, 45.0], "area": 1316, "segmentation": {"size": [512, 512], "counts": "QhU31n?2N2N2N2N2N2N2N2N20000000000000O1N2N2N2N01O2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O000001O00001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NQXk3"}, "image_id": 651, "id": 11364}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 259.0, 17.0, 18.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "YXZ72m?2N2N2N2N3M2O1001O0O1N2N2O1N3M2NcW="}, "image_id": 651, "id": 11365}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 271.0, 26.0, 29.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "hh^11n?2O1N2N2N3`@GT?;j@GT?;j@GT?;j@GT?d0N10O1O2N3N1N2N2N2N2N2N2N3M2O1N2NSWT6"}, "image_id": 651, "id": 11366}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 271.0, 53.0, 51.0], "area": 1406, "segmentation": {"size": [512, 512], "counts": "[YV42m?2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N2N1O0000000001O0000010O2N2N1O000000002N2O1N2N2N3M2N2N2N2N2N2K^@Jc?55N3MQWo2"}, "image_id": 651, "id": 11367}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 273.0, 23.0, 40.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "e8T1k>00000001O2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N2NnVd7"}, "image_id": 651, "id": 11368}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 280.0, 11.0, 12.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "kXb72m?3M2N20000000O1N2N2NRW8"}, "image_id": 651, "id": 11369}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 281.0, 94.0, 96.0], "area": 3031, "segmentation": {"size": [512, 512], "counts": "l:;d?2N2N2N3M2N2N2N2O1N2N2N2N3M2N2N2N02N2N2N2N2O1N0000000000001O0001O00000001O2N3M2N0001UOXO]Bh0c=ZO[Bf0e=]OXBc0h=_OWB`0i=BUB>l=CRB=n=EPB;P>GnA9R>IlA4I\\O[>b0jA2M[OX>e0iA0[>3bAM^>j001O01O002N2N2N0000010O0000000000001O2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2N3M2N2Nbf`6"}, "image_id": 651, "id": 11370}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 285.0, 87.0, 69.0], "area": 2150, "segmentation": {"size": [512, 512], "counts": "[Zk41n?2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2O2M2N1O000000000000000010O1O2N2N2N2N2N2N2N3M2N2O1N2N1O00000000JAl@?T?Cj@=V?Eh@;X?60001O0001O000001O2N3M2N000000000001O001O2N2N2N2O1N3M2N2N2N2N2N2N2NfVi1"}, "image_id": 651, "id": 11371}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 305.0, 58.0, 55.0], "area": 1529, "segmentation": {"size": [512, 512], "counts": "aZZ12n?1N2N2N2N2N2N3M2O1N2N2N2N2N3M2N2N2O1N2N1O00001O0001O000000000001O0001O00001O2N3N1N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2Oieh5"}, "image_id": 651, "id": 11372}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 338.0, 46.0, 44.0], "area": 878, "segmentation": {"size": [512, 512], "counts": "_k`01o?1N2N2N2N2N2N2N2N3M2N2N2O1N1O000000000000001O0001O000000000000002N2O1N2N2N2N2N3M2N2N2N2N2NQUh6"}, "image_id": 651, "id": 11373}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 347.0, 30.0, 29.0], "area": 448, "segmentation": {"size": [512, 512], "counts": "Wk]21n?3M2N2O1N2N2N2N2N2N2N2N2N2O1000N2N2N2O1N2N2N2N2N2N2N2N2N2NgTS5"}, "image_id": 651, "id": 11374}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 362.0, 52.0, 55.0], "area": 1426, "segmentation": {"size": [512, 512], "counts": "_le14j?4HJ`@9^?6N2N2N2O1N2N3IVOSAl0l>5N2N1O000001O000001O01O1O00000000010O000000001O2N2N3N1N2N2N2N2N2N3M2N2O1N2N2N2N2NTT`5"}, "image_id": 651, "id": 11375}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 369.0, 12.0, 24.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "a;h0Y?N2N2N2N2N3M2N2N2O1N2NTdi7"}, "image_id": 651, "id": 11376}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 378.0, 72.0, 65.0], "area": 1920, "segmentation": {"size": [512, 512], "counts": "ZmS22m?2N2N2O1N2N2N3M2N2N2N2N2N2O1N1O00001O0000000001O00000001O0000002N2O1N2N0000000000000001O01O0000000000000003M2O1N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2NcSh4"}, "image_id": 651, "id": 11377}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 387.0, 60.0, 61.0], "area": 1850, "segmentation": {"size": [512, 512], "counts": "m\\Y51n?2N2O1N2N3M2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3O00O1N2N2N3M2O1N2N2N2N2N2N3M2N2O1N2N2N2N2N3M2N2N2O1N2N2N2N3Mlbh1"}, "image_id": 651, "id": 11378}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 388.0, 49.0, 57.0], "area": 1469, "segmentation": {"size": [512, 512], "counts": "oQ1fAQOX>Q1fAQOY>P1eAROY>P1eAROY>\\1N000000KhAcNX>^1iAaNV>_150001O2N2N3M2N010O2N2N2N2N2N2N3M2O1N2N2N2N2N3M2N2O1N2N2Nfah6"}, "image_id": 651, "id": 11382}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 436.0, 56.0, 52.0], "area": 1539, "segmentation": {"size": [512, 512], "counts": "d^_11;0W?2g@0W?2g@0W?2g@0W?3f@OX?>N2N000001O02N3M2N2N000000000001O0002N2N2N3M1O00000001O0000010O2N2N2N2N2N2N2N3M2N2O1N2N2N2N2N2N2N3M2Nhad5"}, "image_id": 651, "id": 11383}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 451.0, 34.0, 29.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "`^]41n?2O1N2N2N2N3M2N2N2N2N2N2N2N2OO2N3M2N2N2N2N2N2N2O1N20000000001N1N2NZaQ3"}, "image_id": 651, "id": 11384}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 24.0, 48.0], "area": 616, "segmentation": {"size": [512, 512], "counts": "b>]1b>000002N2N2N2N2N2N2N2N3M2Kk@\\OW?b05N2N2N2N2N2N2N2N2Nm`c7"}, "image_id": 651, "id": 11385}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 470.0, 17.0, 15.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "onP11n?2N2O2M2N1O001O01O0000101N2N3M2NVaf6"}, "image_id": 651, "id": 11386}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 478.0, 53.0, 34.0], "area": 1000, "segmentation": {"size": [512, 512], "counts": "`_V41n?2O1N2N2N2N2N2N2N2N2N2N3O0O1N1O100O1O1O1O1O1O11O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1N2O1NTPo2"}, "image_id": 651, "id": 11387}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 497.0, 24.0, 15.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "jok01n?2N3M2N2N1O100O1O1O1001O1O2N1O1O1O1O1O1O1O1O2NQPh6"}, "image_id": 651, "id": 11388}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 510.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "o_e01n?1001OQ`X7"}, "image_id": 651, "id": 11389}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 510.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "ooY51n?11OQ`d2"}, "image_id": 651, "id": 11390}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 42.0, 10.0, 22.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "ZaV58h?>BO100000000000=CY^d2"}, "image_id": 652, "id": 11391}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 43.0, 10.0, 21.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "[aP55k??A1O0000000001O?AU^j2"}, "image_id": 652, "id": 11392}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 57.0, 88.0, 58.0], "area": 4078, "segmentation": {"size": [512, 512], "counts": "gbZ32m?:G2N0g@Gk>9UAGe>LWA<5HU>2iAb0L[O\\>\\1000000000001N1000000000O100000O1000000000O100000O10000000O11O3M00O01000000000000000O01000000000000000O01000000000L400000O01000000000000000O010000000006J9G9G9F:Gm\\Y3"}, "image_id": 652, "id": 11393}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 64.0, 10.0, 21.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "QbW58h?B_]c2"}, "image_id": 652, "id": 11394}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 105.0, 25.0, 59.0], "area": 918, "segmentation": {"size": [512, 512], "counts": "Y3k1U>01O001O00001N100HjAfNW>W1:M4M2M3N3L3N3L3M3N3L3N3L3N2Ma\\c7"}, "image_id": 652, "id": 11395}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 109.0, 238.0, 93.0], "area": 15540, "segmentation": {"size": [512, 512], "counts": "UUY48h?:E10000FBRA>n>:0O14L:F6I10000000000000O10O10000000000000O1000O1[OaNiB_1V=kNaBU1_=f00000000O100000O1000000000O10000000O10000000O10000000O1000000000O10000000O10000000O10000000O1000000000O100000O1000000000O10000000O1000000000O100000O1000000000O10000000O10000000O10000000O1000000000O10000000O10000000O10000000O1000000000O100000O1000000000O10000000O1000000000O100000O1000000000O10000000O10000000O10000000O1000000000O10000000O10000000O10000000O1000000000O100000O10000006J9F8I000000O10000000O100000nK"}, "image_id": 652, "id": 11396}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 171.0, 65.0, 83.0], "area": 3010, "segmentation": {"size": [512, 512], "counts": "Pgm02k?3M3M4M2M3M4L3N3L3M3N3L3M3cAgNP>]1mAeNQ>]1lAfNQ>g1M2O2M2M3O20O0010OO1M\\BoM`=m1`BVN`=j1^BXNb=h1[B\\Ne=o101O01O010jM]Bm1c=PNaBo1h=1O01O010O01O01L3N2M4L3M4M2M3M4L3N2M4L3M3N3L3M4M2M3M4L3NniQ6"}, "image_id": 652, "id": 11397}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 202.0, 63.0, 76.0], "area": 2449, "segmentation": {"size": [512, 512], "counts": "UXQ21V?0aA2]>0aA3\\>ObA3\\>ObA3]>NaA4]>NaA4]>NaA4]>O`A3^>O`A3^>O`A3^>O`A3^>i0N2N3M00000001O0000000001O0000002N2N000001O00000000002O1N2N2N2N2N2N2N3M2N2QOUAf0m>XOUAf0m>XOUAf0T?O1N2N2N2N2N3M2N2N2N2NfXo4"}, "image_id": 652, "id": 11398}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 218.0, 55.0, 53.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "ggZ51n?3N1N3M2O2IGb@;]?6M2N3N1N3M2O2M2N3NO00010O00010O00010O00010O0001O01O01O01O01O02N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O]hi1"}, "image_id": 652, "id": 11399}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 228.0, 105.0, 79.0], "area": 3253, "segmentation": {"size": [512, 512], "counts": "lXo51n?2N2N2EJi@9T?Ij@9T?Ij@9T?Ij@9T?Ik@8T?;N2N2N2N2N2N3M2N2N2N2N2OO000000000001O0000000001O000000000001O0000000001O00002N2N2N010E_AVOc>h0`AUOb>j0_ATOc>j0_ATOc>j0;N2N2N2N3M2N2N21O000O1N2N2O1N2N2N0000000000001O2N2O1N0000000000000012M2N2N2N2N2N2N2N2N2N2N3N1N2N]X<"}, "image_id": 652, "id": 11400}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 247.0, 30.0, 53.0], "area": 880, "segmentation": {"size": [512, 512], "counts": "nXa72m?2N2N2N2N2N2N2_OA^Aa0`>A^Aa0`>A^Aa0`>A^Aa0`>A^Aa0a>@]Aa0b>A]A>c>D[A?0000000000000000001O2N2N2NWH"}, "image_id": 652, "id": 11401}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 264.0, 21.0, 18.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "fhd12m?2N2N1O1O01O00000000000002N0001O1O2N2N2NfgP6"}, "image_id": 652, "id": 11402}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 290.0, 48.0, 36.0], "area": 961, "segmentation": {"size": [512, 512], "counts": "bYU14j?2M4M2N2M4M2N3O01O01O010O01O01O010O01O01O010O01O01O010O0O1O2O0010O0010O0010WOk@e0Y?0OO1M4M2M4M2M3N3LafR6"}, "image_id": 652, "id": 11403}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 292.0, 62.0, 62.0], "area": 1935, "segmentation": {"size": [512, 512], "counts": "]Z91l?4L3M3N3L3M3N3j@ZOm>i0PA[Ol>n0M4M2M3M4L3N210O00M4N10010O0010O00oNcA?^>^OdAc0[>ZOhAf0X>WOlAi0T>SOoAm0b>010O01O01O01O01O010O00010O01O01O010O00010ON3M2M3M4M2M3M4L3N2Mmeg6"}, "image_id": 652, "id": 11404}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 294.0, 60.0, 58.0], "area": 1763, "segmentation": {"size": [512, 512], "counts": "[jb61n?2N2DLi@6U?Li@6U?Li@6U?Li@6U?Li@6U?NnA1R>n000001O2N2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2N2N2N2O1N2N2N3M2N2N2NZUd3"}, "image_id": 652, "id": 11408}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 315.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "lYo72m?2TF"}, "image_id": 652, "id": 11409}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 340.0, 55.0, 53.0], "area": 1549, "segmentation": {"size": [512, 512], "counts": "`ko62m?2N2N2N2O1N2N2N2N3M2N2N2N2N2N2O1N2N3M2N2N2N0000000010O0000002N2N2N2N2N2N2OO000000000001O00002O1N2N2@j@L00X?2j@L00X?2j@L_?29NUe4"}, "image_id": 652, "id": 11410}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 354.0, 31.0, 31.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "b[Q23l?2N2N2N2N2N2O1N2N3M2N2N1O00001O01O002N2N2N2N3M2N2O1N2N2N2N2NbT_5"}, "image_id": 652, "id": 11411}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 363.0, 80.0, 79.0], "area": 2617, "segmentation": {"size": [512, 512], "counts": "flU22m?2O1N2N2N2N2N2N2N2N3M2N2N2N2O100000000000010O0N2N2O100001O000000POoNPCo0P=SOPCk0P=XOnBg0R=[OlBf0S=\\OkBd0U=^OXBD;n0]=@VBD;l0_=BTBDQ?El@>R?:M20001M2N3L3M3N3L301O01O01O010O01O01O01O01O010O01O01O010O01O01O010O01O01O01O01O010O01O01O010O01O01O010O00010ObNcAY1c>O01O010O01O01O01nN\\Ag0c>WO_Ai0a>TOcAk0^>ROdAo0f>010O01O01O01O010O01O01O010O0ZORA;m>BWAAWAAWA=X?L3N3L3Mgbl5"}, "image_id": 652, "id": 11413}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 379.0, 98.0, 77.0], "area": 2841, "segmentation": {"size": [512, 512], "counts": "kmU31n?3GN`@3^?O`@3^?O`@3^?9N2N2O2M2N1O000000002O1N2N2N2N1O000001O01O000000000001O01O03M2N2N2N2N2N3N1NH^OUA`0k>BUAFUA8k>JUA4k>NUA0l>1SANm>5PALo>6o@JQ?c0O2N2N2N2O2M0KjNaAV1_>lN_AT1a>5000001O01O2N2N1O000010O0000000001O2N2O1N2N3M2N2N2N00011N2N2N3M2N2N2N2O1N2NdSY3"}, "image_id": 652, "id": 11414}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 408.0, 45.0, 37.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "e]d42m?2N2N2N2N2N2N0000000000001O2N2N2N2000N2N0000000000000000000000000000002N2N2N2N2N2N2N2N2NPSe2"}, "image_id": 652, "id": 11415}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 415.0, 119.0, 97.0], "area": 7338, "segmentation": {"size": [512, 512], "counts": "Z>1R1<^=E`B=^=F_B<_=F`B;^=G`B;^=G`B<]=FaB;^=GPBC:g0e=HoAC:f0Z=WOeBh0O2[=XOeBg0N2]=XOcBh0N1^=YObBh0OO^=\\ObBe0N0_=]OcBc1\\=_NdB`1[=bNeB]1Z=eNgBY1X=iNhBV1W=lNiBS1W=nNiBQ1V=ROiBm0V=UOiBk0V=WOhBj0W=P1O1O1O1O100O1O1O1O1O1O1001O1O2N1O1O1O1O1O1O1O1OO1O1O1O1O1O100001O1O1O1O1O1O2N1O1O1O1O1O1N2N3M2N2N2N2N2N2O1N1O001O00000000011N2N2N2N2N3M2N2N2O1N2N2N0000000000101N2N3M2N2N2N2N2N2O1N3M2N2N2N2NaQT6"}, "image_id": 652, "id": 11416}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 416.0, 16.0, 43.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "c]h73l?2N2Z@J`?7^@K`?e0\\A]Od>c0ZA_Of>a0XAAh>?VACi>>UACj>i0000000000000000000000000000000000000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2Nkab2"}, "image_id": 652, "id": 11418}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 442.0, 24.0, 23.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "XnQ42m?2N2N2N2N2N3M1O01O0000000001O01O2N2N2N2N2N2N2NPRb3"}, "image_id": 652, "id": 11419}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 448.0, 87.0, 64.0], "area": 2631, "segmentation": {"size": [512, 512], "counts": "ool41n?1O1O1O1O100_OLUA5j>MTA4k>NSA3m>NQA3n>OPA2o>0o@1P?1n@0Q?2n@NQ?a0O1O100O1O1O1O1O1O1O10HZAVOg>h0[AWOg>g0ZAXOg>f0[AYOf>e0\\AZOe>d0=N2N00000001O010C_OZAd0c>^O[Ad0c>^O[Ad0c>^O[Ad0c>_OZAc0e>=N2N2N3M2N2N20000O0O0000000000000101N3M2N2N2N2N2N2O2M2N2N2N2N2N2N3N1N2@d@9^?Ed@9c?N2N2N2N2NQag1"}, "image_id": 652, "id": 11420}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 467.0, 19.0, 20.0], "area": 165, "segmentation": {"size": [512, 512], "counts": "h^h22m?2N2N2N2N2000000000001O00N2N2N2O2M2NPQn4"}, "image_id": 652, "id": 11421}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 469.0, 49.0, 43.0], "area": 1232, "segmentation": {"size": [512, 512], "counts": "oog31n?1O1O1XOM]A1B3P?N\\A1B2Q?O[A7d>K[A5d>MZA4e>NYA3f>OXA2h>OVA2i>0UA1j>c0O1O1O1O1O1O1O1001O1O1O2N1O1O1O1O1O1O1O1O1O1O1N3N1N2N2N2N2N2N2N2N2N2N2N_`_3"}, "image_id": 652, "id": 11422}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 483.0, 26.0, 25.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "aoi22m?2N2N2N2O2M2N2N2N1O1O00000001O002N2N2N3N1N2N2N2N2NdPi4"}, "image_id": 652, "id": 11423}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 495.0, 21.0, 17.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "ooR31o?0O1O1O1O1O1O1HKd@6[?Lc@5\\?81O1O1Da@5`?Ib@6e?O1O1O1O1O1OQ`b4"}, "image_id": 652, "id": 11424}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 502.0, 19.0, 10.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "o_W61n?1O1O1O1O1O1O1O1O1001O1O1O1O2N1O1O1OQP_1"}, "image_id": 652, "id": 11425}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 6.0, 1.0, 9.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "69aoo7"}, "image_id": 653, "id": 11426}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 115.0, 25.0, 22.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "ocQ61m?3M2N3M2N3M2010O010O010O010O010O010O0O2M2N3M2N3MVla1"}, "image_id": 656, "id": 11427}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 162.0, 90.0, 49.0], "area": 900, "segmentation": {"size": [512, 512], "counts": "Ze^21m?2N3M2N30O010O010O0010O0010O010O010O010O010O010O010O00010O010O010O010O010O010O010O0010O0010O010O010O010O010O010O010O00010O010O010O010O010O010O010O0010O0010O010O010O010O010O0N3M2NhYT4"}, "image_id": 656, "id": 11428}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 163.0, 174.0], "area": 15764, "segmentation": {"size": [512, 512], "counts": "0g0n0m2i9TMUFl2l9UMRFj2P:WMnEi2S:XMkEh2V:YMhEg2Y:ZMdEg2]:ZMaEf2`:[M^Ee2c:\\M[Ed2f:]MXEj0HMQ;ZOUEi0LKP;]OREh00Io:@oDg04Gn:ClDf08Fl:_1VEaNi:^1YEbNf:]1\\EcNc:[1`EeN_:Z1cEfN\\:Y1fEgNY:X1iEhNV:W1lEiNS:V1oEjNP:U1RFkNm9T1UFlNj9S1WFnNh9Q1ZFoNe9P1]FPOb9o0`FQO_9n0cFRO\\9m0fFSOY9l0iFTOV9k0lFUOS9j0oFUOQ9l0oFhM`NT1b:U1nFeMbNT1a:X1mFbMdNT1`:[1RGcNo8^1QG`NP9a1nF_NS9b1jF_NW9b1gF^NZ9c1dF]N]9d1aF\\N`9e1_FYNc9h1]FcMgN6m:X2\\F`MiN6l:[2[F]MkN6k:^2ZF\\MkN4l:a2YF[MkN2m:d2XFYMlN1m:g2VFYMmNNn:i2VFYMlNLo:l2UFXMlNJP;o2SFWMU:j2kETMV:m2jDoLe02b:P3gDPMg0Nc:S3dDQM[N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1N3N2N2N2N2N2N2N2N2N2N]]^5"}, "image_id": 657, "id": 11429}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 13.0, 172.0, 127.0], "area": 9454, "segmentation": {"size": [512, 512], "counts": "VR`41m?3N1N3M2O2M3M2f@@R?c0k@@S?g0N3M2O2M2N3N2M2N3M2O2M30O010O01000O0O2M21N2M2kA\\Nl=g1QB\\Nm=e1QB]Nm=m1M3N1N3M2N3N1N3M3N1N3M2O200O0100O0100O0100O0100O0100O010O01000O010O0100O2OO10O10O010O10O010O10O10O010M30O10O010O10O10O010O10O010O10O01M2O2M3M2O2M2N3jMaBh1a=UNbBh1`=VNbBh1a=VNaBg1a=WNaBh1m=M3M2O2M2N3N2M2N3M2O2M2N3N2M2N3N1N3M3N1N3O00100O010O1000O010O01000O010O010O10O10O010O10O10O010O01000O010O010O10M3M2O2MPmi0"}, "image_id": 657, "id": 11430}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 89.0, 23.0, 22.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "Qc]32m?2N2N2N2N2000O1000N2N2N2N2N1N12N2N2Ib@I`?5b@I`?57N2NQmV4"}, "image_id": 657, "id": 11431}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 100.0, 33.0, 29.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "bcP32m?2N2N2N2N200O1N2N2N2N2N2O1O0OO10000001100N2N2N2N2N2N2O1N2N2N1O2Nal^4"}, "image_id": 657, "id": 11432}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 113.0, 20.0, 19.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "fSa21n?2N2N2N2N200O10000000000000O1N2N2N2N2NSlT5"}, "image_id": 657, "id": 11433}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 162.0, 61.0, 61.0], "area": 1936, "segmentation": {"size": [512, 512], "counts": "UfZ12m?2N2N3M2N2N2N2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N1O000001O02N3M1O00JhAeNX>[1jAcNV>]16000010O200000000N2N2N2N2N2N3DSA@o>>SA@o>>SA@o>>SA@o>>;N2N2N2N2N2N2NXjf5"}, "image_id": 657, "id": 11434}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 220.0, 16.0, 21.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "nfW12n?1Y@O\\?3b@O\\?3b@O^?1`@1`?81O00000O1N2N2N3M2O1NgX`6"}, "image_id": 657, "id": 11435}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 225.0, 14.0, 15.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "Ugk01n?2N2N2N2O101O01O000N2N2N2NgXm6"}, "image_id": 657, "id": 11436}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 364.0, 39.0, 42.0], "area": 903, "segmentation": {"size": [512, 512], "counts": "R\\P62l?3N2N2N1O2M3N2N2N2N1N3N2N2N2M30O10N2N2M3N1100O1N2M2O2N2N2N2M3N1O2_Oh@7Z?Gh@7Z?Gg@7c?N2N2NoS\\1"}, "image_id": 657, "id": 11437}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 72.0, 64.0, 87.0], "area": 3138, "segmentation": {"size": [512, 512], "counts": "iSP71l?0U@2i?4N2N2N3M2O1N2N2N3M2O100O1N3N1N2N2N3M2N2O1N2N3M2N2O1N2N3M2N2OO01O000001O01O00001O2O2M2N2N0001O000J[BRNe=n1]BQNb=o18O0001O0001O0000011N2NgM"}, "image_id": 658, "id": 11438}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 154.0, 129.0, 236.0], "area": 13941, "segmentation": {"size": [512, 512], "counts": "^k^54j?2M4M2M3N3L3N3L3N3M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3M2M3N0O010O010O10O102M4M2M4M01N3N3L3M4M2M2O0O0010O02O2BRKgER5U:QKiEo4W:SKfEn4Y:VKcEj4^:;O1O0100O3N3L3N2M4M2N3L3N1N10O03N3L3N2M4M2N3L2O1N4M2M4M2M3N3M2M4M2M3cNWCAl<EJHS=:gB6`0EHIT=8hB7>E[=2YB6?FZ=1[B6e>G]A6X?N3L\\j`0"}, "image_id": 658, "id": 11439}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 254.0, 43.0, 80.0], "area": 1329, "segmentation": {"size": [512, 512], "counts": "WjZ71l?4M2M3N3L3N3M2M3N3L3N3L3N3M2M3N0O010O10O10O010O010O10O10O0103L3N3M2M3N3L3N3L3N2N3L3NoG"}, "image_id": 658, "id": 11440}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 383.0, 12.0, 12.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "V\\T31n?1O2M3N2N1O20N2M2O2M2Ooce4"}, "image_id": 658, "id": 11441}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 388.0, 36.0, 34.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "clU32m?2N2M3N1N3N2N1N3N2M3N10100O0100000O01000O01000O01N2M3N1N3N2N1N3N2M3N1O]SX4"}, "image_id": 658, "id": 11442}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 405.0, 77.0, 66.0], "area": 2190, "segmentation": {"size": [512, 512], "counts": "]mi31m?3N1N3N2N1N3N2M2O2M3N2M2O2M3N1N300O01000O01000O01000O01000O10O10O10O1000O01000O01000O01000O10O10O10O1000O01000O01000O0100N1N3N2N1N3N2M3N1N3N2M2O2M3N1O2MTbo2"}, "image_id": 658, "id": 11443}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 420.0, 31.0, 17.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "]m`51l?3L5M20001O01O0001O01O0001O01O0001O01O0001O01O00010O00000M4Kibo1"}, "image_id": 658, "id": 11444}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 422.0, 14.0, 32.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "P^X52l?3DMf@6W?Nh@3S?1m@OP?4n@On>c00@QA0o>MSA3m>JVAJL6n>M_A2b>KbA2W?N2NTb`2"}, "image_id": 658, "id": 11445}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 440.0, 34.0, 31.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "\\^P61l?4M2M4M2M3N3L3N3O00010O010O00010O010O00010O010O0001N1M4M2M3N3M2M4MPb^1"}, "image_id": 658, "id": 11446}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 453.0, 17.0, 23.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "f^_62k?4M2N2M4M2M4M2010N1N3L3N2M4M2M4MiQX1"}, "image_id": 658, "id": 11447}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 462.0, 29.0, 29.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "Q_i42k?3N3L3N2N3L3N3M210O00010O010O00010O010OO2M2M3N3L3N3L3N2N\\Qh2"}, "image_id": 658, "id": 11448}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 10.0, 26.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "0j0V?M3M3N2M3M3M3N2M3MSPk7"}, "image_id": 659, "id": 11449}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 0.0, 37.0, 13.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "PPf11o?001O00001O001O00001O00001O001O00001O001O00001O001O00001O00001O00N2N2M3MS`g5"}, "image_id": 659, "id": 11450}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 0.0, 35.0, 17.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "P`[41o?2N1O1O2N1O2N1O1O2N1O2N00O1O1O100O1O100O1O100O1O1O100O1O100O1O1O100OQPS3"}, "image_id": 659, "id": 11451}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 0.0, 62.0, 63.0], "area": 2532, "segmentation": {"size": [512, 512], "counts": "o`U53l?5L5K4L4L4K5L4LO1000O1000O1000O1000O1000O10O10O1]OSOmAS1m=SOmAS1S><100000000O100000000O100000000O1000000O100000000O10000J6J6J6J6J6J6J6JV`k1"}, "image_id": 659, "id": 11452}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 0.0, 58.0, 74.0], "area": 2343, "segmentation": {"size": [512, 512], "counts": "maZ63j?3M3M4L3N2010O0001L3M3M4K4M3M4L3M3M4L3M3M4L3M3M3N3O00001O00001O0000M3M3M3M3L4M3M3M3M3M3M3M3M3M3M3M3M3M3M3M3M3M3M3MS`h0"}, "image_id": 659, "id": 11453}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 3.0, 45.0, 89.0], "area": 2257, "segmentation": {"size": [512, 512], "counts": "dbY71m?2_ONQA6l>LQA7l>MQA6k>MRA6l>MQA5l>b0M2M3M4M2M4M2M3M4M2M4M2JnM[BU2c=201O01O010O3N21O010ON3L3N2M4L3N2M4M2M4L3N2M4M2M4LI"}, "image_id": 659, "id": 11454}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 18.0, 33.0, 30.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "Ua\\22l?2M3N3L3N3L3N2N3O0010O010O00010O010O00010O010O000M4M2M4M2M4M2N2MXoR5"}, "image_id": 659, "id": 11455}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 19.0, 17.0, 23.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "Tah42g?O^@3`?O^@3_?9N2N1O1N10O1Kg@DZ?;7N1N3N2N2N1N[on2"}, "image_id": 659, "id": 11456}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 27.0, 15.0, 11.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "o`h33i?401O000010O000010O00010ON2MUoo3"}, "image_id": 659, "id": 11457}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 32.0, 83.0, 54.0], "area": 2561, "segmentation": {"size": [512, 512], "counts": "mQX13j?3M4L3L4M3M4L3L4M4O00010O0000010O00010O00010O00010O0000010O00010O00010O00010O0000010O00010O00010O000010O000010O00010O00010O0000010O00010O00010O00N3L3M3L4M4L3M3L5L3M]^^5"}, "image_id": 659, "id": 11458}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 34.0, 13.0, 23.0], "area": 183, "segmentation": {"size": [512, 512], "counts": "[Qc3280Z?3b@0[?3b@1Z?`1O]NQBU1P>gNTBX1l=eNWB\\1h=aN[B_1T>0O01O01O01O01O0M4M2M0102M3N3L3N3L3N2M4L3N3L3N2M4M2M4L3N2M4M]]`3"}, "image_id": 659, "id": 11462}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 65.0, 24.0, 25.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "bbc41l?3N3M2M4M2N3L31O010O01O010O01O010M2N2M4M2N3L3Nj]P3"}, "image_id": 659, "id": 11463}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 72.0, 15.0, 15.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "`bX53j?3N2M4O010O00010O010OM3N3M2Mgm_2"}, "image_id": 659, "id": 11464}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 75.0, 24.0, 26.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "kRa53k?3M2N3L3N2N3M2O2O010O0010O010O0O2M2N3L3N2N3M2MbmR2"}, "image_id": 659, "id": 11465}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 79.0, 22.0, 26.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "Rcd72l?2M3N3L3N3L3N2N3O010O01O01OO2L3N3L3N2M4M2M_="}, "image_id": 659, "id": 11466}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 96.0, 48.0, 63.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "aTc43k?3L3N3L3FDn@>P?El@?P?:N3L3N2O2O001O0N2M4M2M4M2M3O2O010O00010OO2L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3Nhld2"}, "image_id": 659, "id": 11467}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 97.0, 12.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "\\co62k?3M3N3L30001O01N1M3M4LP]j0"}, "image_id": 659, "id": 11468}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 104.0, 26.0, 23.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "dc[62l?2N3M2N3M2N30O010O00010O010O010O010O010N1M4M2N3M2Na\\W1"}, "image_id": 659, "id": 11469}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 108.0, 50.0, 73.0], "area": 2070, "segmentation": {"size": [512, 512], "counts": "Xe_53j?3M4M2M3BDVA?g>CVA`0g>DUA?i>>L3M4M2M3M4M2M4M2M3M4N11O01O010O01O01ON3L3N3L3N2M4L3N2M4M2M4L3N2M4L3N2M4M2M4L3N2M4MZ\\g1"}, "image_id": 659, "id": 11470}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 129.0, 20.0, 19.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "[dZ73k?2N2N3M2N3O010O010O010O0010M2N3M2N3M2Nk[;"}, "image_id": 659, "id": 11471}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 144.0, 48.0, 60.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "oUW61m?3M2M3N3M2M4D_OUAc0i>@TAb0i>ATAc0h>:O00001O001O00001O001O001O00001O001O00001O2N001O00M4O0mNZAl0g>QO[AP1d>nN^AR1i>0O01OUAoNg>U1010O0010O010OM3O20O010O00010O010O01O01O001M2M4O01O01O010O01O0O1N3L3N3M2M4M2M3N3M2M4M2M3N]_i5"}, "image_id": 662, "id": 11476}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 0.0, 41.0, 38.0], "area": 996, "segmentation": {"size": [512, 512], "counts": "RP]32l?2k@M]>3aA0^>1_A1a>O\\A4d>MYA6f>JXA8h>HUAESA=m>CQA`0n>90001O001O00001O001O001O00001OO1N2N2M3N2N2M3N2M3N2N2M3N2N2M3NR`n3"}, "image_id": 662, "id": 11477}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 0.0, 29.0, 17.0], "area": 297, "segmentation": {"size": [512, 512], "counts": "S`a53l?1N3O1O001O1O1O001O1O001O1O001O1O00O1N2O1N2O1O1N2O1N2O1NRPP2"}, "image_id": 662, "id": 11478}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 10.0, 119.0, 63.0], "area": 3861, "segmentation": {"size": [512, 512], "counts": "Uan32l?3M2M3N3M2N3L3N3M2O1010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01M2N3N110OM4M2M4M2N2M4M2M4N10010O010O00010O010O00010O010O0010OM3O2O010O01O010O01O01O010O01O010OdNgAQ1Z>lNhAT1X>iNkAX1_>O0M4O00010O0010O01WOTA=k>@XA`0h>^O[Aa0f>[O]Af0o>O01ON3N1010O0001M2M4M2M4M2M]nU2"}, "image_id": 662, "id": 11479}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 41.0, 76.0, 82.0], "area": 2845, "segmentation": {"size": [512, 512], "counts": "Zca22k?3N3L3N3L3O1010O01N1M4M2M3N3O010O00010^OROPBn0n=UOoAn0m=UOQBm0m=UOQBm0m=VOoAn0m=UOQBm0m=e0O01O010O01O01O010O01O01M2N3L3N2N3L3N3L10O010O010O001002M3N2O2O010O01O01O0N3M2M3N3L3N3M2M3N3L3N3L3Na^X4"}, "image_id": 662, "id": 11480}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 49.0, 77.0, 53.0], "area": 2051, "segmentation": {"size": [512, 512], "counts": "Xb<2l?2N2M4M2M4M2N3L3N2N3O010O010O010O00010O010O010O01O01O010O001M2O1010O010O01O010O01O010O01O010O01O010O010O01O01O010O010O0SOTAg0k>WOXAh0P?10O00010O001M2N3M2M3N3M2M4M2Nbm\\6"}, "image_id": 662, "id": 11481}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 89.0, 81.0, 65.0], "area": 2428, "segmentation": {"size": [512, 512], "counts": "jc01m?2N3M2M3N3M2N3L3N3M2N2N3L3N3M201O01O010O010O01ZAhNb>\\10O0010OO2N11O010O01mN[Ai0e>UO]Al0c>POaAo0h>1O010O01O010O01O010O01O010O01O010O01O010O01O010O01O010O0VOQAd0n>ZOTAf0T?0O01O01O01O0N3M2M4M2N2M4M2Nnkf6"}, "image_id": 662, "id": 11482}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 131.0, 59.0, 51.0], "area": 1681, "segmentation": {"size": [512, 512], "counts": "YUa13l?2M2BKPA7n>KPA7m>LPA6o>KPA7n>Ko@8n>>O2M3N2M21000O01O1N00O0100O010O012O1O0N3N2O001000O1N1N3N2M2O2N02N2M2O2N2M2O2M3N1N3N2M2O2N2M3N1N3N2M2Oa[a5"}, "image_id": 662, "id": 11483}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 144.0, 51.0, 53.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "c4P1n>3N110O010O010O001O001O010O01O010O010O010O010O010O010oNVAj0j>SOYAn0m>O010O0010O010O0010O001M2N3L3N3M2N3M2N3M2N3LgZV7"}, "image_id": 662, "id": 11484}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 144.0, 63.0, 61.0], "area": 1914, "segmentation": {"size": [512, 512], "counts": "PfW21m?3N2M2O2M3N1N3N2M2O2M3N1N3N2M2O2N2M2O1N10O010O010O010O010O010020O010O1M2O2N2O01000O0100N1N3N2kN[Ak0g>SO\\Ak0f>SO[Ak0o>N2M3N1O2M3N1N3N2M2O2M3N1Nkjh4"}, "image_id": 662, "id": 11485}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 157.0, 30.0, 30.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "eej01m?2N3M2M4M2N3L3N3M2N2N3O01M2N3M2010O0010ON20M4M2N3M2M4M2N2MQ[f6"}, "image_id": 662, "id": 11486}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 178.0, 67.0, 106.0], "area": 3197, "segmentation": {"size": [512, 512], "counts": "]XZ32m?2M3^OKUA7i>LUA6h>LVA6i>LTA7^=AfC;iN7_=_OfC_]14N3N2M2N3N1O2M3N1N3M2O2M2N3N2M2N3M2O2M3MeYd3"}, "image_id": 662, "id": 11487}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 191.0, 90.0, 95.0], "area": 2861, "segmentation": {"size": [512, 512], "counts": "jfW11n?2N2M3N1O2N2N2N2N2N2N2N2N1O2M3N2N2N2N2N2N2N2N1O2N200000000000jNcAh0]>VOeAi0\\>UOfAk0Y>TOiAl0W>ROjAo0V>nNmAR1a>00000000O100TOTAc0k>[OXAe0h>YOZAg0f>WO\\Ai0n>0O1000O10000000O1M3N2N2N2N1O2000000000000O10O01N200O100000O1N2N2N2N2N1O2]OPA1ODU?7n@3U?Lm@1V?Ml@1V?Mk@2W?Lk@2c?NdW[5"}, "image_id": 662, "id": 11488}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 200.0, 43.0, 54.0], "area": 1141, "segmentation": {"size": [512, 512], "counts": "X6W1i>mN[Ak0f>RO\\Ao0k>O010O0010O010O0010O010O010O0010O0010O010O010VOPAb0P?\\ORAe0n>XOUAg0R?010O010O010O00M4M2N3M2N3L3N3MhXZ7"}, "image_id": 662, "id": 11489}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 234.0, 66.0, 88.0], "area": 2568, "segmentation": {"size": [512, 512], "counts": "dYP42l?3M2O2AHRA;k>GSA;k>HRA;l>FSA;k>HRA;k>>N3N2M2DhNnA[1P>gNmA[1Q>hNmAZ1P>hNnAZ1P>=N2M2N3M2O11M2N3M2O2M2N3M3N1N3M2N3N1N2N00010O0000010O0000010O003M2N3N1N3M2N3N2M2N3M2O2M2N3M3N1N3M^hn2"}, "image_id": 662, "id": 11490}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 236.0, 33.0, 34.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "mWR11n?2N2N2N2N1O2M3N2N2N2N2N2N1O2O10000000O0O000000O1002]Om@6U?Hm@6U?Hm@6U?Hm@6U?Hm@6`?M3NSX]6"}, "image_id": 662, "id": 11491}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 261.0, 23.0, 38.0], "area": 537, "segmentation": {"size": [512, 512], "counts": "U8Q1P?O010O01O01SORAf0n>XOTAi0R?O01O010O01O0O1M4M2N3L3N3L3N2N^Wd7"}, "image_id": 662, "id": 11492}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 277.0, 95.0, 54.0], "area": 2750, "segmentation": {"size": [512, 512], "counts": "YYU52k?3N3M2N2M4M2N3M2N3O01O010O010O00010O010O010O00010O01N1M4O01O010O01O01O010O010O010O00010O010O010O0010O0010O0010O010O010O00010O010O0N3M2N2M4M2N3M2M4O000010O0N3M2M4M2M4M2M3N3M2M4M2M4M2M4M2M3NTW[1"}, "image_id": 662, "id": 11493}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 278.0, 67.0, 52.0], "area": 2313, "segmentation": {"size": [512, 512], "counts": "PZS14d?8H8H8000010O000000000000010O00000K5I7H81O01O000000000001O01O000000000001O01O000000000001O01O00M300WObA0^>HjA9U>_OSBa0f>0001O00000001O000001O0N2H8HcVk5"}, "image_id": 662, "id": 11494}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 301.0, 60.0, 63.0], "area": 1963, "segmentation": {"size": [512, 512], "counts": "ejf62l?3L3N3L3N201O010O0O1M4M2A[O`Ai0]>ZO`Ah0]>[O`Ah0^>?N1010O00010O0010O0010O0010O0010O00010O010O00010YObAJ^>3eAN[>OgA1Y>LkA3U>KmA6S>FQB9o=ESBO01OO2M2M4M2M3N3L3NYU;"}, "image_id": 662, "id": 11495}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 303.0, 14.0, 16.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "kYe01m?3L3N2M4M201O0001M2M3M4M2MbfS7"}, "image_id": 662, "id": 11496}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 304.0, 24.0, 27.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "j9:d?3L3N3M2O1010OO2O00010O010O0010O0M4M2M3N3M2M6KZfc7"}, "image_id": 662, "id": 11497}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 317.0, 26.0, 26.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "^j<2l?2N3L3N3M2M3N3N1010O01O01O010O010O0O1N3L3N3M2N3L3NnUV7"}, "image_id": 662, "id": 11498}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 317.0, 70.0, 55.0], "area": 2245, "segmentation": {"size": [512, 512], "counts": "Pke42l?3M2M3N3L3N3M2M4M2N2M4M2M4M2O20O00010O010O0010M00O22O0010O010O0010OiN\\AR1d>lN_AT1f>0010O0010O010O00010O010O0010O0010O010O0001L3N3M2M4M2N2M4M2M4M2N2M4M^UW2"}, "image_id": 662, "id": 11499}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 330.0, 14.0, 20.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "g:4i?4M2N3M2N3M02Jc@G_?7c@G`?66O2O0O2Meeh7"}, "image_id": 662, "id": 11500}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 351.0, 76.0, 52.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "k[l52l?2M3M4M2M4M2O110O010O00010O0M4M2M3N3L3N3O00010O010O00010O010OO1M4O001QOSAj0Q?000m@UOR?l0010O00010O010O00010O3NO01O01O01O010O01O01O010O01O01O010O000M4M2M4M2M3N3L3NVdm0"}, "image_id": 662, "id": 11501}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 353.0, 55.0, 77.0], "area": 1930, "segmentation": {"size": [512, 512], "counts": "R]81m?3EMf@4X?Oe@4X?Nf@4X?YOnAg0R>c0000000000000002M3N2N2N2N2N2N2N2N2N2N2N000000000000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NRTf5"}, "image_id": 662, "id": 11503}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 397.0, 61.0, 81.0], "area": 2235, "segmentation": {"size": [512, 512], "counts": "ano11m?2N3M2N3VOIgA9W>KeA8X>LdA6[>LbA7]>J`A8`>H^A;`>G^A:b>c0O0O20O01O010O0O2N2M2N3M2N01O2N2N3M2N000000010O000000001O002N2N3M3M2O2M2N3M2N3M2N3M2N3M2N3N1N3M3M[cQ5"}, "image_id": 662, "id": 11504}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 419.0, 53.0, 79.0], "area": 2097, "segmentation": {"size": [512, 512], "counts": "Vol22l?3M2@LRA7k>KSA7k>LRA7k>KSA7k>LRA7k>`0N3BjNnAX1P>kNlAY1Q>iNmAY1Q>jNlAY1Q>k=DUB;j=GUB9j=IUB7j=KVB4h=OWB1h=1VB0i=2UBOj=3TBNk=3TBNk=4SBMl=5RBLm=6QBKn=7PBJo=8oAIP>T1O1O1O1O1001O1O1N2N2N2N2N2N2N2N2N2N2N2HZASOh>k0ZASOh>k0ZASOh>k08N2N2N2M3N2N2N2N1O2N2N2N2NVQe2"}, "image_id": 662, "id": 11507}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 448.0, 16.0, 13.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "T^o02l?3M201O010O010O0100O10O010ON3Mkah6"}, "image_id": 662, "id": 11508}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 462.0, 22.0, 33.0], "area": 478, "segmentation": {"size": [512, 512], "counts": "n^e73k?2N3M2N3M2O2M2d@@V?f0O2O010O010O0100O010OO2M3M2bA"}, "image_id": 662, "id": 11509}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 465.0, 33.0, 47.0], "area": 898, "segmentation": {"size": [512, 512], "counts": "X?h0Y?O001O0000N2M3N2M3N2M3N2M3N2M31O0N3L3N2N3L3N3L3N2N3L3N3L3N2M4M2NZQ_7"}, "image_id": 662, "id": 11510}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 472.0, 27.0, 27.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "Z_^12l?2M3M4M2M4L3N210O0010O0010O0010O010O0O1N3L3N3L3N2N3LSQT6"}, "image_id": 662, "id": 11511}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 484.0, 42.0, 28.0], "area": 771, "segmentation": {"size": [512, 512], "counts": "oof01m?2M3N2N2M3N2M3N2M300001O001O00001O001O000000N2N2M3N200001O001O00001L3N3L3N3M2M3N3LiPd6"}, "image_id": 662, "id": 11512}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 487.0, 70.0, 25.0], "area": 938, "segmentation": {"size": [512, 512], "counts": "o_g61m?2N2N2N2N2001O001O001OO1000000O10000O1N2M3N2N2N2N2O11O001O00001O001O001O001O001OO100001O001O001O001O00001O001O001E^@6f?01O00001O001O001O001O0NU`5"}, "image_id": 662, "id": 11513}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 508.0, 9.0, 4.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "noY22l?21O001O001O0000Q`a5"}, "image_id": 662, "id": 11514}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 117.0, 51.0, 65.0], "area": 1770, "segmentation": {"size": [512, 512], "counts": "Yen51m?2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3M2M4M2N2O2O010O0010N1N3L3N3M2M3N3M2M4M2N3L3N2N3L3N3M2N3L3N2N3L3NRlW1"}, "image_id": 663, "id": 11515}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 195.0, 72.0, 52.0], "area": 1834, "segmentation": {"size": [512, 512], "counts": "jf`62l?2N3L3N3M2N3M2N2M4M2O2O010O0010O010O0010O010O010O0010O010O010O0010O010O0010O010O010O0010O010O010O0010O010O0010O010O010O00N3M2N3M2M4M2N3M2N3L3NTY;"}, "image_id": 663, "id": 11516}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 510.0, 4.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "n?2o?O0000Q`m7"}, "image_id": 663, "id": 11517}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 150.0, 23.0, 16.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "mT?1l?3N3M201O00010O010O0010O0010O010O00010ON3M2NU[U7"}, "image_id": 665, "id": 11518}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 28.0, 29.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "0c0]?001O000010O010O00010O010O01O010O10O00010O015H2O1M4L3Meoa7"}, "image_id": 666, "id": 11519}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 204.0, 58.0, 57.0], "area": 1747, "segmentation": {"size": [512, 512], "counts": "\\gn22l?3N1N3N2M2O2M3N1O2M3N1N3N2M3N1N3N2N1N3N2M2O2O1O010000O010000O01000O01N2M2O2M3N2N1N3N2M2O2M3N1N3N2M2O2N2M3N1N3N2M2OnXT4"}, "image_id": 667, "id": 11520}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 228.0, 67.0, 112.0], "area": 3467, "segmentation": {"size": [512, 512], "counts": "SXh32k?3N3M2M4M2M3N3M2M4M2M4M2N201O010O01O01O010O010O0nBlNX;T1bClN`02o;R1^CnN`03RM2MdVV3"}, "image_id": 667, "id": 11521}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 231.0, 40.0, 40.0], "area": 1116, "segmentation": {"size": [512, 512], "counts": "TX=1j?6K4L4K5f@Ao>j0L5L31O0001O01K4N2000001O01O0001O01O0001O01O000001O01O0001K4L4K5L4K6Kehn6"}, "image_id": 667, "id": 11522}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 277.0, 16.0, 15.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "iX=5k?00001X@Kc?9L40000001O01O000000K5IagZ7"}, "image_id": 667, "id": 11523}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 309.0, 46.0, 42.0], "area": 1110, "segmentation": {"size": [512, 512], "counts": "`Ze13k?2M3N3L3N3M2M3N3M2M4M2O101O010O01O01O010O01O01O010O01O01O010ON3L3N2M4M2M4M2N2O2O0H]@4h?O00001Mkec5"}, "image_id": 667, "id": 11524}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 19.0, 3.0, 28.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "f`n74k?7VAGj>9WAEj>;XACh><[ABd>?^A_Ob>a0>00O0100000O01001O1O0O2O1O1000000000000000000000O1000O10000000000000000000O10000000O1000000000000000O1000000WM"}, "image_id": 668, "id": 11526}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 226.0, 359.0, 286.0], "area": 24307, "segmentation": {"size": [512, 512], "counts": "Ro\\22l?5L2N2N10000000000O01O0O2O10O010O01000O010O010O01000O010O0100O010O0100O0100O010O01000O010O010O010O10O10O010O10O010O10O010O10O010O010O10O10O0n@I]>7bAK\\>5cAN\\>2cAO]>1bA1\\>OdA3[>MdA5Z>KeA8Z>HeA:Y>FgA;Y>EfA=Y>BfAa0X>@gAb0X>^OgAc0X>]OhAb0Xo;CPDgoR=BmB=T=DkBX=BgB>Y=CfB=`e>gh>j_;\\O]E>iNH:>`;^O\\Ea;_O\\E;iNH9>c;AYE:kNG8>c;DYE7lNG7>d;FXE5lNG8>d;HWE3mNG7=f;JVE2mNG5>g;LVEOmNG6>g;NUEMnNG5>h;NVEMmNG4>i;JmCDZ1=lNF4>i;K]E1eNG3>^2]Od6=WH1dNH1=`2_Ob6;ZH1cNIOc20g5A^IQ1k0Ae5^OaIP1j0Cd5]OcIo0i0Fa5\\OgIm0h0I_5YOkIm0f0L]5WOnIk0e01[5TOQJj0d0H[MYOn76SJi0c0H^MXOk79SJg0c0HaMXOh79TJg0b0HdMXOd7;UJe0a0HiMXO`7cJ471oM]O@Hc7g0fJ39NQNBP7gJ3gJ3:HXNEh6`0fJ39HZNDh6a0eJ38H]NBf6d0dJ37HaN_Oe6f0cJ36H]65]I35G`66[I34Gb66ZI33Gd66YI22If64YI30Ih64XI3OHk65VI3NHm65UI3LIP74TI3KIR74SI3JIT74QI4JGV76PI3IGX76oH3GH[75nH3FH]75mH3EH_75lH2EHa76jH2DHc7LSITOGo7KmH>ROHR8JlH>QOHS8KlH=POGV8LjH=POFW8MiHPOC[8OeH>POC[8OeH>POC[8OeH>POC[8OeH=QOCZ81eHPOC[8OeH>POC[8OeH=QODZ8OeH=QOC[80dH=QOCZ81eHM2M4M2N2M4M2M4M2NY^`4"}, "image_id": 669, "id": 11530}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 6.0, 9.0, 46.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "6^1c>D_ATOd>4\\A:3_Od>3\\A;o>CSA;o>CTA:Z?M4M2M^_k7"}, "image_id": 669, "id": 11531}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 25.0, 53.0, 60.0], "area": 1899, "segmentation": {"size": [512, 512], "counts": "VR53k?2M4M2N3M2M4M2N2M4M2N3M2M3N002M4M2N3N11O010O010O010O01O01OO2M2M4M210O0001N1M4M2N3@cAYO_>d0dAYO`>d0bAZO`>d0cAYO_>e0`0L3N3M2M4M2N2NQ_P7"}, "image_id": 669, "id": 11532}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 34.0, 62.0, 76.0], "area": 2570, "segmentation": {"size": [512, 512], "counts": "Pce31m?2M4M2N2M4]OD^A>_>E^A?_>D^A>_>E_A>^>D_A?_>b0M4M2M4M2M3N3M2M4O0010O0010O0010O0010O010OM3N3L3N3M2M3N3]NfA[1a>N2N3L3N3L3N3N10010O010O000O2M2M4M2M4M2N2M4M[^[3"}, "image_id": 669, "id": 11533}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 46.0, 56.0, 62.0], "area": 2086, "segmentation": {"size": [512, 512], "counts": "nbl02l?2N3L3N2M4M2N3L3N3M2M3N3L3N3M0O2O2M4M2N3O01O010O010O01O01O010O01N1N3L30001ON3M2N3L3N2M4C_AWOc>g0`AVOc>f0`AWOc>g0=M2M3N3L3N3M2M3N]^W6"}, "image_id": 669, "id": 11534}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 61.0, 52.0, 64.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "aca13k?2M4M2M3N3L3N3L3N2M4M2M4M2M31N110O0010O0010O0010O0N3L3N2M10O10O010O13L3N3L3N2N3L3N3L3N2N3L3N3L3N3M2M3Nn]d5"}, "image_id": 669, "id": 11535}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 75.0, 52.0, 64.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "PdR22k?3N3L3N2M4M2N3L3N3L3N2M4M2M30O20O01O01O010O01O01N1N3M2M2O00O010O01003L3N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N`]S5"}, "image_id": 669, "id": 11536}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 83.0, 8.0, 8.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "ebV34j?20010O01O01OO\\]e4"}, "image_id": 669, "id": 11537}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 90.0, 53.0, 64.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "_Tc22l?2M4M2N3L3N2M4M2M4M2N2M4M2M4N01O2O010O00010O010O00N3L3N3M0O01000O0101O2M4M2N2M4M2N3L3N3M2M3N3L3N3M2M4M2NQ]b4"}, "image_id": 669, "id": 11538}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 105.0, 51.0, 63.0], "area": 1581, "segmentation": {"size": [512, 512], "counts": "ldT33k?2M4M2M4M2M3N3L3N3L3N2M4M2N111O01O010O010O00010O01L3N2N0O010O010O11N3N2M4M2M4M2N2M4M2M4M2M3N3M2M4M2MdlQ4"}, "image_id": 669, "id": 11539}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 107.0, 9.0, 10.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "bSV41l?3N3M21O01OO2M2Mf\\e3"}, "image_id": 669, "id": 11540}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 115.0, 83.0, 73.0], "area": 3469, "segmentation": {"size": [512, 512], "counts": "Ref63j?3N2N3L3N3M2M4M2M3N3M2M4M2N2O2O01O0N1O0O02O2N2M4N101O010O01O010O01O010O01O010`NcA[1b>0O010dAeNQ>[1mAgNS>Y1jAkNV>_1O1O2O0010O010O0M3N3M2N3mNaAd0^>ZOdAf0]>WOfAh0Z>UOiAl0V>ROmAm0c>10O01O01O01N1N3M2M3N3M210O010O00010O010O0cK"}, "image_id": 669, "id": 11541}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 124.0, 50.0, 70.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "nTk31m?2M3o@MR>7jALT>6jALS>7jAMS>6jALT>6iANV>3gAOZ>0cA3]>MaA6^>f01O01O0N3L3N2N3L3N3M2M3001M2N3L3N3M2M101O2M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3Mnk[3"}, "image_id": 669, "id": 11542}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 150.0, 61.0, 59.0], "area": 2083, "segmentation": {"size": [512, 512], "counts": "le[43j?3N3L3N2M4M2N3L3N2M4M2M4M2N3L3O110O010O0001M2N3N10`AiNW>X1eAkN\\>T1bAoN]>Z110O01cNaAX1^>eNeA[1`>010O010O00010O0O1N0O0103L3N2N3L3N3N11ON3M2M4M2N2M4H`@Lb?28Mnje2"}, "image_id": 669, "id": 11543}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 161.0, 36.0, 63.0], "area": 1562, "segmentation": {"size": [512, 512], "counts": "[5]1`>3N3M21O01O010O01O010O01O010M2N2N3M2010O0O2M2YOlA_OX>>jA@X>=lA@V>>lA_OX>>jA@X>=lA_OX>>jA@X>=g0N3M2M4M2Mkj]7"}, "image_id": 669, "id": 11544}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 193.0, 115.0, 93.0], "area": 4966, "segmentation": {"size": [512, 512], "counts": "UgV62l?3L3N3M2M4M2M3N3M2M4TASOc>m0[AUOe>k0YAWOh>Q1M2M4M2N3L3O1010O010O00010O0N3O010O0`NeA\\1_>01O`AcN]>`10010O010O01O01ObNcAY1b>10O01O01O01N00N310O01O01O010O010O01]AeN_>_10O010cN`AX1e>O01O010O010O01O01O010L2011O01O010O2OO010O00010O010O0bNbAZ1b>1O010O010O0N2000M40O01O01O010O010O0\\AhNa>[101O01eN_AU1a>hNbAY1n6"}, "image_id": 669, "id": 11545}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 213.0, 73.0, 52.0], "area": 2014, "segmentation": {"size": [512, 512], "counts": "]gf11m?2N3L3N3M2N3L3N201O001M2N3M200010O010O01O01O010O010O01O01O010O010OO2L3N2O20O01O010O01O010O01O010O01SOYA?g>_O[Ab0e>ZO^Af0b>XOaAh0^>VOdAj0j>0O01O01O010O010O01N1M3N3M2M4M2N3L3N`hT5"}, "image_id": 669, "id": 11546}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 228.0, 11.0, 46.0], "area": 299, "segmentation": {"size": [512, 512], "counts": "T7^1c>N1M3N3\\OWAMk>0XAMl>0VANl>0WAMk>0XAMl>0WAMk>0\\Yj7"}, "image_id": 669, "id": 11547}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 243.0, 52.0, 76.0], "area": 2308, "segmentation": {"size": [512, 512], "counts": "`ig23S?NdA5X>OeA3Y>OdA5Y>NdA4Y>OdA4Z>NdA5X>OeA3Y>OdA5X>k0N3M2M3N3L3N3N100010O010O010O00010O010L3N2M4M5J4M2N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2NnW^4"}, "image_id": 669, "id": 11548}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 254.0, 71.0, 50.0], "area": 2015, "segmentation": {"size": [512, 512], "counts": "bXY13k?3M2M4M2M4M2N2M4N110O01O010O01O010O01O01O010O01O010O01O010O01O01N1M4M2M3O2O0010O010O00010O010O0010O0010oNXAh0h>UO[Ak0f>RO\\Ao0k>O010O010O00010O010L3N2M4M2N3Dc@0`?Mc@1lfc5"}, "image_id": 669, "id": 11549}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 257.0, 62.0, 57.0], "area": 2141, "segmentation": {"size": [512, 512], "counts": "RY_32l?3M2M3N3M2M4M2N3M2M4M20001O010O010O01O01O010O010O01OO2L3N3M2N3L3O110O010O010O0010O0010O010O0010O0bNbAZ1b>10O001M2M4M2]OUAOm>OVAMn>OUAOm>OVANm>OUANn>OUAO\\Vb3"}, "image_id": 669, "id": 11550}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 299.0, 60.0, 62.0], "area": 2259, "segmentation": {"size": [512, 512], "counts": "]jm03k?3L3HKc@7[?9M2M4M2M301O0N3L3N2O2O0cAhNS>X1jAjNV>V1gAmNZ>R1dAQO[>Z1010O0010O010O00010O010O00010O010O0001NO0O10O100O3N2M4M2M4M2M4M2M3N3M2M4M2M4M2M3NaVT6"}, "image_id": 669, "id": 11551}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 306.0, 66.0, 44.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "WZV31m?2M4M2N3L3N3M2M3N3O010O00010O010O0010O0010O010O001O0M3N3O010O00010O010O010O00010O010O010O00010O010O00010OM4M2N3L3N201O0N3L3N3M2M3NPfh3"}, "image_id": 669, "id": 11552}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 309.0, 52.0, 64.0], "area": 1603, "segmentation": {"size": [512, 512], "counts": "Zkb12l?3M2M4M2M3N3L3N3L3N2M4M2N2M21000010O010O00010O010OO2L3N2NO0100O010O0102M3N2M4M2M4M2N2M4M2M4M2M3N3L3N3LXVc5"}, "image_id": 669, "id": 11553}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 319.0, 55.0, 59.0], "area": 2011, "segmentation": {"size": [512, 512], "counts": "Y[a53k?3M2N3L3N3D@TAb0j>ASAb0j>@SAc0j>;N3M2N3M2010O01O01O010O00N1O3M2N201O0010O010O010O0010O00N3L3N3M2N3L3lNXAn0n>N3M2M4M2N3M2N2M4M2N3McUc1"}, "image_id": 669, "id": 11554}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 321.0, 52.0, 65.0], "area": 1601, "segmentation": {"size": [512, 512], "counts": "gkR23k?3M2M4M2N2M4M2N3M2M3N3M2M102N3O000010O010O0010O001M2N2M2O00O10O0100O013M2M4M2M4M2M3N3L3N3M2M4M2M3N3L3NjUS5"}, "image_id": 669, "id": 11555}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 335.0, 51.0, 65.0], "area": 1582, "segmentation": {"size": [512, 512], "counts": "Tlb23k?2N3L3N2M4M2M4M2N3L3N2M4M1O2O1010O0010O010O00010N1M4M1N10O10O010O0102N2M4M2M3N3L3N3M2M4M2M3N3L3N3M2M^ec4"}, "image_id": 669, "id": 11556}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 335.0, 60.0, 60.0], "area": 2041, "segmentation": {"size": [512, 512], "counts": "mkX61m?2M4M2M4M2M301O0N2M4M2M6K2M4M2M3N3L3N3L3O1010O0O2O01O01O010O01O01O000M4M2M4M2fN]AT1j>M2M3O20O01O01O010O01N1M3N3L3N3L3N2M4M2MRUi0"}, "image_id": 669, "id": 11557}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 349.0, 62.0, 66.0], "area": 2023, "segmentation": {"size": [512, 512], "counts": "clQ31m?2N3L3N3L3N3M2M4M200010O01O010N1N3L3N2M4M2N3L3N12M2010O0010O0010O010O0M3N000O01000O10O2O3O001O0N3L3HYAUOi>h0[AUOh>h09N3L3N3M2M3N3L3N3MmTo3"}, "image_id": 669, "id": 11558}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 355.0, 55.0, 62.0], "area": 2015, "segmentation": {"size": [512, 512], "counts": "UlS71l?3N2M4M2N3L3N3RA^OY>e0eA]OY>e0eA^OX>e0dA^OY>e0eA^OX>V1M4M21O010O01O01O010O01OO2M2N30O01O01O0101N1O01ON3L3N3L3N3L3N2O20ON3M2M3N3L3N3M2M4M2M]d0"}, "image_id": 669, "id": 11559}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 371.0, 69.0, 60.0], "area": 2445, "segmentation": {"size": [512, 512], "counts": "l\\Y51\\?1SA1g>0k@2;1g>5VANh>5UAMh>6VAMg>g0N3L3N30O0010O0010O010O0010O010O0010O0O2L310O01O01O010O010O01O010O01O010O01O010O01O010O01O010O01O0N3M2N2M4M2N3L3N3M2M4M2N2M4MeSd1"}, "image_id": 669, "id": 11560}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 404.0, 49.0, 72.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "SnR63k?2M3N3f@Fj>>SAFj>=TAGi>i0N2M4N110O00010O010M2N3L3N2N3L10O01000O103L3N2N3L3N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3MUcT1"}, "image_id": 669, "id": 11561}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 404.0, 11.0, 24.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "Vmj72l?2M4M2M4M2M3010ON3M2\\C"}, "image_id": 669, "id": 11562}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 416.0, 52.0, 68.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "kna62k?4M2M3N3M2M4M2M3N3M2M4M2M4M2N2M4ON3L3001M2M2O0O011O3L3N3M201O01O001M2N3L3N2M4M2N3EYAXOj>f0XAXOj>e0YAYOj>d0:M4M2N3L3N2M4MiRd0"}, "image_id": 669, "id": 11563}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 424.0, 62.0, 52.0], "area": 2347, "segmentation": {"size": [512, 512], "counts": "gmR53g00X>4dA0Y>3[AG19`>4\\AF0:a>;\\AIb>k00001O01O00010O0001O01O00010O0001O01O00001L30000010O00010O0001O01OO101OM301O01O01O0001O01O01M2M3L4M4K4M3L5L3M3L4M_Rn1"}, "image_id": 669, "id": 11564}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 430.0, 51.0, 64.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "UoR72k?4M2M4M2M4M2M3N3L3N3M2M3N3L3N3N10N3O01O0N3M2M100O2O2M4N101O01O010M2M3N3L3N3L3N3EYAXOj>f0YAWOi>f0;M2M4M2M3N3L3N3L\\b3"}, "image_id": 669, "id": 11565}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 451.0, 25.0, 58.0], "area": 786, "segmentation": {"size": [512, 512], "counts": "aoc72l?3L3N3M2M3N3L3N3M2M4M2M3N30O010O00N3L3N3L3N3M2M1oA"}, "image_id": 669, "id": 11566}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 502.0, 57.0, 10.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "g_P59g?00000001O000000000000000000000000000000000000000000000000000000000000N2000000000000001O000000000000000000000000YPS2"}, "image_id": 669, "id": 11567}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 12.0, 60.0], "area": 720, "segmentation": {"size": [512, 512], "counts": "0l1T>000000000000000000000PPj7"}, "image_id": 670, "id": 11568}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 27.0, 41.0, 29.0], "area": 1189, "segmentation": {"size": [512, 512], "counts": "kP9m0S?0000000000000000000000000000000000000000000000000000000000000000000000000000000U_R7"}, "image_id": 670, "id": 11569}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 27.0, 192.0, 343.0], "area": 31810, "segmentation": {"size": [512, 512], "counts": "e5m5R:0O010000000O10O100000]Nc11N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O0O2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N@a01O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1NmMnK\\HQ4e7oK[HP4f7PLZHo3g7QLYHn3h7RLXHl3j7TLVHk3k7ULUHj3l7VLTHi3m7WLSHh3n7XLRHg3o7YLQHe3Q8[LoGd3R8\\LnGc3S8]LmGb3T8^LlGa3U8_LkG_3W8aLiG^3X8bLhG]3Y8cLgG\\3Z8dLfG[3[8_MkF_2W9T21O1O1O1N2O1O1O1O1N2O1O1gNY100000O1000000000000000000000000000000000000000000000000j0VO1O001O1O001O1O1O1O001O1O1O1O2N1O1O1oNkCUNWHYA:e>HYA:d>IYA9f>HYA8h>IVA6m>JQA6Q?;2N1O2M3N2N2N2N1N3N2N2N2N`e`5"}, "image_id": 670, "id": 11571}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 394.0, 27.0, 31.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "mlT13l?2N2M2O2N2N2M3N1O2N2M3NO1000O12M3N2N2M2O2N2N2M3N1O2N^c]6"}, "image_id": 670, "id": 11572}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 437.0, 25.0, 26.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "T^d01n?2N2M3N1O2M3N2N1N3N2N2N20O0O2N2N2M2O4L2M3N1O2M3NQRo6"}, "image_id": 670, "id": 11573}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 0.0, 74.0, 55.0], "area": 2325, "segmentation": {"size": [512, 512], "counts": "d`Y12l?3N1N3M2N3N1N3M3M2O2M2O20O0100N1O2O001O001O1O001O001O001O1O001O001O001O1O001O001O001O1O0010O10O010O10O010O10OhNbAn0_>PObAQ1]>mNfAS1c>N3N1N3M3M2O2M2N3M2O2M3M2N3N1N3MX_a5"}, "image_id": 671, "id": 11574}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 0.0, 15.0, 6.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "PPW51o?001O001O001O001O001O0000N2NR`a2"}, "image_id": 671, "id": 11575}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 0.0, 62.0, 67.0], "area": 2337, "segmentation": {"size": [512, 512], "counts": "aPb51m?3M2N3M2N3M2N3M2O2O001O0fAZOZ=g0cB[O]=e0aB^O^=c0_B_Oa=a0]BBb=?[BCf=Q1O001O001O001O1O001OO1N2N2N2N2N2O1N2N2N2N2N2N2gN\\AT1j>M2N3M2N3M2N3M2N3M2N3M2N3M2Nho^1"}, "image_id": 671, "id": 11576}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 0.0, 26.0, 24.0], "area": 385, "segmentation": {"size": [512, 512], "counts": "VPc71m?3N1N3N2O001O1O001O1O001O1O001O1O001O1O001O1O001O1O"}, "image_id": 671, "id": 11577}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 4.0, 71.0, 60.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "oPk22l?3M2N3M2N3M2N3M2N3M30O010O010O010O010O010ON3M2N3O010O010O010O001M2N3M2N3N101N101O001O001O010O10O100O2O2M10OQObA<_>AcA`0\\>^OgAa0Z>]OgAd0X>ZOkAe0V>XOlAi0S>UOPBi0d>N3M3M2N3M2N3M2N3M2Ni^Q4"}, "image_id": 671, "id": 11578}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 10.0, 63.0, 79.0], "area": 2631, "segmentation": {"size": [512, 512], "counts": "Wa`61m?2N3M2N3a@HS?9l@HR?;k@HR?d0N3cATOd=l0lATO62n=k0iAVO62Q>o0mASOS>n0kATOS>m0kAUOS>^1M3M2N30O010O010O010O010O010O010O0O2M2N3M2N1O01O0000HkAgNU>Y1nAdNR>]1oAaNQ>_1:M2N3M3M2N3M2N3N1N3M2N3M2N3M2N3M2O2M2N^o?"}, "image_id": 671, "id": 11579}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 11.0, 8.0, 10.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "``_73k?2N3N1100M2N3Md_<"}, "image_id": 671, "id": 11580}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 15.0, 8.0, 9.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "ePb71m?3M2N30O0O2M2Nao9"}, "image_id": 671, "id": 11581}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 28.0, 45.0, 71.0], "area": 1821, "segmentation": {"size": [512, 512], "counts": "hbY71n?2M2O2M3M2O2M2O2M3N1N3N2M2O2ZOSOUBP1h=ROWBn0i=TOTBl0l=WORBi0n=XOPBh0P>[OnAe0R>]OkAc0U>_OiAc0V>d0N3N1N3N2O00O2M2O2M3N1O2O10O10O10ON3N0O010ORO"}, "image_id": 671, "id": 11582}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 40.0, 69.0, 63.0], "area": 2613, "segmentation": {"size": [512, 512], "counts": "naj02m?3N1N3M3N1N3N2M2g@]OR?f0l@\\Ok>0WAk0g>WOVAk0h>9N2M2N3N2M3N1N3M3NO010O01O01O102M3N1N3M2OO0010O02O2M00010O010O3O1ON3N2M2O2M3M2O2M3N1N3M3N1N3N2M2N3N2M2O2M3M3N1N3N^mR6"}, "image_id": 671, "id": 11583}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 44.0, 15.0, 20.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "lQX21m?3M2O2M2N3M000001O2N3N1N3M2Nd^`5"}, "image_id": 671, "id": 11584}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 55.0, 53.0, 54.0], "area": 1548, "segmentation": {"size": [512, 512], "counts": "Q2\\1d>O00010O00010O000010O000010O002O2M3M2N3N2M2N100O1O101N1O0001O010O3M100O00010O0002N3N1N2N3N1N3M2O2M2N2N3Ne]U7"}, "image_id": 671, "id": 11585}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 67.0, 69.0, 71.0], "area": 2659, "segmentation": {"size": [512, 512], "counts": "SS[23k?3N1N3M2O2M3M2N3N1N3M201O10O010O10O010O10O10O010N1N3M3N1N3M2O2M3M2N3N1O2O0jAZNS>i110O10O010O01000O01M2O2M2N3O10O10OROVBFk=9VBGi=7YBIh=HUBG5>h=IVBF4?i=HVBF4`0h=HUBF6?g=IVBF4?i=HgB6Z=IgB5\\=HgB5\\=HfB6d>N2M]\\b4"}, "image_id": 671, "id": 11586}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 105.0, 62.0, 69.0], "area": 2376, "segmentation": {"size": [512, 512], "counts": "[Ti31m?3M2k@K]>8`AJ^>8`AK]>8`AJ^>8`AK]>8`AJJE`>c0dAN\\>3aA0^>0`A2a>g0N1N3M2N001O3M2N3M2N3O0010O10O01O0N3M2N3M2N3M2N3M2eN_AU1f>O0000003M2N3M1O1O3N110O010O010N1N3M2N3M2N3C^@8g?M2N3MZlW3"}, "image_id": 671, "id": 11587}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 106.0, 71.0, 52.0], "area": 2045, "segmentation": {"size": [512, 512], "counts": "U4a0^?2N2N3N1N3M2OO00010O00010O00010O00000100O3O01OO2M2N2O0O0001O01O01O01O00010O00010O0001O01O02N2O2M2N3NO01O01O01O00010O2N2O2M2N3N1N3M2O2M2N2N3NS\\l6"}, "image_id": 671, "id": 11588}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 139.0, 67.0, 75.0], "area": 2644, "segmentation": {"size": [512, 512], "counts": "Zea43k?2N3M2N3M2N3M2N3M2TAYO]>j0`AXO`>h0^A[Oa>f0\\A\\O8HQ>k0eA@7HS>U1kAmNV>S1gAPOX>P1fARO[>Z1O010O010O010M2N3N1010O010O010O010O001M2N00000000000000001O3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N]k\\2"}, "image_id": 671, "id": 11589}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 141.0, 71.0, 71.0], "area": 2694, "segmentation": {"size": [512, 512], "counts": "nTQ21o?1SA0g=2WB0g=2XBOf=3XBOf=3XBOf=3XBOf=4fAC:;n=4fAC:3_A0^>3_AO_>3_A0^>3^A0_>3_A0^>3_AO_>3^A0_>k0M210O010O01O01O010O010O010O00001O001N1O2M2N2N3L3N3M0O1000O1002001O0M4M2N2FoAbNU>[1mAcNU>[1nAbNU>Z1:N3M2O1010OZOXA3j>IYA5i>IZA3j>JXA4j>JYA3[?N2M\\Zg1"}, "image_id": 671, "id": 11592}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 159.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o42ojo7"}, "image_id": 671, "id": 11593}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 165.0, 72.0, 47.0], "area": 2311, "segmentation": {"size": [512, 512], "counts": "Uf_61j?5J6K5N20001O0001OM3J7J5O1000010O00000001O01O000000010O00000001O01O000UAnNh>V1O01O0000M4O000001O0001O0001O0001O000001O0001O0001L3J6K5M300010OM3J6K5JjZ<"}, "image_id": 671, "id": 11594}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 168.0, 50.0, 43.0], "area": 1072, "segmentation": {"size": [512, 512], "counts": "jeP31n?2N2O1N2N2N2N3M2N2N2N2N2N2N2O1N2N3M2N2N20O1N2N2N2N2N3M2N21O00000000000001N1O1N2N2N2N2N2N2N2N2N3M2OhYV4"}, "image_id": 671, "id": 11595}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 182.0, 61.0, 76.0], "area": 2575, "segmentation": {"size": [512, 512], "counts": "_Vb02m?3M2\\@J\\?7c@J[?9b@J[??N10O01O1O2O2M2N2N3N1N2N3M2O1O20OLfNaAZ1]>hNcAY1Z>jNdAW1[>8N3M2N3N1N2N3M20010O000N10O0000010O0000011N2eNVB=l=AVB=m=@UBFJ>R>KVBDJ?R>KVBEJ=S>KUBFJ=S>KVBEI?R>KaB2b=K`B3b=K`B3b=KaB3a=K`B3h>N3N`X_6"}, "image_id": 671, "id": 11596}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 193.0, 53.0, 51.0], "area": 1107, "segmentation": {"size": [512, 512], "counts": "UWX21n?3M2N2N2N2N2N2N2N2O1N3M2N1O001O0000000001O0000000001O0000000001O0000000001O0002N2N2N2N2N2N2N2N2O2M2N2N2NbYm4"}, "image_id": 671, "id": 11597}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 207.0, 15.0, 16.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "dfS32m?2N2N2N2N3N100000N2N3M2N2N2NYid4"}, "image_id": 671, "id": 11598}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 209.0, 74.0, 51.0], "area": 2306, "segmentation": {"size": [512, 512], "counts": "_gZ62j?4M3L5K4N20010O00001N1L4L4M4M21O0001O01O00010O0001O01O0001O01O00010OTAPOh>T1000010O0000010O0O1N3O01O00010O0000010O0000010O0M3M4K4L4M3010O0000O2K4L4M3L5KVY`0"}, "image_id": 671, "id": 11599}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 218.0, 78.0, 127.0], "area": 4535, "segmentation": {"size": [512, 512], "counts": "YZ[41m?2N3M2N3M2N3_MB\\Ea0a:A]Ea0a:B\\Ea0a:A]Ea0a:B\\Ea0a:B\\E`0b:B\\Ea0a:B\\E`0b:B\\Ea0a:B\\E`0c:A[Eb0d:_OYEd0g:[OWEg0i:ZOTEi0l:VOREl0n:UOoDn0Q;QOmDQ1S;POjDS1V;lNhDV1X;kNeDX1[;gNcD[1];fN`D]1`;bN^D`1b;aN[D_1h;`NVD`1l;aNQD_1R<`NlC`1VO1NYgR7"}, "image_id": 671, "id": 11601}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 225.0, 58.0, 57.0], "area": 1810, "segmentation": {"size": [512, 512], "counts": "Whd31h?1[@2b?1\\@1b?8N2N1N3N2N2M3N1O2M3N1OO10O102N2M3N1O2M3N2N2N101000000O0100N2N2N2M2O2N2O1O1N2N1O2M3N2N2N2M2O2N2N2M3N1O2N2M3N2NVX^3"}, "image_id": 671, "id": 11602}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 230.0, 33.0, 34.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "hWV32l?3N2N2N2N1N3N2N2N2N2M2O2N2N2N2O10N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O^XY4"}, "image_id": 671, "id": 11603}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 246.0, 34.0, 29.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "VXa51l?3M4L3N2M4a@@Z?e00O00010O00010O0010O0010O00010O00010O0010ON2M4M2M3M4L3NRhm1"}, "image_id": 671, "id": 11604}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 248.0, 53.0, 51.0], "area": 1090, "segmentation": {"size": [512, 512], "counts": "kXZ12n?1N2N2N2N3M2N2N2N2N2N2N2O1N000000000000001O0001O0000000000000001O0001O000000002N2N2N2O1N2N2N2N2N2N2N3M2NkWk5"}, "image_id": 671, "id": 11605}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 252.0, 61.0, 58.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "[ij53k?3L3M4M2M3M4L30001O000M4M2M4L3N2M4N100010M2M00000100111O01O01O01O010O00010O01O01ON3L3NO013O00010OO2L3M3N3L3M3N3L3M4L3N2MQhV1"}, "image_id": 671, "id": 11606}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 264.0, 52.0, 58.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "aid62k?3N3L3M3N3L3M4M2M3M4M6I3M3N3L3O1010O01O01O01O01O010O01O0O1M4O01O000M4L3N3L3M3N3M21O01O0M4M2M3M4Ch@NZ?0i@LZ?1i@L[?0lWa0"}, "image_id": 671, "id": 11607}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 265.0, 172.0, 193.0], "area": 10077, "segmentation": {"size": [512, 512], "counts": "gkX11m?3N1O2N2M3N2N1O2M3N2N2N2M2O2N2N2M3N1O2N2M3N2O0100000O10O100000O10O100000O10O100000O10O10000000O01UO_A6a>HaA8_>FcA:]>CfA=Y>BiA=X>AiA`0W>^OkAb0U>[OnAe0R>YO`A0;g0T>XOaA3;d0W>^OhAc0V>_OiAb0T>@kAb0S>@kAb0S>@jAb0U>@iAb0T>AjAa0T>g0N2N2M2O2N2N2MXO[BZOc=e0`BZO_=f0bB[O\\=e0fB[OX=e0jB[OS=e0PC[Onl8W1aEYNe1>i8Z1jGeNV8[1lGcNT8\\1oGaNR8_1PH_No7b1SH\\Nm7d1TH[Nl7d1WHYNj7g1V31000O10002N1N3N2N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2N1NTWQ4"}, "image_id": 671, "id": 11608}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 284.0, 26.0, 23.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "\\id01n?2N2N2O2M2N1O00010O00000001O01O000001O011N2N3M2N2NoVn6"}, "image_id": 671, "id": 11609}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 303.0, 54.0, 51.0], "area": 1113, "segmentation": {"size": [512, 512], "counts": "cj;1n?2N2N2N2N2N2N2N2O1N2N2N3M2N1O0000000000000001O01O0000000000000000000001O01O0001O2N2N2N2N2N2N2N3M2N2O1N2N2NTVi6"}, "image_id": 671, "id": 11610}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 320.0, 95.0, 77.0], "area": 3857, "segmentation": {"size": [512, 512], "counts": "`[h53j?3]OMWA5g>NUA6g>MVA6g>MWA5g>NUA6g>c0N3L3M3N3L301O01O01O01O01O010O01O01O01O010O01O01O011M1eNfAm0^>oNfAn0\\>POfAm0]>POgAm0g>M]AVOV>j0gAYOZ>f0dA]O[>c0bA@_>`0]ACc>=[AFd>k01O01O010O01O01O010O00010O01O01O010O01O01M2N2M4M201O01O0N3M1N103L3M4M2N210O00001M2M4M2M3M4M2M4L3N2M4MTUh0"}, "image_id": 671, "id": 11611}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 334.0, 94.0, 149.0], "area": 5422, "segmentation": {"size": [512, 512], "counts": "b^Z31m?3N1N3M2N3M2N3M2N3M2N3M2N3M2O2M2YOkNaBX1\\=kNaBX1\\=jNbBX1\\=kNaBX1\\=jNbBX1\\=kNaBX1\\=jNbBX1]=jN`BY1`=c010O001M2N3M2N2N0000000001O0000000001O000000000001O000000000001O002N@nBVNP=j1SCVNjJSA:i>ITA:j>ISA9j>a0L3N3L3M3N3L3N2010O01O01O01O01O010O01O01O01O01O010O0YNjAc1Z>M4gNcAk0_>ROdAk0_>SOcAj0l>M20YAXOV>h0gA[OZ>d0cA_O]>b0`AA`>>]AEc>l0010O01O01O010O00010O01O01O010O00010O01O01OO2L3N2M4O0010O0M3M102M4L3N2M4O001O01O01M2M3M4M2M4L3N2M4L3N2MWcS1"}, "image_id": 671, "id": 11615}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 391.0, 51.0, 79.0], "area": 1578, "segmentation": {"size": [512, 512], "counts": "YnS43k?2N3M2O2O010O010O0MG`@9\\?7NFf@3V?=MGm@LP?6PAJm>:SAFi>=WACg>?ZAAb>c0]A]O`>f0aAZO\\>i0cAWOZ>l0gASOV>P1jAnNV>U1jAhNU>[1kAcNT>`1mA]NS>f102N01O010O3M3N3L3M3N3L3M4M2M3M4M2M4L3N2M4L3N2MfcR3"}, "image_id": 671, "id": 11616}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 437.0, 58.0, 62.0], "area": 1310, "segmentation": {"size": [512, 512], "counts": "R^l12m?2N2N2N2N2N2O1N2N2N3M2N2N2N20000001O0001O00000000000001O000001O00000000000001O0001O0000N2N2N2N2N3N1N2N2N2N2N2N2N2Nm`V5"}, "image_id": 671, "id": 11617}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 445.0, 95.0, 67.0], "area": 3683, "segmentation": {"size": [512, 512], "counts": "^oP53j?3N2_OKTA9h>KUA7i>KTA8i>LSA8j>b0K3M4M2M3M4M2O02O01O01O01O01O010O01O01O01O010O01O011L3L3gNeAj0^>TOdAj0_>ROdAk0k>M21XAWOW>i0gAYOZ>f0cA^O\\>c0aA_O`>`0]ACc>m0001O00001O001O00001O001O00001O00001O001O00M3N2M3N2001O00001ON20N2N3L3O2O01O01O0O1M4M2M4L3N2M4M2M3M4MWa_1"}, "image_id": 671, "id": 11618}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 467.0, 38.0, 36.0], "area": 750, "segmentation": {"size": [512, 512], "counts": "Yoi31m?3M2N3N2M2N3M2010O01M2N3N1N3M2O20O0100O010O010O01N2M2N3M2N3N1N3M2N3M2N3M2OSQc3"}, "image_id": 671, "id": 11619}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 468.0, 20.0, 44.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "e_f71l?3M3M4M2M3M4M2M3M4M2O2O01OTAROf>o0WASOj>Q100O1M3M3"}, "image_id": 671, "id": 11620}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 478.0, 40.0, 34.0], "area": 900, "segmentation": {"size": [512, 512], "counts": "S?j02WOP?m0O1O1O1O11O1O1O1O1O1O1O1O00O1O1O11O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1KY@Oh?OZ@0l?OQ`[7"}, "image_id": 671, "id": 11621}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 488.0, 33.0, 24.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "f_f21n?3M2N2N2O1N2N2N2N1O1O1O1O1O1O1001O1O1O1O1O2N1O1O1O1O1O1H]@1d?M^@2c?M^@2i?O1OQPi4"}, "image_id": 671, "id": 11622}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 505.0, 29.0, 7.0], "area": 98, "segmentation": {"size": [512, 512], "counts": "nog42k?3N2001O00001O001O00001O00001O00001O00N20000001O00001O00Q`i2"}, "image_id": 671, "id": 11623}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 426.0, 31.0, 36.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "hm`72m?2O1N3M2N2O1N3M2N2O1N3M2N2O2M2N2N1O12M2N2N3M2N2O2M2N2N3M2N2OVB"}, "image_id": 672, "id": 11624}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 446.0, 44.0, 47.0], "area": 1014, "segmentation": {"size": [512, 512], "counts": "]nR71n?2N3M2O1N3M2N2O1N3M2N2N3N1N2N2N3N1N2N3O0000010O00N2N3N1N2N3M2O1N2N3M2N2O2M2N2N2O2M2N2NUQ7"}, "image_id": 672, "id": 11625}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 459.0, 93.0, 53.0], "area": 2507, "segmentation": {"size": [512, 512], "counts": "ooi31n?1O100O1O1O100001O1O2N0000O1O1O100O1O1O100O1O1EHm@9S?Ik@7T?Kj@6U?Li@5V?;00O1O1O100O1O1O100O1001O1O1OO100O1O1O100O1GQO`AP1`>QO^AP1a>SO]Am0b>9O100O1O1O100O1O1O100001O1O2M2N10O0000010O0000012M11N3M2N2O2M2N2_ORAMQ?0RANo>0SANo>1RAMQ?0RAMP?1RANo>0SANP?0bag2"}, "image_id": 672, "id": 11626}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 511.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "ood51o?000Q`Y2"}, "image_id": 672, "id": 11627}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 36.0, 41.0, 53.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "\\R23k?3L3N3]OE\\A>b>E[A=b>F[A>b>E[A=c>E[A>b>a0M3010O010O00N3N1010O0010O010OO2L3N2M4M2M4M2M4M2M4M2M4M2N2M4M2Me^Y7"}, "image_id": 674, "id": 11628}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 48.0, 41.0, 50.0], "area": 1143, "segmentation": {"size": [512, 512], "counts": "gbj02l?2N3L3N3M2M3N3M2M4M2M3N3M2M4M2N3M21O010O010O00001M2M4M2N3L3N2M4M2M4M2N3L3N2M4M2MYn`6"}, "image_id": 674, "id": 11629}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 69.0, 26.0, 25.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "fb_11m?2N3L3N2N3M2M4N100010O010O0010O0010O0O2M2M3N3M2M4Mf]S6"}, "image_id": 674, "id": 11630}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 80.0, 48.0, 61.0], "area": 1589, "segmentation": {"size": [512, 512], "counts": "Pdk12l?3M2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3M2M4M200010O01O01N1N3L3N3L3N3M2M4M2M3N3M2M4M2M4M2N3L3N2M4MW]\\5"}, "image_id": 674, "id": 11631}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 82.0, 14.0, 23.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "b2e0[?01O010O010O000M4M2N3L3N2M\\mh7"}, "image_id": 674, "id": 11632}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 91.0, 22.0, 19.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "Uc>2l?3M2M4M2O1010O010O010O010O01O01O001M2M4M2NP]V7"}, "image_id": 674, "id": 11633}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 108.0, 40.0, 49.0], "area": 1106, "segmentation": {"size": [512, 512], "counts": "bdR33k?2M3N3M2M4M2M3N3M2M4M2N2M4M2M4M21O010O01O01O000M4M2N2M4M2N3L3N2M4M2N2M4M2N3L3N]\\Y4"}, "image_id": 674, "id": 11634}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 127.0, 45.0, 54.0], "area": 1368, "segmentation": {"size": [512, 512], "counts": "Ye01m?3M2M3N3M2M4M2N2M4M2N3M2M4M2N2M4M2N3N100010O010O0010OO1N3M2M4M2M3N3L3N3M2M3N3L3N3M2M4M2MjkX7"}, "image_id": 674, "id": 11635}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 136.0, 58.0, 66.0], "area": 1986, "segmentation": {"size": [512, 512], "counts": "nej32l?3L3N3L3N2M4M2M4M2N2M4M2M4M2M4M2M3N3L3N3L3N201O00O2M2N3L3N2N3L3N3M2M3N3M2M4M21O010O010O00010O01M2N2M4M2M4M2M3N3L3N][X3"}, "image_id": 674, "id": 11636}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 150.0, 18.0, 20.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "TUi22k?4M2M4L3M3010O00010O00001L3M3M4L3MYkm4"}, "image_id": 674, "id": 11637}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 152.0, 63.0, 55.0], "area": 1787, "segmentation": {"size": [512, 512], "counts": "feU11o?4L3L4M3M4K4M3M1N10O10O10O1000O01000O01000O10O10O10O1000O01000@WOhAj0X>YOeAf0[>^ObAb0^>A^A`0b>>10O10O10O10O10O1000O01000O101O3L5L3M3L4M4L3M3L4M4L3LTjj5"}, "image_id": 674, "id": 11638}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 155.0, 26.0, 26.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "\\UZ22k?3N2M4M2M4L3O2O01O010O01O010O010O0001N1N3L3M4M2M3NPkX5"}, "image_id": 674, "id": 11639}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 159.0, 70.0, 48.0], "area": 1919, "segmentation": {"size": [512, 512], "counts": "oej42l?2N2M4M2N3L3N3M2N2M4M2N3L3N3O000010O010O010O00010N1N3M2M4N11O010O01O010O01O01O010O010O01O01O010O010O01O01O010O01O010O01M2N3L3N2N3L3N3M2M4M\\ZR2"}, "image_id": 674, "id": 11640}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 176.0, 19.0, 22.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "PV`41m?2N3M2M3N3M2M4O0010O010L3N2N3M2M4M2N^ZV3"}, "image_id": 674, "id": 11641}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 177.0, 41.0, 53.0], "area": 1209, "segmentation": {"size": [512, 512], "counts": "ifl23j?3N3M2M4M2N2M4M2M4M2N2M4M2N3L3N3M21O010O010O01ON3M2M4M2N3L3N2M4M2N3L3N3M2M3N3M2MXj^4"}, "image_id": 674, "id": 11642}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 194.0, 22.0, 17.0], "area": 231, "segmentation": {"size": [512, 512], "counts": "[fb32k?3M4M2O2O01O01O01O01O01O01O01O010O0O1N3L3MkYR4"}, "image_id": 674, "id": 11643}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 194.0, 69.0, 74.0], "area": 2275, "segmentation": {"size": [512, 512], "counts": "fgj51n?2M3N1O2M3N2N1N3N2N2O10O10N2N2N1N3N2N2M3N1O2N2M3N2N1N3N1O00OK^NnAc1R>_NlAa1T>`NkA`1T>51000O0100000O02O2N2N2M2O2N2M3N1O2N2M3N2N1N3N2N2N1N3N2N2M3N1O2N2M3N2NZiR1"}, "image_id": 674, "id": 11644}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 199.0, 24.0, 25.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "hVd42l?2M4M2M4M2M3N30O010O00010O010O000O2M2M3N3L3N3Leio2"}, "image_id": 674, "id": 11645}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 202.0, 19.0, 17.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "dVa22k?3N3L3N2010O010O00010O00010O0O2M2M3NcYU5"}, "image_id": 674, "id": 11646}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 219.0, 60.0, 51.0], "area": 1697, "segmentation": {"size": [512, 512], "counts": "gge12e?0`@3]?Oa@3]?:L3N3L3N2O20O010O0010O0010O0010O0N3L3N2N3O010O010O00010O010O00010OQOZAd0g>YO[Ah0d>UO`Aj0k>0010O010O00010O010O010N1N2M4M2M4M2N2M4M2M_X\\5"}, "image_id": 674, "id": 11647}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 232.0, 23.0, 20.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "egk21m?2N3M2M3N3M2010O00010O010O00010O0O2M2M3N3L3Ndhh4"}, "image_id": 674, "id": 11648}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 236.0, 56.0, 41.0], "area": 1717, "segmentation": {"size": [512, 512], "counts": "TX`35k?RO\\An0d>oN`AQ1g>00010O010O010O00010O010O0010O0010O010O00010O010O0N3L3N210O010O00010O01M2M4M2M3N3M2M4M2N3L3N2N3L3NbVb5"}, "image_id": 674, "id": 11653}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 275.0, 25.0, 25.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "SiP32l?3L3N3M2M4M2N2010O010O00010O010O000N3M2M4M2M4M2MYgb4"}, "image_id": 674, "id": 11654}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 280.0, 20.0, 10.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "jhj38h?000O10000000000000O10000000000000O15KSWk3"}, "image_id": 674, "id": 11655}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 290.0, 23.0, 31.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "ki_22l?2N3HK`@8]?8M2N2M4M2N3ON2N3L3N3N1010OM3N3M2M4M2NkfT5"}, "image_id": 674, "id": 11656}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 299.0, 77.0, 55.0], "area": 2383, "segmentation": {"size": [512, 512], "counts": "mik51n?5L3M4L4K5L4L3L5L4L2N0O010000O5L3MO10O10O10O1000O10O10O10O1000O10O10O10O1000O10O10O10O1000O10O10O1IQO]An0d>VOXAj0g>8000O01000O10O1000O01000O10003M4K4M4L3L5L3M4L3L5Lbem0"}, "image_id": 674, "id": 11657}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 306.0, 41.0, 59.0], "area": 1870, "segmentation": {"size": [512, 512], "counts": "Uj4`0A?B>1O01O00000000000000000001O0000000001O000000000001O0C=B>AjfV7"}, "image_id": 674, "id": 11658}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 307.0, 74.0, 53.0], "area": 2327, "segmentation": {"size": [512, 512], "counts": "[ZW43l?8I6J7I6J00000O100000O1000O100000O_OTOPBl0o=\\OjAd0V>CcA=]>c000000O10O1000000000O01000000001N8I6J4LO100000O10006J0000000O01000000000O0100000000000O0100000000003L8I7IXec2"}, "image_id": 674, "id": 11659}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 312.0, 40.0, 77.0], "area": 1956, "segmentation": {"size": [512, 512], "counts": "bkT31j?5b@Km>9n@Lm>9o@Lk>g0L4K5L5J5L4K50001O000K5L5L300000010O000000L5J5L4K5L5J5L4K5L4K6J5L4K5L5JXVW4"}, "image_id": 674, "id": 11660}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 337.0, 12.0, 19.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "oj^23j?3N2N3L3N3M03L3N2M4M2M`U[5"}, "image_id": 674, "id": 11661}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 349.0, 61.0, 52.0], "area": 1794, "segmentation": {"size": [512, 512], "counts": "ikX12l?3L3N3L3N3L3N2M4M2M4M2O101O010O01O010O010O00010O010O0010O001VAPOc>o0[ASOe>T1O2O0010O001N1M4N100010O010O01N1N2N3M2M4M2N3L3N3M2M3N3M2Ncdh5"}, "image_id": 674, "id": 11662}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 365.0, 51.0, 81.0], "area": 3106, "segmentation": {"size": [512, 512], "counts": "_[Z6b0^?0000000000000000000O5LQ1n@gN[=Y1eBiNY=W1gBiNY=W1gBiNY=W1gBiNY=W1gBiNY=o10000000000000O100000000000O10000000000000000000000000000=Bf0[Of0ZOe0[OVRl0"}, "image_id": 674, "id": 11663}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 375.0, 13.0, 21.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "g;e0[?00000000000000000000000YTi7"}, "image_id": 674, "id": 11664}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 378.0, 16.0, 22.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "Q\\b0>[?7000000000000010O00000000000@edU7"}, "image_id": 674, "id": 11665}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 379.0, 96.0, 48.0], "area": 2956, "segmentation": {"size": [512, 512], "counts": "R\\m34l?4L4L4K6K4L4L5K4K4MO100000O01000001O5J2O00000O10O1000O10O100000O0100000O0100000O10O100000O0100000O0100000O10O1000O10O100000O0100000O10O1000O102N4L1OO0100000O01000000O01000000O010004L5J5L4L5K\\cb2"}, "image_id": 674, "id": 11666}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 383.0, 65.0, 47.0], "area": 970, "segmentation": {"size": [512, 512], "counts": "b\\k01m?2N2N3M2N3L3N2N3M210O00010O010O00010O010O0001O001O00010Ag@5Y?Hk@8U?Em@;\\?010O010O00010O010O00010O0010O0010O001^@E^??O010OCc@7]?Gf@8a?1O010O01O01OO2M2M4MmRT6"}, "image_id": 674, "id": 11667}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 405.0, 36.0, 27.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "Qma03k?2N3L3N2N3O001O0010O01O001O000N30O010O010O010O01O01O010M2N3M2M4M2N3M2NTSl6"}, "image_id": 674, "id": 11668}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 422.0, 36.0, 72.0], "area": 2251, "segmentation": {"size": [512, 512], "counts": "\\==P17iC=B>CUc]7"}, "image_id": 674, "id": 11669}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 430.0, 78.0, 82.0], "area": 3014, "segmentation": {"size": [512, 512], "counts": "joc11l?3N3L3N2N3L3N3L3N2M4N110O0M3N2M3N2M300001O00001O0N3L3N2M4M201O01O0M4M2M3N3L3N3L3N2M4M2M4L3N2M4M2M3N3O01M2N3L3N2M4M2M4M2M3N3L3N3L3M3N3L3N3L3N2M4M2M4M2M3N3LZRU5"}, "image_id": 674, "id": 11670}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 430.0, 16.0, 17.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "c]W42n?1N2N2N2N3M20000000N2N3M2N2N2OXb`3"}, "image_id": 674, "id": 11671}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 433.0, 64.0, 55.0], "area": 2092, "segmentation": {"size": [512, 512], "counts": "T^n41o?4L4L4K10O1000O1LEc@:T?GQA=o>Gl@:S?:01O4L4K5L4L4L4K100004K5L1O0O0100000O01000O10O1000O10O10O100D_AYO`>h0cATO^>k0gAQOY>o0<0O10O10O10O1000O10O2O4L4L4K5L4L4L4KbaQ2"}, "image_id": 674, "id": 11672}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 438.0, 51.0, 74.0], "area": 2074, "segmentation": {"size": [512, 512], "counts": "h_R33a?Nh@4V?Og@4V?Ng@5V?=M2@XOfAk0W>WOgAk0W>XOfAk0W>WOgAk0W>WOgAl0V>`0N2N2N2M3N2N2N2N2N2001M2N2N3M2N3L3N3M003M2N3M2N3M2N3L3N3M2N3M2N3M2N3M2M4M2N3M2NPRT4"}, "image_id": 674, "id": 11673}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 439.0, 38.0, 47.0], "area": 1042, "segmentation": {"size": [512, 512], "counts": "knU12k?3N3L3N2M4IAi@a0T?8M2N3L3N2M4M2010O00010O010O010O0N2M4M2N3L3N3L30001M2If@E^?86M3N3LRRW6"}, "image_id": 674, "id": 11674}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 468.0, 65.0, 44.0], "area": 1720, "segmentation": {"size": [512, 512], "counts": "`om32m?2N1O2M3N2N2N2N2N1N3N2N2N2N2N1N3N20O1O1O1O1O1O001O1O1OGVO]Ai0b>YO]Ag0b>[O[Ag0d>ZO[Ag0d>[OZAf0e>;N2O11O1O1O1O1ON21O1O1O1O1O001O1O1O1JRAXOo>f0RAZOo>d0SA[Om>d0UA[Ol>c0:N2M3N2N2N1O2M3N2Nf`Q3"}, "image_id": 674, "id": 11675}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 486.0, 33.0, 26.0], "area": 576, "segmentation": {"size": [512, 512], "counts": "n_n02k?3N2N2N2M3N2N2N2M3N2O1001O001O00001O001O00001O001O001L3N2N3L3N3LdPa6"}, "image_id": 674, "id": 11676}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 502.0, 21.0, 10.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "nob22m?1O1O1N2O1O1O1001O1O1O001O1O1O001O1O001OQ`R5"}, "image_id": 674, "id": 11677}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 0.0, 33.0, 19.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "Y`k02l?3L3N3M20001O00O100001O00001O001O001O00001O001ON2M3N2N2N21ON2N3MQPd6"}, "image_id": 675, "id": 11678}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 0.0, 26.0, 26.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "bP^11m?2M3N3M2N3L3N2N30O010O00010O010O01O01M2N3L3N3L3N2MloT6"}, "image_id": 675, "id": 11679}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 0.0, 9.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "PPk11o?001O00001O001OOQ`P6"}, "image_id": 675, "id": 11680}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 0.0, 27.0, 17.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "UP`21m?2O2N2N2O001O1O1O001O1O001O1O1OO1O1O1N2O1N2O1O1N2O1OQ`R5"}, "image_id": 675, "id": 11681}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 0.0, 9.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "PPX31o?001O001O00001ONR`c4"}, "image_id": 675, "id": 11682}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 0.0, 47.0, 27.0], "area": 684, "segmentation": {"size": [512, 512], "counts": "P`V41o?1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001OO1O1O1O1N2O2N2N1O2M3N2N2N2N2NioQ3"}, "image_id": 675, "id": 11683}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 9.0, 53.0, 67.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "^Q^31m?3N2N2M2O2N2M3N1O2M3N2N1N3N2N1N3N2N2M2O2N2M3N1O2M3N2N1O200O0100000O010000ROlAOS>0oAOR>OPB1P>LSB3n=KSB4n=KTB3n=JUB3n=KTB3m=KUB4m=JUB3n=KTB3n=JUB3m=LTB3n=KTB3n>M3NV^g3"}, "image_id": 675, "id": 11684}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 14.0, 34.0, 63.0], "area": 1738, "segmentation": {"size": [512, 512], "counts": "SbV77_?;F9F:F:G9K5001O0001O0000000000000001O01O00000000000000N2F:G:E:G9Fi_8"}, "image_id": 675, "id": 11685}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 19.0, 28.0, 29.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "nPb63m?1N2N3N1N2N3N1N3M2N2O2M1O010O0003M2O1N3M2N3N1N2N3N1N2Nono0"}, "image_id": 675, "id": 11686}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 26.0, 21.0, 17.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "QQn04i?3N2N3O010O01O010O01O01O010O010O0O1M4M2MR_g6"}, "image_id": 675, "id": 11687}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 37.0, 33.0, 28.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "fQf02l?3L3N2N3L3N3L3010O00010O010O00010O010O01O01M201O00001M2M4M2M4M2Nc^i6"}, "image_id": 675, "id": 11688}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 40.0, 46.0, 61.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "ZR\\12k?4M2e@Ji>9UAIi>:SAIk>9SAJi>:TAHj>i0M2M3N3M2M4M2M40O00010O010O01O01O010O001M2M3N3L3N3M2M4M2N2M4M2M4M2N2M4M2N3L3N]nl5"}, "image_id": 675, "id": 11689}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 45.0, 38.0, 59.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "j1V1h>3M2N2M4M2N3O001O01O010O010O00010O010L3N3M2M4M2N2M4M2N3L3N3M2N2M4M2N3L3N3MXn\\7"}, "image_id": 675, "id": 11690}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 55.0, 97.0, 82.0], "area": 3278, "segmentation": {"size": [512, 512], "counts": "gbT21m?2O2M3N1N3N2M2O2M3N1N3N1N3M3N1N3N2M2O2M3N11000O0100O0100O01000O01000O0100eN^AW1a>hNaAW1d>10O10O10O01000O01000O01000O010O01000O01000O0jNZAS1i>10O10O10O10O01000O010O10O10O10ON2O02M3N1N3M3N1N3N1N3N2M2O2M3M2O2M3N1NglZ4"}, "image_id": 675, "id": 11691}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 62.0, 51.0, 50.0], "area": 1314, "segmentation": {"size": [512, 512], "counts": "bRi51n?2M3N2N1O2M3N2N2N2M2O2N2N2M3N20O10O1N2N2N2O01000O100000O10O100000O10O10O1M3N2TORAc0P?[ORAc0o>[OTAc0V?M3N1O2N2M3N2N1O2MV]]1"}, "image_id": 675, "id": 11692}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 81.0, 27.0, 27.0], "area": 371, "segmentation": {"size": [512, 512], "counts": "obg61n?2N2N2N2N2M2O2N2N2N2N2M300O01N2N2N2M3N2N2N2N1O2N2M3NTmj0"}, "image_id": 675, "id": 11693}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 81.0, 23.0, 25.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "Scd71l?4K4M3M4K4O110O0001O01O01O01O01O01O0001M2L4M]M"}, "image_id": 675, "id": 11694}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 90.0, 16.0, 19.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "Wcn12m?2M3M2N3N1N3M2O11M3N1N3M2N3N1NT]i5"}, "image_id": 675, "id": 11695}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 112.0, 72.0, 67.0], "area": 2182, "segmentation": {"size": [512, 512], "counts": "_Tn12m?2M3M2O2M2N3M2O2M3M2O2M2N3M2O2M3M2O2N1010O10O10O010O010O10O010O10O1lN\\Ak0c>TO^Al0c>QO`Ao0_>oNcAQ1g>000O010O0100O0100O010O0100O0100O0100SOSAe0m>ZOTAg0l>VOWAi0Q?0O01N1N3M2O2M3M2O2M2N3M3NRkm4"}, "image_id": 675, "id": 11696}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 115.0, 95.0, 80.0], "area": 2960, "segmentation": {"size": [512, 512], "counts": "`Tn43k?2O2M3N1N3b@CV??g@DV?>i@DU?d0N3N1N100O1O2O2M3N1N3N2O01000O010O10O10O10O10OlN_Aj0a>TOaAk0`>RObAo0]>POeAP1NoN`>P1_ARO`>o0]ASOd>U10O0O2M3N1N3RORAi0n>TOUAl0P?O01000O01000O01000O0100O0O2M3UAoNe>Q1ZAQOf>T110O10O0O2N2N101000O01000N1O1N010O02N3N1N3N2M2O2M3N1N3N2M2O2M3N1N3NmZb1"}, "image_id": 675, "id": 11697}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 116.0, 54.0, 67.0], "area": 1955, "segmentation": {"size": [512, 512], "counts": "WTU72l?2N3M2N3M2N3M2N3M2O2O010O010dAWO_=i0_BZOa=f0\\B\\Od=d0ZB_Of=a0WBAi=`0TBCl=7nAKQ>6lALU>3iA0V>m010O010O0100O010O010M2N3M2N30O010O010ON3M2N3M2N3M2N3M2N3MhK"}, "image_id": 675, "id": 11698}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 28.0, 56.0], "area": 987, "segmentation": {"size": [512, 512], "counts": "o3c1[>20010O010O00010O01M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4Mjka7"}, "image_id": 675, "id": 11699}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 134.0, 42.0, 51.0], "area": 1347, "segmentation": {"size": [512, 512], "counts": "jTk04j?2N2h@Id>;XAIe>9YAIe>:XAIe>9XAJe>:XAHf>k0M201O010O0010O0010O0010O010O00010O001L3N2N3L3N3M2M3N3M2M4M2N3L3N2N`k_6"}, "image_id": 675, "id": 11700}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 158.0, 25.0, 26.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "_U=3k?2M3N3M2M4M2M40O00010O010O0010O00O2M2M4M2N2M4M2MnZV7"}, "image_id": 675, "id": 11701}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 170.0, 66.0, 59.0], "area": 1937, "segmentation": {"size": [512, 512], "counts": "YVe11l?4M2N3M2M4M2N3O0010O01O010O0N2N3M2N3M2N3M2N3N1010O010O01O010O010O010O01O010O010O010O010UO\\A8d>F_A:a>CaA=_>AdA?\\>^OfAb0Z>\\OiAd0V>ZOlAf0h>0O010O01O01OO2M2N3M2N3M2M4M2NbiY5"}, "image_id": 675, "id": 11702}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 170.0, 60.0, 56.0], "area": 1736, "segmentation": {"size": [512, 512], "counts": "WV_41m?2N3IK_@7_?L_@6_?6N3M3M2O2M2N3O00100M2N3M20100O0100O010O0100M210O010O01000O010O010OoN\\Af0e>WO^Ai0b>TO`Am0_>ROcAm0h>10O10O010O01M3N1N3M2N3M3M2O2M2N3M3Mgib2"}, "image_id": 675, "id": 11703}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 182.0, 22.0, 20.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "Qf92k?4M2M4M2O1010O01O010O01O010O01O01M2M4M2N3LVZ[7"}, "image_id": 675, "id": 11704}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 186.0, 11.0, 16.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "j5=c?10O01O01O010L3N2N3LUZj7"}, "image_id": 675, "id": 11705}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 188.0, 33.0, 32.0], "area": 574, "segmentation": {"size": [512, 512], "counts": "^f_52m?1N3M3N1N3M3N1N3N1N3M3O001000O0100O0100O0O2M3M2O2M3N1N3N1N3M3N1Njio1"}, "image_id": 675, "id": 11706}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 190.0, 27.0, 26.0], "area": 381, "segmentation": {"size": [512, 512], "counts": "\\VX62m?1N3N2N1N3N2M2O2N2M20100O010O0100N1N3N2M2O2M3N1N3N2MjYZ1"}, "image_id": 675, "id": 11707}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 194.0, 56.0, 50.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "hVd62m?2M2O2M3N1N3M3N1N3N2M2O2M3M20100O01000O01000O01000O01000O0100O01000O01000O01000O001M3N1N3N2M2O2M3N1N3N2M2O2M3NTi?"}, "image_id": 675, "id": 11708}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 202.0, 45.0, 54.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "cg;1l?4M2M4M2M4M2M3N3L3010O01L3N2N3M2M4M2N3L310O00010O010ON3M2M4M2N2M4M2N3M2M4M2N2M4M2N3M2M4M]im6"}, "image_id": 675, "id": 11709}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 210.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "b62\\io7"}, "image_id": 675, "id": 11710}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 212.0, 52.0, 57.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "igo31m?2N3M2N3M2N3L3N2N3M2O2O0N3L3N3M2N3M2N3O0010O010O010O010O010O010O00010O0TO`A9`>DbA<^>AfA?Z>^OhAb0X>\\OkAd0U>YOmAg0S>WOoAf0h>M2N3M2N3M2N3L3N3M\\XV3"}, "image_id": 675, "id": 11711}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 215.0, 20.0, 66.0], "area": 703, "segmentation": {"size": [512, 512], "counts": "]Xf74j?2N3L3N3M2M3N3M2@XOeAl0X>VOfAl0W>XOfAk0W>WOfAl0X>WOeAk0X>a0M2N3L3O20OXI"}, "image_id": 675, "id": 11712}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 229.0, 53.0, 79.0], "area": 2167, "segmentation": {"size": [512, 512], "counts": "YYg51m?3]ONTA4j>NTA5i>NTA4k>MSA6j>MSA5k>MSA6X>_OVBR1h=POVBS1h=oNUBT1h=nNWBS1g=POVBS1g=oNWBS1g=POVBS1g=d0O2M2N3N11N1N3N2M2N3M2N3N1N3M2N3M2N3N2M2N3M2N3M2O2M2N3M2N3M3N1N3M2N3M2N3N1N[X^1"}, "image_id": 675, "id": 11713}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 235.0, 27.0, 27.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "jWS52l?3M2O2M2N3M3M2O2N1100O0100O0100O010M2N3N1N3M3M2O2M2N]X_2"}, "image_id": 675, "id": 11714}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 237.0, 27.0, 35.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "Whj03j?3N201L3N3L3N2M4M2M4M2O110O010ON2N3L3N3L3N3L3N2M4M2M`hg6"}, "image_id": 675, "id": 11715}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 239.0, 53.0, 52.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "_h]12k?3N3L3N3L3N3L3N2N3L3N3L3N201O010O010O01O01O010O010O01O0POZAg0e>WO^Ah0c>TO`Am0j>O0010O0010O010O0010O0010M2N3M2M4M2N2M4M2M4Mhgg5"}, "image_id": 675, "id": 11716}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 264.0, 44.0, 58.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "hYd61l?3N2M4_OHVA:h>HUAFVAHUAFVA`0N3L301O01ON3L3N3N11O010O01O01O0N3L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3L`ge0"}, "image_id": 675, "id": 11717}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 269.0, 30.0, 33.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "Tia02l?3M2M4M2M4M2N2M4M2N30O0010O0010O0010O00O2M2M4M2M3N3M2M4M2M^Wo6"}, "image_id": 675, "id": 11718}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 270.0, 11.0, 23.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "^8f0Z?10O000M4M2N3L3N3M2N`Wj7"}, "image_id": 675, "id": 11719}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 284.0, 121.0, 82.0], "area": 4191, "segmentation": {"size": [512, 512], "counts": "QjZ32k?3N3L3N3L3N2N3L3N3L3N3L3N2M4M201O010O00010O010O010O00O2M2nN[Ag0h>VOZAg0i>VO[Ai0n>010O010O00010O010O010SAVOb>j0[AYOe>g0YA\\Og>d0VA^Oj>m00O010O00010O010O010O00010O010O0010O0010ON3M2N2N3ON3M2N3L3000^AmNX>S1dAPO\\>P1bASO^>l0`AVO`>U110O0010O0hN`AQ1`>lNcAS1e>010O0010O010O0010O0010O010ON2N3M2N21M2M4M2N2M4M2N3L3N3L3N2N3L3N3M2Mmeh2"}, "image_id": 675, "id": 11720}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 285.0, 39.0, 32.0], "area": 729, "segmentation": {"size": [512, 512], "counts": "bYZ11m?2M4M2N3L3N3M2M4M20010O010O010O010O00N3N110O010O0010O010O0010N1M4M2N3M2N3L3NhVR6"}, "image_id": 675, "id": 11721}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 303.0, 33.0, 54.0], "area": 1216, "segmentation": {"size": [512, 512], "counts": "_j_71m?2M4M2M4M2g@Dk>?SACk>`0QACl>k0M2O20O00O2M2M4M2N2010O010O00010O010O0N2N3L3N3L3N]F"}, "image_id": 675, "id": 11722}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 310.0, 53.0, 72.0], "area": 2001, "segmentation": {"size": [512, 512], "counts": "b[92k?3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L301O01O0N2N3L3N3L3N2N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M3N3L3NRVl6"}, "image_id": 675, "id": 11723}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 316.0, 14.0, 32.0], "area": 228, "segmentation": {"size": [512, 512], "counts": "l9P1Q?M2N3M2N3N1N3M2N3M2N3N2M2Noeh7"}, "image_id": 675, "id": 11724}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 324.0, 26.0, 27.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "gjb61m?3L3N3L3N2M4M2N3O010O01O01O010O01O0O2M2N2M4M2N3L3NgUP1"}, "image_id": 675, "id": 11725}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 335.0, 27.0, 28.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "P[o23k?2N3L3N3M2M4M2O110O0010O0010O010O0010M2N3L3N2N3L3N3L\\Uc4"}, "image_id": 675, "id": 11726}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 336.0, 67.0, 73.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "Zlk01m?2M4M2N3L3N3O01O01O010O01O01O001N1O1O2KZOm@g0R?[Om@f0Q?5O101N1O2N1O2O0O1M4L3N3L3N1N100O010O01N12002N100O2N1O2O0O1O2L3N3L3N3M2M3N3L3N3L3N2N3L3N3LYeR6"}, "image_id": 675, "id": 11727}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 348.0, 26.0, 25.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "\\k^62k?3N3L3N3L3N3N100010O010O01O01O010O0O1N3M2M4M2M3N3MoTT1"}, "image_id": 675, "id": 11728}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 353.0, 54.0, 43.0], "area": 1332, "segmentation": {"size": [512, 512], "counts": "gk`54j?2N3L3N2M4M2N3L3N2O20O01O010O01O010O01O01O010O01O010O01O01N110O00010O010O0010O0010O00O2M2N3L3N3L3N2N3L3N3LaTd1"}, "image_id": 675, "id": 11729}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 359.0, 31.0, 28.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "^kQ42l?2\\@NX?4f@OV?5f@NZ?=O00010O010O00010O00010O010O00010O01O01O01L3N2M4M2M4Lbd^3"}, "image_id": 675, "id": 11730}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 359.0, 59.0, 80.0], "area": 2618, "segmentation": {"size": [512, 512], "counts": "nln64j?2N3L3N3L3O110O010O0N3L3@YOeAi0Y>YOdAk0X>YOeAi0Y>YOeAj0W>a0N3M2O20O0010O010O0010OO1N3L3M4M2O2O010O01O0VOZB[Oe=c0]B]Od=?_BB`=N3L3NRc3"}, "image_id": 675, "id": 11731}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 367.0, 28.0, 32.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "U\\_42l?3L3N2N3L3N3L3N3L301O00010O010O010O01M2N2M4M2M4M2N3L3N\\dR3"}, "image_id": 675, "id": 11732}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 371.0, 50.0, 78.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "[mg21l?4M2M4M2M3N3L3N3L3N2YASOX>Q1eAQOY>Q1dASOX>Q1eAQOY>\\1M4M2M4L3N2M4M2M4O01M2M3M4M2M4M2M3N3L3N3L3M3N3L3N3L3N2M4L3N3L3N2M4M2M4MTT_4"}, "image_id": 675, "id": 11733}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 372.0, 22.0, 25.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "V\\d32l?2M3N3M2M4M2N3M21O01O010O01N1M3N3L3N3M2M4MXdP4"}, "image_id": 675, "id": 11734}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 388.0, 22.0, 19.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "^\\Q42k?3N3M2M4O000010O010O010O00010O010O0N3L3N2Nhcc3"}, "image_id": 675, "id": 11735}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 388.0, 33.0, 26.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "elX51l?3N3L3N3M2M4M201O01O01O010O010O010O01M2N201O010O01N10N2N3M2M4M2NfcV2"}, "image_id": 675, "id": 11736}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 408.0, 47.0, 34.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "YmP61m?2M4M2M4M2N2M4O0010O01O01O010O01O01O010O010O0]Og@`0\\?N30O01O0C`@;c?O01O010O01O01O010O01O010O01OO2L3N3LdbW1"}, "image_id": 675, "id": 11737}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 409.0, 44.0, 57.0], "area": 1404, "segmentation": {"size": [512, 512], "counts": "V^`32k?4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M310O0010O0010O01M2N3M2M3N3L3N3L3N3L3N2M4M2M4M2M4M2Mobi3"}, "image_id": 675, "id": 11738}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 414.0, 63.0, 66.0], "area": 1974, "segmentation": {"size": [512, 512], "counts": "`^T41l?3N3L3N2M4M2O20O010O00010O010OO1N3M2M4M2M4M2N2M4M2M4M2O1010ON3L3N3L3O1010M2M3N3M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3O00001M2N3L3NiRl2"}, "image_id": 675, "id": 11739}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 436.0, 48.0, 63.0], "area": 1560, "segmentation": {"size": [512, 512], "counts": "onP52m?2O2M2N2N2N2N3N1ZO_OjAc0S>_OlAc0Q>@mAb0o=AoAc0m=@PBc0l=@QBd0l=^ORBe0k=^OQBe0P>d01O1O2O0O2N1O101N1O2N101N1O1O2N1M4M2N2M4M2M4M2N3L3N2M4M2N3L3NQRW2"}, "image_id": 675, "id": 11740}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 442.0, 24.0, 27.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "^^f61m?2M4M2N3M2M3N3M2N30O010O0010O01M2N2N3L3N3M2N3LRbm0"}, "image_id": 675, "id": 11741}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 457.0, 14.0, 15.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "b^`73k?2M4M2N2010O010O000O2M2M4Mea8"}, "image_id": 675, "id": 11742}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 470.0, 38.0, 42.0], "area": 1056, "segmentation": {"size": [512, 512], "counts": "moj53j?3ZON\\A4b>N[A5b>O[A3c>OZA4c>O[A3c>0YA3e>e0001O001O001O00001O001O001O00001O001N1M4M2M3N3L3N3L3N2M4M2MQQb1"}, "image_id": 675, "id": 11743}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 473.0, 20.0, 22.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "Y_\\71l?3N3L3M4L3N3O0010O0010O00N3L3N3M2M4M2NTa9"}, "image_id": 675, "id": 11744}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 476.0, 37.0, 36.0], "area": 839, "segmentation": {"size": [512, 512], "counts": "n_d62l?2M3N2M3N2M3N2N2M3N2M3N2M3N2001O00001O001O001O00001O001XOPA>P?_OTA`0l>ASAZ?M2M4M2M3NdPi0"}, "image_id": 675, "id": 11745}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 505.0, 27.0, 7.0], "area": 95, "segmentation": {"size": [512, 512], "counts": "no^72l?2N2O1001O001O00001O001O001O00001OO1N21O00001O001O00Q`3"}, "image_id": 675, "id": 11746}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 6.0, 37.0, 36.0], "area": 861, "segmentation": {"size": [512, 512], "counts": "RQU61j?6K4N2IFh@:T?Jm@5n>0RA1l>0TA0l>1SAOn>0o@3Q??O000001O01O0001O01O000001O01O0001O0@TAMk>1WAOi>1WAOi>1WAKn>4RAGS?:9O1K6Kg_X1"}, "image_id": 676, "id": 11747}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 25.0, 48.0, 88.0], "area": 2624, "segmentation": {"size": [512, 512], "counts": "WSl44h?4K5K6K4K5K5L5J5L4K5K5L5J5L4K5K6K4000000010O00000010O000000O2J5L4K5K6K4K5L4K5K6K4K5L4K5K6K4K5LUo[2"}, "image_id": 676, "id": 11748}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 111.0, 52.0, 52.0], "area": 2068, "segmentation": {"size": [512, 512], "counts": "Vda26e?:I5K4L3N3M2M3N2O1N2N1O2O1N101N10001N100000001O0O10000000O2O00000O2O001N101N101N2N101N2N2M4M2N3L4L5J8Fn[d4"}, "image_id": 676, "id": 11749}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 116.0, 35.0, 36.0], "area": 910, "segmentation": {"size": [512, 512], "counts": "`TU42i?5K5K6J5L4L401O01O000001O01O000001O01O000001O01O000001O01M2K5K5K6J5LZ\\Y3"}, "image_id": 676, "id": 11750}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 149.0, 29.0, 17.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "oTa71k?4K6N11O0001O01O000001O01O0001O01O000001O01O0001O01N1L4KZ;"}, "image_id": 676, "id": 11751}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 152.0, 17.0, 23.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "ZUk42i?5L5J5L4000001O01O0001OO1L5J5L4K[[l2"}, "image_id": 676, "id": 11752}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 157.0, 101.0, 41.0], "area": 1955, "segmentation": {"size": [512, 512], "counts": "^eU52i?6K4K5M3010O0000010O000000010O0000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O00000010O0000010O000000010O0000010O000N2K6K4KbjW1"}, "image_id": 676, "id": 11753}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 160.0, 26.0, 27.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "Ye19e?4M3L3N2O1N1O2O001N1000000001N10001N101N2N2N2N3L6IgZa7"}, "image_id": 676, "id": 11754}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 165.0, 33.0, 26.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "feP43h?5K5K6M2001O0001O0001O0001O0001O01O000001O01O000001O01O0000K6J5Kjj^3"}, "image_id": 676, "id": 11755}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 167.0, 28.0, 19.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "]e_76d?7O000001O000001O3M01O0001O00000001O0001O00000001O0N2JkZ2"}, "image_id": 676, "id": 11756}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 169.0, 27.0, 27.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "ded05h?6K4M2N2N2N2O1N10001O0O1001O00000O101O0O2N2O1N2M4M3K`jm6"}, "image_id": 676, "id": 11757}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 176.0, 8.0, 39.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "`Vl75f?5L5J5K5L4K5L5_J"}, "image_id": 676, "id": 11758}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 178.0, 33.0, 33.0], "area": 831, "segmentation": {"size": [512, 512], "counts": "oe[17f?6K4L3N3M101N2N2O0O2O000O10001O0000O2O00000O2O0O2O1N2N2N2N2N3K7ISjS6"}, "image_id": 676, "id": 11759}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 190.0, 31.0, 32.0], "area": 775, "segmentation": {"size": [512, 512], "counts": "YVV2;b?5M3M2N2N2N2N101N10001O000O1001O0000000O2O0O2O1N2N2N2N2N3L8FfYZ5"}, "image_id": 676, "id": 11760}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 205.0, 20.0, 45.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "fgj52i?5K5L4K5K6K4K5L4L50O0L4L4K5L5J5K5L4K6JeYk1"}, "image_id": 676, "id": 11761}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 232.0, 21.0, 20.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "fgg03g?6K5M4O01O000001O01O000000010O00000M3K5Kjhm6"}, "image_id": 676, "id": 11762}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 234.0, 34.0, 27.0], "area": 616, "segmentation": {"size": [512, 512], "counts": "kWR13i?4L5K4L4O11O01O0001O01O0001O01O0001O01O0000010O0000010O0000K6K4L4Lch\\6"}, "image_id": 676, "id": 11763}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 239.0, 34.0, 27.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "QXY63h?6K4K5L40010O000001O01O000001O01O0000010O000001O01O000001O0L4K5K5L_hU1"}, "image_id": 676, "id": 11764}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 255.0, 27.0, 33.0], "area": 601, "segmentation": {"size": [512, 512], "counts": "jhf51j?6K4K5L4K6K400001O01O0000010O000001O01ON2L4L5J5L4K5LQhk1"}, "image_id": 676, "id": 11765}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 265.0, 16.0, 22.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "hh\\65g?4K5K5O1010O000000010O0L4K5L4KjW[1"}, "image_id": 676, "id": 11766}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 269.0, 72.0, 50.0], "area": 2485, "segmentation": {"size": [512, 512], "counts": "^Yl62j?4K5L4K6K4K5K5O101O01O0001O0001O0001O01O0001O0001O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O0001O0001O01O0001O0001OUG"}, "image_id": 676, "id": 11767}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 292.0, 103.0, 150.0], "area": 8426, "segmentation": {"size": [512, 512], "counts": "\\mW42j?4K6K4K5M301O0N2K5L4K6J5L4K5L5J5L4K5L4K6J5L4K5L5J5L4K5L4K6K4K5K5M301O0001O01O000001O01O0000010O000000010ON2L4K5L5J5K5L4K6K4K5L4K5L5J5L4K5K5L5J5L4O10010O000001O01O000001O01O0001OO1L5J5M30001L3L4K5L4K6K4K5LbfT2"}, "image_id": 676, "id": 11768}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 362.0, 38.0, 30.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "nk13h?5L5J5L4N20001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O01O0O1L4K5L5JdT[7"}, "image_id": 676, "id": 11769}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 365.0, 97.0, 51.0], "area": 2980, "segmentation": {"size": [512, 512], "counts": "Y\\j03h?5L4L5J5L4K5O2O000000010O0000010O000000010O0000010O00000010O00000010O0000010O000000010O0000010O000000010O0000010O000000010O0000010O000000010O0000010O000000010O0000010O000000010O00N2L5J5L4L4K5LTTe5"}, "image_id": 676, "id": 11770}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 384.0, 31.0, 43.0], "area": 838, "segmentation": {"size": [512, 512], "counts": "T]X31j?5J6K5K6J5K5000000O2K4001O0001O00100O000000iN]A71_O^Aa0P?000000O2I6K5K]SX4"}, "image_id": 676, "id": 11771}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 423.0, 140.0, 71.0], "area": 5622, "segmentation": {"size": [512, 512], "counts": "Z^j54h?5K4K5L4K6K4K5L400010O000001O01O0001O01O0000010O000001O01O0000010O000001O01O0000010O000001O01O0001O01O0000010O000001O01O0000010O000001O01O0000010O000001O01O0000010O000001O01O0001O01O0000010O000001O01O0000010O000001O01O0000010O000001O01O0001O01O0000010O000001O01O0000010O000001O0lA"}, "image_id": 676, "id": 11772}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 0.0, 298.0, 225.0], "area": 30940, "segmentation": {"size": [512, 512], "counts": "^PT12m?2N2N2M3N1O2N2N2M3N2N1O2O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001^DkKW;V4gDlKX;U4fDmKY;T4eDnKZ;]4O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1TF\\JZ9d5eF^JZ9c5dF^J\\9c5bF_J]9b5aF`J^9a5_FbJa9^5]FdJc9\\5[FfJe9Y5ZFiJf9h51000O1000O1M3N2N1O2N2N2M3N20O100000O1000O10AiETKW:j4kEVKU:h4mEXKS:f4nEZKS:c4PF]Ko9b4SF^Km9`4UF`Kk9^4WFbKi9\\4YFdKg9Y4[FgKf9W4\\FiKd9U4^FkKa9T4aFlK_9R4cFnK]9o3fFQLZ9m3gFSLZ9k3hFULX9i3jFWLU9h3mFXLS9e3PG[LP9c3RG]Ln8a3TG^Lm8`3TGaLl8]3VGaLk8^3WG`Lk8]3XGaLj8]3XG`Lk8^3WG`Lk8^3VGaLl8]3l1N3N2N2N2N2M2O2N2N2N2M3N2N1O2N2M3N2N2N1O2N2M3N2000O10O1EQBdNo=Y1TBgNl=W1UBjNk=T1WBlNh=S1ZBkNh=S1ZBkNh=S1ZBjNi=S1ZBkNh=S1d0N1O2N2M3N2N2N2N1N3N2N2N2N2M2O2N2NVmV2"}, "image_id": 677, "id": 11773}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 0.0, 21.0, 12.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "PPP71o?001O1O1O1O1O1O1O1O001O1O1ON2O1O1O1O2N2No_e0"}, "image_id": 677, "id": 11774}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 16.0, 34.0, 36.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "UaX62m?2M3N2N1O2N2N2M3N2N2N1O2M3N2N2N000002M3N2N1O2N2M3N2N2N2N1O2M3N2N2NT_V1"}, "image_id": 677, "id": 11775}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 39.0, 34.0, 33.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "dae01m?3N2N2N1O2N2M3N2N2N1O20000000000O010000000000O0O2N2M3N2N2N1O2M3N2NW^i6"}, "image_id": 677, "id": 11776}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 60.0, 56.0, 53.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "`Ro02m?2M3N1O2N2N2M3N1O2N2M3N2N1PAVOi>l0UAVOh>S1N1010000O10O10000N1O2N200000O0100000000O0100000O1M2O2N2N2N2M3N1O2N2M3N2N1O2M3N2NVmT6"}, "image_id": 677, "id": 11777}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 98.0, 16.0, 16.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "ZSg11n?2N2N1N3N2N2N20O10OO2M3N2N2N2MilP6"}, "image_id": 677, "id": 11778}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 118.0, 31.0, 27.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "TT\\11n?1N3N2N2N2N2M2O2N2N2M3O01000O10O1N2M0100000O3N2N2N1O2M3N2N2NT\\T6"}, "image_id": 677, "id": 11779}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 143.0, 190.0, 151.0], "area": 7461, "segmentation": {"size": [512, 512], "counts": "TU>1n?2N2N2N2M2O2N2N2M3N2N1O2N200O10O10000000O10O10O1M3N2N1O2N2M3N2N2N1N3N2O100000O10O100000O1000O100000O10O10000000O10O100000O1000O100000O10O10000000O10O100000O10O10000000O10O100000O1000O100000O10O100000UOdA1\\>MfA2[>KgA6X>IjA7V>GlA9T>EnA;R>CPBAQB`0o=^OSBb0l=]OVBc0j=[OXBe0h=XO[Bh0_>O01000000000O010000000O01000000000O0100000000O0O2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1N3N200O10O1O1N2N1N3N2N2N2N2M2O2N2N2N2M2O2N2N2M3N2N1O2N2M3N2N2N1N3N2N2N2N1NShb4"}, "image_id": 677, "id": 11780}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 162.0, 194.0, 247.0], "area": 27010, "segmentation": {"size": [512, 512], "counts": "f5i4U;3N2N1O2N2M3N2N2N1N3N2N2N1O1N3N20O1000O100000O1000O100000O1000O100000O10O100000O1000O100000O1000O100000O1000O100000O1000O100000O1000O100000O1000O1000O100000O1000O1000O100000O1000O1000N2N2N2M2O2N2N2N2M3N10100000000O01000000000O001N2N2N2M3N1O2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N2N1N3N2N2N\\fn4"}, "image_id": 677, "id": 11781}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 180.0, 67.0, 64.0], "area": 2082, "segmentation": {"size": [512, 512], "counts": "^fT32m?2N2N2N2M2O2N2N2N2N2M3N1O2N2N2N2M3N1O2N2N2N2N2O10O1000000000O10O1000000000O10O1000000N2N1O2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N2M3N2N2N1O2NYii3"}, "image_id": 677, "id": 11782}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 372.0, 93.0, 115.0], "area": 5911, "segmentation": {"size": [512, 512], "counts": "T2N2M2O2N2N2M3N2N1O2N2M3N2O01000000000O010000000O01000000000O010000000O01000000000O01000000000O010000O1M2O2N2N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N1N3N2N2N2N2M2O2N2N2M3N1O2N2N2M3N2N1O]Ra6"}, "image_id": 677, "id": 11783}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 506.0, 211.0, 6.0], "area": 1188, "segmentation": {"size": [512, 512], "counts": "kof41o?4L00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, "image_id": 677, "id": 11784}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 377.0, 34.0, 33.0], "area": 1003, "segmentation": {"size": [512, 512], "counts": "b\\_76Y?a0H8000000001O000000000000000001O00000000000001O000000000000000001TD"}, "image_id": 678, "id": 11785}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 451.0, 24.0, 42.0], "area": 952, "segmentation": {"size": [512, 512], "counts": "T^d7=c?d0\\O8H00000000000O10000000O10000000000000000000mA"}, "image_id": 678, "id": 11786}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 76.0, 39.0, 43.0], "area": 894, "segmentation": {"size": [512, 512], "counts": "YSg62l?2N3M3N1N3M2N3N1N3M3M2N3N1N3M2N3O00100O010ON3M2N3M3N1N3M2N3N1N3M3M2N3N1N3M2N[]e0"}, "image_id": 680, "id": 11787}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 91.0, 25.0, 34.0], "area": 674, "segmentation": {"size": [512, 512], "counts": "kbc7d0\\?001O00000001O000000000=C00000000000000000000000TM"}, "image_id": 680, "id": 11788}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 204.0, 35.0, 19.0], "area": 541, "segmentation": {"size": [512, 512], "counts": "^VV7;e?6J00000000000O10O100000000000000000O10O1000000000000000004G50000O4M]Y8"}, "image_id": 680, "id": 11789}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 250.0, 5.0, 11.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "mgm73l?2O1N3M2UH"}, "image_id": 682, "id": 11790}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 0.0, 27.0, 11.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "PPY51o?2N2N2N2N2NO100O100O100O100O100O100O100O100O10000O10P`Y2"}, "image_id": 683, "id": 11791}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 0.0, 65.0, 57.0], "area": 1943, "segmentation": {"size": [512, 512], "counts": "nPX63l?2V@Le?9O2M2N2N2N2N3N1N2N2N2N3M2O1N2N2N2N1O01O00000001O01O00000001O01O000000100O1O1O1O1O100O1O1O1O1O100O2N2N2N3M2O1N2N2N3M2N2O1N2N3M`_g0"}, "image_id": 683, "id": 11792}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 1.0, 50.0, 65.0], "area": 1873, "segmentation": {"size": [512, 512], "counts": "iQg12j?4M3L5jNG_B=a=HVBMeA8Z>g000O2O00001O0O10001O000O2O00001O0O10M3K5L4K4M4K5L4K5M210O1000O11O1N100Fi@KW?5n@FR?:911N5L5Kdo_5"}, "image_id": 683, "id": 11793}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 3.0, 43.0, 43.0], "area": 873, "segmentation": {"size": [512, 512], "counts": "b`b22l?3N2N2N1N3N2N2N2M2O2N2N2000Oo@WOk>j0SAXOm>m0100000O10O1000O1000N2N1O2N2N2N1N3N2N2N2N1O2M3N2N1O2NWog4"}, "image_id": 683, "id": 11794}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 10.0, 16.0, 21.0], "area": 188, "segmentation": {"size": [512, 512], "counts": "iPh52l?3L3N3L3N2M4O010OO2M2M3N3L3N3Leoo1"}, "image_id": 683, "id": 11795}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 11.0, 11.0, 28.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "[`j7l0T?0000000000000000000E"}, "image_id": 683, "id": 11796}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 13.0, 24.0, 24.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "m`m45f?5K5K5O2O000000010O000000010O0000000001K4K5K5Jf_f2"}, "image_id": 683, "id": 11797}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 19.0, 25.0, 27.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "UQa35f?6I6K5N2000010O000000010O000000010O000000K6J5J6K^_R4"}, "image_id": 683, "id": 11798}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 34.0, 6.0, 24.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "R1h0\\?K6K4L4L4KZnl7"}, "image_id": 683, "id": 11799}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 44.0, 18.0, 14.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "dQX71m?2N3L3O2O00010O010O00010O01O0N3M2Mbn>"}, "image_id": 683, "id": 11800}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 46.0, 27.0, 26.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "Pb[42j?4M4L3L4M3O2O00010O000010O000010O00010O00N2L5L3M3L5L_nV3"}, "image_id": 683, "id": 11801}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 53.0, 36.0, 57.0], "area": 1479, "segmentation": {"size": [512, 512], "counts": "USi25f?5J6K5J6K5J7J5K5K5000010O0000000010O00000000010O00000O1J6K6I6J6J6J6J6J]nd4"}, "image_id": 683, "id": 11802}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 54.0, 18.0, 14.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "mQV71m?3L3N2010O010O00010O010O01O01N1M4MVn`0"}, "image_id": 683, "id": 11803}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 58.0, 49.0, 36.0], "area": 976, "segmentation": {"size": [512, 512], "counts": "cRd61m?2N3M2N3L3NO0101N3N2N3O010O01O01O010O01O01O010N1N3M20010O010O00010OVOQAc0M[OW?g00010O010O00O2M2M4M2N3L3N2Mj]c0"}, "image_id": 683, "id": 11804}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 65.0, 50.0, 63.0], "area": 1593, "segmentation": {"size": [512, 512], "counts": "`S]32l?3M3M2N3N1N3M2N3M3M2N3M2N3M2O2M3M2N3M2N3M2N3M3M2O02M2N3M3M2N3M2N3M2N3M2N3M2N3M2N3M2O2M2N3M2N3M2N3Mdmi3"}, "image_id": 683, "id": 11805}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 67.0, 15.0, 13.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "ZRP62m?2N3N1N1O010O01O01O01O2O2M2Nj]h1"}, "image_id": 683, "id": 11806}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 81.0, 31.0, 45.0], "area": 805, "segmentation": {"size": [512, 512], "counts": "Tc`71n?3N1N3N2M3N2M2O2M3M3N2M2O2M3NO010O01O010O010O01O010O01N101O0`M"}, "image_id": 683, "id": 11807}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 85.0, 36.0, 59.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "ST\\44g>OYB3g=6mAMS>9eAJY>7dAL\\>4bAO]>1`A2Z>\\OgAa0M7[>j001O00001O0O2O001O00001O001OO0O2N2M3N2N2M3N2N2M8I9G:F9FclQ3"}, "image_id": 683, "id": 11808}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 98.0, 93.0, 96.0], "area": 3599, "segmentation": {"size": [512, 512], "counts": "Ue]53l?2M2O2N2M3`NDYC=fhikkoQ=EkB>T=CjB?T=CjB?T=CiB?V=DgB>X=CeB`0Y=T101001O0O10000O2`NeB8[=FfB;Z=ChB=X=@kB?W=^OkBb0U=oNgBJ6X1S=mNZCR1gVO[Aj0e>SO^Am0j>01000O01MMUOVAi0j>XOVAg0j>[OVAb0l>_OTA?l>BTAFSA8m>JSA4m>MTA0n>1QANo>4QAIP?8QAGn>;QAGm>:QAHo>8o@JR?b001O00010O0001O01O01O0O1N2N3N1N2N3M2N2O2M2N2N2O2M[jS1"}, "image_id": 683, "id": 11809}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 103.0, 15.0, 36.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "Y3R1n>000O1000O1000O13M5J6K5K5K5Km[h7"}, "image_id": 683, "id": 11810}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 109.0, 81.0, 62.0], "area": 2255, "segmentation": {"size": [512, 512], "counts": "icV61n?2N2N3M2O1N2N3M2N2O1N3M2N2N200010O0000010O00N2N3M2N2O1O2O000001O01O0001O01O000N2N010O000001O3N1N2N2N2N3N1N2N1O1O01O000001O0001O000001O011N2N2N2N3N1N2N2N3M2O1N2Nhk`0"}, "image_id": 683, "id": 11811}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 148.0, 30.0, 80.0], "area": 1283, "segmentation": {"size": [512, 512], "counts": "d4`2`=01M2M4M2N2M4M2N3M2M4M2N2M4M2N3L3N3M2N2M4M2N3L3N3M2N3L3N2NRk`7"}, "image_id": 683, "id": 11812}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 185.0, 49.0, 52.0], "area": 1643, "segmentation": {"size": [512, 512], "counts": "mf[15g?5K4L4K5L5K4L4K5N210O0000010O00000010O00000010O0\\AhN`>\\1010O0000001L3VO\\A8d>D`A=_>^OfAb0[>YOiAg0h>010O000001O01O00M4J5L4L4L`ik5"}, "image_id": 683, "id": 11813}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 192.0, 28.0, 28.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "bVh02l?2M4M2N3L3N2N3M210O010O00010O010O010O0001L3N3M2M4M2N2Mkii6"}, "image_id": 683, "id": 11814}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 197.0, 10.0, 19.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "[Vk72m?3M2O1N3M2N2N2O2OOjI"}, "image_id": 683, "id": 11815}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 201.0, 43.0, 65.0], "area": 2078, "segmentation": {"size": [512, 512], "counts": "Ph`24n>0gA5T>0gA5S>1hA4S>1hA5T>NgA7Y>h000N2L50O000000010O000000010O000000010O00O11O01O0001N1K5K6I6K5K5K6J5K5K5Keii4"}, "image_id": 683, "id": 11816}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 218.0, 50.0, 55.0], "area": 1710, "segmentation": {"size": [512, 512], "counts": "ThZ31k?4K5L5J5K5L4K5K6K4O100010O00000010O000000010O00000010O`AdN\\>`101O01O000lNfAc0Y>XOlAh0T>TOPBj0e>L31O0001O01O000001L3K5L4K6J`Xl3"}, "image_id": 683, "id": 11817}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 236.0, 2.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "]Wo73l?2cH"}, "image_id": 683, "id": 11818}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 238.0, 61.0, 59.0], "area": 1821, "segmentation": {"size": [512, 512], "counts": "VhX41m?2O2M2N3M3N1N3M2N3N1N3M3M2O2WAQO_>o0_ATOa>l0\\AVOd>j0[AXOe>R10O010O0100O0100O010O010fN[AX1f>010O0100O0100O010O0100O010O0N3M3M2O2M2N3M2O2M3M2N3N1N3M2N3Nggh2"}, "image_id": 683, "id": 11819}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 254.0, 34.0, 33.0], "area": 668, "segmentation": {"size": [512, 512], "counts": "dhR21n?2DNh@4U?Ni@4V?Ng@5V?Nh@4U?Ni@3W?;O010O00010O010O3N1N3M2O00O3N1N3M3N1N3M2O2M3N1N3M2O2M^W\\5"}, "image_id": 683, "id": 11820}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 259.0, 47.0, 47.0], "area": 1296, "segmentation": {"size": [512, 512], "counts": "ahX73m?2M3N1N3N2M3M2O2M3N2M2O2M3M3N1N3N2M100O0010O010O00102M2O2M2O2M201O0O1N3N0M2010O01Di@OV?2l@KU?4n@JR?6PAGS?6>M^G"}, "image_id": 683, "id": 11821}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 270.0, 60.0, 42.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "diV13[?b0K500JYOSAg0m>YOSAg0m>6M3000000000000001O01O0000000000nNVAn0n>00O100O10O010O010O010O010O0010O010O010O010O01O0100O3N2M3N2M3N2M3N2M3N2M3NlVk5"}, "image_id": 683, "id": 11822}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 276.0, 25.0, 31.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "aYP31j?5L4K6J5L4K40L6K4001O0000010O0000000010O0L4K5L4K^Wc4"}, "image_id": 683, "id": 11823}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 300.0, 23.0, 20.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "fYk42m?2O2M2N3N1N2OO01O010O00010O000101N2N3N1N3N2M\\Vi2"}, "image_id": 683, "id": 11824}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 305.0, 23.0, 37.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "lid71n?3N2M2O2M3M3N2M3N2M2O2M3N2M2O0O010O01O010O3N]F"}, "image_id": 683, "id": 11825}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 311.0, 77.0, 62.0], "area": 1975, "segmentation": {"size": [512, 512], "counts": "Rkh42n?1N3M2O2M2O2DDo@?o>Cn@?P?Dn@>o>;O2M0010O0010O0010O00010O00010O010O00010O00010O00010O010O00010O00010O003N2M1FPADo>=RAAo>>TA_Ol>b0UA]Ok>b090010O02N3N1N10O01O010O01O2O2M2N3N2M2N3N1N3N1N3MieP2"}, "image_id": 683, "id": 11826}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 314.0, 34.0, 35.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "aZc11n?1N3N2M3N1O2M3N2N1N3N2N2M2O2N2M2O02N2M2O2N2M3N1O2M3N2N1N3N2N2M2O2Nkek5"}, "image_id": 683, "id": 11827}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 325.0, 85.0, 57.0], "area": 2629, "segmentation": {"size": [512, 512], "counts": "nZ_21m?2M3N3L3N3M2M3N3M2M4N110O00010O010O00010O010O0010O0010O010O00010O010O00010QATOh>l0UAWOl>o0O010O00010N1M4M2O2O00010O010O00010O010N1N2M4MO4M2M4M2N3L3N201O0010O00N3M2M4M2M4M2N2M4M2M4M`UV4"}, "image_id": 683, "id": 11828}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 328.0, 49.0, 65.0], "area": 1666, "segmentation": {"size": [512, 512], "counts": "Z;e0[?010O010O00010O001M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4N10N3M2N3L3N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N2MbUW7"}, "image_id": 683, "id": 11829}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 340.0, 31.0, 30.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "TkZ11n?2N1O2N2N2N2N2M2O2N2N2N2N2N2N1100N2N2N2N1O2N2M3N2N2N1O2M3N2NPeU6"}, "image_id": 683, "id": 11830}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 359.0, 43.0, 35.0], "area": 723, "segmentation": {"size": [512, 512], "counts": "Q\\R51n?2N3N1N3M2O1IEf@Q1bAkNa>V14010O01O01O01O0N3L3O10103L7J7H8Hibl3"}, "image_id": 683, "id": 11837}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 394.0, 24.0, 23.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "`l\\53l?4M4L4L3L0100000O01000O0100000O01000O05L3M4L4KXSW2"}, "image_id": 683, "id": 11838}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 430.0, 19.0, 20.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "m]R11l?4L3M3M4L3000010O00010O0001M2M3M4L3MaRd6"}, "image_id": 683, "id": 11839}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 432.0, 69.0, 51.0], "area": 1970, "segmentation": {"size": [512, 512], "counts": "^^g53i?4M3O20O0001O01O0002O0O0L4M3L5L3M3L4M4N1001O01O00010O0000010O0000010O00010O0000010O0000010O000010TOYA=g>@]A?c>^O`Ab0`>[OcAf0k>01O0001O01O01O01O000O1L5L3L4L4MhQV1"}, "image_id": 683, "id": 11840}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 433.0, 41.0, 30.0], "area": 755, "segmentation": {"size": [512, 512], "counts": "T^o31k?4M4K4M3L5N11O00010O0001O01O00010O0001O01O0001O01O01O01O0001O01O00010N1M3L4M4K4MXR\\3"}, "image_id": 683, "id": 11841}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 439.0, 80.0, 58.0], "area": 2342, "segmentation": {"size": [512, 512], "counts": "f^g13j?3N3L3N3L300010O01O010O0N2M4M2N3L3O110O010O0010O0010O0010O010O0010O0010O0010O010O0010O0010O0010O0010O010O001L3N2N3L3NO10O010O12M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2NUbP5"}, "image_id": 683, "id": 11842}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 449.0, 65.0, 48.0], "area": 1768, "segmentation": {"size": [512, 512], "counts": "nn52l?2N3M2N3L3N3M2N2N3L3N3M2N210O0N3O010O00010O010O01O01O010O010O0001kNWAR1k>010O00010O010OM3N3L3N3M2010M2N3N110O01O01O01N1N3L3N2M4M2N3L3N`ai6"}, "image_id": 683, "id": 11843}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 454.0, 26.0, 25.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "e^W12k?4M2M3N3L3N3O001O01O010O010O00010O010O0N2M4M2N3L3Nea[6"}, "image_id": 683, "id": 11844}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 474.0, 61.0, 38.0], "area": 1625, "segmentation": {"size": [512, 512], "counts": "Xoh3f0Z?000000002N000000000000000000000003M000000000000000000000000000000000000000000000000_Oa0000000000000000000000000000000006JPaX3"}, "image_id": 683, "id": 11845}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 484.0, 11.0, 10.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "Toj6:f?0000000001O00000000k`o0"}, "image_id": 683, "id": 11846}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 494.0, 70.0, 18.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "o_`11m?2N2M3N2M3N2N2O100001O001O00001O001O001O00001O001O001OO1N200001O001O00001O001O001O00001O001O001O00001O001O000000N20000001O001O00001O001O00Q`\\5"}, "image_id": 683, "id": 11847}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 510.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "n_j42o?O00QPT3"}, "image_id": 683, "id": 11848}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 0.0, 45.0, 42.0], "area": 1022, "segmentation": {"size": [512, 512], "counts": "h`?3l?2N1O2N2M3N2N1O2M3N2N2N2N1N3N2N2N2M2O2N10O1O1N2O1O1O1O1N2O2N2N1N3N2N2N2N2M2O2N2N2M3N1O2Neoi6"}, "image_id": 686, "id": 11849}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 134.0, 82.0, 69.0], "area": 3677, "segmentation": {"size": [512, 512], "counts": "Rf_54h?5J5K5L4K5K6J5L4K5K6K4K5L40001O0001O000GUB^Nj=]1[BcNf=W1_BiNa=U1aBkN_=U1bBjN^=W1aBeNc=[1b0O0001O0001O000eN_AX1d>000000010O0`AgNY>Y1cAkN]>]10O000000010O0000O2J500001O01OL4M301O0001O01O00000eNcAT1d>000001O01O00N2K6K4K5K5L5J5K\\[W1"}, "image_id": 686, "id": 11850}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 213.0, 57.0, 48.0], "area": 1941, "segmentation": {"size": [512, 512], "counts": "gWZ44h?5J5K5K5K5K6J51O0001O0001O0001O0001O0001O0001O0001O0001O0001O0001O0001O0001O0001O01O000001O01O000M3K5K6J5K5L4K5KVYi2"}, "image_id": 686, "id": 11851}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 217.0, 22.0, 17.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "RWW64g?5L401O000001O01O0001O01O0001O01O0000N3K4LWi]1"}, "image_id": 686, "id": 11852}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 444.0, 82.0, 68.0], "area": 4099, "segmentation": {"size": [512, 512], "counts": "n_Y32j?4K5K5L4K5K5K5L4K5K5K5L4K5K500001O000000001O000000001O000000001O0000001O000000001O000000001O000000001O0000001O000000001O000K5L4K6J5M3000001O01O0001O0N2K5K5K6K4K5Kka]3"}, "image_id": 686, "id": 11853}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 0.0, 76.0, 22.0], "area": 1037, "segmentation": {"size": [512, 512], "counts": "PP`13m?001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O00O1N2M3N2001O00001O001O00001O001O001O00001OO1M3N2M3N3M2M4MPPZ5"}, "image_id": 687, "id": 11854}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 0.0, 56.0, 6.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "PPk33m?000000000000000000003M0000000000000000000000000000000000000000000000000000000000M3000000000000000000000000000PPY3"}, "image_id": 687, "id": 11855}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 0.0, 30.0, 35.0], "area": 573, "segmentation": {"size": [512, 512], "counts": "ePb42l?3M20003NO01O0M4M2N3M2N3L3N3M20001O001O0010M2ISAXOQ?e06N3Ae@5]?If@3e?N3Mfon2"}, "image_id": 687, "id": 11856}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 2.0, 69.0, 61.0], "area": 1884, "segmentation": {"size": [512, 512], "counts": "ePQ31n?2O1N2N2N2N2N2N2N2N3e@\\OU?i0N2N2N2N2O1N2N2N3M2N2O10O1N3O000001O0000O1N2N2N2N2N2O2N1000000001O00000000000O1N1O02N2N2N2N2N3M2N2O1N2N2N2N2N2N2N2Nc^l3"}, "image_id": 687, "id": 11857}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 5.0, 50.0, 34.0], "area": 1219, "segmentation": {"size": [512, 512], "counts": "m`d52i?5J6J6J6O2O000001O0001O000001O0001O000001O01O00000001O01O000001O0001O000001O0001O000001O01OL4J6J6Ki_b1"}, "image_id": 687, "id": 11858}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 13.0, 5.0, 21.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "Pam71j?5L4K5K6B"}, "image_id": 687, "id": 11859}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 14.0, 28.0, 27.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "PQP51m?3L3N2N3M2M4M2N3O00010O0010O010O0010OO1N3M2M4M2N3L3N2N]oa2"}, "image_id": 687, "id": 11860}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 41.0, 44.0, 54.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "cR_41m?2GOa@4\\?O`@4]?9N3M2M4M2N201O0O2L3N2N3L3N3M21O01O010O010O01O01OM4L3N3L3N2M4M2M4L3N3L3N2M4M2M4L_nj2"}, "image_id": 687, "id": 11861}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 46.0, 28.0, 30.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "QRQ43j?3M3N3L3M4L3N2010O01O010O01O010O010O00010L3M4M2M4M2M4M[n`3"}, "image_id": 687, "id": 11862}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 53.0, 4.0, 11.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "mQn72k?4M2M3[N"}, "image_id": 687, "id": 11863}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 74.0, 60.0, 49.0], "area": 1823, "segmentation": {"size": [512, 512], "counts": "iRX51m?4L5K4L4M101N100j@^On>c0n@@Q?i0O0O110O010O00010O010O00010O010O00010O010O01ON3M2M4M2M3010O01O01O010O01O01N1N3L3N2M4M2M4M2Ei@JZ?3i@KZ?2i@JZ?3jNcAY1[>8M2M3N3M2N30O010O0O1N3L3N3M2M01000O011O3L3N3M2M4M`M"}, "image_id": 687, "id": 11865}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 87.0, 60.0, 258.0], "area": 10526, "segmentation": {"size": [512, 512], "counts": "l2m6R96K4L5K4K6K4L5K000O010000000O0100000O10O1000003L6K4L5K4ZIZGj5k8QJZGi5k8RJZGj5k8QJZGj5k8QJYGk5c9J5L5K5K4L5J5L5K4L5QNbDQOc;k0bDcNKVOh;R2aDdNLUOg;S2bDcNP7XAKf>8WAJg>8WAJg>8XAIf>9XAJe>8YAJf>8WAJh>h0O1O1O2N1O00O100O1O1O1O1O1O100O1O1O1O1O1O1O100O1O1O1O1O1O100O1O1O1O1O1O1001O1OO1O1O1O1O1O100O1O1O1O1O1O100O1O1O1O1O1O2Oo_`6"}, "image_id": 688, "id": 11872}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 0.0, 52.0, 45.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "P`Q44l?000000:F000000000000000000002N00000000000000m0SO000000000000000000000000000000000000000000000000XOh000000P`T3"}, "image_id": 688, "id": 11873}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 0.0, 12.0, 7.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "PPi51o?1O1O1O1O1O1OO1O1O1N2OQPQ2"}, "image_id": 688, "id": 11874}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 66.0, 48.0], "area": 1691, "segmentation": {"size": [512, 512], "counts": "bP[61m?3N1O2FKf@7X?Kf@79G`>4UA79Fa>d0]A^O`>e0^A]Oa>d0]A^Ob>b0]A@b>a0[ABd>n0O1O001O1O1O00N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O11O1O1O1O00O1O1O1O1O1O1N2O1O1O1O2N1O2MPPd0"}, "image_id": 688, "id": 11875}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 16.0, 91.0, 87.0], "area": 4098, "segmentation": {"size": [512, 512], "counts": "Wbn02n?1N3N2M3N2M3M3N1N3N2M3N2M3N1N3N2M2N010O0^OjN\\BV1c=mNZBS1g=nNXBQ1h=ROUBo0k=SOSBl0m=WOPBj0P>XOnAg0R>d00O010O01O0LUNUBk1j=XNSBh1n=401O010O010O010O010O01O01O010O010O010O010O011O11000000N2M2O2M3N2M3N2M3N1N3M2OO010O02O2M3N2UOQA`0R?]OQAa0Q?]OQA`0Z?N2M3M2O2M3N2Mhmc5"}, "image_id": 688, "id": 11876}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 17.0, 83.0, 90.0], "area": 3695, "segmentation": {"size": [512, 512], "counts": "Vbf61n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2TAnNg>W1N2N2O100000000000O10O1N2N2N2N2N2N1O0000000000000000000JYNTBg1l=[NRBd1o=^NoAb1Q>60002N1O00000000000000000O12N2N2N2N2N2N2N2N2N2N2N2N2O100O1N2N1O2CSADo>:SADj="}, "image_id": 688, "id": 11877}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 18.0, 7.0, 8.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "c05k?2M1001O2M3NY_l7"}, "image_id": 688, "id": 11878}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 42.0, 68.0, 72.0], "area": 2429, "segmentation": {"size": [512, 512], "counts": "oai43l?1N3N2N1N3N2N2M2O2N2M3j@WOT?k0O0bAVOe=l0XBWOf=k0XBWOf=k0XBVOf=l0XBWOf=k0XBWOf=k0XBWOg=i0WBYOj=f0UB\\Oj=d0UB^Ok=b0RBAn=>QBCo=X1000O10O1000O1O0N3N2N0O101N3N1O2M3N2F`AQOa>n0aAoNb>o0_APOa>o080O010O10O101O2M2O2N2M3N1O2M3N2M2O2N2M3N1OX^T2"}, "image_id": 688, "id": 11879}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 51.0, 33.0, 72.0], "area": 1263, "segmentation": {"size": [512, 512], "counts": "g1T2k=00001O02N2N2N2N2N2O2M2N2dNfAo0\\>oNfAo0\\>POeAn0^>oNeAn0]>POeAo0g>N2N2N2N3M2N2O1N2N2N3M2N2N2O1NS]_7"}, "image_id": 688, "id": 11880}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 90.0, 71.0, 75.0], "area": 2395, "segmentation": {"size": [512, 512], "counts": "hcd32m?2N2M3N1O2N2M3N2N2N2N1N3N2N200O10OO2N2N2N2N2N2M2O2N2N2N2M3N101000000O1000O1000000gNkAf0T>YOnAg0R>WOoAi0R>UOPBk0P>SORBm0n=POUBP1k=nNWBR1h=mNZBS1f=kN[BV1X>O100O1M3N2N1O2N2N2M3N2N1O2N2M3N2N2N2N1O2MdkW3"}, "image_id": 688, "id": 11881}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 91.0, 69.0, 66.0], "area": 2585, "segmentation": {"size": [512, 512], "counts": "gSl52m?1N3N2M2O2M3N1O2M3N1N3N2M2O2N2M2O2M30O01000O01000O0100O001N2M2O2M3N1O200O01000O01000O010O1M2O2M3N11000O01POoA0Q>NQB2n=LTB4m=JUB4m=IVB5k=JVB4m=IVB5l=IVB4l=JVB5l=IVB4m=IVB5k=JVB4m=IVB5l=IVB4l>O2Mj[Q1"}, "image_id": 688, "id": 11882}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 100.0, 68.0, 70.0], "area": 2800, "segmentation": {"size": [512, 512], "counts": "mSm16h?4L3N2O1N2O0c@CT?i0J5L3_AkNT>W1hAlNW>V1eAmN[>]1N101N11O01O01O01O01O01O01O01O01O01O010O01O01O01O01O0O0N1O0001O003M3N2M4L3M4M20010O00010O0M3N3L3M3M4L3M3N3L3M4LglP5"}, "image_id": 688, "id": 11883}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 127.0, 79.0, 66.0], "area": 2835, "segmentation": {"size": [512, 512], "counts": "Xd28g?2N2N2N3M2N2O1O1O1O2N100[AVOR>k0lAWOS>j0aATO64Y>h0_AVO64[>j0cAXO]>i0`AYO`>U1O000N2O1N2N3M2N2N2N01O010O2N3M2N2N1O010O0000000000010O1O2N2N3M2N2O1N2N2N3M2N1O01O000001O0000011N2N3M2N2N2N2O1N3M2N2N2N2N^ke6"}, "image_id": 688, "id": 11884}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 139.0, 75.0, 75.0], "area": 2832, "segmentation": {"size": [512, 512], "counts": "kej24i?3N2M4L3N3L3M3N30O00010O0010O0010O00010OO1M4L3N3L3M3N3L3M3M4N1N201N1O2O00001N11O01O0010O00100O1O2O6I1QOiA4V>JmA5S>IoA7R>ERB;m=CUB=l=@VB`0j=]OZBc0f=ZO\\Bf0d=XO_Bg0a=VObBk0Z>O01O01O001L3N3M2M3N3M2M4M2NSjo3"}, "image_id": 688, "id": 11885}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 141.0, 77.0, 67.0], "area": 2617, "segmentation": {"size": [512, 512], "counts": "mTW52l?3M2N2f@Hj>;SAGk>;SAHj>;SAGk>;SAHj>h01O010O010O01O010O010O010O010O010O010O00010O010O010O010M2N3L3N3N110O0cAcNX>]1eAeN[>a10O010O00100O010O010O010POdA:\\>CgA>Y>_OjA`0V>^OlAc0T>ZOoAe0Q>YOQBh0o=UOTBj0m=SOUBn0_>O001M2N3L3N3M2N3M2N2N3M2N[Zb1"}, "image_id": 688, "id": 11886}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 165.0, 11.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "[eh41m?3M2N30O010O000O2L3NjjQ3"}, "image_id": 688, "id": 11887}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 183.0, 66.0, 73.0], "area": 2370, "segmentation": {"size": [512, 512], "counts": "Xgi02m?2N2N2M3N1O2N2N2N2XO]OPBe0m=^OQBd0m=^OQBd0m=^OPBe0n=]OPBd0o=]OPBe0n=]OPBe0n=h0N2N2M3N2N2N2N1100N2M3N1O2N2N2N2N2N2M10000HeAlNZ>U1hAiNX>W1jAgNX>V1kAhNV>W1;N2N2N2N2N2NO010001O2N2N2N2N2N2M3N1O2N2N2N2N2NgYU6"}, "image_id": 688, "id": 11888}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 200.0, 87.0, 64.0], "area": 2862, "segmentation": {"size": [512, 512], "counts": "Qgb43j?3N3L3N3L3N2M4L30010O010O00010O01O01O010O0N2N3L3N2010O010O00010O0010O0010O0010O00010O010O00010O010O00010M2N2O2_AfNZ>Z1cAjN[>^100O2O00001O0010O00010O1nNfA<[>@jA?V>]OoAa0R>[OSBd0o=WOVBf0b>0010O0010ON2M4M2M3M4M2M4LahQ2"}, "image_id": 688, "id": 11889}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 216.0, 57.0, 73.0], "area": 2528, "segmentation": {"size": [512, 512], "counts": "mgj14b?Md@6Y?Me@6W?Mf@6W?;M4M2M3M4L3M3O20O00oAeN]=\\1_BgNb=X1\\BkNc=U1ZBnNg=R1UBQOk=o0SBTOl=l0QBWOP>\\101O010O00010O0N2N3O00010O01O010N1M3M4L3M4L3M3N3L3M4L3M4L3M3N3L3M4L3M3M4LnhX5"}, "image_id": 688, "id": 11890}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 242.0, 63.0, 78.0], "area": 2773, "segmentation": {"size": [512, 512], "counts": "\\hg24i?3N2M4L3M3M4M2N2010O01O01O01mAUOo\\1M2M3M4O00010O0010O00010O00010O000N3M1N001O4L3N2M4L3M3M4M2M3M4L3N3L3M3M4L3N2M4L3M3MZhX4"}, "image_id": 688, "id": 11891}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 254.0, 51.0, 76.0], "area": 2908, "segmentation": {"size": [512, 512], "counts": "diV76d?6J7L3001Oh@_OQ?h000000010I6K5J6K5J7J5O1000000O2I60000000010O000000010O00000000010O00000001O01O000000010O0mG"}, "image_id": 688, "id": 11892}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 258.0, 62.0, 73.0], "area": 2433, "segmentation": {"size": [512, 512], "counts": "aYd34i?3M3N3L3M3N3L3M3O2O010O0PATOl>Q10O0103L0010O00010ON2M4L3N3L3M3N3L3M3N3L30010O0010O0N2M4M2M3M4M2M3M4M2M3M4L3N2M4L3N2M4L3N2M4L3N2Mhg\\3"}, "image_id": 688, "id": 11893}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 270.0, 74.0, 79.0], "area": 2984, "segmentation": {"size": [512, 512], "counts": "_Yf44i?3M3N3L3M3M4M2O101O010O01O01O01O01O01ON3L3N3L300oAkNW=V1eBmN[=S1bBQO^=n0_BUOa=l0\\BVOd=j0YBZOg=e0VB^Oj=c0RB@n=Z10OO1N3O0010O0010O00010O000M4M2N1OO2N3M4M2M4BQBfNR>W1RBeNQ>X1RBfNQ>W1=M3M4L3N2M4L3O2O00010M2M3M4M2M3M]gT2"}, "image_id": 688, "id": 11894}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 346.0, 5.0, 10.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "j::g?N2N2N2NSUm7"}, "image_id": 688, "id": 11895}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 353.0, 57.0, 55.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "gk11n?2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N2N2N2N2O101O0000000001M2N2N2N2N2N2N2N2000000N2N2N3M2N2N2N2N2N2N2O1N2N2N2NncQ7"}, "image_id": 688, "id": 11896}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 379.0, 57.0, 55.0], "area": 1557, "segmentation": {"size": [512, 512], "counts": "`\\S11n?2N3M2N2N2N2O1N2N2N2N3M2N2N2N2N2O1N2N2N3M2N2N2O10000000001ON2N3M2N2N2N2N2N2001OO2M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2NSSP6"}, "image_id": 688, "id": 11897}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 408.0, 30.0, 31.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "Wm11n?2N2N2N2N2N2N2O2M2N2N2N2N2N2N0003M2N2N2N2N2N2O1N2N2N3M2N2NjR_7"}, "image_id": 688, "id": 11898}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 409.0, 68.0, 85.0], "area": 3230, "segmentation": {"size": [512, 512], "counts": "omS21l?3L4N3O0n@F^>:_AIa>7[ANe>2WA1i>OTA4l>LPA9n>=M3M3L5L3L4O2O01O01O0001O01O000oA^Nd=c1WBaNj=^1SBeNm=h1O00010O0000010O000010O0000M4L3M3L5LO3N3M3L4M4K4M3L5L3M3L4M4K4M3M4K4M3L5L3M3L4MUSj4"}, "image_id": 688, "id": 11899}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 418.0, 49.0, 84.0], "area": 2747, "segmentation": {"size": [512, 512], "counts": "k]Y3g0Y?00000A?1O000000000000N2G9bABZ=b100000000000000000000000000b0^O00000000005K0000000000000J6ZOf0H8000000000WRn3"}, "image_id": 688, "id": 11900}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 425.0, 15.0, 14.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "`]W11n?2N2N2N2N1O0010100000O1N4L3M_Ra6"}, "image_id": 688, "id": 11901}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 436.0, 76.0, 57.0], "area": 3129, "segmentation": {"size": [512, 512], "counts": "X^U4n0Q?10001O0000000000000000000000000000000000000000000000000014K0000000000000000_Oa000000000000000000004L0000000I700000000000000000000000001O00000000000000@lbd2"}, "image_id": 688, "id": 11902}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 446.0, 50.0, 59.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "n=c1^>N2N2N2O1N2N2N3M0000000001O200001O01O0000000000000001O01O0000O1N2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2Oh`V7"}, "image_id": 688, "id": 11903}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 452.0, 31.0, 31.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "a^g01n?2N2N2N2N2O1N3M2N2N2N2N2N2N200000N2N2N2N2N3M2O1N2N2N2N2N2N2N\\Qi6"}, "image_id": 688, "id": 11904}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 480.0, 67.0, 32.0], "area": 1057, "segmentation": {"size": [512, 512], "counts": "mol03j?3M3001O00001O00001O00001O00001O00001O00001O00001ON2N2M3M3M3M3M3M3M3M3M300001O00001O00001O00001O00001O00001O00001O00001L3N2M4L3M4L3Mi`Q6"}, "image_id": 688, "id": 11905}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 502.0, 31.0, 10.0], "area": 175, "segmentation": {"size": [512, 512], "counts": "n_^22j?4M3O100001O00001O00001O00001O001O00001O00001O00001O00001O00QPR5"}, "image_id": 688, "id": 11906}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 502.0, 44.0, 10.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "o_i41o?000000000000000000000J6M30000000000000000001O00000000000000000000003M0000000000001O0LY``2"}, "image_id": 688, "id": 11907}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 503.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "gog01XPX7"}, "image_id": 688, "id": 11908}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 504.0, 10.0, 8.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "oob01o?0O1O1O1O100O1OO1N[PX7"}, "image_id": 688, "id": 11909}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 0.0, 29.0, 19.0], "area": 332, "segmentation": {"size": [512, 512], "counts": "U`:2m?2N2N2N2N2O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O2N2N2NnoV7"}, "image_id": 689, "id": 11910}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 0.0, 27.0, 14.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "P`R11o?2N1O1O1O1O1O1O2N1O1O1OO1O1O1O100O1O1O1O1O1O100O1O1OQP`6"}, "image_id": 689, "id": 11911}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 0.0, 47.0, 27.0], "area": 693, "segmentation": {"size": [512, 512], "counts": "UPU42m?2N2N3M2N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1OQ`S3"}, "image_id": 689, "id": 11912}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 0.0, 7.0, 4.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "PPP61o?1O1O1OO1O1OQ`l1"}, "image_id": 689, "id": 11913}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 0.0, 59.0, 41.0], "area": 1059, "segmentation": {"size": [512, 512], "counts": "k`f52m?2N2N2N2N2N3M2O1N2N2N2N0000001O000000000001O0001O0000000001O1O1O100O100101O0001O0O1N2N2N2N2O100001O000000N2O2M2N2N2NYo[1"}, "image_id": 689, "id": 11914}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 0.0, 26.0, 13.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "P`m61o?1O1O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1OQ`e0"}, "image_id": 689, "id": 11915}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 0.0, 26.0, 17.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "VPY72m?2N2N2N2N2N2O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1O1O2NPP:"}, "image_id": 689, "id": 11916}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 2.0, 55.0, 51.0], "area": 1393, "segmentation": {"size": [512, 512], "counts": "l`]11n?2N2N2O1N2N3M2N2N2N2O1N2N3M2N2N2N2N2O1N3M20N2N2N1O01O0000000000010O00000002N2N3M2O1N2N2N2N3M2N2N2O1N2N2N3M2N[of5"}, "image_id": 689, "id": 11917}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 7.0, 53.0, 58.0], "area": 1479, "segmentation": {"size": [512, 512], "counts": "[Qe41n?2N2N2O1N2N2N3M2N2N2N2N2N2N2N2DSOcAn0[>TOcAn0[>TOcAo0Z>SOdAo0Z>SOdAn0[>;0000001O0001O02N2N3M2N2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2O1N2NP_`2"}, "image_id": 689, "id": 11918}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 20.0, 52.0, 51.0], "area": 1324, "segmentation": {"size": [512, 512], "counts": "aQT73l?2N2N2N2N2O1N2N2N2N2N3M2N2N2N2N2N2N2O0O001O000000000000000001O002O1N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N3Mhn1"}, "image_id": 689, "id": 11919}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 27.0, 32.0, 32.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "Zak01n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O10N2N2N2N3N1N2N2N2N2N2N2N2N2N2Nf^d6"}, "image_id": 689, "id": 11920}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 30.0, 21.0, 29.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "T1b0]?2N2N3N1N1O0001O2N2O1N3M2N2N2N2N2O2M2N2Ne^e7"}, "image_id": 689, "id": 11921}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 36.0, 42.0, 40.0], "area": 766, "segmentation": {"size": [512, 512], "counts": "maY61n?2N3M2N2N2N2O1N3M2N2N2N2N0001O00000001O0001O0000000001O01O1O2N2N2N3N1N2N2N2N2N3M2O_^Q1"}, "image_id": 689, "id": 11922}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 42.0, 62.0, 52.0], "area": 1648, "segmentation": {"size": [512, 512], "counts": "Pbk12m?2N2N2O1N2N3M2N2N2N2O10000O2M2N2N2N2N2N2O1N3M2N2O10O1N3M2N0000001O01O0000000000000100O2N2N2N2N2N2N2N3N1N2N2N2N2N2N2N3M2O1NS^U5"}, "image_id": 689, "id": 11923}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 44.0, 54.0, 58.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "`bX52m?2O2M2N2N2N2N2N2N2N3M2O1N2N2N2N2N2N2N1O0O1N3M2N2N2N2001O1O2N100O2N1O1O2N1O1O2N100O2N1O1O2N1N2L5L3M3M3L5L3M_^l1"}, "image_id": 689, "id": 11924}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 45.0, 24.0, 33.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "RRd72m?2N3M2N2O1N2N2N2N2N000000000001O000001O0000000dN"}, "image_id": 689, "id": 11925}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 70.0, 29.0, 29.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "gbY12m?2N2N2O1N3M2N2N2N2N0001O0000000001O0002N2N2N2N2N3N1N2N2NamW6"}, "image_id": 689, "id": 11926}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 74.0, 65.0, 65.0], "area": 1981, "segmentation": {"size": [512, 512], "counts": "`3`0_?2\\O_OfAc0X>_OfAc0X>@eAb0V>YO`A78b0V>ChA?V>ChA`0U>BiA`0U>BjA?T>CjA?T>f00001O00000001O2N2N2N2N3M2N2N2O1N2N2N2N2N2N2N2N3M2N2N2N2O1N1O00000001O2N2N2N2N000001O0Lg@BY?>i@@W?a05N2N2N2N2N2N2N2NU]o6"}, "image_id": 689, "id": 11927}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 74.0, 39.0, 32.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "ibQ72m?2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N11N3M2N2N2N2N2O1N2N2N2N2O11O00000N10O2N2N2N2NSm:"}, "image_id": 689, "id": 11928}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 78.0, 46.0, 60.0], "area": 1421, "segmentation": {"size": [512, 512], "counts": "Zcm52m?2b@Nk>4SANk>4SANk>4SANk>4SANk>4SANk>4SANk>4TAM`>DeAR1Y>POeAS1X>oNfAS1X>oNfAS1X>;O0000000001O3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2Ng\\[1"}, "image_id": 689, "id": 11929}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 79.0, 55.0, 50.0], "area": 1422, "segmentation": {"size": [512, 512], "counts": "XS`23l?2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2O1000O1N2N1O000000000000000001O01O001O2N3M2N2N2N2N2N2N2O1N2N2N2N2N3M2NP]d4"}, "image_id": 689, "id": 11930}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 99.0, 28.0, 28.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "_cj02m?2N2N2N2N3M2N2N2O1N2N2N2N2OO2N2N2N2N3M2O1N2N2N2N2N2N2N`\\g6"}, "image_id": 689, "id": 11931}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 105.0, 29.0, 29.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "gSo11n?2N2N2N2N3N1N2N2N2N2N2N2N2N1O01O2N3M2N2N2N2O1N2N2N2N3M2NZ\\b5"}, "image_id": 689, "id": 11932}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 114.0, 33.0, 39.0], "area": 709, "segmentation": {"size": [512, 512], "counts": "Vd_72m?2N2N2N2N2N3M2O1N2N2N2N2N2N3M2N0001O000000001O2N2O1N2Kj@]OY?`05N2N2N2NUL"}, "image_id": 689, "id": 11933}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 119.0, 60.0, 58.0], "area": 1661, "segmentation": {"size": [512, 512], "counts": "fdY61n?2U@Od?3Z@Od?8O1N2N2N2N2N2N3M2N2N2000000N2N2N2N2N0001O000001O000000000000000001O000001O02N2N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N3M2Ne[h0"}, "image_id": 689, "id": 11934}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 123.0, 58.0, 45.0], "area": 1354, "segmentation": {"size": [512, 512], "counts": "^do22m?2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N2N3N1N2N2O1000O1N2N3M2N2N2N2O1N000000001O00000001O00000001O2N2N2N2N3N1N2N2N2N2N2N2Ni[S4"}, "image_id": 689, "id": 11935}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 131.0, 56.0, 51.0], "area": 1881, "segmentation": {"size": [512, 512], "counts": "kd?1o?4K6K4L5K4K5LOGXO\\Ah0d>\\OXAd0h>:O010LkN]AU1c>4O10O1000O10O100000O0100000O10O1000O10O100000O0100000O14K6K1O000O01000GUA]Ok>c0:O4L4L5J5LR[d6"}, "image_id": 689, "id": 11936}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 137.0, 8.0, 8.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "\\dU22m?2N2N20N2O1N2Nd[f5"}, "image_id": 689, "id": 11937}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 154.0, 60.0, 61.0], "area": 1944, "segmentation": {"size": [512, 512], "counts": "jUj61n?2N2N2N2N3M2^@C^?b0M2N2O1N2N2N2N2N2N201N1IkNaAV1^>kN`AW1^>kN`AW1^>7N2N0000001O000001O00001O2N2N2O1N3M2N2N2N001O00001O2O1N2N2N2N2N2N3M2Fb@O`?0b@M`?1b@N_?0lj7"}, "image_id": 689, "id": 11938}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 155.0, 62.0, 53.0], "area": 1555, "segmentation": {"size": [512, 512], "counts": "be]32m?2N2N2N2O1N2N2N2N2N2N3M2O1001O000000000000N3M2N2N2N2OO000001O0000000000000001O000001O0002N2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N3McZc3"}, "image_id": 689, "id": 11939}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 158.0, 35.0, 36.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "\\eX21n?3M2N2N2N2N2N2N2N2N2N3M2N2N21O000000000O1N2O1N2N2N2N2N2N2N2N2N2N2N2N^jU5"}, "image_id": 689, "id": 11940}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 164.0, 48.0, 29.0], "area": 779, "segmentation": {"size": [512, 512], "counts": "_e[11o?4L4K5L4L1O0O0100000O0100000O01000000O0100000O0100000O0100000O0100000O0Ld@F\\?:5O10O100000O012N5K4KbZl5"}, "image_id": 689, "id": 11941}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 175.0, 7.0, 14.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "del72m?2N2N2N2O1N3_J"}, "image_id": 689, "id": 11942}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 184.0, 24.0, 25.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "Ufo22m?2N3M2N2N2N2N2N2N2OO000000002N2N2N3M2N2N2N2N2OnYd4"}, "image_id": 689, "id": 11943}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 188.0, 83.0, 46.0], "area": 2079, "segmentation": {"size": [512, 512], "counts": "^f>4l?5J6K4L5K5K0O10O1000O10O100000O10O100000O10O1000O1000OHWO[Ai0e>9000O010000000O01000000O010004L5K5J2O00OMi@_OW?a040000O010000000O0100000O10O100000O010000000O010000000O012N4L5J6K\\iW6"}, "image_id": 689, "id": 11944}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 194.0, 33.0, 61.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "^g_71o?1N2@MPA6m>LQA6m>LQA6m>LQA6m>LQA6m>LQA6m>`0N2N2O2EkNfAW1X>kNfAW1X>lNeAT1[>800001O00000001O0002N2N2N2N2N3M2NgI"}, "image_id": 689, "id": 11945}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 195.0, 59.0, 48.0], "area": 1525, "segmentation": {"size": [512, 512], "counts": "ffP41n?2N2N2N2N2N2O1N3M2N2N2N2N2N2N2TAROc>P1\\AQOb>Q1\\AQOc>U101O001010000O1N2N0101N2N2jN\\Am0f>QO\\Am0m>N1O0000001O0001O00000002N2N2N2N2N2N2O1N2N2N3M2N2N2N\\iQ3"}, "image_id": 689, "id": 11946}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 229.0, 56.0, 53.0], "area": 1500, "segmentation": {"size": [512, 512], "counts": "oWc42n?1N2N2N2N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2N2N2O2ON2N10O01O00000000000000001O2N2N2N2N2N3N1N2N2N2N2N2N2N2N2N3M2N2N2NWh`2"}, "image_id": 689, "id": 11947}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 234.0, 28.0, 27.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "gWZ32m?2N2N2N2N2N2N3N1N2N2N2N1O000100O1O2N2N2N2@b@;c?N2N2O1N2N2NZhW4"}, "image_id": 689, "id": 11948}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 236.0, 20.0, 15.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "`g[11n?5L4L2N0O0100000O0100000O01000000O5L4L\\XZ6"}, "image_id": 689, "id": 11949}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 255.0, 20.0, 14.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "Sh42n?4L4L0O100000O0100000O10O100000O012N4LkWa7"}, "image_id": 689, "id": 11950}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 258.0, 16.0, 17.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "Xha32m?3M2N2N2N2O1N2000N2N2O1N3M2N2NfWV4"}, "image_id": 689, "id": 11951}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 262.0, 32.0, 32.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "cXV32m?2N2N3M2N2N2N2N2N2N2O1N2N2N2O1000O1N2N2N2N2N3M2N2N2N2N2N2N2N2OYgY4"}, "image_id": 689, "id": 11952}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 265.0, 27.0, 26.0], "area": 354, "segmentation": {"size": [512, 512], "counts": "fho31n?2N2N2N2N3M2N2N2N2O1N2N1O0002N2N2N2N2N2N2N2N2N2N2N2O[gb3"}, "image_id": 689, "id": 11953}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 267.0, 8.0, 15.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "aXl72n?1N2N2N2N2N2N2dG"}, "image_id": 689, "id": 11954}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 269.0, 55.0, 35.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "eh65k?4K6K5K5K3M0O10O100000O010000000O01000001O1N10O100000O10O100000O10O1000O1000O1000O1000O1000O10O1003M5J5L5K5Kmfm6"}, "image_id": 689, "id": 11955}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 270.0, 53.0, 48.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "hXW23m?4K4M4L2N0O4M4L4L3L5L4LO0100000O2O0000O01000O0100000O0100000O01000O0100001N2OO1000O13L5L4L3M4K5L3M4K5L3M^Vn4"}, "image_id": 689, "id": 11956}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 281.0, 58.0, 51.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "`YZ51n?2N2N2N2N2N2N2O1N2N3M2N2000000001OO1N2N2N3M2N2N2N2N1O00000001O0000000000010O2N2N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2N2Ndfh1"}, "image_id": 689, "id": 11957}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 285.0, 28.0, 37.0], "area": 731, "segmentation": {"size": [512, 512], "counts": "RYd13l?5L4L5K4L5J5L3M00O0100000O10O100000O0100002M6K4L5K4@a@7i?JVfm5"}, "image_id": 689, "id": 11958}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 290.0, 15.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "VYm31n?2N2N2N2O2N100000O1N2N2N2N2NgVk3"}, "image_id": 689, "id": 11959}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 310.0, 54.0, 51.0], "area": 1503, "segmentation": {"size": [512, 512], "counts": "^ZP32n?2M3N2M3N2M3N2M3N2M3N2M3HROZAo0e>SOXAm0h>60O010O010O010O010O010O010O010O010O010O010O010O2O2M3N2M3N2M3N2M3N2M3N2M3N2M3N]eT4"}, "image_id": 689, "id": 11960}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 314.0, 22.0, 19.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "TZh22m?2N2N2N2N2N2N20N2N2N0000000000001O2N2N2N2NSfl4"}, "image_id": 689, "id": 11961}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 317.0, 60.0, 41.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "Wj:5k?4L5J5L5K4L1N10O11O3M0O0100000O01000000KSAWOl>j07NO01000000O01000000O01000000O01000000O01000000O01000000O01000000O3N5K4L5K4K`Ug6"}, "image_id": 689, "id": 11962}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 317.0, 61.0, 54.0], "area": 1734, "segmentation": {"size": [512, 512], "counts": "Rkl51g?1^@1a?0]@2a?0^@1`?9N2N3M2N2N2O1N2N2N2N0000001O2JmN]AU1a>lN]AV1a>6N1O0000001O01O3M00001O00002N2N2N2N2N2N3N0O1O000000002N2N2N2N2N2N2N3N1N2N2N2N2NbeT1"}, "image_id": 689, "id": 11963}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 324.0, 29.0, 29.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "bZc41n?2N2N2N2N2N2N2N3M2O1N2N2N2N1O02N2N2N2N2N3N1N2N2N2N2N2N2N_Un2"}, "image_id": 689, "id": 11964}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 336.0, 32.0, 24.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "jj]13m?5J6K1O0ICk@=U?61000O1000O1000O1000O3N3M00000O10O100000O10O1005K5JTUR6"}, "image_id": 689, "id": 11965}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 346.0, 27.0, 27.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "Y[c62m?2N2N2N2O1N2N2N2N2N2N000000000002O1N2N2N2N2N2N2N2N2NmTo0"}, "image_id": 689, "id": 11966}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 353.0, 35.0, 34.0], "area": 598, "segmentation": {"size": [512, 512], "counts": "bkU51n?2N2N2N2N2O1N3M2N2N2N2N2N2N2N2N01O00011N2N2N2N2N2N3M2N2N2N2O1N2N2N2N`dX2"}, "image_id": 689, "id": 11967}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 365.0, 55.0, 42.0], "area": 1464, "segmentation": {"size": [512, 512], "counts": "ck?1n?3N3M3L4M3M3L3N3M3L4M3M2M3N0O0103M2M4M1O0O10O10O01000O01000O010O10O10O10O10O10O10O01000O012M4M2N3L4M3M3L3N3Mkcd6"}, "image_id": 689, "id": 11968}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 367.0, 36.0, 36.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "l[X62m?2O1N2N2N2N2N2N2N2N3M2N2N2N20000000000000O1N3M2N2N2N2N2N2N2O1N2N2N2N2NlcU1"}, "image_id": 689, "id": 11969}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 371.0, 14.0, 15.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "jkZ32m?2N2N2N2N2N2N02N2N2N2N2N2NXT^4"}, "image_id": 689, "id": 11970}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 371.0, 61.0, 50.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "Z\\m31n?2N2M2O2M3N2N1N3i@@k>b0SA@k>b0SA@j>b0TA@k>l0N2M2O2M3N2O0100000O01O1M2O2M3N2N11000O10O1000O10O10O1N1N10O0100O010O2O1O2M3N2M2O2M3N1O2M3N2M2O2MnST3"}, "image_id": 689, "id": 11971}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 372.0, 30.0, 31.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "Q\\l22m?2N2N2O2M2N2N2N2N2N2N2N3M2O1N02N2N2N2N2N2N201M2O1N2N2N2IX@3l?Nlcd4"}, "image_id": 689, "id": 11972}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 376.0, 34.0, 24.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "R\\m11n?3N3M3L4M2NO0100O01000O0100O01000O0100O0100O01000O01000O2O3M3L3N3Lmca5"}, "image_id": 689, "id": 11973}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 376.0, 59.0, 55.0], "area": 1504, "segmentation": {"size": [512, 512], "counts": "Z\\Q72m?2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2N1WAnNe>R1YAPOg>U1000N2N2000O1000000000000000000000N2N2N2N2N1O00000O101O2N2N2N2N2N2N2N2N2N2N2N2N[S1"}, "image_id": 689, "id": 11974}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 395.0, 29.0, 29.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "elT52m?2N2N2N2N2N3M2N2N2N2O100000000000O1N2N2N2N2N2N2N2N2N2N2NUc\\2"}, "image_id": 689, "id": 11975}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 397.0, 21.0, 23.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "i\\Y22g?O^@5_?M_@6^?8M2O000O010O01000O100O04M3M2M4M3L3NSS\\5"}, "image_id": 689, "id": 11976}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 401.0, 57.0, 58.0], "area": 1825, "segmentation": {"size": [512, 512], "counts": "^]j03m?2M3N2M3N2M3H@l@b0Q?Al@b0R?8M3N2M3N2NO010O010O010O10O0KlN`AT1`>510O10O010O010O10O010O010O010O0103L3N2M3N3L3N2M4M2M3N2M100O4D[@7j?L3N\\RY6"}, "image_id": 689, "id": 11977}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 411.0, 56.0, 54.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "h]T32m?2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2O100000N1O00000000001O00000000001O2N00000000002N2N2N2N2N2N2N2O1N2Ha@La?2a@La?2a@La?2obo3"}, "image_id": 689, "id": 11978}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 413.0, 44.0, 38.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "Zmi51n?2N3M2N2O1N2N2N2N2N2N3M2N2N2OO2N2N2N2N2N2N2N2O10001O0001O02N00000000000001OO2M2N2N2N2NTR`1"}, "image_id": 689, "id": 11979}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 413.0, 17.0, 18.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "RmU62m?2N2N2N2N2O101O000000N2N2N2N2N2Njba1"}, "image_id": 689, "id": 11980}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 414.0, 24.0, 25.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "]mh21n?2IO\\@3b?O\\@3b?7N2N2N2N2N2N11N2N1O1O2N2N2N2N2N2N2N2N2NhRk4"}, "image_id": 689, "id": 11981}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 414.0, 57.0, 56.0], "area": 1418, "segmentation": {"size": [512, 512], "counts": "`]\\62m?2N3M0000002O1N2N3M2N2N2N2N2N2N3M2O1N2N2O10001O000001O000001O000000000001O0N2N2N1O002N2N2N2N2N2N2N2N3M2N2N2N2N2NPRg0"}, "image_id": 689, "id": 11982}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 423.0, 34.0, 38.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "f]n11m?4M3N2M3N2M4M2M10O10O100O3N2M4M1N010O010O010OTOSAf0n>ZOTAd0k>\\OXAa0i>_OYA?f>A]A=e>A]ATO]Am0b>VO\\Ai0e>9O01O01O01O010O010O2N2O2O0N3N1L5M2N3N2M2N3N1N3N1N3M2O2MSQl5"}, "image_id": 689, "id": 11987}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 451.0, 30.0, 39.0], "area": 578, "segmentation": {"size": [512, 512], "counts": "b^W51n?2N2N2N2N2N3N1N2N2N2N2N2j@XOQ?n0M2N2N200000N2N2@QALR?2o@LS?2o@LS?2o@LS?2o@LS?2o@LS?2?N2N`aY2"}, "image_id": 689, "id": 11988}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 452.0, 17.0, 19.0], "area": 166, "segmentation": {"size": [512, 512], "counts": "Y^]41n?3M2O1N3M2N3N10010OO2N1N2N3N1N2NaQZ3"}, "image_id": 689, "id": 11989}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 454.0, 35.0, 28.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "fn`21n?3N2M2N3N2M2O1N1O01O010O01O01O010O01O01O010O01O01O010O2O2M2N3N2M2O2M`am4"}, "image_id": 689, "id": 11990}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 467.0, 21.0, 20.0], "area": 218, "segmentation": {"size": [512, 512], "counts": "lnW41n?2N3N1N2N3M2O1N2N0001O010O3M2N2O2M2N2O2MTa]3"}, "image_id": 689, "id": 11991}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 487.0, 28.0, 25.0], "area": 399, "segmentation": {"size": [512, 512], "counts": "d_e41n?2N2N2N2N2O1N3M2N2N2N1O1O1O11O1O1O1N2N2N2O1N2N2N2N2N3M\\`l2"}, "image_id": 689, "id": 11992}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 488.0, 48.0, 24.0], "area": 628, "segmentation": {"size": [512, 512], "counts": "joY51n?3M2N2N2N1O1O1O1O1O1O1O1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O1O1O00O1O1001O1O1O1O1O1O1O1O1O1O1O1O1OQPn1"}, "image_id": 689, "id": 11993}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 489.0, 37.0, 23.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "c_o11;0W?3g@MX?5f@LZ?6c@K\\?7c@I]?=O1O100O1O100O100O1O11O1O2N2N1O2N1O1O00O1O100002N2N1O2N1O2N1O2NQP^5"}, "image_id": 689, "id": 11994}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 492.0, 9.0, 8.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "__o21o?1N2N3N0O0102M2NbPl4"}, "image_id": 689, "id": 11995}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 493.0, 29.0, 19.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "ioQ31n?2N3N1N2N2N1O1O100O1O1O1O1O1001O1O1O2N1O1O1O1O2N1N2N2O1NU`_4"}, "image_id": 689, "id": 11996}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 0.0, 111.0, 55.0], "area": 3072, "segmentation": {"size": [512, 512], "counts": "P`X51o?001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1OO1O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1OQPP1"}, "image_id": 691, "id": 11997}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 0.0, 184.0, 109.0], "area": 7406, "segmentation": {"size": [512, 512], "counts": "o`R51m?3N2N1O2N2M3N2N2N1O2M3N2N2N2N1O2N2000000O01000000000O01000000000O10O10000000O10O10000000O10O10000000O10O1000000000O01000000000O01000000000O01000000000O010N2N2N2M3N1O2N2N2N2M3N2N1O2N2M3N0000000O10O100000O10O100000O10O100000O10O100000O10O100000O10O100000O1001N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1OQ`1"}, "image_id": 691, "id": 11998}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 305.0, 81.0, 118.0], "area": 4979, "segmentation": {"size": [512, 512], "counts": "njg61n?2M3N2N1O2N2N2N2N2N2M3N2N2N2N1O2N2N2N2N2M3N2N2N2N1O2N2N2N2M3N2N2N2N2N2N1O2N2N2M30000000000O010000000000000000000O0100000000000000000O0100000000000000000O10O1000ZE"}, "image_id": 691, "id": 11999}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 314.0, 35.0, 38.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "bjb23l?2N1O2M3N2N2N1N3N2N2M3N1O2N2M2O00O10O12N2M3N1O2N2M3N2N1O2M3N2N2M2O2Nkek4"}, "image_id": 691, "id": 12000}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 346.0, 39.0, 36.0], "area": 508, "segmentation": {"size": [512, 512], "counts": "Sk_31n?2N2N1O2M3N2N2N20O10000000O10O10000000O010000000O10O10000000O10N2M3N2N2N1O2M_dl3"}, "image_id": 691, "id": 12001}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 429.0, 149.0, 83.0], "area": 6775, "segmentation": {"size": [512, 512], "counts": "ooe51n?1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O100001O1O1O1O1O1OO1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1001O1O1O1O001O1O1O1O1O1O1O1O1O1^NVBl0k=QOXBn0h=QOZBn0g=PO[Bo0f=oN\\BP1e=nN]BQ1d=mN^BR1c=lN_BS1b=kN`BT1a=jN`BV1V>O1O1O1O1OO1O1N2O1O1O11O001O1O1O00O1N2O1O1O1O1001O1O1O1O1O1O001O1O1O"}, "image_id": 691, "id": 12002}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 394.0, 55.0, 66.0], "area": 1969, "segmentation": {"size": [512, 512], "counts": "ZO01O01O0aNPBn0P>nNUBQ1k=lNXBU1h=fN\\BZ1V>O0001O01O00001M1M4K5M300O100000000000000000O10000000O100000000000000000O10000000O100000;E>BYRT7"}, "image_id": 692, "id": 12003}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 15.0, 43.0, 43.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "oP9:c?6K4L4M3M2N2N2N1O2O1N101N101O0O10001O000000000000O101O000O2O0O2O0O2N2N2O1M3N3M2M5K5IT_Q7"}, "image_id": 693, "id": 12004}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 52.0, 21.0, 24.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "TR33j?3N3M2N3L3N2N30O010O01O01O0M4M2N3M2M3N3MX^b7"}, "image_id": 693, "id": 12005}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 301.0, 14.0, 13.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "eYa51l?3N3L301O01O01O010O00M4L3NbfW2"}, "image_id": 696, "id": 12006}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 45.0, 22.0, 16.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "daX61m?2M4M210O00010O010O010O010O00010O010O01M2M^^\\1"}, "image_id": 697, "id": 12007}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 255.0, 72.0, 73.0], "area": 1928, "segmentation": {"size": [512, 512], "counts": "bY^33j?3M4M2M3M4M2M4L3N3L3M3N3M201O010O01O01O010O0FgNnAY1o=iNQBW1l=mNQBU1o=kNoAg0O_OQ>KmAf04_OP>h0SBXOl=e0WB[Oi=c0YB^Og=>]BAa=?aBA\\=`0gB@V=?mBAT=TOmAl0S>ROoAn0Q>POQB0Je0T>YOUB0Ig0R>WOVB1Jf0R>WOVB1Jf0R>WOVB1I:KMi>G^A:KLU?2m@LU?2l@MV?1l@MV?1>NRnl6"}, "image_id": 699, "id": 12013}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 0.0, 18.0, 9.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "PPU11o?001O1O1O1O1O001O1O1O00O1N2O1O1O1OQPb6"}, "image_id": 699, "id": 12014}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 0.0, 102.0, 44.0], "area": 1879, "segmentation": {"size": [512, 512], "counts": "PPV61o?1O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O00D^AYOc>f0_AXOb>g0`AWOa>g0bAWO_>h0=O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1O1N2O1O11O1O1O1O1O001O1O1OO1N2001O001O1O1O1OO1O1O2N2N2N2M2Omo6"}, "image_id": 699, "id": 12015}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 297.0, 149.0, 179.0], "area": 13411, "segmentation": {"size": [512, 512], "counts": "ike53l?2M2O2M3M2O2M3N1N3N1N3M3N1N3N2M2N3N1oNgNYC\\1e1n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2M3N2N2N2N2N2O1000000O100000000000000000000000000000O1000000000000000000000000000O1000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N00000000O100000000000Km@^OU?`0m@^OU?`07N2N2N2N2N2N2N2Nnan5"}, "image_id": 699, "id": 12019}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 440.0, 75.0, 72.0], "area": 2720, "segmentation": {"size": [512, 512], "counts": "h=X2i=O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1OQPj6"}, "image_id": 699, "id": 12020}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 0.0, 14.0, 5.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "PP`21o?001O00001O001O001O000000NRPY5"}, "image_id": 701, "id": 12021}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 0.0, 14.0, 10.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "T`m23k?2N3O001O00001O000000M3M3NR`k4"}, "image_id": 701, "id": 12022}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 0.0, 40.0, 25.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "ZPV33k?3L3N3M2O2O00001O001O001O00001O001O00001O001O001O00001O0000N2N2M3N2M3N2N2M3N2NRPV4"}, "image_id": 701, "id": 12023}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 0.0, 57.0, 43.0], "area": 1486, "segmentation": {"size": [512, 512], "counts": "h`P42l?2N3M2O2M2N3M3N1N3M2O2M2N3N2O001O001O001O1O001O001O001O1O001O0000N2O1N2N2N2O1N2N2O1N2N2N2O1N2N2N2O1N2N2N2O1N2N2NRPS3"}, "image_id": 701, "id": 12024}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 0.0, 64.0, 75.0], "area": 2577, "segmentation": {"size": [512, 512], "counts": "jQm42l?2N3L3N2N3M2N3M2M4M2N2N3M2N3O010O01O01M2N3L3N2M4M2N3L3N3M2M3N3M201O001O00N2N2N2M3N2N2M4M2M4M2N3L3N2N3L3N3L3N3M2M3N3M2M4M2M3N3MioR2"}, "image_id": 701, "id": 12025}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 0.0, 24.0, 9.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "P`R61o?001O00001O001O00001O001O001O00001O001O00N2M3NR`a1"}, "image_id": 701, "id": 12026}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 0.0, 21.0, 7.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "PP^71o?00001O00001O001O00001O00001O00001OO1M3NR`7"}, "image_id": 701, "id": 12027}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 15.0, 23.0, 26.0], "area": 352, "segmentation": {"size": [512, 512], "counts": "SQ[71m?2N2M4M2M4M2N2M4N101O010O01O0O1M4M2M4M2M3N3L__9"}, "image_id": 701, "id": 12028}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 18.0, 74.0, 85.0], "area": 3463, "segmentation": {"size": [512, 512], "counts": "fQX62l?3L3N2M4M2N3o@@]>c0aA_O]>d0`A^O^>d0_A@]>c0aA_O]>T1M2M3N3L3010O01O3NO0010O010O0010O0010O0010O0010O01M2N2MTB[Nd=b1[BaNe=`1XBcNh=\\1VBfNj=Z1SBjNm=d100010O010O010bNUBe0k=YOWBg0i=VO[Bj0e=SO]Bm0c=PO`BQ1`=lNcBS1]=kNeBV1Z=gNjBX1o=010O00N3M2N3L3N3L3N2N3Gg@F\\?88L3N3Momb0"}, "image_id": 701, "id": 12029}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 41.0, 29.0, 30.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "mQT42k?3N3L3N3L3N3L3N210O0010O0010O0010O0010O00N3L3N3L3N2M4M2Mb^]3"}, "image_id": 701, "id": 12030}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 68.0, 24.0, 91.0], "area": 1219, "segmentation": {"size": [512, 512], "counts": "eSd71m?2M4M2N2M4YABh=`0UBC\\OMW>d0\\BDb=>[BEb=>\\BEa=>[BEb=>[BFb=<\\BFb==ZBFc==]BD`=?_BA^=b0bB^O\\=d0cB^OY=f1N3M2O110O0kM"}, "image_id": 701, "id": 12031}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 234.0, 84.0, 156.0], "area": 3662, "segmentation": {"size": [512, 512], "counts": "lk92k?4M2N2M4M2N3L3N2N3L3N3M2N2M4M2N3L3N3M2M3N1O0O01000O01000O01000O01000O01000O01000O01000O010000O01000O01000O01000O01000O01000O103M2M4M2N3L3N2N3L3N3M2M4M2N3L3N2N3L3N3M2M`X\\6"}, "image_id": 701, "id": 12032}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 446.0, 53.0, 57.0], "area": 1833, "segmentation": {"size": [512, 512], "counts": "n=Y1g>10O01O010O01O010O010O010O010O010O010O010O010O010O010O010O010O010M2N3M000000002N2N3M2N3M2M4M2N3M2N3M3M2OdQU7"}, "image_id": 701, "id": 12033}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 460.0, 16.0, 15.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "b^i42l?3M2N3O010O010O10O010O10ON3M2N`an2"}, "image_id": 701, "id": 12034}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 0.0, 49.0, 35.0], "area": 1148, "segmentation": {"size": [512, 512], "counts": "aP24j?2M4M2M3N3L3N3O00001O001O00001O001O00001O001O001O00001O001O00001O001OO1M3N2M3N2M3N2M3N2M3N2N2M3NR`U7"}, "image_id": 702, "id": 12035}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 0.0, 71.0, 54.0], "area": 2163, "segmentation": {"size": [512, 512], "counts": "\\Qf03k?2M4L3N2M4L3N2M4L3M4N10001O000M4L301O01L3M3M4M2M3M21N3L3M3N3N10001O00001O00N2N2M3001O00001O00001O001O00001O00001ON2M3N2M3M3M3M3N2M3M3M3N2M3MS`V6"}, "image_id": 702, "id": 12036}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 0.0, 52.0, 82.0], "area": 2297, "segmentation": {"size": [512, 512], "counts": "YRi12k?3oNMSB6i=NSB5k=MSB5j=ORB5k=MRB6k=MSB6j=MRB6k=MSB5j=NSB6j=MSB5j=S1M2M3N3L3O101O00O1M3M3N2M3N2M3N2M3M3N2M3N2M3N2M3M3N2M3N2M3N2M3N2M3M3N2M3N2M3N2MSP]5"}, "image_id": 702, "id": 12037}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 0.0, 16.0, 6.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "PPj21o?00001O001O001O00001O001O00N2NRPn4"}, "image_id": 702, "id": 12038}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 0.0, 34.0, 28.0], "area": 602, "segmentation": {"size": [512, 512], "counts": "_`_32l?2N3L3N3M2M3O2O001O001O00001O001O010O00010O010O010ON2N3L3N3M2M4M2Nk_o3"}, "image_id": 702, "id": 12039}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 0.0, 17.0, 13.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "VPR41m?3M2N3O001O010O01O010O01O01M2N3Mm_e3"}, "image_id": 702, "id": 12040}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 0.0, 91.0, 39.0], "area": 2057, "segmentation": {"size": [512, 512], "counts": "VP\\42l?2N3M201O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001ON2N2N2M3N2M3N2N2M3N2M3N2N2M3N2MS`V2"}, "image_id": 702, "id": 12041}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 0.0, 69.0, 30.0], "area": 1151, "segmentation": {"size": [512, 512], "counts": "S`m61m?2O2O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O00001O010O00010O010ON3M2N2M4M2N3LM"}, "image_id": 702, "id": 12042}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "PPo71o?00"}, "image_id": 702, "id": 12043}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 7.0, 113.0, 75.0], "area": 3722, "segmentation": {"size": [512, 512], "counts": "^QR62l?3L3N2M4M2N3L3N2M4M2M3N3L3N3M2M310O010O00010O01M2N3L3N210O0010O010O00010O01O01O010O01O010O01O01M2M40O010O00010O010O00010O010O01O01O010O01O01PAUOj>j0TAYOk>n00010O010O01O01O010O01O01O010O010O01O01O010O01O010O01O01O010O010O00010O010UO\\A8d>FhA0X>MP1Mb^5"}, "image_id": 702, "id": 12044}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 20.0, 47.0, 67.0], "area": 1959, "segmentation": {"size": [512, 512], "counts": "XR_21l?4M2M4M2N2M4M2N3N1010O00UO[O\\Be0b=AYB?g=GRB;m=JlA8T>k0O0O101O001O001N10001O001N101O0000N2M2O2M3N2M3N1N9H7H9H7H9Hb^i4"}, "image_id": 702, "id": 12045}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 28.0, 67.0, 83.0], "area": 2450, "segmentation": {"size": [512, 512], "counts": "obY32m?2M2N3N2M2ZOEfA=X>EeA=Y>EeA>X>EfA=X>EeA=Y>EeA>X>EfAEeA>X>g0N1N3M2O2O10O10O10O0N3N2M2N3N2M2N100O00010O01O01O010O00010O01O01O2O2M2N3N2M2N3N1N3N2M2N3N2M2N3N1N3N2M2N3Nind3"}, "image_id": 702, "id": 12046}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 55.0, 63.0, 88.0], "area": 2495, "segmentation": {"size": [512, 512], "counts": "UdP41m?2\\ONXA5e>NYA4d>NZA4e>NXA5e>NYA3e>OYA4d>OYA4e>MZA4d>f0M2O2M3M2O2M2N2OO00010O00010O000101N2N2OO01O01O01O03M2O2M3M2O2M3M2O2M2N3N2M2N3N2M2N3N2M2N3N2M2N3N2M2N3N1Njmo2"}, "image_id": 702, "id": 12047}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 65.0, 86.0, 64.0], "area": 2690, "segmentation": {"size": [512, 512], "counts": "eSS52l?2N3M2N3N1N3M2N3M2N3M2N3N2N1010O010O10OM4M2N2N1O110O0O1O2N1OFFRA8n>JQA5Q?Kn@6R?<00K5I8O00000001O01O001O1O01O000001O0001O00000gN`AS1f>000001O01O000001O00L5M200000O10001O01O0000L5I6K5J6K5J6Klma1"}, "image_id": 702, "id": 12048}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 70.0, 19.0, 19.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "_RQ53l?2M2N3O10O010OO2N2M2N30O001M3N1J_@Kd?2^@Mc?1m]e2"}, "image_id": 702, "id": 12049}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 93.0, 68.0, 61.0], "area": 2163, "segmentation": {"size": [512, 512], "counts": "Pde62l?2M3N3L3N3M2O20O00010O010O00010ON3M2M4M2N2O2O010M2XAmNa>Z1M3N3L3O2O01O010O010O01O01O010O01O010O01O010M2M3N3M5@YA^Ok>?WA_Ok>?XA]Ok>`0XA^Ok>a0:010O010O00O2M2N3L3N3Mb\\8"}, "image_id": 702, "id": 12050}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 119.0, 20.0, 41.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "g3Y1h>M2O2LUAPOm>m04O2N2N20O01N2M2O2M3N2M2O2K[@Lg?15Omke7"}, "image_id": 702, "id": 12051}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 131.0, 68.0, 69.0], "area": 2677, "segmentation": {"size": [512, 512], "counts": "UU[41m?3L3N2M4M2M4N10001M2M4M2M3M4M2O20O00010O0010dAkNk=U1SBmNm=T1oAPOQ>o0mASOS>m0jAWOV>Y100010ON3L3N2O2O01O01O010O01O01O010O01O0O1N3L3M4[NgA_1_>L3N2M4M2M3N3L3M4M2M3N3L3N3L3N2M`kb2"}, "image_id": 702, "id": 12052}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 134.0, 101.0, 56.0], "area": 3406, "segmentation": {"size": [512, 512], "counts": "ode51l?3N2M4M2M4M2M3RA^O\\>e0aA]O\\>f0aA^O\\>e0aA]O_>c0^AAa>o01O010O01O01O010O01O010O01O01O01gN_AR1`>kNcAU1d>010O010O00010MO100O010O1200010O01O01O010O01O01O010O01O01M2N3N110O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01OO2M2M4M2M3N3L3N3L3N2M4MQkg0"}, "image_id": 702, "id": 12053}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 140.0, 57.0, 74.0], "area": 1966, "segmentation": {"size": [512, 512], "counts": "aU81m?2M3M4M2M4M2M301O010O0]A[Ol=d0QB@n=a0oAAQ>?lADU>8fAJ[>5bAO]>2`A0`>h010O010O00010O001M2M3N3L010O010O010O02O2M4L3N3O00001N1N3L3M3ESAAP?\\1iAgNW>a1010O010O00010O001O0M4L3N2M4M2M4M2M4L3N2M4M2M4M2M4L3NiZS6"}, "image_id": 702, "id": 12055}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 178.0, 65.0, 89.0], "area": 2749, "segmentation": {"size": [512, 512], "counts": "hWe12l?2N3N2M2N3N2M2O2M2N3N2M2N3N2M2N3M3L3N3M3L3N3M2M4M3L3N3M0O2O001N100010O2N2O2M2N3N1N2O2M2N3N1N2M4M2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2MVZZ5"}, "image_id": 702, "id": 12056}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 212.0, 25.0, 31.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "d6h0Y?O01O01O01O01O010O01O01O010O01O01Ah@4X?Jk@6U?Fn@:\\?01L3N2M4MjXc7"}, "image_id": 702, "id": 12057}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 214.0, 59.0, 49.0], "area": 1745, "segmentation": {"size": [512, 512], "counts": "^ga23m?;E;E0O100000000000000O100000O100000O100000000EXO^Ah0_>[OaAe0]>^ObAa0]>AaAa0\\>BbA`0_>`0001O001O00N21O0O2O001O001O000O2M2N3M2M4M2N3M2M4M2N3L3N3M2NPi`4"}, "image_id": 702, "id": 12058}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 223.0, 28.0, 26.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "_gc02l?3L3N3M2M3N3M210O01O01O010O00010O010O0O1O10M3N3L3M4M5JlXn6"}, "image_id": 702, "id": 12059}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 241.0, 66.0, 84.0], "area": 2588, "segmentation": {"size": [512, 512], "counts": "]iV33k?3M2M3N3L3N7H3N3L3N2N3L3N3L3N2M4M2N3L3N2M4M2MWOYB@d=`0_B_O_=`0dBA[==hBBX=[<^OhCb0X<\\OgCh0f=2M3N3M2M4M2OO3L3N3M2M3N3L3N3L3N2M4M2N3L3NSWh3"}, "image_id": 702, "id": 12060}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 251.0, 32.0, 32.0], "area": 620, "segmentation": {"size": [512, 512], "counts": "^X54j?2N3L3N2M4M2M4M20010O010O010O00010O010O00010OO2M2M3N3M2M4M2M4MmgZ7"}, "image_id": 702, "id": 12061}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 260.0, 28.0, 29.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "ghU23k?2M3N3L3N3L3N2N30O010O00010O010O00010O0N2M4M2M4M2M3N3LgW\\5"}, "image_id": 702, "id": 12062}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 290.0, 74.0, 72.0], "area": 2759, "segmentation": {"size": [512, 512], "counts": "Qk41m?2M3N3L3N3M2M3N3M201N1N3M2M3N3M2M4M2M4M2N2M30N2M3N3M2M4M2N2M4O010OO2L3N2N3L3N3M20010O01O00001O010O010O01OM2OO10O101N4M2N2M4M201O0O2M2N2M4M2Di@MZ?0i@LZ?2h@LZ?1TWf6"}, "image_id": 702, "id": 12063}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 299.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "[io73b6"}, "image_id": 702, "id": 12064}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 303.0, 94.0, 59.0], "area": 3199, "segmentation": {"size": [512, 512], "counts": "djU1175[?9N2N3N1N2N2N100O1O100O1O1O2O0O1O0001O01O01O01O000O1O2M2O1N2O2M200101N1O1O2O010O00010O0001M2M4O00010O010O0001M2N30O00010O01O01O010O01O01OhN]AS1h>10O000N3O010O00010O010O0UOUAa0k>]OXAc0h>YO[Ag0o>01N1M4M2M3N3L3M4MbU[5"}, "image_id": 702, "id": 12065}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 311.0, 20.0, 15.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "PZn21m?2M3M4N110O00010O01O01O010O0001N1N3L3NUfg4"}, "image_id": 702, "id": 12066}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 336.0, 40.0, 70.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "ikl22`?1i@1U?1h@39J[>6YA2:KY>d0cA_O[>c0cA@Z>b0cAAZ>c0cA_O[>U1M2N3L3N3M21O01O010O01O01O010O01O0O1N3L3N3L3N3EcAnN`>P1bAnN`>a0^AHm>5UAIm>4VAIn>4UAIm>4VAIm>5a0LXU_4"}, "image_id": 702, "id": 12067}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 336.0, 46.0, 61.0], "area": 1580, "segmentation": {"size": [512, 512], "counts": "o[U44j?2M4L3N2M4M2M4M2M3N3L3N3L3N2M4L3N2M4N110O01O01O010O01M2M3N3L3M4M2M3N3L3N2M4M2M4M2M3M4M2M4MWeS3"}, "image_id": 702, "id": 12068}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 344.0, 61.0, 72.0], "area": 2009, "segmentation": {"size": [512, 512], "counts": "[lk41n?2M3N2N2N2N2M3N2N2N2N2N1N3N2N2N2N2M3N2M3N2M1O100O100O1O100O010O1O100O1O12N1O2N1O2M2N3N1N2N3M2N3N1N3M2N3XOi@c0\\?N1N3M2N2N3M2O2MldU2"}, "image_id": 702, "id": 12069}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 350.0, 60.0, 97.0], "area": 2698, "segmentation": {"size": [512, 512], "counts": "g\\h52k?4L3M3M4ZAAg=a0VBCf=`0WBCf=a0WBAh=`0UBDj==RBFo=9nAJR>6lAMT>3hA0X>l0010O010O00010O0001M2M3M4M2M3M1O01O01O0000010O0003M3N2M4L3M3N3L3M3M4M2M4L3M3N3L3M3N3L3M3M4M2MndY1"}, "image_id": 702, "id": 12070}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 386.0, 67.0, 74.0], "area": 2496, "segmentation": {"size": [512, 512], "counts": "cl53j?3N2M4M210O000_@D_??O01O01O0N3O00010O010O01ON3M2M4M2kAVOR=l0lBVOU=j0gBYOY=g0eB\\OZ=e0bB^O_=a0_BB`=>]BEd=;YBGg=9VBKi=V110O01O01O010O01O01O010O01O01ON3L3N3L3N2M4M2M4M2M301O010UOTA70Hl>OVA70Jj>LZA60Jj>MXA70JT?3?MUch6"}, "image_id": 702, "id": 12071}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 387.0, 31.0, 28.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "el_31m?2M3N3L3N3M2M3O2O010O00010O010O00010O01O01O010ON3M2M3N3L3M3NgcP4"}, "image_id": 702, "id": 12072}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 391.0, 83.0, 66.0], "area": 2871, "segmentation": {"size": [512, 512], "counts": "imX62k?3N2M4M201O01O01OO2NO3M2M3N3M201O00010O010O00010O010O00010O010M2N2M4M2M4M2M3N3L3N3L3N0O102M40O0010O0010O0M4M200010O01N1M301O010O00010O010kNcAe0]>YOeAg0[>WOhAi0X>SOkAj0X>TOkAi0g>N2M4M2M4M2M3N3L3Nmb="}, "image_id": 702, "id": 12073}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 395.0, 67.0, 107.0], "area": 3392, "segmentation": {"size": [512, 512], "counts": "U_Z12k?4V@Ke?9N3L3N2M4M2N3M201O0O1M4M2M4M2N210O0XOeNkB\\1Q=hNoBW1hU1gAjNZ>S1jAjNY>S1;N2QOTAg0P?VORAh0T?M4M2N3L3N3I[@Nh?0ZSd5"}, "image_id": 702, "id": 12074}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 410.0, 65.0, 78.0], "area": 2610, "segmentation": {"size": [512, 512], "counts": "P]X31m?2YAN^=5lANOOS>5lANO0T>3jAOO1X>OfA4O0Z>MdA500\\>KbA8N1`>m0001bNaAZ1c>ON3O010O01O010O01O010O01O010O01O010O01O01dAgNP>X1nAjNR>W1kAlNT>`110O01O010O01O010O01O010O0DiAPOV>o0lAPOU>o0kAoNW>Q1jAlNY>Q1iAmNY>Q1iAlN[>P1a0kAAU>`0gACZ>Q1001O00001O001O0000M3N2N2M3N2M1003L3N3L3N2M4M2M4M2N2M4M2M4M2M4M2M3N3M2M4M2Mlal4"}, "image_id": 702, "id": 12078}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 448.0, 68.0, 46.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "enT41m?2M4M2M3N3L3N3M2N210O010O00010O3NO01O01O010O01O01OM4O0010O01O01O010O01O01O010O01O01OO2L3N3M21O01O010O01O01O01L3N3M2M3N3L3N3L3N2M4M2M4MgQi2"}, "image_id": 702, "id": 12079}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 458.0, 39.0, 33.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "PoU53k?2M4M2M4M2M3N3L3N3O000010O010O00010O010N1N2N3L3N3L3N3L300010O010O00010O01M2N]aV2"}, "image_id": 702, "id": 12080}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 459.0, 22.0, 20.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "g^k52k?3M4L3M301O01O01O01O01O01O01O01O0Fb@3^?If@6b?00N3LZai1"}, "image_id": 702, "id": 12081}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 459.0, 29.0, 29.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "onQ71l?4L3N2M4M2M4L3O1010O00010O01O01O010O01O01N1N3L3M3N3L3M3N`a?"}, "image_id": 702, "id": 12082}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 484.0, 41.0, 28.0], "area": 772, "segmentation": {"size": [512, 512], "counts": "noX32l?2M3N2M3N2N2M3N2M3N2N2001O001O00001O001O001O00001O001O001O00001O001O0O2M2N2M4M2Mc`R4"}, "image_id": 702, "id": 12083}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 484.0, 33.0, 28.0], "area": 605, "segmentation": {"size": [512, 512], "counts": "m_R63k?2N2M3N2M3N2N2M3N2N2N21O00001O001O001O00001O001O001L3N2N3L3N3L3NeP]1"}, "image_id": 702, "id": 12084}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 485.0, 15.0, 13.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "]_o61m?2N3L3O20O00010O00010O0O2L3NiPi0"}, "image_id": 702, "id": 12085}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 485.0, 35.0, 27.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "joY74j?2M3M4L3M3M4L3O1001O001O00001O00001O00001O00001O00001O00001M2M4L3M3Me`4"}, "image_id": 702, "id": 12086}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 491.0, 44.0, 21.0], "area": 501, "segmentation": {"size": [512, 512], "counts": "koW4130f?3X@Ng?6N2N2O1N2N2N2O1O11O1O001O001O001O1O001O001O001O1O001O001O001O1O001O001O001O1O001O00RPR3"}, "image_id": 702, "id": 12087}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 500.0, 14.0, 12.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "m_o23k?2N2M3N2001O001O00001N1N2M\\`i4"}, "image_id": 702, "id": 12088}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 509.0, 9.0, 3.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "o_]51m?200001O00001O00QP^2"}, "image_id": 702, "id": 12089}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 358.0, 40.0, 40.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "_k83l?1O2M3N1O2M3N20O0100000Og@AQ?`0l@CT?=j@EV?c00O010000O0100000O0100000O01M3N2N1N3N2M3N1O2M3N2NSTS7"}, "image_id": 704, "id": 12090}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 387.0, 68.0, 80.0], "area": 1887, "segmentation": {"size": [512, 512], "counts": "Xmh02m?2N2M3^OHYA9e>JYA8e>JXA9f>HYA:d>IZA8e>JXA9f>IXA9e>b0O2M3N2N1O200O10O100TOdA3[>LgA4Y>IiA7X>GjA9U>FmA:S>CPBBPB?P>_ORBa0m=]OVBc0j=[OXBd0i=YOYBh0`>000O0100000O010000O0100000O010000O010000O0100000O0N3N2N2M2O2N2M2O2N2M3NoQU6"}, "image_id": 704, "id": 12091}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 423.0, 34.0, 31.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "c]22m?1N3N2M3N1O2M3N2N101000O0100000O010000O010000O010M3N2M2O2N2M2O2N2MZb\\7"}, "image_id": 704, "id": 12092}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 447.0, 80.0, 53.0], "area": 1920, "segmentation": {"size": [512, 512], "counts": "f^i13l?1O2M3N2M2O2O1000O0100000O0100000O0100FBQA>l>ETA:k>GUA:i>HVA9g>JWA8g>JVA8i>IVA9g>b0N2N1N3N2OO2N2N1N3N2N1N3N2N2M2O2O1O10O1000O10O1000O10O1000O10O1000O10O1000O10O1000N2N1N3N2N2M2O2N2M3N1O2MPan4"}, "image_id": 704, "id": 12093}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 453.0, 44.0, 38.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "dn21n?2M3N1N3N2N1N3N2N2M2O2O1000O01000O10O1000O0100000O01O1N101000O1N1N3N2M3N1O2M3N1O2M3N2MYQW7"}, "image_id": 704, "id": 12094}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 453.0, 40.0, 35.0], "area": 764, "segmentation": {"size": [512, 512], "counts": "hng31m?3M2N3M2N3M2N3N1N3M201O0100O010O010O010O010O010O010O010O010N1N3M2N3M2N3M2N3M2N_Qd3"}, "image_id": 704, "id": 12095}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 465.0, 56.0, 47.0], "area": 1557, "segmentation": {"size": [512, 512], "counts": "^_g02l?3N2N1O2M3N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2M2O1O100001O1O1O1O001O1O1O1O001O1O1O1N1O1N102KPAWOR?g05N1O2M3N2N2N1N3N2N2N2Mk`\\6"}, "image_id": 704, "id": 12096}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 490.0, 18.0, 22.0], "area": 266, "segmentation": {"size": [512, 512], "counts": "\\?d0Z?2001O001O1O001O1O1N1O2N2M2O2M3N1N^`f7"}, "image_id": 704, "id": 12097}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 492.0, 27.0, 20.0], "area": 325, "segmentation": {"size": [512, 512], "counts": "l_V32m?2M2O2M2O1N2O1N2O1N2O11O1O001O1O001O1O001N2M2O2M3N1N\\P\\4"}, "image_id": 704, "id": 12098}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 0.0, 48.0, 11.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "PPc25k?4L2N00O1000000O100000000O1000000O100000000O1000000O100000000O1000000O100000000O1000000O100000PPe4"}, "image_id": 707, "id": 12099}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 0.0, 19.0, 19.0], "area": 332, "segmentation": {"size": [512, 512], "counts": "P`f76j?9G4L000000O10000001OO100000000000000"}, "image_id": 708, "id": 12100}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 47.0, 13.0, 36.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "`ai78h?9G9G9F100000000000O1000aN"}, "image_id": 708, "id": 12101}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 88.0, 27.0, 28.0], "area": 619, "segmentation": {"size": [512, 512], "counts": "lbb74l?8G9H4LO1000000000O1000O1000000000O1000O1000000000O1XM"}, "image_id": 708, "id": 12102}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 132.0, 4.0, 12.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "TTn76j?6JO10lK"}, "image_id": 708, "id": 12103}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 191.0, 13.0, 15.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "SVf72m?2O2M2N3N1N30N1N3M2O2M2NiY3"}, "image_id": 708, "id": 12104}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 0.0, 44.0, 34.0], "area": 1144, "segmentation": {"size": [512, 512], "counts": "^Pk05k?:F1AISA7m>>0O1000000000000000000O10000000000000000004L2N00000000000008H1O0O1Ho@AQ??800000009G^o^6"}, "image_id": 710, "id": 12105}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 0.0, 42.0, 25.0], "area": 802, "segmentation": {"size": [512, 512], "counts": "PPg17i?9G9G0000O1000000000000000000O10000000000000000O10000000000Eg@OY?1;0000000000O1O1O1O1MSPd5"}, "image_id": 710, "id": 12106}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 0.0, 37.0, 32.0], "area": 1062, "segmentation": {"size": [512, 512], "counts": "QPS3;e?>B6J0000000000O1000O1000000000000000000000O100000000000000000000000=C>AV_Z4"}, "image_id": 710, "id": 12107}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 0.0, 69.0, 16.0], "area": 843, "segmentation": {"size": [512, 512], "counts": "PPP46j?2N0000O13M6J0000000000O10000000000000000O10000000000000000O10000000000000000O10000000000000000O10000000000000000O1000000000000000000O4Mm_m2"}, "image_id": 710, "id": 12108}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 0.0, 8.0, 16.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "PPl7`0`?00000000000000"}, "image_id": 710, "id": 12109}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 14.0, 20.0, 23.0], "area": 386, "segmentation": {"size": [512, 512], "counts": "``Z29g?9F4M00O100000000000O1000O100000007I9GR_[5"}, "image_id": 710, "id": 12110}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 17.0, 22.0, 23.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "c`;6j?:F5K00000O100000O1000000000O100000O104L:FQ_Y7"}, "image_id": 710, "id": 12111}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 28.0, 14.0, 10.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "n`R28h?000O10O1000000000O100005Ko^f5"}, "image_id": 710, "id": 12112}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 31.0, 50.0, 23.0], "area": 810, "segmentation": {"size": [512, 512], "counts": "RaZ36j?6J000000O0100000000000000000O01000O100010O:F000000000O0100000000000000000O01000000000000000009F;F^^l3"}, "image_id": 710, "id": 12113}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 42.0, 51.0, 32.0], "area": 1271, "segmentation": {"size": [512, 512], "counts": "^Qf67i?7H10O1008H5K0000000000O1000O10000000000000002N1O00L40O1000000000000000000000O10O1000000000000009G=CP^`0"}, "image_id": 710, "id": 12114}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 44.0, 54.0, 43.0], "area": 1684, "segmentation": {"size": [512, 512], "counts": "YbU41n?;F2N00000UOFPB:P>0fA0Z>;[AEe>j011O1O0000000O1000000000O10002N0000O10000000O1000000000006J;E2N0O10000000O100000000000O;Fg]o2"}, "image_id": 710, "id": 12115}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 51.0, 22.0, 21.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "ea`79g?:F0O1000O10000000000000000000O1000O10008HU^4"}, "image_id": 710, "id": 12116}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 59.0, 51.0, 43.0], "area": 1660, "segmentation": {"size": [512, 512], "counts": "nQo04l?:F9G9G8G10O10000000000000O10O10000000000000O15J5L000000000000000O01000000000000000O01000000009G9G:FR]W6"}, "image_id": 710, "id": 12117}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 76.0, 16.0, 16.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "]bX71o?;SAEm>g00000000000000O1000O10000000FTA@l>`0:000000000O01000007F_@Gelb5"}, "image_id": 710, "id": 12119}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 96.0, 49.0, 34.0], "area": 1290, "segmentation": {"size": [512, 512], "counts": "TSP73m?9G9G9GO10000000O10000000O10000000O10000000O10000000O10000000O10000000O1003M000O10008H0000000Eh@NW?3Q]7"}, "image_id": 710, "id": 12120}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 103.0, 30.0, 21.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "Zcf27i?5K0000000O01000000006J1O000O10000000O10000000O10000001N:G_\\j4"}, "image_id": 710, "id": 12121}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 110.0, 22.0, 21.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "`SP44l?:F5K00O100000O100000000000O100000O1006J:FRld3"}, "image_id": 710, "id": 12122}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 111.0, 57.0, 39.0], "area": 1714, "segmentation": {"size": [512, 512], "counts": "TT[49g?9C40000000O10GCo@=Q?90001O0000000O1000000000O100000O1L400O1000O10000000000000O10O101O4L0000000000O10O100000006J:F9Gd[h2"}, "image_id": 710, "id": 12123}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 112.0, 26.0, 21.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "bSb1:f?9G0000000000000O0100000000000000000O010000000009GWlP6"}, "image_id": 710, "id": 12124}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 121.0, 48.0, 45.0], "area": 1683, "segmentation": {"size": [512, 512], "counts": "ocT33m?:Z@HP?o0E2N0M2100000000000O100000002N00000000000O1000K]AmNc>S1500000000000O100000O10003M;E8HO10000005J;FS[S4"}, "image_id": 710, "id": 12125}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 135.0, 42.0, 42.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "bTm04l?003Dc0k@XO^>V1O00000000000000O010000000000000O100000O1000000004K5L000000000000000O10004^OSAIZZ^6"}, "image_id": 710, "id": 12126}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 145.0, 15.0, 68.0], "area": 603, "segmentation": {"size": [512, 512], "counts": "bdh75k?:F9G9G8H0000000O10O16J:F9G2]K"}, "image_id": 710, "id": 12127}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 153.0, 38.0, 69.0], "area": 1892, "segmentation": {"size": [512, 512], "counts": "kTj66j?9G9G:F8H00000O10O1008H9G9G00000O10000001N6K00000000O100000O2O9G:F9G9G9G9GZib0"}, "image_id": 710, "id": 12128}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 173.0, 26.0, 23.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "_e`35k?;E5J1000O1000000000000000O1000O100000000000008H:FQZR4"}, "image_id": 710, "id": 12129}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 189.0, 72.0, 36.0], "area": 1853, "segmentation": {"size": [512, 512], "counts": "VVV44l?:F:F1O00000000000O1000O107L0M0O01000000000000000ODBXA>h>=000000000000O010000000000000O10O10000000000000O01000000000000000O010000000000000008G:G9GYie2"}, "image_id": 710, "id": 12130}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 200.0, 27.0, 41.0], "area": 835, "segmentation": {"size": [512, 512], "counts": "ZVe04l?:F:F:F5K000O10000000O1000000000O100005K0WOVA;k>E_A1a>OiAGW>9h0000cYm6"}, "image_id": 710, "id": 12131}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 202.0, 65.0, 35.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "efl15k?9F:G1OO10000000000000O10O10000000K\\On@d0R?500000O100000O100000000000O100000O1000000000O100000O1000000000O100000O1000000000O10006J9G9GnhR5"}, "image_id": 710, "id": 12132}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 204.0, 49.0, 36.0], "area": 984, "segmentation": {"size": [512, 512], "counts": "SgQ17i?6J0O0100000000000000000O010000000000000BJRA6n>>0O10000O100000O100KXORAh0n>50000000O0L500000000O1000006J:FTiU6"}, "image_id": 710, "id": 12133}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 235.0, 10.0, 9.0], "area": 76, "segmentation": {"size": [512, 512], "counts": "\\Wh11o?7H10000000000005K`hR6"}, "image_id": 710, "id": 12134}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 235.0, 47.0, 39.0], "area": 1416, "segmentation": {"size": [512, 512], "counts": "]gX73m?8Hb0^O8H00O1000O100000000000O1000O1000000000O10007I0000000000O010000000000000O10O:G4L00O10000TH"}, "image_id": 710, "id": 12135}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 238.0, 59.0, 42.0], "area": 1724, "segmentation": {"size": [512, 512], "counts": "cW]46j?9G000000000000O0100NBb@>^?2k@Ee>?WAAh>l007I0000000000000O1000O5L0000000000O1000O1000001O8H0000O10O10000000000000O1000O100000:F9GeWe2"}, "image_id": 710, "id": 12136}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 253.0, 121.0, 55.0], "area": 3740, "segmentation": {"size": [512, 512], "counts": "cXi06j?:F:F7I00000O100000O100000000000O1000O10000000000000O1000O10000000000000O1000O100000000000O100000O101O00000BSAIm>7]A_Oc>a0=1000000000O100000DBVA>j><0000O4M0000O100000000000O100000O100000000000O100000O1000000000O10000000O1000000000O100000O10000000005K:F9G[WZ5"}, "image_id": 710, "id": 12137}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 254.0, 17.0, 9.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "Phl67h?1000000000000000000O10O1000002NPhj0"}, "image_id": 710, "id": 12138}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 262.0, 37.0, 36.0], "area": 1116, "segmentation": {"size": [512, 512], "counts": "YX_36j?:F:F7I000O10O10000000000000O10O1000000000000000O10O100000000000009G:F9FoVn3"}, "image_id": 710, "id": 12139}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 265.0, 9.0, 6.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "ZhS71n?5L000000O10000Nigg0"}, "image_id": 710, "id": 12140}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 291.0, 40.0, 37.0], "area": 1273, "segmentation": {"size": [512, 512], "counts": "fY\\75^?5e@3S?b0K0000000000000O10O10L4000000O10000000O10000000003M00000O100000000000O10000kF"}, "image_id": 710, "id": 12141}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 299.0, 35.0, 23.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "_YX35k?8G7J00O100000000000O1000O100000000000O10O100000000000O1000O10004L8HYVV4"}, "image_id": 710, "id": 12142}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 306.0, 25.0, 23.0], "area": 493, "segmentation": {"size": [512, 512], "counts": "dYk29g?;E1O00000000000O10000000O100000000000O100008H;DlUh4"}, "image_id": 710, "id": 12143}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 310.0, 73.0, 51.0], "area": 2191, "segmentation": {"size": [512, 512], "counts": "_jZ46j?P1O0100000000000000DeASO[>l0=000O100000000000000000O1000O10000000000000000000O106J;E000000004Lee`2"}, "image_id": 710, "id": 12144}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 320.0, 47.0, 37.0], "area": 1231, "segmentation": {"size": [512, 512], "counts": "cZ^32n?;E5K0O10O100000000000000000O10OBB\\A>d>?000000O1000000000O1000000000O1000000000O10000000004L;E;EVUj3"}, "image_id": 710, "id": 12145}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 332.0, 7.0, 9.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "]ZU26j?2N0O100007H^Ug5"}, "image_id": 710, "id": 12146}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 334.0, 55.0, 40.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "hZT12`04h>7l@0n>a0000000000000O010000000HYOWAg0i>80000000O100000O1000000002N5K0O100000000000O10000000O100000005K00O100000@m@2T?NVAHj>8e0ISUP6"}, "image_id": 710, "id": 12147}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 339.0, 46.0, 32.0], "area": 1206, "segmentation": {"size": [512, 512], "counts": "hZ\\28h?9F:G1O0000000000O10O10000000000000O10O1000000000000000O10O10000000000000O10O10000005K9G9Gfdl4"}, "image_id": 710, "id": 12148}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 345.0, 22.0, 27.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "kZg03m?9G9G4L000O100000000000O1000O10000005K9G9Fadm6"}, "image_id": 710, "id": 12149}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 348.0, 26.0, 40.0], "area": 918, "segmentation": {"size": [512, 512], "counts": "V[c74d?Oa@;U?c0E5K0000000000000O1000O1000000000000000O1007H10mD"}, "image_id": 710, "id": 12150}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 358.0, 14.0, 9.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "Wk`37i?1O00000O1000000000O10007IcTX4"}, "image_id": 710, "id": 12151}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 359.0, 11.0, 24.0], "area": 236, "segmentation": {"size": [512, 512], "counts": "W[Z3b0^?6J00000000000000Bo@LR?4VU`4"}, "image_id": 710, "id": 12152}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 359.0, 18.0, 13.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "Z[c4:f?0000000O100000000OK]@Nd?250000000O12NgdS3"}, "image_id": 710, "id": 12153}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 361.0, 18.0, 18.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "Y[Q2:f?8H000000O100000004L3M00000000009GWde5"}, "image_id": 710, "id": 12154}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 367.0, 55.0, 45.0], "area": 1821, "segmentation": {"size": [512, 512], "counts": "lka48h?1mAE[>3\\Tg3"}, "image_id": 710, "id": 12156}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 384.0, 7.0, 9.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "QlW33m?5K0000000O0Qdd4"}, "image_id": 710, "id": 12157}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 386.0, 22.0, 23.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "Tla5:f?;E000000O10O100000000000000000O10O10000;EcSS2"}, "image_id": 710, "id": 12158}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 401.0, 25.0, 38.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "clj23m?9G9G9G6J0000O01000000000000000O010000003M9G9G9Gabh4"}, "image_id": 710, "id": 12159}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 403.0, 20.0, 10.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "d\\^26j?3M00000000000000000O01000000000000000]cW5"}, "image_id": 710, "id": 12160}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 407.0, 55.0, 44.0], "area": 1588, "segmentation": {"size": [512, 512], "counts": "RmV11n?4M4L:F9f@WOk>o00000000O10O1000000000000I70O01000000000000000O10O1000000001O9G:F1N100000000000000000O0100000008H:FSbm5"}, "image_id": 710, "id": 12161}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 413.0, 31.0, 34.0], "area": 877, "segmentation": {"size": [512, 512], "counts": "Qm`79g?2N0O10:F9G0O1000O10000000000000O1000O100000000000O1000O1000SC"}, "image_id": 710, "id": 12162}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 422.0, 28.0, 36.0], "area": 764, "segmentation": {"size": [512, 512], "counts": "\\]]56j?1O4L;E8H0000000O01000L4000000O1000O10002N2N00O16J9G9FPbT2"}, "image_id": 710, "id": 12163}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 428.0, 6.0, 6.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "]mg45k?00000O12NbRU3"}, "image_id": 710, "id": 12164}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 433.0, 17.0, 11.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "amU21o?:F000000000000O10001O00O10N203M\\ba5"}, "image_id": 710, "id": 12165}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 437.0, 20.0, 20.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "fm^37i?;E1N100000000000000002M1Fb@2^?N:00000O16IURW4"}, "image_id": 710, "id": 12166}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 437.0, 108.0, 44.0], "area": 3138, "segmentation": {"size": [512, 512], "counts": "hmj35k?9G:F9F1000O100000000000O100000O100000000000O1007I3L13L9HO1000000000000000O10O10000000000000O1000O3N00000H701000000000000000O10O100000NYOk@g0U?200O107I000000000000000O10O10000000000000O1000O5L9G:FOFf@0Z?>M4K10000000000004L>BRQ_2"}, "image_id": 710, "id": 12167}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 457.0, 21.0, 16.0], "area": 291, "segmentation": {"size": [512, 512], "counts": "[n\\14l?:F000O1000000000O100000000000O10000003MdaX6"}, "image_id": 710, "id": 12168}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 457.0, 52.0, 39.0], "area": 1431, "segmentation": {"size": [512, 512], "counts": "fna27h?710O100000000000O1000O100000000003L9H9G9GjPd4"}, "image_id": 710, "id": 12169}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 478.0, 53.0, 32.0], "area": 1216, "segmentation": {"size": [512, 512], "counts": "R_[19g?:F9G0O1000000000O10000000O100000000000O10000000O1000000000O1000000In@@Q?a07O10000000002N4LOI800000000000004LiPj5"}, "image_id": 710, "id": 12170}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 478.0, 13.0, 16.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "Voh35k?O10HNb@2^?804L000000000007Hl`P4"}, "image_id": 710, "id": 12171}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 479.0, 23.0, 33.0], "area": 655, "segmentation": {"size": [512, 512], "counts": "Rod72n?8H9G9G2N000O1000000000000O100000000000000O1"}, "image_id": 710, "id": 12172}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 6.0, 7.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "07i?O1O1O1N2OQPm7"}, "image_id": 711, "id": 12173}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 0.0, 24.0, 28.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "b`Q42n?2M3]OJXA9e>IXA9g>ISANJ9S?Lo@8R?Jl@K14S?7o@FR?:QACo>>92N00O100O100O3N2M3N2N3L3Nc_b3"}, "image_id": 711, "id": 12174}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 43.0, 23.0, 21.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "baa21n?2N2N2N2M3N2O010O1000000000O10O01M3N2N2N2N2NYnR5"}, "image_id": 711, "id": 12175}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 88.0, 64.0, 58.0], "area": 1604, "segmentation": {"size": [512, 512], "counts": "US[22m?2N2N1O2N4K3N2N2N2N2N2N110O1000000O1N2N2N101O10000000000000O10O1000O1N2000000001N10000000O100UORAc0n>[OTAe0l>YOUAf0l>YOVAe0T?M3N00001O2N2N1O2M3N2N2N2NQld4"}, "image_id": 711, "id": 12176}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 150.0, 58.0, 48.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "^Ue51o?2M2O2M3M3N1N3N2M3N1N3N2M3M100O010O01O01O010O0NROTAn0k>31O010O010O01O010O12O00N0010KVASOj>n04O010O012M2O2M3M3N1N3N2M3N1N3N2M3Mdj]1"}, "image_id": 711, "id": 12177}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 188.0, 26.0, 22.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "SVZ41n?2N2N2N1O2M3000000000O10O100000O1N2O1O01000N2N2N2NdiX3"}, "image_id": 711, "id": 12178}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 209.0, 34.0, 31.0], "area": 554, "segmentation": {"size": [512, 512], "counts": "oVl62m?3N2M2O2M3N1N3M3N1N010O00010O010O01O01O010O010O02N3N1N3N2M2N3N2M3NPib0"}, "image_id": 711, "id": 12179}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 220.0, 18.0, 17.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "SWU62m?2N3N2M3N1N00010O010O02N3N1N3N2M2Okha1"}, "image_id": 711, "id": 12180}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 245.0, 33.0, 34.0], "area": 609, "segmentation": {"size": [512, 512], "counts": "PXg52m?3N1N2N3N1N3M2O2M2N3N1N3M2O2M1O01O101N2N3N1N3M2O2M2N3M2O2M2N3N1NhWh1"}, "image_id": 711, "id": 12181}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 253.0, 46.0, 48.0], "area": 897, "segmentation": {"size": [512, 512], "counts": "^hl11a?4g@NW?4g@MX?4g@NX?3f@OX?2g@0X?0g@2Y?;O1003MN2N110000000000000O100000O10000000000000O100000O1N2N2N2N2N2M2O2N2N2N2NRW\\5"}, "image_id": 711, "id": 12182}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 325.0, 42.0, 90.0], "area": 2195, "segmentation": {"size": [512, 512], "counts": "U:f2[=0O010O010O0010O0N3M2N3M2N3M2N3M2N3M2N3M2N3L3N3M2N3M2N3M2N3M2N3M2N3M2M3N3M2N3M2N3MYeZ7"}, "image_id": 711, "id": 12183}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 352.0, 42.0, 12.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "Q[_18g?100000000000001O0000000000000001O000000000001O0000000000000001O00000000000001O000ndk5"}, "image_id": 711, "id": 12184}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 372.0, 17.0, 28.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "fkQ74l?3L3N3L3N3M2N3O10N2L3N3M2M4M3L3Nece0"}, "image_id": 711, "id": 12185}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 414.0, 19.0, 40.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "Q]d03m?3_@Nm>7n@Nn>6n@Nn>f0L2M10O10O14M3M2M4L4K5L3M4L4KQRR7"}, "image_id": 711, "id": 12186}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 448.0, 39.0, 43.0], "area": 837, "segmentation": {"size": [512, 512], "counts": "i^22m?1O2N2N2N2N2M3N2N2N2N1O2N2IWOTAk0j>WOTAk0j>7N2N2N02N2N2N2N2M3N2N2N1O2N2N2N2N2M3N2N2N2N1NaQZ7"}, "image_id": 711, "id": 12187}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 462.0, 31.0, 22.0], "area": 257, "segmentation": {"size": [512, 512], "counts": "mnY72m?2O2M3N0O1O01O010O010O010O01O01O010O010O010O01O01O012M3N1N3N[a6"}, "image_id": 711, "id": 12188}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 481.0, 33.0, 23.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "__R73m?2M3N1N2OO010O010O010O010O01O010O010O01O010O010O010O010O2O2M3N2MiP="}, "image_id": 711, "id": 12189}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 433.0, 131.0, 79.0], "area": 5864, "segmentation": {"size": [512, 512], "counts": "en^43m?4K5L4L4L4K5L4L4K4M00O10O10O1000O10O1000O10O1000O10O10O1000O10O14L4K5L000O1000O10O1000O10O1000O10O100000O0100000O0AXOhAh0W>]OeAc0[>AaA>:XOe=Y1QBhN21k=W1SBlNMNo=W1TBROl=m0UBSOk=m0TBTOl=l0TBTOk=m0UBSOk=l0UBUOk=k0UBUOj=l0VBTOj=l0VBTOj=k0VBVOj=j0VBVOi=k0WBUOi=k0WBUOi=j0WBWOi=i0WBWOh=j0XBVOh=j0XBVOh=i0XBXOh=h0XBXOg=i0YBWOg=j0XBVOh=a100O1000000O10000O1000000O10TOYB_Og=`0^B\\Ob=d0bBXO]=i0eBUO[=k0eBUO[=j0fBVOZ=j0fBVOY=k0fBVOZ=j0fBVOZ=i0gBWOY=i0gBWOX=j0gBWOY=i0kBSOU=l0PCPOP=P1SCmNl2\\AIi>2\\AJh>2\\AJh>2\\AJh>1^b_1"}, "image_id": 717, "id": 12190}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 506.0, 22.0, 6.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "ooe61o?00000O1000000O10000O1000000O1000000O1002NTPo0"}, "image_id": 717, "id": 12191}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 0.0, 39.0, 26.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "P`X21o?1O1O001O1O1O1O1O1O1O001O1O1O1O00O11O1O1O1O1O1OAe@:Z?Eh@:X?Ej@;^?000O100000O1N2N2N2N1N3N_oS5"}, "image_id": 718, "id": 12192}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 0.0, 133.0, 61.0], "area": 4523, "segmentation": {"size": [512, 512], "counts": "U`m51n?2N2N2N2N101O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1OO1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O100001O1O1O1O1O1O001O1O1O1O00O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1"}, "image_id": 718, "id": 12193}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 14.0, 28.0, 26.0], "area": 334, "segmentation": {"size": [512, 512], "counts": "g`k11n?2N2N2M3N1O2N2N20000O01000000000O010000N2N2N1O2M3N2N2NS_f5"}, "image_id": 718, "id": 12194}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 28.0, 34.0, 38.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "kQn31n?2N1O2M3N2N2N2N0O10O100000O10O100000O10O1000O1000O101O2N2N2M3N2N1OPo`3"}, "image_id": 718, "id": 12195}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 31.0, 45.0, 53.0], "area": 1217, "segmentation": {"size": [512, 512], "counts": "Sbf02m?2N2N1O2M3N2ADWA>g>DWA>g>DWA=g>FVA=h>EVA=h>DWA>g>?N2M2O2N2N2N1OO2O2N2N2N2N2M3N2N2N2N2M2O2N2N2N2M3N2N2N2N2M3N2N2N\\nb6"}, "image_id": 718, "id": 12196}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 51.0, 293.0, 330.0], "area": 44099, "segmentation": {"size": [512, 512], "counts": "YY_22m?2M3mNJYB8e=JYB8e=JYB8e=JYB7f=JYB8d=KYB8e=JYB8e=JYB8e=JYB8e=JYB8e=JYB7e=KYB8e=JYB8e=JYB8e=JYB8e=JYB8e=S1M2O2N2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N2M3N2N2N1O2N2M3N2N2N2N1O2M3N2N2N2N2N1N3N2N2N2N2N2M2O2N2N2N2aNhJSHZ5k7gJSH\\5k7fJSH[5l7gJRH[5l7gJRH[5l7gJRH[5k7hJSHZ5k7hJRH[5l7fJSH\\5k7fJSH[5l7gJRH[5m7fJQH\\5o7dJoG^5Q8bJlGa5T8_JjGc5U8]JjGe5V8[JhGe5Z8[JdGe5^8[J`Gd5c8\\JZGe5h8[JVGe5l8ZJSGg5m8g000000O100000O100000O100000O100000O1000O01M3N2NO100000O1000O100000O1000O10000YOWIXHh6i7YIVHg6i7\\IUHd6k7^IRHc6n7_IPHa6P8aInG_6R8cIlG\\6U8fIiGZ6V8hIiGX6W8jIfGW6Z8kIdGf40RL\\8e4fGYKZ8g4hGVKY8i4jGUKU8l4mGRKS8n4nGQKR8o4PHoJP8Q5RHmJn7S5THjJm7U5VHiJi7X5YHfJg7Z5[HdJf7[5[HdJg7Z5[HdJg7Z5[HcJh7Z5[HdJg7Z5[HdJg7Z5[HdJg7Z5[HdJf7[5[HdJg7Z5[HcJh7Z5[HdJg7Z5[HdJg7Z5_1000000O1000O100000O1000O100000O1000O100000O1000O100000O1000O100000O1000O100000O1000O1002N2M2O2N2N2N2N2M3N1O2N2N2N2M3N2N1O1OO10O100002N2N2000O100000O100000O100000O100000O1000O1000M3N2N2N2N1\\LUDR3m;kLVDR3m;lLTDS3n;kLTDS3n;kLTDS3n;kLTDS3n;kLTDS3^O2N2N2N2MnZn0"}, "image_id": 718, "id": 12197}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 55.0, 41.0, 44.0], "area": 829, "segmentation": {"size": [512, 512], "counts": "eb\\32m?2M3N1O2N2N2M3N2N1O2N2N2M3N2N00000O10O10000000O10O2O2N2N1O2M3N2N2N2N1N3N2N2N2N2Nnmn3"}, "image_id": 718, "id": 12198}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 59.0, 100.0, 110.0], "area": 3165, "segmentation": {"size": [512, 512], "counts": "iT51n?1O2M3N2N2N2N1O2M3N2N2N2N1O2M3N2N2N2N1N2O000000O10O100000O1000O100000O10O100000O1000O1000O1000O100000O1000O1000O100000O1000O1000O1000O100000O1000O100000O10O10001O2N2N2M3N2N1O2N2M3N2N2N2N1N3N2N2N2N2NfmX6"}, "image_id": 718, "id": 12199}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 89.0, 154.0, 161.0], "area": 11778, "segmentation": {"size": [512, 512], "counts": "Ufe01n?2N2M3N1O2N2N2M3N2N1O2N2M3N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2M3N2N1O1O0O100000O10O100000O10O100000O10O100000O10O1000O10002N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2Od[m4"}, "image_id": 718, "id": 12200}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 199.0, 67.0, 131.0], "area": 5142, "segmentation": {"size": [512, 512], "counts": "W6S4n;N2N2N2N2N2M2O2N2N2N2M3N2N1O2N2N2M3N2N2O010000000O1000O10000000O1M2O2N2N2N2N2M3N1O2N2N2M3N2N2N1O2N2M3N2N2N2N1B`A[Ob>b0aA[Ob>c0`A[Ob>c0`A[Ob>c0`A[Ob>3UA9:Cb>3VA8Y?Fi@7a?O2N2N2NjWn6"}, "image_id": 718, "id": 12201}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 314.0, 56.0, 97.0], "area": 3197, "segmentation": {"size": [512, 512], "counts": "o[T71l?4M2N3M2N3M2N3M2N3M2N3M2N3M2M3N3M2N3M2N3M2N3M2N3M2N3M2N3L3N3M2N3M2010hNiBEW=9kBHU=5nBJR=4PCMP=0RC0n<0SC0m1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1YOWNUCk1jn0YAVOf>R11O01O01O01O00010OTO[A=e>@^A`0c>\\OaAc0_>ZOdAg0j>01O04L01O01O01O0001O0M301O00010O00010O00010O00010O0001O01O01O01O00010O00010O00010O0001O01OM4L3M3M3M4K4M3M4Lon`0"}, "image_id": 719, "id": 12208}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 0.0, 6.0, 5.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "PPi65k?00000000MSPT1"}, "image_id": 719, "id": 12209}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 0.0, 16.0, 5.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "P`l61o?00001O0000001O00001O0000001ONR`k0"}, "image_id": 719, "id": 12210}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 0.0, 28.0, 8.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "PPV71o?00001O0000001O00001O0000001O00001O00001O0000001O00N2MSP<"}, "image_id": 719, "id": 12211}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 10.0, 5.0, 6.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "[`o05k?00000O0gom6"}, "image_id": 719, "id": 12212}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 19.0, 25.0, 12.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "cPS1ZOTAg0R?01O01O01O01O4MO00010O00010OL4M4O01O01L3O110O0001O01O01O0lNXAm0o>01O01O01O01O01TORAd0n>YOUAg0S?0O000010O00k@YOQ?k01O01O01O01O01ON2M4L31O01O01O01O00010N1M31O0M3M4L3L4M4L3M4L3G]@2h]o2"}, "image_id": 719, "id": 12215}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 42.0, 22.0, 19.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "[Q[1b0^?000000000000000000000000000O19F100000O13M[nY6"}, "image_id": 719, "id": 12216}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 58.0, 9.0, 7.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "nQi61l?4N100010O00000NW^R1"}, "image_id": 719, "id": 12217}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 71.0, 108.0, 67.0], "area": 3261, "segmentation": {"size": [512, 512], "counts": "YcY53j?3M3M4K4M3M3M4L3L4O2O0N2N30O00010O00010O000O1N3lNWAo0n>0O0000010OWOQAa0o>[OUAe0T?0O00010O00014K000010O00010ON2M3N3O000N3N10010O00010O0001L3nNVAn0o>O00010O00010O0UORAd0n>YOVAg0Q?00010O0001Oh@\\OT?h0010O0001O01OO2L3M310O0001O01O01O01K400010M2M3L5L3M3M3M4K4G^@1j?Mi\\P1"}, "image_id": 719, "id": 12218}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 79.0, 36.0, 28.0], "area": 864, "segmentation": {"size": [512, 512], "counts": "aRo1SO[Am0m>0O00010O0001O01OO2L3M3L4Gf@I^?2f@K]?2e\\W4"}, "image_id": 719, "id": 12221}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 116.0, 22.0, 46.0], "area": 1000, "segmentation": {"size": [512, 512], "counts": "PT]2R1b><0000000000000000000000000000000000000000\\lW5"}, "image_id": 719, "id": 12222}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 120.0, 26.0, 26.0], "area": 427, "segmentation": {"size": [512, 512], "counts": "[dX51l?3M3L4M4L3M310O00010O0000010O00010O000010L3M3L5L3MU\\Z2"}, "image_id": 719, "id": 12223}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 125.0, 50.0, 43.0], "area": 1425, "segmentation": {"size": [512, 512], "counts": "kd_41l?4K4M3M4L3M3M4K4N200010O00010O0001O01O01O01O01O01OM4L3O10001O01O01O01O01O01O0M3M4K4M3M4L3M3L4M4L3MP\\g2"}, "image_id": 719, "id": 12224}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 127.0, 8.0, 6.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "STV61k?41O00010O000NQle1"}, "image_id": 719, "id": 12225}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 138.0, 63.0, 57.0], "area": 2075, "segmentation": {"size": [512, 512], "counts": "_ea53j?4K4M3M4N1001O01O01O02N010O00010M2M3M3M4L3M3M4K4M301O00010O00010O000010O00010O00010O000bNcAZ1a>01O01O01O0N2M4L3M3M4L3L4M4L3M3M4L3MYk^1"}, "image_id": 719, "id": 12226}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 144.0, 23.0, 69.0], "area": 1246, "segmentation": {"size": [512, 512], "counts": "a4T2l=00O010000005K0000000O100000004L9G:F9G9G9F;F_Yd7"}, "image_id": 719, "id": 12227}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 161.0, 118.0, 72.0], "area": 3187, "segmentation": {"size": [512, 512], "counts": "mUj23j?4L3M3M4L3L4M3M4N11O01O00001L31O01O01O01O01O0TOVAb0j>[OZAe0e>XO^Ah0m>010O00010O000QAXOg>h0UA[Ok>n0O01O01O00010OSOVAd0j>YOYAg0P?10O0001O01O00010O00010O00010O0001L3L4N30O00010O0000010O000nNUAP1m>01O0VOQAd0n>YOUAg0S?O01O01O01O01O01O01O0001O01O01O01OO2L3M3N30O00010O0O1M4L3M3M3\\Og@?_?0O0001Ac@:a?01O01OO2K4M`iZ3"}, "image_id": 719, "id": 12228}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 175.0, 27.0, 27.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "PV_63j?4K4M3M4K40010O00010O00010O00010O00010OO1M4L3M3M4L3M]ZS1"}, "image_id": 719, "id": 12229}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 181.0, 84.0, 34.0], "area": 2127, "segmentation": {"size": [512, 512], "counts": "meS15k?9G9G3M000000000O10O1000000000000000O01000000000000000O0100000000000000000O01000000000000000O01000000000000000O0100000000000000000O01000000000000000O01000000000006J:FkYb5"}, "image_id": 719, "id": 12230}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 184.0, 7.0, 8.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "iUa07h?100000O1000XZ[7"}, "image_id": 719, "id": 12231}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 195.0, 18.0, 14.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "YVl43j?4L3010O00010O00010O00010O000M4L3Njij2"}, "image_id": 719, "id": 12232}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 213.0, 53.0, 44.0], "area": 1471, "segmentation": {"size": [512, 512], "counts": "mVZ53:OT?6g@NU?`0N201i@[OR?i00001O01O01O01O01O01O03M01O01O01O01O00001O01O00010O00010O00010O0001O01O01O01O0N2M4L3M3M4L3M3M4KPYk1"}, "image_id": 719, "id": 12233}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 221.0, 12.0, 14.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "UgZ64i?3M3N3O00010O00O2L3M3MTY_1"}, "image_id": 719, "id": 12234}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 222.0, 34.0, 30.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "`ge43j?3M4K4M3M4M200010O00010O00010O000010O000010O00010O00010O000M4L3Ea@2b?Ka@1]hi2"}, "image_id": 719, "id": 12235}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 223.0, 10.0, 12.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "WWW11m?3M2N3M2010O0M4M2MRic6"}, "image_id": 719, "id": 12236}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 224.0, 49.0, 25.0], "area": 968, "segmentation": {"size": [512, 512], "counts": "Tg;8h?;E2N0O10O10000000000000000O10O10000000000000O10000000O10000000O10000000000000O10O100O1O1O1O105Jlhk6"}, "image_id": 719, "id": 12237}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 224.0, 18.0, 33.0], "area": 464, "segmentation": {"size": [512, 512], "counts": "RgX23m?:F:E9H0000O100000000000O1005K:EYO]Ag0c>VO`Aj0l>0O00010O00010O00010O0000O2L3M3M4L3L4M4LPXX3"}, "image_id": 719, "id": 12240}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 238.0, 30.0, 12.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "_W\\1;e?00000000000000000000O1000000000000000000O1000O10000000000bhT6"}, "image_id": 719, "id": 12241}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 251.0, 24.0, 25.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "\\Xe23j?3EKh@8U?Kg@:X?7000001O01O01O01O01O01O01O01N1M3M4L3L4MShn4"}, "image_id": 719, "id": 12242}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 252.0, 7.0, 17.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "l7a0_?O1000001O9GjWl7"}, "image_id": 719, "id": 12243}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 262.0, 62.0, 46.0], "area": 1630, "segmentation": {"size": [512, 512], "counts": "\\ib42k?3M4L3L4M4L30010O00010N1M3M3L5L3M3M4N10M3M4K4M3M4O01O0001O01O01O01O01O01O01O01O00010O0001O01O01O01O01O01O0001L3M3L5L3M3M4KaW^2"}, "image_id": 719, "id": 12244}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 270.0, 9.0, 7.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "^Xc17i?000000000000000bWX6"}, "image_id": 719, "id": 12245}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 271.0, 23.0, 22.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "aXV11o?;D9H000000000000O100000O10000000000000O7J;EPW^6"}, "image_id": 719, "id": 12246}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 277.0, 48.0, 50.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "jY_53j?3M4L3M3M4L3M3M4L3M3M4N1001O01O01O01O000M4M200010O00010O0fNaAS1_>jNdAW1b>01O0N2M4L3M3M3N3O00010K4M3M4L3M3MPgh1"}, "image_id": 719, "id": 12247}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 278.0, 25.0, 26.0], "area": 419, "segmentation": {"size": [512, 512], "counts": "WYd23j?3L5L3M3M4N11O01O01O01O01O00010O0001ON3L3M3M4L3LXWo4"}, "image_id": 719, "id": 12248}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 293.0, 15.0, 31.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "U9o0Q?05K7IO100000000000O10002N9GUVh7"}, "image_id": 719, "id": 12249}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 297.0, 35.0, 22.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "bi=;e?1O000O100000000000000001N12GEi@;V?800000000000O100000000000O10000002N>BWfP7"}, "image_id": 719, "id": 12250}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 303.0, 9.0, 14.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "_YQ1>b?000000000000000aVj6"}, "image_id": 719, "id": 12251}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 314.0, 53.0, 44.0], "area": 1332, "segmentation": {"size": [512, 512], "counts": "_jg24i?4L3M3M4L3L4N3O01O01O01O01O01O01O00010O0001O01O01O01O01O01O01O01O00010O00012M1O01O01O01O000Do@JQ?2RANn>OUA1k>LYA4f>I]A5f>G]A6X?MYe]4"}, "image_id": 719, "id": 12252}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 329.0, 51.0, 49.0], "area": 1218, "segmentation": {"size": [512, 512], "counts": "iZW42m?2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N200000000000000000000000000O1N1O01O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NlTo2"}, "image_id": 719, "id": 12253}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 334.0, 13.0, 22.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "_Zb03m?;E7I0000O0100000Cj@0V?0=000bUW7"}, "image_id": 719, "id": 12254}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 335.0, 40.0, 39.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "`jl12n?2N2N2N3M2N2Ne0[O2NO100000000000000000000000O100000000000000000000000000000000h0XOiT_5"}, "image_id": 719, "id": 12255}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 341.0, 26.0, 24.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "Wkb31l?3L4M3M4K4O101O0001O01O01O01O00010O0001N1M3L5L3M3LZUP4"}, "image_id": 719, "id": 12256}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 342.0, 47.0, 170.0], "area": 4017, "segmentation": {"size": [512, 512], "counts": "moX73j?3M3]OKZA6W>JcA363T>MbA59NQ>1cA49NQ>a0kACQ>a0lABQ>X1M3001O00L4M3L4M300001O00001O0000N2M3L4M3L4M3L4TOPM[D3HP3i;RM^DOER3j;RMbDT3Z;PMfDP3W;TMdDP3X;TMeDo2X;TMdDP3Y;m0D_KUEe4j:\\KSEg4m:6O1M3L4"}, "image_id": 719, "id": 12257}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 348.0, 41.0, 32.0], "area": 786, "segmentation": {"size": [512, 512], "counts": "][c24i?3M3M4L3L4O20O00010O00010O000010O000010O00010O00010O00010O00010O00010L3M3M4L3M3MlTh4"}, "image_id": 719, "id": 12258}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 348.0, 50.0, 53.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "okQ53j?4L3M3L5L3M3k@[Ol>n0M3M4K3N31O01O01O01O01O01O01O01O01O00010O0001O01O01OM4L3M3M4L3M3L5N100010ON2M4L3M3M3MkTU2"}, "image_id": 719, "id": 12259}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 353.0, 36.0, 33.0], "area": 892, "segmentation": {"size": [512, 512], "counts": "S;o0Q?00000000000O10O10000000000000000N20O1000000\\OVAe0i>\\OUAf0i>\\OUAg0h>:N000NjN\\AU1d>4O01O0000010O000001O01O0000101N001O0001O01O0001O0001O00011N2N2N2O2M2N2N2N3N1N2N3M2N2O1N3M2N2O2MZSd5"}, "image_id": 719, "id": 12262}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 396.0, 51.0, 47.0], "area": 1196, "segmentation": {"size": [512, 512], "counts": "Umb22n?1N3M2N2O2M2N2N3N1N2N3M2O1N3M2N10O0000010O0000010O0000010O000002O1N0101N3M2Ko@XOS?f06N1N2N3M2O1N2N3M2N2O2MQcc4"}, "image_id": 719, "id": 12263}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 397.0, 60.0, 50.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "[]f33i?4M4GHe@;X?8M4L30001O01O01O01O01O0O1M4L3O101O01O01O01O01O01O01O01O01O01O0001O01O01O01O01O01O0ROZAc0e>ZO^Af0c>VOaAi0k>1O01O01ON3L3M3M4L3M3M4Knb[3"}, "image_id": 719, "id": 12264}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 411.0, 21.0, 33.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "mf0?00000000000000000O100000000?A?@f`Z5"}, "image_id": 719, "id": 12273}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 462.0, 33.0, 50.0], "area": 1338, "segmentation": {"size": [512, 512], "counts": "anW15k?9G9G9G9G6I10000000000000000O10000000000000000O100002N9G9G9G9G9GS`W6"}, "image_id": 719, "id": 12274}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 467.0, 50.0, 45.0], "area": 1157, "segmentation": {"size": [512, 512], "counts": "[_[41o?1N3M2N2O2M2N3N1N2N3M2O1N3M2N2O0O1O010O000010O0000010O000010O002N10O1O2N3Ko@WOR?g05O2M2N3N1N2N3M2O1N3M2Nk`k2"}, "image_id": 719, "id": 12275}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 477.0, 28.0, 35.0], "area": 605, "segmentation": {"size": [512, 512], "counts": "o>Q1o>000000000O1000000000000O16J8H7I7I1OO1000000000000006JQ`a7"}, "image_id": 719, "id": 12276}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 478.0, 47.0, 34.0], "area": 975, "segmentation": {"size": [512, 512], "counts": "a_U31n?2O2M2N3N1N3M2O2M2N2O0O1O100O1O100O1O100O1O100O1O100O1O1002N1O2N1O2N1O2N1O2N1O2N1Eb@2`?Kb@4_?Kc@3f?O2N1OQPS4"}, "image_id": 719, "id": 12277}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 498.0, 42.0, 14.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "f_`09g?1O00000000000O10000000000000000O1000000000000000000O10000000000000000O1000000009GU`j6"}, "image_id": 719, "id": 12278}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 408.0, 26.0, 21.0], "area": 249, "segmentation": {"size": [512, 512], "counts": "l\\]21n?1O2O1U@Lh?70O10O1N2N2M30O10O1000000000O1O0O2N2N2M3N2NlbU5"}, "image_id": 720, "id": 12279}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 430.0, 50.0, 67.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "anb22m?2N2N2M2j@Ib>9\\AIb>9\\AIb>9\\AIb>9[AId>9ZAI^>DbAd0OJ]>CbAf0OI]>CbAY1\\>7M1000O1000O1000O100000O12N2M3N1O2N2N2kNYAm0j>QOXAm0o>N1O2N20000M3N2N2N2M3N2N1O2M3N2NiQd4"}, "image_id": 720, "id": 12280}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 29.0, 17.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "0a0_?000000000000O1000000000000O10000000000M3000000000000O7Jj_a7"}, "image_id": 721, "id": 12281}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 42.0, 11.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "P``08h?3M00000000O10000000000000000O1000000000000000000O10000000000000000O1000000000005Kk_j6"}, "image_id": 721, "id": 12282}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 0.0, 26.0, 3.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "PPZ13m?000000000000O1000000000000000000O1000000000000000PPY6"}, "image_id": 721, "id": 12283}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 0.0, 21.0, 11.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "P`R2;e?O1000000000000000000000000000000O10002Nnob5"}, "image_id": 721, "id": 12284}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 0.0, 40.0, 21.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "PPZ31o?1O2N1O2N1O2N1O1O2N1O2N1O2N1O00O100O1O100O1O100O1O100O1O100O1O100O1O1O101N2N2OmoQ4"}, "image_id": 721, "id": 12285}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "P`b41o?0P`\\3"}, "image_id": 721, "id": 12286}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 0.0, 53.0, 52.0], "area": 2095, "segmentation": {"size": [512, 512], "counts": "j`U72k?3DNf@7V?Lg@7U?n0]AUOc>T101O00001O0000001O00001O0000O1001OM3M300001\\AjN\\>V1`AoN_>Y10001O0000001O00001O00001O00"}, "image_id": 721, "id": 12287}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 23.0, 34.0, 46.0], "area": 1418, "segmentation": {"size": [512, 512], "counts": "h`U1b0^?b0^O9G00000000000000000000O10000000O10000000000000000000000005Jc0^Oa0_OQ^Y6"}, "image_id": 721, "id": 12288}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 25.0, 22.0, 18.0], "area": 318, "segmentation": {"size": [512, 512], "counts": "kP83m?:F3MO10000000O1000000000O10000000O100008Hon\\7"}, "image_id": 721, "id": 12289}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 25.0, 50.0, 40.0], "area": 1387, "segmentation": {"size": [512, 512], "counts": "Sa^23l?6K5K5K4L5J4M0000O0100000O010000000O0100000O0100000O010000000O0100000O010000000O01001O5J5L4L5K4K6K[^h4"}, "image_id": 721, "id": 12290}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 43.0, 40.0, 39.0], "area": 1344, "segmentation": {"size": [512, 512], "counts": "^1T1l>0000000000O100000O1000000000O100000O1000000000O100000O100000000000O106J9G9G9Gdm[7"}, "image_id": 721, "id": 12291}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 54.0, 20.0, 21.0], "area": 385, "segmentation": {"size": [512, 512], "counts": "gai07i?=C000000000000000000000000000O100005JV^l6"}, "image_id": 721, "id": 12292}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 61.0, 29.0, 25.0], "area": 352, "segmentation": {"size": [512, 512], "counts": "[Re22m?2O2M2N2O2M2N2O0O00010O00010O0001O01O01O010O3M2O2M2N3N1Nk]l4"}, "image_id": 721, "id": 12293}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 72.0, 27.0, 28.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "bbo1240d?1[@0c?8N1O010O3M2O1N3M2O2M1O0011N3M2O1N3M2N3N1N3M2O1N3MXmb5"}, "image_id": 721, "id": 12294}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 84.0, 72.0, 66.0], "area": 2580, "segmentation": {"size": [512, 512], "counts": "\\Sk03i?6L4M2i@Dg>?VACh>?WACf>?XACf>`0WACg>>XACf>n0N1N3N1O2N1O2N1O1OO2N100O3M2O1N2N100O1O100O1O100O1O01O01O01O01O00010O0001O01O01O01O01O01O01O00010O02N2O2M2N3N1N3M2N3Bd@4]?Jf@3\\?Le@2^?Ke@3e?NblP6"}, "image_id": 721, "id": 12295}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 87.0, 16.0, 26.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "h2i0W?00O10000000O100000O10002N8H9Gflg7"}, "image_id": 721, "id": 12296}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 99.0, 147.0, 147.0], "area": 12619, "segmentation": {"size": [512, 512], "counts": "PeT51n?2O2M2\\OJTA0L9n>HTA1M8m>JSA1M8m>IUA`0j>ATAb0i>l0iASOY>j0jASOX>l0iAROZ>k0iASOX>k0jASOY>k0?N2N3N1N3M2O2M2F^@3i?O2MTja0"}, "image_id": 721, "id": 12297}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 114.0, 46.0, 56.0], "area": 1886, "segmentation": {"size": [512, 512], "counts": "T4;e?000AMPA4P?>0007I8G107I000000005K0O10000000000000000O10000000000003M8H2N000H800M30000G816J9G8H[kX7"}, "image_id": 721, "id": 12298}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 214.0, 6.0, 16.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "oVm71h?1\\@2b?0[@2c?8M2O0ZI"}, "image_id": 721, "id": 12299}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 221.0, 155.0, 142.0], "area": 10411, "segmentation": {"size": [512, 512], "counts": "nWg21n?2N2N3N1N2N2N2N2N2N3M2O1N2N2N2N2N3M2N2O1N2N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N2N3O0000000001O01O0000000001O01O0000000001O0001O00000001O0001O0000000001O01O00000O1N2N2N3M2N2N2N2O0O001O002N2N2N3N1N2N2N2N2N2N3M10O00000001O0003M2N2N2N2N2O1N2N3M2N2N2N2N2O1N3TOiBjNY=T1jBiNX=U1jBiNX=V1iBhNY=V1iBhNY=V1iBhNY=V1iBiNY=T1iBjNY=T1jBiNX=U1jBiNX=U1jBiNX=V1j0N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2O2M2N2NnVk2"}, "image_id": 721, "id": 12300}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 237.0, 76.0, 81.0], "area": 2782, "segmentation": {"size": [512, 512], "counts": "UXg12m?2N2N2N3N1N2N2N2N2O1N2N3M2N2N2O1N2N2N2N2O2M2N2N2N2N2O1N2N3O00000001O000001O0000000001O000001O000000O2M2GgAiNZ>V1hAgNZ>W1hAgNZ>W19N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2O1N2N2N2NQgR5"}, "image_id": 721, "id": 12301}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 277.0, 66.0, 85.0], "area": 3118, "segmentation": {"size": [512, 512], "counts": "QZo61o?1N3M2O2M2N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N2O2M2N3N1N3M100O00010O00010O00010O00010O00010O000010O00010O00010O000010O00010O00010O000\\G"}, "image_id": 721, "id": 12302}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 282.0, 177.0, 140.0], "area": 11301, "segmentation": {"size": [512, 512], "counts": "\\[i41o?2M2N2N2N2N2N2N2O0B@[A`0e>BYA>g>DWAFUA:k>IRA7o>Jo@7P?=O2N2N3M000WBkNh\\NmAd1X>O1N2N3M2N2N2N2N2O1N2N3M2N2N2N2N2O1N3M2N2N2N2N2N2N2OeS>"}, "image_id": 721, "id": 12303}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 353.0, 50.0, 52.0], "area": 1187, "segmentation": {"size": [512, 512], "counts": "akX21n?2N2O1N2N2N3M2N2N2O1N2N3M2N2N2N2N2O1N30O0000000001O01O000000000N3M2N2O1N2N2N2N3M2N2O1N2N2N2N3M2N2NmSn4"}, "image_id": 721, "id": 12304}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 387.0, 7.0, 16.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "[ll71k?1V@2g?5N2O1N2N2lC"}, "image_id": 721, "id": 12305}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 447.0, 127.0, 65.0], "area": 4085, "segmentation": {"size": [512, 512], "counts": "o_P51n?1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1001O1O1OO1O1O1O1O1O1O1O1HSO\\An0c>TO[Am0e>TOYAm0f>7O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O11O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O2N1OQPP1"}, "image_id": 721, "id": 12306}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 458.0, 27.0, 53.0], "area": 738, "segmentation": {"size": [512, 512], "counts": "Rob71n?2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N2N3M2N2N2O1N2N2N2N2N2eA"}, "image_id": 721, "id": 12307}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 487.0, 17.0, 18.0], "area": 155, "segmentation": {"size": [512, 512], "counts": "\\_[72m?2N2N3M2N2000001O0O01N2N2N2N2N2N`P<"}, "image_id": 721, "id": 12308}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 0.0, 30.0, 36.0], "area": 1026, "segmentation": {"size": [512, 512], "counts": "k`c48m>k000000000000001O0000000000000000000000000000000000000000]Oc`m2"}, "image_id": 722, "id": 12309}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 0.0, 28.0, 31.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "PP_6n0R?00001O0000000000000000000000000000000000000000000000HXPS1"}, "image_id": 722, "id": 12310}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 2.0, 12.0, 9.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "RPY59g?000000000000000000000no`2"}, "image_id": 722, "id": 12311}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 24.0, 20.0, 8.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "h`U57i?1O00000000000000000000000000000000000X_`2"}, "image_id": 722, "id": 12312}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 46.0, 179.0, 275.0], "area": 12981, "segmentation": {"size": [512, 512], "counts": "kVX48Y?`0O2N1O1Of0ZO0000000000000000004L0@`0PNRNTEh2Q:SNTEh2Q:Q2XOh00000000000000000000000000000000000000;E00000001_KYG=H\\No8f0jG50UOV8f0jG[Oj0O\\7=SHjN[1i0b6DjK=U4iNeLW1_JjN\\>]1000000001O000iN`An0i>000000000000001O000000000001O000000000000000000000000001O00000000000000000000000M30001O000000000001O00000000000VA_OU>a0kA_O>DQ=l0aB@O3`==aB@O3`==aB7_=U10001O000000000000000001O00000000F:0000000fNiBKW=5iBKX=4hBLX=4hBLX=4hBLX=4hBLX=4hBXOl=h0TBXOl=h0TBXOl=h0TBXOl=h0TBXOl=h0f000000000000;E0000000O1000008H0000000000001O0B>0000000PXn0"}, "image_id": 722, "id": 12313}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 56.0, 83.0, 139.0], "area": 8944, "segmentation": {"size": [512, 512], "counts": "]Uf5?d>m0SOm0ROn0I700000000000000000000000000000000000001O000001O00000000000000000000000000HcLmC]3d;RM\\De2m;[MSDe2m;[MSDe2m;[MSDe2n;f0000000000000000000000001O00000000000000>B00000000000000ZNiDcNX;?eEA[:AcF?_<00000001O0UOn[P1"}, "image_id": 722, "id": 12314}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 171.0, 19.0, 31.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "iem34l?5K4K5L0E_OWAa0h>ESA;m>;1000O10O103M4L5K4K5L5Klih3"}, "image_id": 722, "id": 12315}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 179.0, 27.0, 21.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "geW34l?6J1O000O10004L2NO10O100000Id@I]?751000O100000O10O10005KXjZ4"}, "image_id": 722, "id": 12316}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 285.0, 16.0, 41.0], "area": 598, "segmentation": {"size": [512, 512], "counts": "jio6m0000000000000000000000000000SOPXh0"}, "image_id": 722, "id": 12317}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 329.0, 11.0, 29.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "Q[Q75S?h0000000000000000000gUi0"}, "image_id": 722, "id": 12318}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 477.0, 47.0, 35.0], "area": 1191, "segmentation": {"size": [512, 512], "counts": "Y_^12m?5L4L4L5J5L00000MXOm@i0R?3000000O11O2NO1000000O1000000O1000000O100000ON300000O01002N2M5L4L4L4K5L5KVPj5"}, "image_id": 722, "id": 12319}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 0.0, 133.0, 68.0], "area": 4622, "segmentation": {"size": [512, 512], "counts": "TPm41n?2N2O1N2N2O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O2NO1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O10P`P1"}, "image_id": 723, "id": 12320}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 91.0, 58.0, 186.0], "area": 4762, "segmentation": {"size": [512, 512], "counts": "`TS71n?2N2N3M2N2N2N2N2N2N2N2O1N2N2N3M2N2N2N2N2N2N2N2N2ZC`Nj:a1TEaNj:a1TEaNj:b1SE`Nk:b1SEaNj:a1TEaNj:a1TEaNj:a1TEaNj:a1TEaNj:a1TEaNj:a1TEaNj:a1UE`Ni:b1UE`Ni:b1UE`Nj:a1TEaNj:b1SE`Nk:b1SEaNj:a1TEaNCmNl:d2_EaNCmNl:d2_EaNCmNl:d2_EaNCmNl:d2_EaNCmNl:d2_EmN^:U1`EmN^:U1aElN]:V1aElN]:V1aElN^:V1_ElN_:\\3N2N2N2N2N2TM"}, "image_id": 723, "id": 12321}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 93.0, 132.0, 139.0], "area": 6702, "segmentation": {"size": [512, 512], "counts": "\\fT11n?2N2N2N2N3M2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N1O1O000000000001O0000000001O000000000001O00F^NWBb1i=aNTB_1l=cNRB]1n=eNPB\\1o=:0000001O000000_OTNoBl1Q=VNmBj1S=YNjBg1V=[NhBf1W=\\NgBd1Z=]NdBc1\\=_NbBa1^=`00000000000000001O01O00000000000000000001O01O0000001O2N2N2N2N2N2O1N2N2N2N3M2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2Nk[i4"}, "image_id": 723, "id": 12322}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 493.0, 9.0, 19.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "hok73l?1O2M3N1N3N2M3N1"}, "image_id": 725, "id": 12323}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 15.0, 6.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "02n?00001O001O00001O001O0000M3NR`h7"}, "image_id": 726, "id": 12324}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 0.0, 71.0, 22.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "S`l02k?4O001O00001O001O00001O001O00001O001OO1N2M31O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O0000N2M3N21O00O1N2M3N2M3NRPP6"}, "image_id": 726, "id": 12325}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 0.0, 72.0, 60.0], "area": 2095, "segmentation": {"size": [512, 512], "counts": "X`[21l?3N2M4]@I[?7b@L^?`0\\BGc=9ZBJf=7WBLh=4UBOk=2RB0n=0PB3o=NmA5S>KkA8T>j001O00001OO1N2M3O10000N2N2M3N2M3lN]Ai0e>TO^Aj0m>M4N101O01ON3L3N3M2M4M2M3Nd_`4"}, "image_id": 726, "id": 12326}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 0.0, 82.0, 32.0], "area": 1462, "segmentation": {"size": [512, 512], "counts": "RPP41m?201O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001OM3N2001O0000VOm@h0U?Ak@1X?Kl@1W?Mk@1X?Kk@2S_g2"}, "image_id": 726, "id": 12327}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 0.0, 20.0, 21.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "_`h62k?3N2M4M2N3L30001O001O00O1M3N2M3N2M3N2MS`m0"}, "image_id": 726, "id": 12328}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "PPo71o?00"}, "image_id": 726, "id": 12329}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 1.0, 22.0, 18.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "ZPf31m?3L3N3M2010O0010O0010O010O0010O0010M2N3M2Mkon3"}, "image_id": 726, "id": 12330}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 6.0, 2.0, 6.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "66k?Llon7"}, "image_id": 726, "id": 12331}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 7.0, 52.0, 53.0], "area": 1471, "segmentation": {"size": [512, 512], "counts": "^QQ72k?4L3N2M4M2O2O00010M2M4M2M3M4M2M3N3L3M40O00010O010O00010O01O01N1N3L3N2M4M2M3M4M2M3N3L3M4N11O01F[@7g?1OO2M2M^o4"}, "image_id": 726, "id": 12332}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 34.0, 93.0, 66.0], "area": 3016, "segmentation": {"size": [512, 512], "counts": "SRa31m?2M3N3L3N3L3N2N3L3N3L3N2M4N110O00010O010O00010O010O00010L3010O0010O0010O0N30O0010O0010O00lNYAP1k>00ROUAg0l>UOWAl0o>O04MO01O01O010O01O01O01O0M4M21O010O01O01O010O01O01O010O01O01O0O2L3N2M4M2M4M2M3N3L3N3L3No]P3"}, "image_id": 726, "id": 12333}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 47.0, 24.0, 28.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "PRd73k?3L3N2N3L3N3L300010O010O00010O010O00010N1M40O0[N"}, "image_id": 726, "id": 12334}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 58.0, 81.0, 43.0], "area": 1887, "segmentation": {"size": [512, 512], "counts": "`bc11l?3N2N3L3N3L3N2M4O0010O01O01O01N1N3N11O010N101WOl@e0S?YOPAf0U?10O00101N10O00010O010M2N3O3M0000000001O01O00000000010O000000000010L300001O000001O01O0000000001_On@2R?FVA:X?000000000000H`mS5"}, "image_id": 726, "id": 12335}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 66.0, 68.0, 53.0], "area": 2172, "segmentation": {"size": [512, 512], "counts": "lbj6460o>NTA4K0o>NSA?j>ESA>j>DTA>i>?M2O2O01O01O010O01O010O01O01N1N3L3O1010O01O010O01O01O010O01O010O01O01O010O01O010O01O01O010O01O010O01O001M2M3N3L3N3M2M3N3L3N3MW]3"}, "image_id": 726, "id": 12336}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 93.0, 12.0, 27.0], "area": 187, "segmentation": {"size": [512, 512], "counts": "bSj71l?3N3M2M4M2N2M4M2N30O01QM"}, "image_id": 726, "id": 12337}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 110.0, 91.0, 59.0], "area": 2887, "segmentation": {"size": [512, 512], "counts": "VdT14j?2M3N3M2M4M2M4M20010O0010O0010O0010O010O0010O001O0M3N3M2M4N110O00010O010O01O01O010O010O00010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O01O0N3L3N2N3L3N3L3N2N3L3N3L3Nkk]5"}, "image_id": 726, "id": 12338}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 119.0, 58.0, 50.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "idU61m?2M4M2M3N3L3N3M2N21O001O01O010O01O01O0M4M2M40O0010O010O0N2N3L3N3L30010O010O0010O0O2M2N3L3N2M4M2M4M2N2M4M2M4M2M3N3MQ\\m0"}, "image_id": 726, "id": 12339}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 147.0, 58.0, 58.0], "area": 2008, "segmentation": {"size": [512, 512], "counts": "ieQ33k?3M2M3N3L3N3B^OYAe0e>]OYAe0d>_OYAd0f>;10O001L3N2N3O010O00010O010O01O01O010O010O00010O010O01O01O010ON3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3MnZQ4"}, "image_id": 726, "id": 12340}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 149.0, 52.0, 66.0], "area": 1913, "segmentation": {"size": [512, 512], "counts": "Wfg04j?2N2M4M2M4M2N2M4M2N3L3N3L3N2N3L3N3L3N2N3L3010O010O0010O0010N1N3L3N2M4M2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4MP[^6"}, "image_id": 726, "id": 12341}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 149.0, 49.0, 43.0], "area": 1337, "segmentation": {"size": [512, 512], "counts": "aeW73j?4M2M4L3N2M4L3O2O01O01O01O010O01O01L3N3O01O010O000M4M2M3M4M2O2O01O01O01O01O010O01O01O01QOWAe0i>XO[Ah0d>UO_Ak0k>N3MmJ"}, "image_id": 726, "id": 12342}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 153.0, 20.0, 24.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "\\Un61l?3N3L3N2M4L3N3N10010O01O0N2M4M2M3M4M2MVkg0"}, "image_id": 726, "id": 12343}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 166.0, 47.0, 56.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "`Vo53k?3L3N2M4M2M4O00010N1G[OUAh0g>\\OUAg0i>9M4M2M4M2M301O0010O0010O0010O001M2N2M4M2M4M2M4M2M3N3L3N3M2M3N3L3N3LbZY1"}, "image_id": 726, "id": 12344}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 186.0, 26.0, 26.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "]fg32k?3M3L4M4K4N201O01O00010O0001O01O01O01O0O1L4M4L3L4MTZk3"}, "image_id": 726, "id": 12345}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 194.0, 76.0, 61.0], "area": 2393, "segmentation": {"size": [512, 512], "counts": "UWh61m?2M3HLb@7Z?Lc@7[?9M2M3N3L3N3M2M40O00010O010O01O01O010O010O00010O0ROYAd0g>YO\\Af0d>WO_Aj0a>SObAl0i>001n@TOo>o0O01O010O01O01O010M2O110O010O0001L3N3L3N2N3O010O010OO1M2O2N3L3N3M2M4M2M3N5K2M4M2MZi1"}, "image_id": 726, "id": 12346}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 195.0, 71.0, 88.0], "area": 3230, "segmentation": {"size": [512, 512], "counts": "bg]1280Z?3b@1Z?2d@0Z?=M2M4M2N2O20O01O010O010O01O01O010O010O01O01O010O0O2L3N2N3@fNUB]1i=fNTB]1i=eNUB]1h=fNUB^1h=>N2M4N101^BdM_=^20010O010O0010O0010O010OoN`BB`=L3N2N3LTh^5"}, "image_id": 726, "id": 12347}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 204.0, 20.0, 26.0], "area": 295, "segmentation": {"size": [512, 512], "counts": "PW`62k?3N3L3M3N3L3N3M21O01O01M2M3N3L3N3L3N2MciU1"}, "image_id": 726, "id": 12348}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 214.0, 67.0, 55.0], "area": 1820, "segmentation": {"size": [512, 512], "counts": "aWm21m?2M4M2M3N3M2O2O010O0M3N3M2M4M2O110O0010O010O0010O0010O010O0010O0010O0010O001TAnNi>T1010O010O00010O010O001TOWA`0h>^O[Aa0e>\\O^Ae0b>XOaAg0l>10O00010O0O2L3N2M4M2N3L3NZXQ4"}, "image_id": 726, "id": 12349}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 216.0, 28.0, 28.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "[gm01m?2M4M2N2M4M2M4M20010O0010O010O00010O010N1N3L3N2M4M2N3LSYd6"}, "image_id": 726, "id": 12350}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 226.0, 59.0, 46.0], "area": 1708, "segmentation": {"size": [512, 512], "counts": "lWc53k?2N2M4M2M4M2N2M4M2M4N101O01O01O010O01O01O010O010O000N3L3O2O0010O0010O010O00010O001M2OO2M4M2N2M4M2M4M2N3L3N2M4M2N3L3NiX_1"}, "image_id": 726, "id": 12351}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 234.0, 60.0, 85.0], "area": 2935, "segmentation": {"size": [512, 512], "counts": "X96h?3[@I\\?`0L3N3L3M3N3L3N3L3M3N3L3N2M4L3N3L3N2M4L3N3L3N201O01O01O01M2O20O01L3M3N3L3N3L3M3N3L3N3L3M3N3L3M4M2M3N3L3M4M2M3N3L3M4MZhQ7"}, "image_id": 726, "id": 12352}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 248.0, 19.0, 23.0], "area": 270, "segmentation": {"size": [512, 512], "counts": "XX\\63j?3N3L3N2N3L3O2O00010O01M2N2N3L3N3L3NVXZ1"}, "image_id": 726, "id": 12353}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 251.0, 65.0, 48.0], "area": 1863, "segmentation": {"size": [512, 512], "counts": "ihd61m?2M3N3L3N3N100010O010O00010O01M2M3N3L3N2N3O010O00010O0O2L3N2N3O0010O001M2O110O010O00010O010O00010O010O00010O000M4M2M4M2Dm@HV?5n@GV?6l@HV?5=Lkg:"}, "image_id": 726, "id": 12354}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 256.0, 20.0, 62.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "dYf72l?2N3L3N3M2M3N3M2M4M2M4M200DnNiAT1S>POjAR1T>POiAT1T>oNiAS1U>=M4oG"}, "image_id": 726, "id": 12355}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 257.0, 65.0, 51.0], "area": 1883, "segmentation": {"size": [512, 512], "counts": "mXd21m?3L3M4M2M3N3L3M4M2M3N3O00010O0010O0010O010O00010M2M301O010O01O01O010O00010O010O00010O01O01O01ZOXA3g>J]A5c>H`A8a>EaA<^>AfA>[>_OgAb0l>O00010O001N1N2M4L3N2M4MoV[4"}, "image_id": 726, "id": 12356}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 264.0, 79.0, 84.0], "area": 3032, "segmentation": {"size": [512, 512], "counts": "lil02l?2N3L3N3L3N2M4M2M4M2M4M2N2M4M2M4M2M3NO010O2O3L31O010O01O010O01O01O010O01O01VObA0^>NdA3<\\O\\==\\B65@_=8^B90Ab=3bB;IFe=LdB?DGh=HgB`0_OKj=AjBX1o=010O010O00010M2N3L3N3O01O010O01O01N1N3L3N3L3N2N3oNVAg0S?N2M4M2M4M2N3L3N^fk5"}, "image_id": 726, "id": 12357}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 310.0, 78.0, 63.0], "area": 2630, "segmentation": {"size": [512, 512], "counts": "mZR22l?3L3N3M2^@F[?a0N2M4M2N2M4M2O20O00010O010O00010O010O000N3N100010O0SAROi>R100010O010O00001L310O01O01O01O010N1N2M4M2M3M4M2M4M2M2OO3N3L3M3N3L3N3L3N2M4L3N2M4M2M4M2M3M4M2MVff4"}, "image_id": 726, "id": 12358}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 333.0, 52.0, 57.0], "area": 1857, "segmentation": {"size": [512, 512], "counts": "[kl44l?4L3L4M4L3L1G]OUAc0j>ASA?m>910DROeAo0[>UOaAj0`>YO]Ag0b><0O10O1000O10O10O10O10O10O3N3M3M2M30M100000O02O3M3L5L3M3L4M3M4L3L4M3M3L5L3MTTY2"}, "image_id": 726, "id": 12359}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 354.0, 35.0, 29.0], "area": 672, "segmentation": {"size": [512, 512], "counts": "Y[j53m?3M3L5L3M3L4M000O01000O01000O10O10O10O10O10O101O2M10O1000O0Kh@D[?99L5L3M`Td1"}, "image_id": 726, "id": 12360}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 394.0, 69.0, 83.0], "area": 2887, "segmentation": {"size": [512, 512], "counts": "[no52k?3M4M2M3N3L3M4M2M3N3N110N1M3N3N1010O01O01O010O01O01ON3L3N3L3N2N3L3N3M2M3N3M2M4M2N01N3N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N^cm0"}, "image_id": 726, "id": 12361}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 405.0, 43.0, 58.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "Tne41o>0nA2P>0mA4o=0nA2P>0mA4o=0nA2P>0mA3P>0nA3Q>NkA5U>KiA8V>IfA:[>e0010O01O01O010O01O01O010O01ON3M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3NPcd2"}, "image_id": 726, "id": 12362}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 408.0, 49.0, 78.0], "area": 2070, "segmentation": {"size": [512, 512], "counts": "j=\\1b>1N2O1O1N2O1N2O1O1N2O1O1N2001O1O2N1O2O0O2N1O1O2N1O2N1O2NN2O1N21N2M4M2M4M2N3L3N2N3L3N3M2M4M2M3N3MSSW7"}, "image_id": 726, "id": 12363}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 409.0, 68.0, 84.0], "area": 3078, "segmentation": {"size": [512, 512], "counts": "i^n61l?3N3M2M3N3L3N3L3N3M2M3N3L3N3M2M3_AeN[>a1M3O2O010O00010O010O001O0M3N3L3N3M2M3N3M2M4OO1N3M2M3N3L3N3M2M4M2M3N3M2M4M2N2M4M2M4M2N3L3N2N3L3N3L3N2NnB"}, "image_id": 726, "id": 12364}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 420.0, 45.0, 48.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "nme02k?4`@LP?6n@LP?7m@Lo>7n@LP?d0M2M4N110O01O01O001L3N1OO012O2O010O01O010O01O010O01OWO[A7f>F\\A;c>C`A@bA`0^>^OdAc0\\>ZOgAd0k>N3M2M4M2N2M4M2NRbc6"}, "image_id": 726, "id": 12365}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 439.0, 81.0, 58.0], "area": 2526, "segmentation": {"size": [512, 512], "counts": "gnX13k?3M2M3N3M2M4M2N2M4N110O0001O001O010O01O0RAUOh>k0UAWOk>P10O00010O010O0010O0O2L3N3M2M3N3L3N3N11O010N1N2M4M2M4M2N3L3N2O2O010O010O00010O010O010O00010O010O001M2N2M4M2N3L3N3L3N2Mka^5"}, "image_id": 726, "id": 12366}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 439.0, 32.0, 41.0], "area": 733, "segmentation": {"size": [512, 512], "counts": "gnY51m?3L3N2M4M2M4M2M3N3M2M4M2M3010O010O0010ON2M4M2M4L3N3L3M3N3L3M4MSRV2"}, "image_id": 726, "id": 12367}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 455.0, 93.0, 57.0], "area": 3075, "segmentation": {"size": [512, 512], "counts": "f_j32l?3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M4M2M3O01M4M2N3L3N3M2M310O010O0010O0010O010O0010WAmNc>S1[AoNe>W10O010O0010O0010O010O0010O0010iNZAR1f>lN]AS1h>01O001O00001O001O001O00001O001O001O00O1M3001O001O0M3N3M2M4@c@9d?M2M3N3MhPg2"}, "image_id": 726, "id": 12368}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 470.0, 46.0, 39.0], "area": 1069, "segmentation": {"size": [512, 512], "counts": "P_>2l?2M4M2N3N1m@Cc>=[AFe>:XAHh>8UALk>4RANn>2PA1P??010O00N3O0010O01O010O01O010O001L3N2O2O010O0010O010OO1N3L3N3M2N3L3N2Nn`j6"}, "image_id": 726, "id": 12369}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 472.0, 26.0, 27.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "onR62k?3N3M20001d@GP?8m@KS?5j@NV??0O010O00010O00010O00001L3M4M2M3M4LSQ`1"}, "image_id": 726, "id": 12370}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 489.0, 11.0, 23.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "noj72k?3N2M3N2N2M3N2M3O100"}, "image_id": 726, "id": 12371}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 507.0, 12.0, 5.0], "area": 31, "segmentation": {"size": [512, 512], "counts": "oo_71m?2N21O001O001O00001O00QP:"}, "image_id": 726, "id": 12372}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 508.0, 11.0, 4.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "n_`52l?200001O001O00001O00QPZ2"}, "image_id": 726, "id": 12373}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 0.0, 154.0, 178.0], "area": 12654, "segmentation": {"size": [512, 512], "counts": "VQc52m?2N2N2N2N2N2N2N2N2N2M2O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O010000000000000000000000000O10000000000000000000O1000000000000000O1000000000000000000000000000O10000000O1000000000000000000000000000000000000O0100000000000N2M3N2N2N2N2N2N2N2N1O000000000000000[N"}, "image_id": 728, "id": 12374}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 0.0, 18.0, 10.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "P`g61o?1O1O1O1O1O1O1O1O1OO1O1N2O1O1O1O1OQ`o0"}, "image_id": 728, "id": 12375}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 0.0, 41.0, 41.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "P`[71o?1O1O1O1O1O1O1O1O1O1O1O1O1O1n@B`>?^ACa>>]ADb>=\\AEc><[AFd>;ZAGe>:YAHf>9XAIg>7XAKg>h0OO1O1O1BXACi>=ZAAg>>[A@f>?\\A_Oe>`0]A^Od>a0>O11O1O1O1O1O00"}, "image_id": 728, "id": 12376}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 0.0, 32.0, 29.0], "area": 601, "segmentation": {"size": [512, 512], "counts": "b`c41m?2M4M2N3L3N3c@@T?Ok@g0U?201O001O001O0000N20000001OM3N2N2M3N2N2M3N2N2M3NR`l2"}, "image_id": 729, "id": 12377}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 0.0, 113.0, 61.0], "area": 3346, "segmentation": {"size": [512, 512], "counts": "i`Z51l?3N3L4M2M4M3M2M4M3L301O001O00001O001O00001O000010O001O00010O001O00010M2N3L3N2O20O010O00010O03NO00010O0N3L3O110O010O00010O010O00010O01OPAVOj>k0SAXOl>o00O0ROUAh0k>TOXAm0m>010O10ON3M2M4O01O01O01O010O00010O0010O01O00010O001O01O01O0N3L3N3L3M3N3L3M4M2M3MVol0"}, "image_id": 729, "id": 12378}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 0.0, 25.0, 28.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "h`Z72l?3M2K6M2N2M10O12N3M20001O00O1N2N2M3N2N2M3N2N2N2MSP9"}, "image_id": 729, "id": 12379}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 0.0, 15.0, 22.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "\\`h72g?O]@3a?O\\@5`?701O00003M001O00001O001O"}, "image_id": 729, "id": 12380}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 3.0, 73.0, 51.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "mPj01l?3N3L3N2M4M2N3L3N2M4N1010O00010O010O010O00010O010O00010O010O00010O010O0010O0010O0010O0010O0010O0010O001]OPA5o>ITA6m>FVA;i>CZAB\\A?Q?1N201ON3M201O01N101O0001L3N3H\\@0f?M]@0bnQ6"}, "image_id": 729, "id": 12381}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 26.0, 49.0, 67.0], "area": 1923, "segmentation": {"size": [512, 512], "counts": "naX33k?2N3L3k@Ga>;]AH_><^AF`><]AGa><\\AG`><]AGa>n0M2M4M2M3N3M2O20O00010O010O010O0O1N3M2M4M2M4M2NO02O3M2M3N3L3N3M2M3N3M2M4M2M4M2Nonn3"}, "image_id": 729, "id": 12382}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 31.0, 42.0, 69.0], "area": 1414, "segmentation": {"size": [512, 512], "counts": "TR[72l?2N3M2N2EFn@=o>Fn@YO[Ah0e>UO^Aj0l>010O0010O01[AVOS>i0kAZOT>g0iA[OX>d0fA_OY>a0dAB]>>`AE_>n0010O0010O010O0[N"}, "image_id": 729, "id": 12383}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 48.0, 52.0, 67.0], "area": 2004, "segmentation": {"size": [512, 512], "counts": "XRQ43k?6I5L3c@@Y?c001O010TA]OY>c0dAA[>?cAC^>=^AG`>:^AH`>;\\AHa>n0M2M3N3L3M4N100010O01O01O010O01OO2L3N3L3N2M4M0O2O2M4M2M4M2M3M4M2M4M2M4M2M4M2MZnT3"}, "image_id": 729, "id": 12384}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 69.0, 19.0, 89.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "U2i2X=M2M4L1O3dN_B7d=F^B7e=@ZBA1l0h=@ZBB1k0h=@YBB2k0h=@eBH\\O=S>_OmA5g0L\\O>U>DiB8a>M4M2O10101NZ\\f7"}, "image_id": 729, "id": 12385}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 73.0, 54.0, 65.0], "area": 1767, "segmentation": {"size": [512, 512], "counts": "mSc42l?3L3N3M2M3N3M2M4M2M3N3M2M4M2N3L3N2N3L3N3L3N210O010O00M4M2M4M2N2M4M2M4M2N3POTAg0o>WOSAg0T?M4N110O00010O010O0N3M2M3N3L3NWma2"}, "image_id": 729, "id": 12386}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 75.0, 55.0, 72.0], "area": 2216, "segmentation": {"size": [512, 512], "counts": "Udf04j?2M4M2N2CCTAa0h>BVA`0h>BUAb0g>BVA`0h>>M2M3N3L3N3M2M3N3M2M4O0010O00010O0N3M2N2M4M2N3L3N2M10O4M20M3N3M2M4M2M3N3M2M4M2M4M2N2M4M2N3Lam]6"}, "image_id": 729, "id": 12387}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 90.0, 61.0, 62.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "`TR52k?3M4L3M3N3M21O01O01O01O010O00010O00O2M2M4L3N210OM4L3M3N3L3M3DWO^Am0^>VO_Am0^>VO`Al0]>>L3N2N30O010O00001L3M3M4L2O0O2N4L3M3N3L3M3M4L3N3L3M3MU]o1"}, "image_id": 729, "id": 12388}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 104.0, 95.0, 73.0], "area": 3368, "segmentation": {"size": [512, 512], "counts": "_dh22k?4M2M4L3N2M4M2M4M2M4M2M3M4M2M4O01O01O010O010O00010O010O00010OmNaAe0`>XObAi0]>TOgAk0Y>SOiAn0W>nNmAQ1a>0010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O01O0N3M2M3N3M2010O001M2N0O010O012N2M4M2M4M2M3^Om@5V?Hm@5U?Im@4b?M2Mlkg3"}, "image_id": 729, "id": 12389}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 115.0, 40.0, 64.0], "area": 1642, "segmentation": {"size": [512, 512], "counts": "bT\\73=Oo>4m@OP?4m@0o>3o@Oo>c0L3O101WAnNc>Q1[AQOe>U110O000M4L3N3L3M3N3L31O010O01O01L3M3N3L3M4M2M3M4TOUA`0l>^OVAb0j>[OYAe0R?0O010O0kK"}, "image_id": 729, "id": 12390}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 143.0, 72.0, 68.0], "area": 2364, "segmentation": {"size": [512, 512], "counts": "[Un51o?2N2M4M2M3N2M01001N3N3L100O010LZOn@f0Q?410O0100O0100O010O01001N3N0O100O0100O010O0100O012N2M4M2M3N3L3N2N3L3N2MTORBKk=5VBJj=6VBIj=7WBIi=7VBIj=7WBIk=5TBKo=2RBNP>0oA1T>LlA3W>JjA6X>HjA5Z>GiA7Y>GjA5Y>HjA6Y>GiA7Y>GjA5Z>GiA7R?M4Mgim0"}, "image_id": 729, "id": 12391}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 171.0, 15.0, 12.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "`UP53j?3N210O0010O010O0010O01M2N2NbZh2"}, "image_id": 729, "id": 12392}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 173.0, 69.0, 101.0], "area": 3167, "segmentation": {"size": [512, 512], "counts": "RhP44j?2M3M4M2M4O00010O0O2L3N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M4DeMoB]2nT1\\AnNd>X10O00010O01O01O010O0M3N30O01O01O010O01O01ON3N11O01O010O01O010N1M3N3M2M4M2N210O010O00010O010O00010O010OTOeA3[>KhA4Y>HjA9U>EmA;T>BoA>P>_OSBa0m=]OVBb0k=ZOXBg0g=WO[Bi0_>0O010O00M4M2M4M2M4M2M3NViV4"}, "image_id": 729, "id": 12395}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 196.0, 70.0, 106.0], "area": 3359, "segmentation": {"size": [512, 512], "counts": "mXm41m?3L3N2M4M2M4N11O01O0M4M2M3N3L3N3L3N2M4M2M6K3L3N2M4M2M4M2M3FcMoB`2n4TA9NFl>m0M4M2N3N110O01O01O010O010O01O01O010O010O0N2N3O0010O00010O010O010O00010O0O2M2N3O0fN^AW1f>O0N2M4M2N3L3N3M2M3N3M2M4M2NYG"}, "image_id": 729, "id": 12398}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 291.0, 27.0, 27.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "dib63k?2N3L3N3L3N3L301O01O01O010O010O0001N1N3M2N2M4M2N3M2Mifo0"}, "image_id": 729, "id": 12399}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 0.0, 131.0, 103.0], "area": 5136, "segmentation": {"size": [512, 512], "counts": "Tab41n?2M3N2N2N2N2N2N1N3N2N2N2N2BUOdAm0Z>UOcAm0\\>UObAm0\\>UObAm0\\>UObAm0\\>TOcAn0Z>>N2O1O00O1O1N2O1O1O1O1O1O1O11O1O00O1N3N2O1000000O0100000000000O010000000000000O0100000000`AhNW>X1gAjNW>W1hAkNV>W1hAkNV>`1100000000000O01N2N2M3O1000O1000O1000000000O10O100cN`AY1`>eNbA[1b>00000000O0100000000000OO2N01O2M3N2N1O2N2N2M3N2N2N1O2N2N2M3N2N2N2N1O2M3Nfm[1"}, "image_id": 730, "id": 12400}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 0.0, 12.0, 18.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "P`Q6b0^?000000000000000000000P`h1"}, "image_id": 730, "id": 12401}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 174.0, 143.0, 178.0], "area": 10456, "segmentation": {"size": [512, 512], "counts": "UZj13i?4L4L4L5K4o@ZOe>i0WA[Oe>R1O2O0001O01O0001O01O000N2L5K4L4L4L5K4L4L2N004L4L4L4L5K4L1O000000000000002NN]CoL_TOjAQ1o=UOmAo0R>`00010N1L4K5L5K4L4L4L5N1001O0001OM4K4L4L4L5K4L4L4L5K4L4K5L5K4L4L4L5K4L4L4LSYS2"}, "image_id": 730, "id": 12404}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 337.0, 38.0, 56.0], "area": 1462, "segmentation": {"size": [512, 512], "counts": "o[k44h?4K6K4K5K5L5J5L4K5K501O01O0000010O000001O01O0000010O000L4L4K6J5L4K5K5L5J5L]ea2"}, "image_id": 730, "id": 12405}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 354.0, 151.0, 158.0], "area": 16260, "segmentation": {"size": [512, 512], "counts": "Qnd52l?2O2N2N2N2N2N2N2N2N2N2N2N2M3N1O2N2N2N2N2N2N2N2N2N2N2M3N2N1O2N2N2N2QCmMn;U2QDkMn;W2PDjMo;X2oCiMPG_A;`>G]A;a>H]A9a>I]A9b>I\\A8b>J\\A8b>d0O1N2N2O1N2N2O1N2O1O1001O001O1O001O001O1O001O001O1O001O001O1O001O1OO1N2O1N2N2O100001O001O1O00N2O1N2O1N2N2O1N2JTNVBn1i=4OSOYBCg==[BBf==\\B@f=>\\B@g==\\BAe=`0[B]Oh=b0XB\\Ok=a0XB]Oi=b0XB\\Ok=a0XB]Oj=`0YB]Oi=b0XB\\Ok=a0XB]Oi=a0m0N2M2L5M2N3NQRi5"}, "image_id": 730, "id": 12410}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 451.0, 34.0, 34.0], "area": 585, "segmentation": {"size": [512, 512], "counts": "f^k21n?2N2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N01O2N2M3N2N2N2N1O2M3N2N2N2N2N1O2Mbac4"}, "image_id": 730, "id": 12411}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 468.0, 29.0, 33.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "]oV22m?2M3N2N2000O10FHi@8W?Jg@5Y?Ne@2[?90O100O01000000O11N2O2N2N2N2N2M2O2NWaZ5"}, "image_id": 730, "id": 12412}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 473.0, 21.0, 21.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "Pog22l?3N2N2N1O2N2O1000000000O01000N2M3N2N2N2Nl`m4"}, "image_id": 730, "id": 12413}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 487.0, 50.0, 25.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "oo]31n?1O1O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N2O1O1O1001O001O1O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O1O001ORPi3"}, "image_id": 730, "id": 12414}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 492.0, 18.0, 19.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "goU32m?2N2N2N2N2N2N1O000000O3N2N2N2N2N2N_Pa4"}, "image_id": 730, "id": 12415}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 501.0, 31.0, 11.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "o_W21n?1O1O1O1O1O1N2001O1O1O1O1O00O1N2O1O1O1O1001O1O1O001O1O1O1O1NTPY5"}, "image_id": 730, "id": 12416}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 66.0, 41.0, 28.0], "area": 602, "segmentation": {"size": [512, 512], "counts": "]Rc13k?2N3L3N2N30O010O01O01OO2N110O010O01O01O010O010O01O01O010O010O010O00_Oe@=_?00N3M2M4M2N`]h5"}, "image_id": 731, "id": 12417}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 498.0, 19.0, 14.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "loe21m?2M4M2M4O00010O01O00001O001O000O2M2M\\`P5"}, "image_id": 733, "id": 12418}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 105.0, 158.0, 111.0], "area": 6711, "segmentation": {"size": [512, 512], "counts": "ocV58a?8G8000000001O01O000000000001O01O000000000001O01O000000007JO0000000000PAXOh>P1001O0000L5G80000001O0000hA`NP>h1000000010O0004L1O0000WNPBb1W>000000001O0001O0000000001OM3M40O0000000000M3M13K4001O00000001O000001O0O1J60000000010kNbAf0l>00O1L400010O000000000000010O000000000001O`B[Oe;f0SDAm;?kCJT<6dC2\\R10000001O0000001nNYAi0o>000001O0000001O0000001O0000001O000000M3N200001O0000001O0N2L4L5J5O100L5Kn`^4"}, "image_id": 735, "id": 12421}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 495.0, 44.0, 17.0], "area": 393, "segmentation": {"size": [512, 512], "counts": "n_`02l?2N2M3N2N2N2N2001O001O001O001O00001O001O001O001O00001O001O001O001O00001O001O001O001O00Q`i6"}, "image_id": 735, "id": 12422}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 0.0, 115.0, 59.0], "area": 2394, "segmentation": {"size": [512, 512], "counts": "\\Pn02l?3M2M3N3M2O2O001O00001O001O001O00001O001O001O00001O001O001O00001O0010O0[Oo@^OTAc0U?O0010O010O010O00010O010O010O00010O010O010O00010O010M2N3O00010O010O010O00010O010O010O01O01O010O010O01O01O010O010O010O00010O010O010O0001O0N3M2M4M2N2M4M2Nk^X5"}, "image_id": 736, "id": 12423}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 0.0, 63.0, 18.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "PPV41o?2N1O2N2N2N2N1O2N1O00O100O1O100O100O1001O2N2N2N00O100O1O100O100O1001O1O1OO100O1O100O100O100O100O1O100O100O100O1O100O100O100OQ`j2"}, "image_id": 736, "id": 12424}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 409.0, 87.0, 45.0], "area": 1842, "segmentation": {"size": [512, 512], "counts": "km81j?5K6K4M30000010O000001O01O0000010O000001O01O0000010O000001O01O0000010O000001O0D<000001O00K5L4K5L400000000000000000000000D<0000000000000O100000000000000000000000000000000000Wc[6"}, "image_id": 738, "id": 12425}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 448.0, 25.0, 24.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "Zn]14i?6K4M2N2N2N2O0O101O00000000000001N101N1O2O1M4M3KkaU6"}, "image_id": 738, "id": 12426}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 458.0, 16.0, 16.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "ZnT2?a?00000000000000000000000000001eQc5"}, "image_id": 738, "id": 12427}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 0.0, 29.0, 29.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "c`c03j?3N3L3M4L3N2M4O01O01O010O00010O00010O01O000N3L3M3M4M2M3Mlom6"}, "image_id": 740, "id": 12428}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 0.0, 36.0, 18.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "R`[11m?3O1O001O001O001O1O001O001O001O1O001O001O001O001O1O00N2O1N2N2N2O1N2N2NR`R6"}, "image_id": 740, "id": 12429}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 0.0, 53.0, 23.0], "area": 635, "segmentation": {"size": [512, 512], "counts": "PPT21o?001O001O1O001O001O001O001O1O001O001O001O1O001O001O001O1O001O001O001O001O1O001O00O1N2N2N2O1N2N2N2N2O1N2NR`Q5"}, "image_id": 740, "id": 12430}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 0.0, 45.0, 22.0], "area": 508, "segmentation": {"size": [512, 512], "counts": "PPb51o?1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100OQ`g1"}, "image_id": 740, "id": 12431}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 0.0, 34.0, 21.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "YPT72m?2N2N3M2O1N2N2N3M1O0100O1O1O1O1O100O1O1O1O1O100O1O1O1O1O100O1O1O1OQP;"}, "image_id": 740, "id": 12432}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 0.0, 16.0, 9.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "P`g71o?1O1O1O1O1O1O1O1O00O1O1O1O2N2No?"}, "image_id": 740, "id": 12433}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 3.0, 17.0, 35.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "3Q1o>0001O01O0001O01O00N2K6K4K5L4K5Ko_g7"}, "image_id": 740, "id": 12434}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 15.0, 73.0, 54.0], "area": 1948, "segmentation": {"size": [512, 512], "counts": "YQ]12l?2N3L3N2M4M201O010O01O01O010O010O00010O010O01O01O010O0O2M2M3N3M2M4M2M3N3O010O010O00010O010O01O01O01gN\\AU1g>0010O0010O010OoNYAi0g>UO\\Ak0d>RO^An0k>O01ON3L3N3M2M3N3M2M4M2Mi^^5"}, "image_id": 740, "id": 12435}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 36.0, 34.0, 30.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "Zal02l?2M4N1c@JP?7l@LU?3i@OW?1f@3Y?:10O01O01O01O01O010O01O01O010O01O01O01O000M4L3N3L3N2Md^b6"}, "image_id": 740, "id": 12436}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 37.0, 8.0, 16.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "[Ql72m?2N2N2O2M2N2N2jN"}, "image_id": 740, "id": 12437}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 38.0, 48.0, 48.0], "area": 1162, "segmentation": {"size": [512, 512], "counts": "jaa52m?2N2N2N2M2O2N2N2N2N2N2M3N2N1O2N2N2N2O100000O10O100000000000N2M3N1O2N2N2N2N2M3N2N1O2N2N2N2N2M3NR^f1"}, "image_id": 740, "id": 12438}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 38.0, 30.0, 29.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "_Qn63m?2M3N2N3L3N2M3N2M10O010O010O010O010O01001N3N2M3N2M3N3L3N2MYnb0"}, "image_id": 740, "id": 12439}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 46.0, 33.0, 38.0], "area": 902, "segmentation": {"size": [512, 512], "counts": "\\b:2i?6K4K5L4K5L5O000M301O01O000001O01O0000010O000001O01O0M3L4VOm@b0\\?J5L4K_nT7"}, "image_id": 740, "id": 12440}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 48.0, 60.0, 48.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "WbY43k?2O2M2N3N1N3M3M2O2M2N3N2M2O2O0100O0100O0100O0100O010O010M3M2O2O010O01000O010O01N2M2O2M00002O1N3M2O2M3M2O2M2N3M3N1N3M2OT^h2"}, "image_id": 740, "id": 12441}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 49.0, 1.0, 5.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "a15Zno7"}, "image_id": 740, "id": 12442}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 65.0, 30.0, 30.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "`bZ52m?2N2N2N1N3N2N2N2N2N2N2M2O200000N1O2N2N2N2N2M3N2N1O2N2N2N2Nb]V2"}, "image_id": 740, "id": 12443}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 67.0, 50.0, 66.0], "area": 1860, "segmentation": {"size": [512, 512], "counts": "cRc62n?2M3N2M3N2M3JBf@`0X?6M4M2M3N2M3N2M3N3L3N2M3N2M3N2M3NO010O2O2N2M3N2M2O0O012M3N2M3N2M4M2M3N2[Oo@7S?Go@6T?Go@7T?Fo@6`?N2M3NXlc0"}, "image_id": 740, "id": 12444}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 73.0, 70.0, 43.0], "area": 1792, "segmentation": {"size": [512, 512], "counts": "PcU13b?0c@4Z?:1O0001O01O01O01O0001O0N2L4M4K40001O01O00010O0000010O0001O01O00010O0000010O0000010O002O0O000010O0000010O000010O0000010O000O2K4L4M3L5K4MZ]g5"}, "image_id": 740, "id": 12445}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 74.0, 31.0, 40.0], "area": 651, "segmentation": {"size": [512, 512], "counts": "ScS61n?3N1^OMTA6j>KUA7h>LUA6j>LSA7j>KUA7h>LUA6j>LTA5j>`01O01O010O03M2O2M3N1N3M3N1N3N2M2N3N2M2O2Mnl\\1"}, "image_id": 740, "id": 12446}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 94.0, 20.0, 20.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "Xcf52m?2O1N2N3M2N2N2N01O0000002N2O1N2N3M2N2Nk\\o1"}, "image_id": 740, "id": 12447}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 98.0, 67.0, 38.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "Pd32i?5K5K5K5L5J5O1000010O000000010O00000010O000000010O000000010O0N2K5M4O0Fi@KW?5j@KU?5k@KU?6j@IX?6h@E]?;4000]@G_?=1O00N2N3O000000010O000000010O00000010O0O1K5K5Kllj6"}, "image_id": 740, "id": 12448}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 99.0, 56.0, 51.0], "area": 1653, "segmentation": {"size": [512, 512], "counts": "Sdm34i?3M3M4K4M3M4L3M3M4L3O110O00010O00010O00010O0001mNYAl0f>QO]Ao0j>010O00010O00010O00010O00010L3O1010O00010L3M3M4L3M3M4L3L4M3M_\\V3"}, "image_id": 740, "id": 12449}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 99.0, 44.0, 34.0], "area": 674, "segmentation": {"size": [512, 512], "counts": "\\ce42l?3N1N3M3N1O2O010O10O10O010O3NO10O10O010OO2N20O010O01000O010O01000O010O0N3M3N1N3M2O2M3M\\\\d2"}, "image_id": 740, "id": 12450}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 113.0, 7.0, 14.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "fcl72m?2N3N1N2N2N2^L"}, "image_id": 740, "id": 12451}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 117.0, 55.0, 51.0], "area": 1573, "segmentation": {"size": [512, 512], "counts": "gTh53i?4M4L3M3M4L3L4M4L3M3N3O00010O00010O0001O01O01O01OROZAb0g>ZO\\Af0d>WO_Aj0`>SOdAl0h>0010O00010O00010O000010O000010O00001L3M3M4L3L4M4L3Me[\\1"}, "image_id": 740, "id": 12452}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 122.0, 56.0, 45.0], "area": 1589, "segmentation": {"size": [512, 512], "counts": "jTV12i?5L4K6K4K5L4L4O2O0000010O000001O01O0000010OQOUAi0k>SOYAm0m>000010O0000010O0000010O000000010O0000010O000000010O0L4K5L5K4K5Ljkm5"}, "image_id": 740, "id": 12453}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 147.0, 15.0, 13.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "idj05e?60000000010O000000010O00O1K`km6"}, "image_id": 740, "id": 12454}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 152.0, 29.0, 28.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "ZUX52k?3M5K3M301L3M310O010O00010O00010O00001N10O1M4L3M3M4L3M3MV[Y2"}, "image_id": 740, "id": 12455}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 154.0, 28.0, 26.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "j4e0[?1O000001O01O0001O01O000001O01O0001O01O000001N1L4K5L5JUka7"}, "image_id": 740, "id": 12456}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 155.0, 47.0, 26.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "SU`41l?3M4M200010O00010O0001O0_@H[?>0001O000010O000010O000010O00010O00010N1M3N30O00010O00010O0000M4L3MlZh2"}, "image_id": 740, "id": 12457}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 156.0, 52.0, 43.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "jed34g?5L5J5K5L401O01O000001O01O000001O01O000001N1L41O00M4M2001O0001O0001O01L31O000001O01O000L4K5L5J5K5K5L4KV[a3"}, "image_id": 740, "id": 12458}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 158.0, 25.0, 24.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "`Uf02i?6J5K5M31O01O000001O01O000001O01O000001O0O1L4K5KS[m6"}, "image_id": 740, "id": 12459}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 174.0, 52.0, 46.0], "area": 1452, "segmentation": {"size": [512, 512], "counts": "^fQ15g?4K5K5K5K6K4L41O01O000001O01O000001O01O00POZAf0f>UO`Aj0k>001O01O0001O0001O0001O0001O0001O0001O0001O0001O0L4K5K6J5LRZT6"}, "image_id": 740, "id": 12460}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 181.0, 51.0, 46.0], "area": 1737, "segmentation": {"size": [512, 512], "counts": "nec52n?7DI`@`0W?50001O0001O0001O0001O0001O0001O0001O01O0001OTOUAc0k>XOZAh0o>01O01O000001O01O000001O01O000001O01O0000N3J5K5L4KPib3"}, "image_id": 740, "id": 12464}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 213.0, 26.0, 18.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "mVW58h?0000000J60002N00000O10000000O10000000O10000000O7JUi[2"}, "image_id": 740, "id": 12465}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 218.0, 22.0, 16.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "Sgn06a?900000000000000001O01O0000000000000001O0G]Yf6"}, "image_id": 740, "id": 12466}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 236.0, 56.0, 35.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "TXi05d?7I7I7000000000001O000001O0000000001O000001O00000001O00000001O000000N3K4000000000000010O000000000000000_ORA1n>E[A;U?L4G[hZ6"}, "image_id": 740, "id": 12467}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 236.0, 32.0, 36.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "lWQ57i?8H5^ODVA;k>=00000HSA]Om>c08000O1000O100000000000OGo@EQ?;:00O10000000O1002N8G[h^2"}, "image_id": 740, "id": 12468}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 243.0, 57.0, 38.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "kWe56i?8I7I7I3M00000O1000O1000000000O10O1000000000O10O1000000000O10O1000000000O1000O10000000O1000O10000000O1003L8I7I8HdW^1"}, "image_id": 740, "id": 12469}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 253.0, 20.0, 13.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "Phd46g?40O000000010O000000010O00000000010M2KTXQ3"}, "image_id": 740, "id": 12470}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 260.0, 58.0, 43.0], "area": 1657, "segmentation": {"size": [512, 512], "counts": "TiY31k?4K5L5J5K5L4010O000000010O000001O01O0000010O000000010O000001O01O000001K4L4M31O01O0000010O000001O0M3L4K5K6K4O1Ef@0Z?Lj@5`?M`Wi3"}, "image_id": 740, "id": 12471}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 262.0, 9.0, 31.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "V8n0R?1O00K6J5K5L4K5KnWk7"}, "image_id": 740, "id": 12472}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 266.0, 28.0, 25.0], "area": 550, "segmentation": {"size": [512, 512], "counts": "kX85c?8H8O11O000001O00000001O000001O00000001O000001O00Ei@MW?KQA5]?NWgY7"}, "image_id": 740, "id": 12473}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 266.0, 32.0, 39.0], "area": 863, "segmentation": {"size": [512, 512], "counts": "Zi[41k?4L5K4L4K5L5K4M3000001O01O0001O0001O01O000001O01OL4L4K6K4K5L4KgWT3"}, "image_id": 740, "id": 12474}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 288.0, 50.0, 34.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "TYj09g?0k@J`>k00000000000000000000000000000000000000000000000000000000000L400000000000000000000000000005K0000kf\\6"}, "image_id": 740, "id": 12475}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 291.0, 22.0, 14.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "VYT53m?7I1O0O100000O100000O1000O100000O1000O105Khf`2"}, "image_id": 740, "id": 12476}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 293.0, 6.0, 5.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "UYP55k?00000000Olfl2"}, "image_id": 740, "id": 12477}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 294.0, 50.0, 37.0], "area": 1414, "segmentation": {"size": [512, 512], "counts": "Zil51o?7I7I8H7I3M0O100000O100000O100000O100000O100000O100000O13M2N0O1000000000O1000O10000000O1004K9H7I7ImUZ1"}, "image_id": 740, "id": 12478}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 308.0, 93.0, 47.0], "area": 2441, "segmentation": {"size": [512, 512], "counts": "SZV3o0Q?00000000000000000000O10000000O100000000000000000000000000000O13KPOTAP1l>2000M300000000000000000000000O100000000000O100000H80VOj00O1N200000000000000031K10000000O1000000000000000003M00O100O107IRV[3"}, "image_id": 740, "id": 12479}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 318.0, 13.0, 11.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "nif47i?4L000000O10000000000000RfR3"}, "image_id": 740, "id": 12480}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 327.0, 32.0, 21.0], "area": 664, "segmentation": {"size": [512, 512], "counts": "_j1=[?8000000000000000000000000000000000000000000000000000000000000iU^7"}, "image_id": 740, "id": 12481}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 328.0, 7.0, 8.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "Yjg54l?3M0O1000000heT2"}, "image_id": 740, "id": 12482}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 332.0, 21.0, 21.0], "area": 259, "segmentation": {"size": [512, 512], "counts": "jjc46j?000000000000KG>003M06J0000000000000000JgeQ3"}, "image_id": 740, "id": 12483}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 339.0, 51.0, 40.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "R[i0=c?1O00000B>09G0000000O1000000000O100000004L03M000000L4000003M00000000000000000000O1000000000O1000009GnT]6"}, "image_id": 740, "id": 12484}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 345.0, 32.0, 27.0], "area": 582, "segmentation": {"size": [512, 512], "counts": "P[V55k?5K1N1000O10N200O101O5K4L0O1000O1000O1000O1000O1000O105K5K5K5JddY2"}, "image_id": 740, "id": 12485}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 345.0, 63.0, 41.0], "area": 1672, "segmentation": {"size": [512, 512], "counts": "mjk53m?5K5K5J6K5K5K4K10O1000O1000O10O13M4K5L1O00O0100000O10O1000O3N0000O01000M30O1000O10O1000O10O1000O10O1000O10O1000O4M4L4L4K5L4L]dT1"}, "image_id": 740, "id": 12486}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 354.0, 7.0, 7.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "R;7i?00000000000nTl7"}, "image_id": 740, "id": 12487}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 354.0, 31.0, 16.0], "area": 351, "segmentation": {"size": [512, 512], "counts": "T[V4>b?00000000000000000000000000O100000000000N20G900000000000005KiTZ3"}, "image_id": 740, "id": 12488}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 365.0, 19.0, 19.0], "area": 302, "segmentation": {"size": [512, 512], "counts": "^k06j?0070O100000O010000000O01000000O010000000O0100000O10O100000O2O5K5K2M010000000O01000000O05L5K5K5Kdcf3"}, "image_id": 740, "id": 12490}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 386.0, 32.0, 24.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "Sl11o?5J100000002N>B02N0O11N2O00000000000000O10000000O1000000006J>BZS^7"}, "image_id": 740, "id": 12491}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 388.0, 54.0, 45.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "X\\l01n?7J5K6J5J6K6J5K2N0O1000O10O1005K2M100000O10O100000O10O1000O14L2M1000O1000O10O100000O10O100000O5L4L5K4L5J5LkbX6"}, "image_id": 740, "id": 12492}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 393.0, 25.0, 24.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "^lc54k?5L5K4L2M10000000O0100000O010000000O011O5K4K6K4LTco1"}, "image_id": 740, "id": 12493}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 394.0, 9.0, 7.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "[\\l45k?1OO01000000004KcSo2"}, "image_id": 740, "id": 12494}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 397.0, 31.0, 21.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "c\\[41o?4K5L5K1O00O0100000O010000000O0100000O01000000O05L5K2N0O102NVSU3"}, "image_id": 740, "id": 12495}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 398.0, 63.0, 44.0], "area": 1705, "segmentation": {"size": [512, 512], "counts": "V]R64k?4M3M3M3L5ZO]OgAb0Z>AcA?]>E^AH\\A7e>LXA4h>b0O10O10O10O1000O010001N4M3M3L0100000O01000O01000O01000O01000O01000O0100000O01000O01000O2O3M3L5L3M3L4M3MjRn0"}, "image_id": 740, "id": 12496}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 399.0, 7.0, 10.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "`l\\55k?4K01000005J]c_2"}, "image_id": 740, "id": 12497}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 418.0, 50.0, 44.0], "area": 1314, "segmentation": {"size": [512, 512], "counts": "e]`32n?2N3L4M3M3L3N3M3L2O00O0100JVOUAj0k>600O0100O01000O01000O0100O0100O01000O01000O01000O2O3M3L3N3L4M3M2M4M3MTbf3"}, "image_id": 740, "id": 12498}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 436.0, 53.0, 50.0], "area": 1510, "segmentation": {"size": [512, 512], "counts": "\\^R15k?3M3L5L3M4L3L2O000O10O10O10O1000O01000O10O10O1DUOcAj0^>YO_Ag0a>\\O\\Ad0c>=0O10O10O1000O01000O10O10O10O3N3M4K4M3M4L3L4M4L3L4MYQS6"}, "image_id": 740, "id": 12499}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 436.0, 38.0, 32.0], "area": 769, "segmentation": {"size": [512, 512], "counts": "m]c42n?3L4M3L3N3M3L3N2N0O0100O0100O01000O02O1O0O010O01000O0Nj@\\OV?d030O010001N3N3L4I[@Mh?0Tbi2"}, "image_id": 740, "id": 12500}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 442.0, 26.0, 23.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "nma01o?6I7J6J00000O1000O1000O1000O1000O100000O10006J6J6IeQQ7"}, "image_id": 740, "id": 12501}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 444.0, 56.0, 43.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "Un]61o?2M4M2N3L3N3L3N3M2M4M2M4M2N1N11N4M2N1N010O10O10O010O10O10O010O10O10O010O10O10O010O10O10O103L3N3L3N3M2M4M2M4M2N[Qf0"}, "image_id": 740, "id": 12502}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 452.0, 28.0, 28.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "\\nf53m?2M4M2M3N3M2M4MO0100O0100O0100O0100O012M3N3M2M4M2M4M2NYQk1"}, "image_id": 740, "id": 12503}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 458.0, 12.0, 9.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "]^_51n?4M2N000O01000O11N4M2N`aZ2"}, "image_id": 740, "id": 12504}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 465.0, 59.0, 47.0], "area": 1518, "segmentation": {"size": [512, 512], "counts": "Q_g31n?3N2M3N2M3N2M3N2M3N2M4M2M3N2M2O00O3N2M2O0O010O010O010O010O010O010O010O0100O010O010O010O010O03N2M3N2M3N2M3N2M3N2M3N2MhP[3"}, "image_id": 740, "id": 12505}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 476.0, 25.0, 25.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "U_Q51n?2O2M3N2M3N2M3N2M10O01O010O010O03N2M3N2M3N2M3N2MePb2"}, "image_id": 740, "id": 12506}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 483.0, 18.0, 16.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "XoW63m?2N3L3N1N01000O010O01000O03N3L3N3McP_1"}, "image_id": 740, "id": 12507}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 490.0, 5.0, 13.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "Z?=d?O3M3L4M\\Pm7"}, "image_id": 740, "id": 12508}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 490.0, 31.0, 22.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "oo]11o?0O10000O100C1h@0X?2f@NY?6c@K]?<00O100O10000O100O10000O12N3M2N2N3M2N3M2NS`R6"}, "image_id": 740, "id": 12509}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 491.0, 57.0, 21.0], "area": 807, "segmentation": {"size": [512, 512], "counts": "boh62n?3L3N3M3L2O0O10000O10000O1001O3M2N3M000000O10000O100O10000O100O10000O100O10000O10000O100O10000O12N2N3M3M2N3M2N3MQ`:"}, "image_id": 740, "id": 12510}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 496.0, 38.0, 16.0], "area": 386, "segmentation": {"size": [512, 512], "counts": "ko?1n?4M1O000O10000O1O10000O10000O10000O10000O1000000O10000O10000O1002N4L3M3M3MQPm6"}, "image_id": 740, "id": 12511}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 502.0, 22.0, 10.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "oo`41n?100O1O100O100O100O100O100O100O12N2N1O2N2NQPT3"}, "image_id": 740, "id": 12512}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 503.0, 24.0, 9.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "ooo51o?000O100O10000O100O10000O100O10000O100O11O3M2NSPd1"}, "image_id": 740, "id": 12513}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 46.0, 46.0], "area": 1343, "segmentation": {"size": [512, 512], "counts": "0h0X?1O1O001O1O001O1O001O1O001O1O001O1O001O1O001O1O001O1O001O1OoN_Ac0a>\\OaAd0^>ZOdAg0[>SOaA06j0[>UOlAi0f>N3N2M2O2M3N1N3N2M2O2MZoX7"}, "image_id": 741, "id": 12514}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 0.0, 63.0, 48.0], "area": 1497, "segmentation": {"size": [512, 512], "counts": "PPf31o?1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1OdNaAW1_>hNcAV1^>jNcAT1^>kNdAS1]>lNeAR1f>SOUAb0m>\\OUAc0l>[OVAc0U?N2N2N2N2N2N2N2N2N2NR_Z3"}, "image_id": 741, "id": 12515}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 23.0, 8.0], "area": 98, "segmentation": {"size": [512, 512], "counts": "P`\\61o?2N3M2N00O10000O100O10000O100O10000O100O1000PPX1"}, "image_id": 741, "id": 12516}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 8.0, 83.0, 90.0], "area": 3336, "segmentation": {"size": [512, 512], "counts": "\\ae22m?2O1N2N2N2N2N2BBWA`0g>BWA`0g>BWAa0f>AXAa0f>BWA`0g>>N2N2N2N1O0001O01O2N2N2N2N2O100000000000001O01O000000000000000000000001O0001O000000000YNkAb1U>\\NmAd1X>N2N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2N2N2N2N2N3M2NimP4"}, "image_id": 741, "id": 12517}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 16.0, 67.0, 62.0], "area": 2456, "segmentation": {"size": [512, 512], "counts": "Qaf62n?2M2`@LQ?6n@LP?6m@LQ?6m@MP?6m@LQ?6n@KQ?d0M2N3N1N3M2O2M2N3N1N3M2O2N01O2M2N2O2M2N3N1N1O01O01O01O01O01O01O00010O00010O00010O0001O01O101N3M2O2M2N3N1N3M2Bh@1[?Mf@2[?Lh@1Z?Mh@1[?Mg@1V^8"}, "image_id": 741, "id": 12518}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 20.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "d`o72Z?"}, "image_id": 741, "id": 12519}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 32.0, 18.0, 47.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "P1_1b>M3M2O2M3GUAZOl>e0VAXOm>e0VAYOl>e08N3N1N3N2M2O2M3M2Ofnf7"}, "image_id": 741, "id": 12520}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 51.0, 57.0, 54.0], "area": 1557, "segmentation": {"size": [512, 512], "counts": "YRZ41n?2N2N2N2N2N2N2N2N2N2N2N2N2O1N3M2N2N2N2N2N2N2N2O10000000000000O2M2N2N2N2N2N2N21O00O1N2N2N2N2N2N2N2N3M2N2N2N2N2O1N\\]i2"}, "image_id": 741, "id": 12521}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 55.0, 81.0, 80.0], "area": 3126, "segmentation": {"size": [512, 512], "counts": "mb^11n?2N1N3N2N2M2O2N2M3N1O2M3N2N1N3^AQOP>Q1nAPOQ>R1mAPOQ>R1lAQOQ>Q1nAQOP>Q1nAPOS>P1jASOV>\\100O0100000O10O1000M3N1O2M2O0000O010000O010002M3N1O2M3IhAcNZ>[1hAcNY>[17O000O01000O01001N3N2N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2N1NlmX5"}, "image_id": 741, "id": 12522}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 55.0, 37.0, 82.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "dc]72m?2N2O2M2N2N2N2N3M2O1N2VOZOWBh0g=ZOWBh0g=ZOWBi0f=YOXBi0a=POSB9:i0a=POSB90E8R1d=ROSB;7c0f=FWB:i=HUB8k=JSB6m=n01O01O00000000010O00000000002XN"}, "image_id": 741, "id": 12523}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 81.0, 69.0, 87.0], "area": 3161, "segmentation": {"size": [512, 512], "counts": "[S`22m?2M2O2M3N2l@Da>=^AD`>?^AC`>?]AD`>>_AD_>>_AC`>?]AD`>?^AC`>P1N1N3N2SB\\NZ=f1eB[NZ=g1cB\\N[=f1cB\\N\\=e1bB\\N^=d1`B_N`=a1^BaNb=_1\\BcNd=l101000O10O1000O010000N1N30N1N3N2M3N1O2M3N1N3N2N2M2IhAeNZ>Y1hAeNZ>X18O2N2mNXAj0k>SOXAk0P?M2O2N2M3N1N3N2N1N3N2N2M2O\\\\]4"}, "image_id": 741, "id": 12524}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 107.0, 16.0, 15.0], "area": 133, "segmentation": {"size": [512, 512], "counts": "ccV41n?2N2N2N2N2N1O0002000000O1G[@4j?N2N\\\\a3"}, "image_id": 741, "id": 12525}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 117.0, 58.0, 55.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "[dd41o?1N2N2N2N2_@GX?;g@FW?PO\\Am0f>QO\\Am0m>N2N2O1N2N2N2N2N3M2N2N2N2O1N2N\\YY3"}, "image_id": 741, "id": 12528}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 174.0, 28.0, 28.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "jUf42n?1N3M2N2N2N2N2N2N2N2N2O2M1O02N2N2O1N2N2N2N2N3M2N2N2N2OTjk2"}, "image_id": 741, "id": 12529}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 174.0, 57.0, 56.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "TVU52m?2N2N2N2O1N2N2N2N3M2N2N2N2N2N2O1N2N2N3M2N2N2N2N200001O0001ON2N2N2O1N2N2N3M2000000N2N2N2N2O2M2N2N2N2N2N2N2N2N2N3N^Yn1"}, "image_id": 741, "id": 12530}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 180.0, 12.0, 12.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "he_42m?2N2N2O2M20O1O1N2N2N2NWZZ3"}, "image_id": 741, "id": 12531}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 211.0, 56.0, 76.0], "area": 2650, "segmentation": {"size": [512, 512], "counts": "chk04h?4L5jNDQB01`0j=DQB01`0j=EPBO2`0j=EPBO3`0h=V1M30001O01O0001O01O00010O0000010O0000010O0000010oMYBg1g=UN]Bg1Q>K4L4L4eN^AT1i>L4M301O01O0001O01O0001O01OL4L5K4L4L4LjXX6"}, "image_id": 741, "id": 12532}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 237.0, 55.0, 53.0], "area": 1561, "segmentation": {"size": [512, 512], "counts": "RXd52m?2N2N2_@IV?9h@IV?9h@IW?8g@JW?a0N2N2N2N1O0000002N2N2N2N2N3M2O1N2000000000O1N2N3M2O1N2N2N2N2N2O1000000N2N2N2N3M2N2N2N2N2N2N2N2NcW`1"}, "image_id": 741, "id": 12533}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 240.0, 69.0, 67.0], "area": 2662, "segmentation": {"size": [512, 512], "counts": "nhi11m?3M2M4M2N2]OD`A?]>D_A?^>D`A?]>CaA?]>D`A?]>C`A`0]>d0M200010O01fA\\NW>g1O010O010O0010O0010O010O010O00010O010O01O0N2N3L2O00000O2O3M2N3L3N3M2N3O00010O010O0N3L3N3M2N2M4M2N3M2MUhS5"}, "image_id": 741, "id": 12534}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 246.0, 16.0, 14.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "lWY41n?2O1N2N2N2N2N20O1O1N2N2N2N2N2ORh^3"}, "image_id": 741, "id": 12535}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 258.0, 41.0, 43.0], "area": 809, "segmentation": {"size": [512, 512], "counts": "_Xn42m?2N2N2N2N2N2N2N2O1N2N3M2N2N2000000000000001O01O000000N2N2N2N2N2O1N3M2N2N2N2N2N2NSW]2"}, "image_id": 741, "id": 12536}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 277.0, 70.0, 82.0], "area": 2874, "segmentation": {"size": [512, 512], "counts": "fZg21m?3N1N3M2O2M3N1N3M3N1N3M2O2ESO_AP1^>RO`AP1h=ROdB1Bo0g=WOaBKFP1h=WO_BT1^=oN`BS1^=oN_BS1`=nN^BU1a=f0O001O0O2O00O1N2N3N2M2O2M2N3N2M2N3N1N3M3N1N3N1N3M3oNZAf0f>YO[Ah05ROW>3gAk0OTO[>OhAl0LWOe>j0XAYOh>n01IWAVOk>j0UASOn>l0RASOo>m03N1N3M3N1N3M2O2M3N1N3M3N1N[fU4"}, "image_id": 741, "id": 12537}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 284.0, 54.0, 53.0], "area": 1532, "segmentation": {"size": [512, 512], "counts": "biY61n?2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N201O000000O1N2N2N2N2O2M1O0110000O1N2N2N2N2N2N2O2M2^Oc@>a?N3I^@Kd?36NVVk0"}, "image_id": 741, "id": 12538}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 308.0, 8.0, 11.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "kY\\22k?3M4N110OO1M4M\\f_5"}, "image_id": 741, "id": 12539}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 310.0, 19.0, 18.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "Rje51n?3M2N2N2N00000001O0001O0001O2N2N2N2NWfP2"}, "image_id": 741, "id": 12540}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 316.0, 51.0, 54.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "ajV72m?2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N3M2N2N2N2N2O10000000000001M2N2N2O1N2N2N2O10000O1N2N2N2N3M2O1N2N2N2NXE"}, "image_id": 741, "id": 12541}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 318.0, 76.0, 73.0], "area": 3005, "segmentation": {"size": [512, 512], "counts": "ck?1l?3M3N3L3M3N3L3N3L30010O010O00010O00010O010O00010O01O01NN@YOjAk0R>XOkAk0R>XOlAj0R>YOmAh0o=[OoAg0o=[OnAh0o=g00O010O00010O01O01O010O00010O01O01O010O00010O010O000VOPBDQ>8RBEQ>8SBDP>:RBDQ>8RBEQ>9RBCR>9QBEQ>:PBCS>>mA_OV>`0e0O2L3M3N3LeUZ6"}, "image_id": 741, "id": 12542}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 329.0, 8.0, 8.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "[j^52m?2N2O2OO1N2O1NdU]2"}, "image_id": 741, "id": 12543}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 333.0, 15.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "dZb51n?2M3N2N2N2N2000O1N2N2N2N2N2N\\UV2"}, "image_id": 741, "id": 12544}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 350.0, 81.0, 73.0], "area": 2737, "segmentation": {"size": [512, 512], "counts": "i[j13k?2N3N2M2N3M2O2M2N3M3N1010O10O010O10O010O10M2N3M3M2O2M201O010O010O10OO2M2N3N101O10O010O010O10gA]NS>d1kA^NU>f1010O010O10O10ZNkA?1:T>UOnA?O=Y>AjA>W>_OkAb0T>\\OoAc0R>[OoAf0Q>WORBi0m=UOVBj0`>10O010O010O1M2N3N1N3M2N3N2M2N3M2O2MaSm4"}, "image_id": 741, "id": 12545}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 356.0, 37.0, 39.0], "area": 706, "segmentation": {"size": [512, 512], "counts": "akZ61n?2O1N2N2N3M2N2N2N2N2N2N2O1N2N3N10000001O0000000N2N2N2N2N2O2^Oc@00010O00UOYA=g>@\\Aa0d>[O`Ac0a>YOcAd0n>M4L3M3L5LcSQ3"}, "image_id": 741, "id": 12547}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 381.0, 53.0, 65.0], "area": 1975, "segmentation": {"size": [512, 512], "counts": "ll61l?3M4M2M3JDe@`0W?6M3N3L3M4O00010O0gAmNe=S1XBPOi=P1SBSOm=m0QBVOo=j0mAYOS>i0hA[OW>W11O01O01O01O01O01O01O010ON3M20010OM3M4M2M3CaAVOc>f0`AWOc>g0`AUOd>g0l0hAQO[>l0hAROZ>k0jAQOZ>l0>M7J2M4M2M3N3L3NRcX6"}, "image_id": 741, "id": 12551}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 423.0, 86.0, 58.0], "area": 3073, "segmentation": {"size": [512, 512], "counts": "anc34i?3M3M3M4L3G]ORAf0j>_ORAe0j>:M3M4L3N201O00010O00010O00010O00N3K4O1010O00010O00010O00010O0001O01O00010O00010O00010O00010O00010O00010O00010OjN^Al0d>QO_Al0l>O20O00010O00010O00010O0000M4L3M3M4L3M3M4LmQQ3"}, "image_id": 741, "id": 12552}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 424.0, 50.0, 75.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "l]W72m?2N2N2N2N2N2O1N2N3M2N3M3M2N2O1000000N101N2N2N2N2N2N3M2N2N2O1000000000001O01O0000000000000001O01O0NTB"}, "image_id": 741, "id": 12553}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 431.0, 63.0, 79.0], "area": 2854, "segmentation": {"size": [512, 512], "counts": "Z_k11m?3L3M4M2M3M4M2M3N3O0010OM3N3N10001M2M4L3N2M4L3N2M4L3N3L3N210O01O01O010O00010O01O01O010O00010O01TOXB^Oh=`0ZBAe=<^BDc=8aBEa=8bBF`=8cBDa=8bBEa=8bBFa=7bBEa=8bBEa=8bBFa=7U1M3M4MVRU5"}, "image_id": 741, "id": 12554}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 458.0, 31.0, 54.0], "area": 1191, "segmentation": {"size": [512, 512], "counts": "b>^1_>3M3N200001O00001O00001O001O000LcAeN^>W1eAiN[>T1:N3L3M4M2M3M4L3N2M4L3N3L_Q`7"}, "image_id": 741, "id": 12555}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 476.0, 69.0, 36.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "n_Z52k?3L4M3M3M300001O00N2M3M3M3M31O00M3M3001O00001O00001O00001O0000001O00001O00001O00001O00001VOm@e0X?O00001O0000001O00001O004L00001O00001O0O1M3M4L_Pc1"}, "image_id": 741, "id": 12556}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 478.0, 13.0, 13.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "Woc42k?3M3M4O00010O0001O000L5LRaU3"}, "image_id": 741, "id": 12557}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 486.0, 44.0, 26.0], "area": 716, "segmentation": {"size": [512, 512], "counts": "o_a01l?3M3N2O100001O001OM3M3N2M3M3N2N21O001O00001O00001O001O00001O00001O001O00001O0M4M2M3M4Ma`h6"}, "image_id": 741, "id": 12558}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 487.0, 87.0, 25.0], "area": 1081, "segmentation": {"size": [512, 512], "counts": "oo_31l?3M3O11O00001O00001O00M3M3M3M3M3M3N21O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0OS`T3"}, "image_id": 741, "id": 12559}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 488.0, 7.0, 9.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "]_l43j?3N210O0000LkPP3"}, "image_id": 741, "id": 12560}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 499.0, 9.0, 9.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "g_Z13i?40001O01O01OO2L^Pa6"}, "image_id": 741, "id": 12561}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 508.0, 6.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "m_m43l?100001O00S`o2"}, "image_id": 741, "id": 12562}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 53.0, 39.0], "area": 1251, "segmentation": {"size": [512, 512], "counts": "8j0R?4L401O000000001O000000001O000000001O0000001O00O1K5K5L4K5K5K500000000001O0000001O000000001O000000001OL4KU`U7"}, "image_id": 744, "id": 12563}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 0.0, 20.0, 9.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "PPl11o?1O001O1O001O001O1O001O1O0000O1N2O1N2NRPj5"}, "image_id": 744, "id": 12564}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 35.0, 24.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "a`l21j?5L4L4L5O000000001O0000001O000000001O0000001O0000001O0000O1L4K5L4L4KUPb4"}, "image_id": 744, "id": 12565}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 0.0, 73.0, 23.0], "area": 1109, "segmentation": {"size": [512, 512], "counts": "UPc33i?4O10001O0000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O0000O1L4K5L4LT`X3"}, "image_id": 744, "id": 12566}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 0.0, 14.0, 3.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "P`f61o?00000000001O00000000001O0P`R1"}, "image_id": 744, "id": 12567}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 13.0, 16.0, 19.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "jPn12l?3M2N3L3N3M2010O000N3M2N3M2N3Lboi5"}, "image_id": 744, "id": 12568}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 35.0, 34.0, 25.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "cQP73h?6J5L4N20001O01O000001O01O0000010O000000010O000001O01O00000M3K6J5Kln>"}, "image_id": 744, "id": 12569}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 36.0, 73.0, 45.0], "area": 1833, "segmentation": {"size": [512, 512], "counts": "jQ^31l?3M4L3M3M4L3L401O01O01O01O01O010O00010O00010O00010O00010M2M3010O00010O0010O00010O00010O00010O00010O0010O00010O0SOQAi0T?0O00010O00010O00N3L3M3M4L3M3M4LZ^]3"}, "image_id": 744, "id": 12570}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 41.0, 46.0, 48.0], "area": 1057, "segmentation": {"size": [512, 512], "counts": "b1?_?2M3N3M2010O00010O010O00010O0010O010O00010O010O01O01O010O0o@YOj>f0TA]Ok>c0RA@n>>RAEn>7SAKm>3SAOn>MTA5k>HYA7h>FZA;e>B_A=a>AaA`0P?O0N2M4M2N3LhmX7"}, "image_id": 744, "id": 12571}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 44.0, 86.0, 72.0], "area": 2695, "segmentation": {"size": [512, 512], "counts": "`b\\12i?5L4K5L5J5L4O10010O0000010O000000010O0000010O000000010O000RATOh>R11O0001O000L4K23N20O2M2O100O101O00000100O1O2eN]AU1g>00YO]A2b>IcA73^Om=4VB>JAf>>XAEg>9SAJl>e000010O013L0010O010O01O01O010O01L3N3L3N2M4M2M4M2M4MX]X5"}, "image_id": 744, "id": 12572}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 45.0, 57.0, 40.0], "area": 1422, "segmentation": {"size": [512, 512], "counts": "Vbo51k?5L3M3M4K4M3N3O01O0001O01O01O01O01O01O01O0001O01O01O01O01O01O01O0001O01OO2K4M3M4N100010O00000N3K4M3M4L3L4M4L3L4MbnS1"}, "image_id": 744, "id": 12573}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 61.0, 13.0, 35.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "ibi72k?3N2N3L3N3M2M4M2N2M4M2N3RN"}, "image_id": 744, "id": 12574}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 71.0, 28.0, 28.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "jbm22k?3N3L3M3M4M2M301O01O01O010O00010O00010O00M4L3M3M4L3M3Mf]d4"}, "image_id": 744, "id": 12575}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 82.0, 12.0, 32.0], "area": 196, "segmentation": {"size": [512, 512], "counts": "b2P1P?M4M2N3L3N3L3N2N3L3N3M[mi7"}, "image_id": 744, "id": 12576}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 84.0, 19.0, 20.0], "area": 246, "segmentation": {"size": [512, 512], "counts": "PcT74i?3M3M4L300010O00010O00010O0N2L5L3M3M[ma0"}, "image_id": 744, "id": 12577}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 96.0, 24.0, 22.0], "area": 353, "segmentation": {"size": [512, 512], "counts": "]Sg63j?3M4L3M3O20OO101O00010O000101N001O01O00M4L3M3Mmll0"}, "image_id": 744, "id": 12578}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 97.0, 16.0, 14.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "Yc72k?3N2N3N1010O010O00010O001L3N3Ml\\`7"}, "image_id": 744, "id": 12579}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 97.0, 50.0, 39.0], "area": 1176, "segmentation": {"size": [512, 512], "counts": "fSi52l?3L3M4L3N2M4L3N201O01O01O01O01O010n@VOl>j0QAYOo>m00O0010O0001M2O1010O00010O01O01O01O01O010O0001L3M3M4M2M4L3M3Mdl]1"}, "image_id": 744, "id": 12580}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 98.0, 61.0, 48.0], "area": 1646, "segmentation": {"size": [512, 512], "counts": "icV34j?2M4M2M4M2M310O010M2N3L31O010O01O010L3N2O2O0010O010O010O00010O010O010O000nNWAn0n>O01O010O01O010O01O010O01N1N3M2M4M2N2M4M2N3L3N[lj3"}, "image_id": 744, "id": 12581}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 100.0, 27.0, 33.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "lc^12l?2M4M2N3L3N2M4M2M4M20010O0010O0010O0001M2\\Ol@:W?Bl@;W?Cl@:^?M3N3LalS6"}, "image_id": 744, "id": 12582}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 103.0, 14.0, 14.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "YSm13k?200X@Md?8O001O00010O010ON2N3Lhlk5"}, "image_id": 744, "id": 12583}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 104.0, 18.0, 21.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "fS\\73j?3N3M2M4M2N201O010O01O0N2N3L3N3M2Mgl:"}, "image_id": 744, "id": 12584}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 120.0, 25.0, 25.0], "area": 385, "segmentation": {"size": [512, 512], "counts": "Wd[23k?2M4M2M3M4M201O01O01O010O00010O010OO1M4L3N3L3N2MUlW5"}, "image_id": 744, "id": 12585}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 122.0, 44.0, 59.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "YUb03k?2M4M2M4M2M3M4M2M4M2M3N3L3M4M2M3N3L3O1010O01O01O0N3M2M3N3L3N3L3M3N3L3N3L3N2M4M2M3M4M2MPlg6"}, "image_id": 744, "id": 12586}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 21.0, 32.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "n3k0T?20O0010O010O00010O01N1M3N3L3N3L3N2M4M2Nm[e7"}, "image_id": 744, "id": 12587}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 130.0, 17.0, 47.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "Zeg71l?3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2nK"}, "image_id": 744, "id": 12588}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 136.0, 26.0, 30.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "md62l?3M2M3N3M2M4M2M4M20010O0010O010O0N3M2M3N3L3N3M2M3Nd[\\7"}, "image_id": 744, "id": 12589}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 142.0, 52.0, 55.0], "area": 1641, "segmentation": {"size": [512, 512], "counts": "]e[12k?3N3L3N4M11O01O001M2M3N3L3N2UATO_>P1^ASO_>o0^ATO_>X1O2O0010O0010O0010O00010O010O000M4M2M4M2M3N3L3O2O01O01O0O2M2M3N3Bd@3_?Je@3e?MQ[j5"}, "image_id": 744, "id": 12590}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 146.0, 31.0, 29.0], "area": 549, "segmentation": {"size": [512, 512], "counts": "Te]32k?4M2M4L3N2M4M210O01O01O010O01O01O010O01O01O01O0N2N3L3N3L3N2MXkR4"}, "image_id": 744, "id": 12591}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 146.0, 49.0, 50.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "ee^51l?4M2M4M2M3M4M2N30O00O2M2M4M2M3N3L300010O010O00010O010O00010O010O00010M2N3L3N2M4M2M3N3M210Ge@K[?3g@J\\?3g@K\\?2\\kh1"}, "image_id": 744, "id": 12592}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 148.0, 48.0, 33.0], "area": 871, "segmentation": {"size": [512, 512], "counts": "UeX61m?3L3M3N3L3N3M20010O00010O010O00010O0010O0010N10010O010O00010O01O01O010O00010O01O0N2M4M2M3M4M2MQ[o0"}, "image_id": 744, "id": 12593}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 165.0, 53.0, 49.0], "area": 1390, "segmentation": {"size": [512, 512], "counts": "oeT22n?2M2N2N3N1N2N3M2O2M2N2JYOQAi0l>7O1N3M10O0001O01O0001O01O0001O01O012M2N2N20N2N3NO000KWATOk>j0WAUOj>j08M2N3M2O1N3M2K`@Ha?67N1N2NZjP5"}, "image_id": 744, "id": 12594}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 178.0, 54.0, 62.0], "area": 1903, "segmentation": {"size": [512, 512], "counts": "RWo21m?3L3N3M2N3L3N2N3L3N3M2M4M2N3L3N2N3M2M4M2N3O000010O010O010N1N3L3N2N3M2M2O001N3N2O20O01O0M4M2N2M4\\Oh@CcA=]>AfA>[>^OhAc0W>ZOmAe0S>YOoAh0d>N3L3M4M2M3N3L3M3NnhR2"}, "image_id": 744, "id": 12598}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 197.0, 51.0, 41.0], "area": 1214, "segmentation": {"size": [512, 512], "counts": "lfV71m?2M4M2N2N3M2M4M2N2N3N1010O010O00010O010O010O00010O010OO2M21O01O010O01O010O01O010OM4M2N2M4M2N3L3N3L3N`I"}, "image_id": 744, "id": 12599}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 203.0, 24.0, 23.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "jVn51m?2M4M2M4M2M310O0010O0010O0010O0010OM3N3L3N3L3N`ie1"}, "image_id": 744, "id": 12600}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 224.0, 28.0, 21.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "ZWk61m?2N2N3M2N30O01O010O01O010O010O01O010O010O01O010M2M4M2Nhhf0"}, "image_id": 744, "id": 12601}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 237.0, 21.0, 19.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "fg_73k?2M4M2N3O01O010O01O010O01O010O010L3N2N3L_h5"}, "image_id": 744, "id": 12602}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 247.0, 50.0, 40.0], "area": 1143, "segmentation": {"size": [512, 512], "counts": "]Xk62l?2M3N3L3N3L3N3L3N201O0010O0010O0010O0010O0010O0010O010O00010O010O00010O010O0010O0N3L3N2M4M2M4M2M3Nlg;"}, "image_id": 744, "id": 12603}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 261.0, 58.0, 45.0], "area": 1477, "segmentation": {"size": [512, 512], "counts": "ohh41l?3N2M4M2M4M2M3N3L3N3N10010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O00010O010O00010O01M2N2M4M2M4M2M3N3L3N[WZ2"}, "image_id": 744, "id": 12604}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 266.0, 16.0, 26.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "kXh72l?3L3N3M2N2N3M2N30O01O010O01O01bG"}, "image_id": 744, "id": 12605}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 276.0, 33.0, 37.0], "area": 706, "segmentation": {"size": [512, 512], "counts": "^ig52k?4M2M3N3M2M4M2M3N3M2N30O00010O010O010O00010L3N3L3N2N3L3N3L3N3L3NUgg1"}, "image_id": 744, "id": 12606}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 298.0, 41.0, 48.0], "area": 1142, "segmentation": {"size": [512, 512], "counts": "_jg61m?2N3L3N2N3L3N3M2M4M2M3N3M2M4M2N201O010O0010O0010ON3M2M3N3O010N1N3L3N2Gl@CX?:8N3L3N3M^fc0"}, "image_id": 744, "id": 12607}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 301.0, 18.0, 22.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "lYY62l?3L3N3M2N2M4N1010O010N1N3L3N2N3L3Naf]1"}, "image_id": 744, "id": 12608}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 310.0, 31.0, 49.0], "area": 937, "segmentation": {"size": [512, 512], "counts": "ij`71m?3N1N3M2N3N2M2N3M2N3N1N3M2N3N1N3M3M2O2M210O010O01O0N3M2N3M2NWF"}, "image_id": 744, "id": 12609}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 353.0, 239.0, 159.0], "area": 21809, "segmentation": {"size": [512, 512], "counts": "n_j02k?3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2O1001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001TMVD`1j;^NXDb1h;\\NZDd1g;XN]Dg1c;WN_Di1b;SNbDl1^;RNdDn1];oMfDP2Z;mMiDS2W;kMkDU2V;gMnDX2R;fMPEZ2Q;bMSE]2m:aMUE_2k:_MWEa2U0O101N101O0O2O0O2O000O2O0O2O0O1000O2O2M2O2M3N2M2O2M3N1N3N2M2O2M3N2M2O2M3N1N3N2M2OlmZ7"}, "image_id": 745, "id": 12622}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 55.0, 24.0, 26.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "XRU43k?2M4M2N3M2M4M2O110O010O010O0001M2N3M2M4M2N3M2MUn^3"}, "image_id": 745, "id": 12623}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 66.0, 29.0, 28.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "db[71l?4M2M4M2M4M2M310O010O010O00010O010O01O01ON3M2M4M2N3L3N2Ngm5"}, "image_id": 745, "id": 12624}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 72.0, 31.0, 33.0], "area": 550, "segmentation": {"size": [512, 512], "counts": "lR?2m?2M2O2M3N1N3N2M2N3N2M2O2M21000O0100N1O2M2O2M3N1N3N2M2O2M3N1N_]Q7"}, "image_id": 745, "id": 12625}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 79.0, 16.0, 23.0], "area": 246, "segmentation": {"size": [512, 512], "counts": "oRh71m?2N3L3N3M2M3N3O0010O01O01O010O^M"}, "image_id": 745, "id": 12626}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 84.0, 44.0, 51.0], "area": 1187, "segmentation": {"size": [512, 512], "counts": "nSU31m?2M3N3L3N3L3N2M4L3N3M200010OM4M2M3N3L3N3N100001L3N3L3M3N3L3N3L3N210O001M2N2M4M2M4M2M3NVmT4"}, "image_id": 745, "id": 12627}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 86.0, 60.0, 78.0], "area": 2426, "segmentation": {"size": [512, 512], "counts": "cdl32k?4M2M4M2N3L3N2O2O010O001M2N2M4M2N3@mNnAV1P>lNnAW1n=mNoAU1o=mNnAV1P>?L3N3M2N3O01O010O010O01OM4M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2Nn\\U3"}, "image_id": 745, "id": 12628}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 90.0, 42.0, 33.0], "area": 750, "segmentation": {"size": [512, 512], "counts": "\\cT21m?2N3M2M3N3M2N3L301O010O01O010O010O01O010O01O010O010O01O010O01O010O0N3M2M3N3M2N3L3Nj\\V5"}, "image_id": 745, "id": 12629}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 92.0, 71.0, 98.0], "area": 3293, "segmentation": {"size": [512, 512], "counts": "^ec42k?3DLj@7R?Lk@7S?Kk@8R?b0nA[OV>b0lA\\OV>b0mA[OU>b0nA[OV>b0mA[OU>b0g0M2M4M2N2M4M2MilX2"}, "image_id": 745, "id": 12630}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 98.0, 16.0, 15.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "ZSe73k?2N2N3M201O010O00010O01M2N2N3Lml2"}, "image_id": 745, "id": 12631}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 104.0, 58.0, 40.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "RdZ61k?4M4K4L4M3L5K40000010O0000010O00010O0000010O0000010O0000010O00010O00000TOo@j0T?O000010O000010O0000010O00001O0M3L4L5K4M3L\\\\h0"}, "image_id": 745, "id": 12632}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 139.0, 47.0, 30.0], "area": 789, "segmentation": {"size": [512, 512], "counts": "mTU61k?4O2O01O00010O000000M3L5L3L4O101O01O01O00010O0001O01O01O01O0001O01O01O01O00010O00O2K4M3L4M4L_[S1"}, "image_id": 745, "id": 12633}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 157.0, 27.0, 25.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "Re^72n?3M2N3L4M2N3L3N00O01000O10O10O10O010000O3N3M3L3N3L4Mcj3"}, "image_id": 745, "id": 12634}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 162.0, 63.0, 54.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "mUn51m?3DNf@5X?Mf@4X?Nf@5X?;N1N3N2O010O1N1O2M2O2M3O01000O01N2N1O200O0100O0100O01000O01000O0PORAo0P?0O01000O0100O0100O0100N1O2M3N1N3M3N1N3N2M2O2M2O2MQZR1"}, "image_id": 745, "id": 12635}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 191.0, 42.0, 52.0], "area": 1219, "segmentation": {"size": [512, 512], "counts": "WW]12l?3L3N2N3L3N3L3N3M2M3N3L3N3M2M3N3M201O0010O0010OO2M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2Njim5"}, "image_id": 745, "id": 12636}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 199.0, 8.0, 16.0], "area": 81, "segmentation": {"size": [512, 512], "counts": "W6`0a?N2N100O0H^@3d?K^@3i?Nbik7"}, "image_id": 745, "id": 12637}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 199.0, 53.0, 58.0], "area": 1493, "segmentation": {"size": [512, 512], "counts": "[77h?2N2N1N3N2N2N2N2N2N1N3N2N2N2N2N2M3N1O000000O10O10000000O10O1001O1O2M3N2N2N2N2N1O2M3N2N2N2N2N1N3N2N2N2N2NYYU7"}, "image_id": 745, "id": 12638}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 219.0, 54.0, 57.0], "area": 1589, "segmentation": {"size": [512, 512], "counts": "nWU21o?1N3M2N3M2O1N3M2N2N3N1N3M2@UOhAm0U>VOiAm0S>VOlAi0S>[OjAf0T>]OjAc0U>@iA`0X>c02N101N1O2N101N1O1OO1101N1O2N1O101N1O2N1O2O0O2N1O101L3N3M2M4M2M4M2Nkho4"}, "image_id": 745, "id": 12639}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 257.0, 27.0, 30.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "bX]13k?2N3L3N2M4M2M4O00010i@ZOR?k00O00010O0N3O000M4M2M4M2N2M4M2MkWU6"}, "image_id": 745, "id": 12640}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 260.0, 38.0, 37.0], "area": 701, "segmentation": {"size": [512, 512], "counts": "hX:2m?2N2N3M2N2O1N2N3M2N2N2O1N2N00000010O0000000000101N2N3M2N2N2O1N3M2N2N2N2O1N^gR7"}, "image_id": 745, "id": 12641}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 269.0, 41.0, 41.0], "area": 998, "segmentation": {"size": [512, 512], "counts": "\\ic11m?2N2M4M2N3L3N3M2N3L3N2N3M2N30O010O010O0010O00N3O010FSA_Om>?VA@k>`0TAAk>?VA@k>`0TA@l>`0TA^On>c0RAYOR?f04^Ok@9V?Dl@=\\?N1M3N3M2NQgg5"}, "image_id": 745, "id": 12642}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 277.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "e83n?N[gn7"}, "image_id": 745, "id": 12643}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 299.0, 43.0, 41.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "kYf6h0S?5000000000000001O000000000000K5I700001O0000000000000000000000000000000001O00000000MgVd0"}, "image_id": 745, "id": 12644}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 303.0, 21.0, 21.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "aib7c0[?200000000000000000000000000000000000000af2"}, "image_id": 745, "id": 12645}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 309.0, 9.0, 28.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "e9l0V?N2M2O2M3Eb@0`?Mc@0_?Ob@0`?Mc@0[ek7"}, "image_id": 745, "id": 12646}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 309.0, 34.0, 29.0], "area": 681, "segmentation": {"size": [512, 512], "counts": "lig04l?4L4K4M4L3M0O10O1000O10O10O1000O10O1000O10O1000O10O10O103M4L3L5L4LiUg6"}, "image_id": 745, "id": 12647}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 337.0, 34.0, 41.0], "area": 871, "segmentation": {"size": [512, 512], "counts": "iZX34l?3M3L3N3M3L4MO10O10O12M3N3M3L4M1N01000O01000O010003L4M3M3L4_Of@6]?Fg@7d?L4M_dV4"}, "image_id": 745, "id": 12648}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 344.0, 50.0, 56.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "b;`0_?3M2N2O1N3M2N2N2N2O2M2N000010O00000000010O000000010O00000000011N2N2N3M2O0O00002N3M2O1^Og@:[?Dh@9Z?Eh@9a?N2O1N3MbdV7"}, "image_id": 745, "id": 12649}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 344.0, 60.0, 37.0], "area": 1771, "segmentation": {"size": [512, 512], "counts": "\\[P4>b?00000000000D?M000000000000000000000000000000000000000000000000000000000000000000000H800000000000000000000000000000000XeQ3"}, "image_id": 745, "id": 12650}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 344.0, 21.0, 24.0], "area": 504, "segmentation": {"size": [512, 512], "counts": "hZ`7h0X?000000000000000000000000000000000000000XU5"}, "image_id": 745, "id": 12651}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 351.0, 55.0, 55.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "o[T12m?3M2O2M2N2N3N1N2N3M2N2O2M2N2N3M10O01O000JnN`AQ1`>QO^AP1b>QO\\Ao0d>60000010O000000010O1O2N3N0O1O2N2N3N1N2N2N3M2O1N3M2N2O1N3M2N2N[TP6"}, "image_id": 745, "id": 12652}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 353.0, 54.0, 39.0], "area": 1542, "segmentation": {"size": [512, 512], "counts": "_k[6i0W?00000000000000000000000000000000000000000000000000000000000B>000000000000000000000000000000008XOQAOo>1`00000000gTi0"}, "image_id": 745, "id": 12653}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 371.0, 54.0, 41.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "d\\]21h?7I7I8I61O0000000001O01O0000000001O01O0000000001O01O0000000001O0I7M3001O0001O00000001O0001O000000I8H7J6I7Iadg4"}, "image_id": 745, "id": 12654}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 374.0, 8.0, 21.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "k[l7`0[?5001O00000000YD"}, "image_id": 745, "id": 12655}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 395.0, 50.0, 46.0], "area": 2127, "segmentation": {"size": [512, 512], "counts": "[lS4^1b>0000000000000000000000000000000000000000000000J60000000000000000O10000000000000000000000000000000eSS3"}, "image_id": 745, "id": 12656}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 398.0, 20.0, 26.0], "area": 520, "segmentation": {"size": [512, 512], "counts": "^\\^7j0V?0000000000000000000000000000000000000bc7"}, "image_id": 745, "id": 12657}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 399.0, 24.0, 28.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "dl_3g0T?50000000000000000000000000000000000000000000]OTTT4"}, "image_id": 745, "id": 12658}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 405.0, 43.0, 50.0], "area": 1154, "segmentation": {"size": [512, 512], "counts": "R^d12k?4M2M310O0010O00M2M3N2N2N2M4M20001M3M3M4K4O100010O0001O01O01O01O0O1M4K4M3M4Bn@HV?5m@HV?540O10O1000O1000O1000O1000O3N2N000O0100LTATOl>l040O010001O5K5J6K4L5KWbj0"}, "image_id": 745, "id": 12662}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 441.0, 53.0, 34.0], "area": 1295, "segmentation": {"size": [512, 512], "counts": "`n22j?4K5K6J5M3001O01O000001O01O000001O01O000000010O000000010O000000010L3L40000010O000000010O00000N2K6J5K5K5KYbR7"}, "image_id": 745, "id": 12663}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 450.0, 24.0, 25.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "c^h24g?5L5J5M31O0001O0001O01O0001O0001O0001O0O1K5L5Jnak4"}, "image_id": 745, "id": 12664}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 460.0, 57.0, 52.0], "area": 1882, "segmentation": {"size": [512, 512], "counts": "W_d11n?2M2O2M3K4N3m@Ab>a0\\A@b>b0\\AAb>a0[ABb>`0]AAa>b0\\AAb>o0N3N2M2O100001O1O001O001O1O001O1O001OO1N2O1001O001M3N1N3M2O2M3N1N3N2M2N3N1N3N2M2O2M3M2O2MTQ_5"}, "image_id": 745, "id": 12665}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 463.0, 22.0, 21.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "c^Q1`0\\?40000000000000000000001O0001O000000000000`ac6"}, "image_id": 745, "id": 12666}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 470.0, 30.0, 25.0], "area": 576, "segmentation": {"size": [512, 512], "counts": "X_X73g?6J6J7O000001O01O00000001O01O000001O0001O000001O00000J6J6K[a8"}, "image_id": 745, "id": 12667}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 472.0, 4.0, 21.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "h^n7e0[?0001OWA"}, "image_id": 745, "id": 12668}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 475.0, 49.0, 37.0], "area": 1428, "segmentation": {"size": [512, 512], "counts": "h_X66d?7I6J6K50001O0001O0M3001O0001O0001O000L4L40001O01O00000000001O00000000001O00000000001M2J6J6K5J7IVQo0"}, "image_id": 745, "id": 12669}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 480.0, 75.0, 32.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "l_d34g?5L4K5K5K5L4001O0000001O000000001O0000001O0000001O0WOm@e0W?01O0000001O000000001O0000001O000000001O0000001O0000001ON2N21O0000001O000000001O0000001O000L4K6KcPV3"}, "image_id": 745, "id": 12670}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 481.0, 36.0, 31.0], "area": 842, "segmentation": {"size": [512, 512], "counts": "oon21k?4K5L4K5L4K5M3001O0000001O000000001O0000001O000000001O0000001L3K5L4K5LmP_4"}, "image_id": 745, "id": 12671}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 489.0, 54.0, 23.0], "area": 907, "segmentation": {"size": [512, 512], "counts": "o_11j?5K5L4K5M3001O000000001O0000001O000000001O000000001O000000001O0000001O000000001O000000001O000000001O0O1L4Kb`S7"}, "image_id": 745, "id": 12672}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 10.0, 5.0], "area": 28, "segmentation": {"size": [512, 512], "counts": "01o?1O001O1O001OO1N2OQPk7"}, "image_id": 748, "id": 12673}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 0.0, 17.0, 9.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "P`Q11o?1O001O1O1O1O001O1O1OO1N2O1O1O1NRPf6"}, "image_id": 748, "id": 12674}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 0.0, 25.0, 15.0], "area": 188, "segmentation": {"size": [512, 512], "counts": "PPX31o?001O1O001O1O001O1O0X@Ke?9O1O001O1O010O01N2M2O2M3N1Nm_[4"}, "image_id": 748, "id": 12675}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 14.0, 27.0, 28.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "lPc02m?2N2M3N1O2N2M3N1O2M3N2O10O01000M3N2N1O2M3N2N1O2M3N2NX_o6"}, "image_id": 748, "id": 12676}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 39.0, 20.0, 21.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "eQR32m?1O2M3N2N1N3N2N1O0O10O2O2N2N2M2O2N2M3Ndnc4"}, "image_id": 748, "id": 12677}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 54.0, 21.0, 20.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "mQi03l?2M2O2M3N1010000O01000O01000OO2M3N1N3N2MR^l6"}, "image_id": 748, "id": 12678}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 56.0, 28.0, 28.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "WR_11n?2N2M3N1O2N2M3N1O2M3N2N110O1000M3N1O2M3N2N2M2O2N2M3N1OnmR6"}, "image_id": 748, "id": 12679}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 100.0, 50.0, 42.0], "area": 1074, "segmentation": {"size": [512, 512], "counts": "eSS41n?2M2O2M3N1N3N2M2O2N2M2O200O010l@WOP?i0n@YOQ?k01000O010000O01000O0100O0O2M0100O012M2O2M3O001M3N2M2O2M3N1N3N2M2O_lS3"}, "image_id": 748, "id": 12680}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 136.0, 28.0, 32.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "PU]21n?2M2O2N2M2O2M3N1N3N2M2OO10O10O010O3N1N3N2M2O2N2M2O2M3NbkT5"}, "image_id": 748, "id": 12681}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 136.0, 55.0, 59.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "gU`42m?2N2M2O2M3N1N3N2N2M2O2M3N1N3N1O0O010O010O10O10O010O101N3N2N1N3N2M20100O10O10O10TObA4_>JcA6\\>HfA9Z>EhA:Y>DiA9Z>DiA:X>EiA:Y>CjA:Y>DiA:X>DjA:Y>DiA:P?M2O2N2MeZd2"}, "image_id": 748, "id": 12682}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 178.0, 59.0, 67.0], "area": 2004, "segmentation": {"size": [512, 512], "counts": "dfb01n?2N1N3N2N1N3N2M3N1O2o@]Oa>f0]A\\Oa>f0\\A\\Oc>e0\\A]Oa>f0]A\\Oa>S1N1N3N2M3N1O2M3N2N1N300O01N2N2M2O2N2M2O2M3N2N1N3N2N2M2O2N2M3N1N3N2N1N3N2N2M2O2N2M3N1N3Ngi_6"}, "image_id": 748, "id": 12683}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 191.0, 36.0, 42.0], "area": 658, "segmentation": {"size": [512, 512], "counts": "mVe13l?2N2M2O2N2M3N2N2M2O2N2M2OO01000O01000O01000O102M3N2N1N3N2N2M2O2N2M3N1Oiih5"}, "image_id": 748, "id": 12684}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 207.0, 54.0, 78.0], "area": 1820, "segmentation": {"size": [512, 512], "counts": "VXa31n?2M3N2N1N3N2M2O2M3N2N1N3000O010000EVO^Aj0`>YO`Ag0^>[ObAe0\\>\\OeAc0Y>@fAa0X>AhA?U>ClAFlA;R>GnA9o=JQB6m=KTB4j=OUB2i=0WB0f=2XB0g=2WBOh=3VBMi=T110O01000O01QOTBKl=DVBK1>l=EUBK0?m=DUBJ1`0k=DVBK1>l=EUBK0?m=CVBK0?l=EUBK1>l=EfB8]=EeB:b>N2M2O2MUhc3"}, "image_id": 748, "id": 12685}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 216.0, 26.0, 27.0], "area": 376, "segmentation": {"size": [512, 512], "counts": "UWi53l?2N2N2M2O2N2N2M3N1O2N2000O10OO2N2M3N2N1O2M3N2N2M2Onhi1"}, "image_id": 748, "id": 12686}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 218.0, 33.0, 35.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "aWo41n?2M3N1N3N2N2M2O2N2M3N1N3N2N2M2O0002N1N3N2N2N1N3N2N2M3N1O2M3N2N1NmX`2"}, "image_id": 748, "id": 12687}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 249.0, 38.0, 51.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "i7=c?101O0001O01O00010O0001O01O0001O01O00010O0001O01O0001O01O0001N1O1N2N3M2N2Ojf\\7"}, "image_id": 748, "id": 12688}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 269.0, 36.0, 42.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "bhg03m?1N2N2N3N1N2N3N1000010O0000010O0000010O0000010O00010O00N2N3N1N2N2N3M2OaVf6"}, "image_id": 748, "id": 12689}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 287.0, 50.0, 32.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "_YR31n?2M2O2M3N1O2M3N2M2O2N2N11000O10O1O0N3N2M3N1O2000O10O1000N1N3N2N2M20100O0100O0N3N2M3N1N3N2N1N3N2M2OcfT4"}, "image_id": 748, "id": 12690}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 289.0, 35.0, 36.0], "area": 652, "segmentation": {"size": [512, 512], "counts": "hYT41m?3N1N3N2N1N3N2M2O2N2M2O2M3N2N1N3000O001M3N1N3N2M2O2M3N1N3N2M2O2N2M2OdVZ3"}, "image_id": 748, "id": 12691}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 324.0, 31.0, 42.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "X[o21n?1O2M3N2]OIZA8e>JXA9f>HYA:e>HYA8f>KWA6i>LUA4k>MTA3k>0SAOn>a000O02O2N2M3N2N1N3N2N2M2O2N2N2M2O2NbUa4"}, "image_id": 748, "id": 12692}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 335.0, 32.0, 36.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "Zka32l?3N2N1N3N2N2M2O2N2M2O1O0O10O10O10O10O0101O2M3N2N1N3N2M3N1O2M3NZUn3"}, "image_id": 748, "id": 12693}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 360.0, 51.0, 79.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "`kf01n?4M2N3L3N3M2M4M2N3L3N3L3N3M2M4M2N3L3N3M2M4M2N2010O10O010OM4M2M4M3M2M4M2M4M2N3L4M2M4M2N3L3N3L4M2N3L3Ncb_6"}, "image_id": 748, "id": 12694}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 363.0, 27.0, 27.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "i[b22m?1O2N2N2M2O2N2N2N2N1N3N2000O100N1O2M3N2N1O2M3N2N2M2O[TP5"}, "image_id": 748, "id": 12695}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 401.0, 38.0, 41.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "^]T31n?2N2N1N3N2N2M2O2N2M3N2N1N3N2N0O10000O010000O102N1N3N2N2M2O2N2M3N1O2M3N2N1OVcX4"}, "image_id": 748, "id": 12696}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 468.0, 27.0, 25.0], "area": 381, "segmentation": {"size": [512, 512], "counts": "Soc31m?2N3N2M2N3N1N3M3N101O010O10O010O1O0N3M2O2M2N3N1N3M2OUan3"}, "image_id": 748, "id": 12697}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 506.0, 13.0, 6.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "o_^41n?1N2N200001O1O001O1O001OQP[3"}, "image_id": 748, "id": 12698}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 0.0, 16.0, 6.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "PPj14l?2N00000000O100000000000000O3Nnom5"}, "image_id": 749, "id": 12699}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 11.0, 25.0, 36.0], "area": 768, "segmentation": {"size": [512, 512], "counts": "\\Pb2e0[?000000i04O10000O4M4L3M3L4M4Llnm5"}, "image_id": 749, "id": 12701}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 47.0, 10.0, 15.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "`Qk2:f?4L00O10000000O1h0ZAUOi>i09M3N1N3M3N2M2O2M3N2M3Mmlh6"}, "image_id": 749, "id": 12704}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 92.0, 16.0, 30.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "l2n0R?H800000000000000000000000000^Ofmg7"}, "image_id": 749, "id": 12705}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 98.0, 19.0, 18.0], "area": 314, "segmentation": {"size": [512, 512], "counts": "[cd23h?5L:J000000000000000000000000000003MklQ5"}, "image_id": 749, "id": 12706}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 102.0, 32.0, 56.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "]do3;e?00000QOLRB4n=n0100000000007A8000000000000000000008H0000000E;00000R1nN`[`3"}, "image_id": 749, "id": 12707}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 343.0, 56.0, 50.0], "area": 1937, "segmentation": {"size": [512, 512], "counts": "kZ^47i?;E;E000000000000O0100000000000037F8H0000000O100000O10000000000000L400000O1000000^OhA\\OW>e0b0000000000000000000O7J;E;E]de2"}, "image_id": 749, "id": 12708}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 375.0, 24.0, 33.0], "area": 753, "segmentation": {"size": [512, 512], "counts": "gkS3P1P?000000000000001O0000000000000000000000000000QOXU`4"}, "image_id": 749, "id": 12709}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 455.0, 13.0, 15.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "`^g33l?2M2O2M3N1N12N2M2O2M3N1NgQR4"}, "image_id": 749, "id": 12710}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 0.0, 245.0, 90.0], "area": 9728, "segmentation": {"size": [512, 512], "counts": "PPi11o?001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O1O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00002N1O00001O001O00001O001O00iNYAU1j>0O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O010O0001N110O01O01O010O01O01O010O01M2N2M4M2M4M2N3L3N201O001O00001O001O00001O001O00001O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O01O01O010O01O01O010O010O00010O010O00010O010O0001O0N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2MS_\\2"}, "image_id": 750, "id": 12711}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 0.0, 92.0, 34.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "QPX61n?10001O001O00001O001O001O00001O001O0000O1M31O00001O001O00001O001O000`@GY?:c@I4NQ?`0l@CS?e001O001O00001O001O00001O001O00001O001O00N2M3N2N2M3N2M3N21O001O001O00001O001O00001O001O00001OO1M3N2N2M3N2M3N2MSP:"}, "image_id": 750, "id": 12712}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 36.0, 319.0, 314.0], "area": 62136, "segmentation": {"size": [512, 512], "counts": "YY33V>N]C5a`;AoE1ZOl0e:VOmEO@l0`:WOnENDj0[:\\OmEMHg0Y:^OmENIe0V:AnELMb0S:DmENN`0R:DnENN`0Q:FmEMOa0KcN_9Q1eFNN`0LcN^9S1eFLO`0LdN]9R1eFNN`0LcN_9Q1eFNN`0LcN^9S1dFMOa0KcN_9Q1eFNN`0LcN_9R1dFMO`0LdN]9R1eFNNW1[9g3L3N2N3L3N3N100N3M2FjHgGZ7V8hHhGZ7U8;M2M3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2O11N1N2M4M2N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3M2M3N3M2M4M2M4M2N2M4N110ON2DbISGb6K^Ik82XGb6K_Ii83YGe6e8QOaAo0c0mNT=2[BR1=POa=o0]BSOc=n0YBUOh=j0VBYOi=g0UB[Ok=f0QB^Oo=a0oAAQ>W110N1N3L3O20O0010O0010O0010O01N1N2010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O01O01SBSNe=m1YBUNh=R2O010O00010O010O01M2N2M4M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3Nbe2"}, "image_id": 750, "id": 12716}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 314.0, 339.0, 198.0], "area": 32743, "segmentation": {"size": [512, 512], "counts": "gnj01m?2M4M2N2M4M1N2O3M2M4M2M4M2N2M4M2O20O0001M2N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2N3O001O01O01O010O00010O0010O010O00010O010O0010O0010O010O00010O010O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O001O000`NeD[N\\;a1hD^NX;`1jD`NW;\\1lDdNT;Z1oDeNQ;Y1QEgNP;U1TEjNl:T1VElNk:P1XEPOh:n0[EQOf:l0\\ETOd:i0`EVO`:h0bEXO_:d0eE[O[:c0gE]OZ:`0hE@X:=lEBT:R1bAkNb>Q18N3L3N210O001O0M3N3Bf@1]?Mf@2[?Jh@7`?000010O01O01O010O01O01O01a@JS?5k@NT?3h@0X?0f@3Z?90010N1010O0010O0001M2M4M2M3N3L3Ncdg1"}, "image_id": 750, "id": 12718}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 370.0, 40.0, 24.0], "area": 351, "segmentation": {"size": [512, 512], "counts": "i[\\61m?2N3L301O010O01O010O010O010O01O010O01O010O010O010O01O010O01O010O010O010O0N2N3MPdo0"}, "image_id": 750, "id": 12719}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 371.0, 62.0, 91.0], "area": 3651, "segmentation": {"size": [512, 512], "counts": "c;`2a=O01O01O010O010O00010O010O0010O0010O0010O010O00010O010O0001O0M4M2M4M2M3N3L3N3L3N3L3N2M4N110O00010N1N3L3N3L3N2M4M2M4M2M4M2MfcP7"}, "image_id": 750, "id": 12720}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 391.0, 46.0, 48.0], "area": 1213, "segmentation": {"size": [512, 512], "counts": "jl`41l?3M4M2M4L3N2M40O01RA[O_>e0^A_Ob>`0\\ABd>?XAEh>i000010O00010O010O00010O010O000mNWAP1l>00010O010O00M4M2M3M4M2M4M2M3M4M2MZSh2"}, "image_id": 750, "id": 12721}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 407.0, 54.0, 38.0], "area": 1067, "segmentation": {"size": [512, 512], "counts": "Z]h51l?3N3M2M4M2M3N3M210O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O0001M2N3L3N2M4M2Mjb\\1"}, "image_id": 750, "id": 12722}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 415.0, 57.0, 45.0], "area": 1092, "segmentation": {"size": [512, 512], "counts": "_mc62k?4M2M4M2M4M2O1010O010O010O00010\\Og@?^?0O010O01O01O010O01O01O010O010O0k@Aj>>TADl>=PAGo>9o@IR?`00O10O102N3L3N2ON4M2N3L3N2N3L3N3L3N3M2M3Nnb?"}, "image_id": 750, "id": 12723}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 436.0, 21.0, 30.0], "area": 341, "segmentation": {"size": [512, 512], "counts": "Z^T44j?2M3N3L3N3L3N2N3L310OO1M4M2M4M2M3N3M2M4MYRa3"}, "image_id": 750, "id": 12724}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 462.0, 37.0, 30.0], "area": 635, "segmentation": {"size": [512, 512], "counts": "onj42l?2M4M2M4M2N3L300010O01O01O010O01O01O010O010O01O01O010N1N2M4M2M10002M4M2M_ab2"}, "image_id": 750, "id": 12725}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 462.0, 60.0, 50.0], "area": 2016, "segmentation": {"size": [512, 512], "counts": "X_P61m?2N3L3b@HR?:k@JR?9k@IR?:l@IQ?d0N3L3N2N3O010O010O0O1N3O0010O010O00010O010O010O00001O001O001O00001O001O00001O001L3N3M2M3N3M2M4M2N3L3N2N3LTaQ1"}, "image_id": 750, "id": 12726}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 501.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "gon72m?2N2Z@"}, "image_id": 752, "id": 12727}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 505.0, 13.0, 7.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "nof72m?1O1O1N2001O1O1O001O1O1OQ`2"}, "image_id": 752, "id": 12728}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 0.0, 90.0, 91.0], "area": 3296, "segmentation": {"size": [512, 512], "counts": "ZQl31n?2M3N1N3M3N1N3N1N3M3N1N3N2M2N3N2M2N3N1N3N2M2N3N0O1oA[Nh=g1UB\\Ni=f1UB[Nk=e1SB^Nl=l1O001O001OoNWBIl=4WBJj=4XBJk=4WBLh=2ZBNg=O\\B1d=M^B3a=KaB5`=HcB8\\=FfB;Z=ChBN2M2N3N1\\Oj@<_?N1N3N1N3M2Ommf2"}, "image_id": 753, "id": 12729}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 0.0, 42.0, 18.0], "area": 388, "segmentation": {"size": [512, 512], "counts": "PPi51o?1O1O1O001O1O1O1O001O1O00O1O1N21O1O1O1O001O1O1O1O001O1O1O1OO1N2O1O1O1N2O1O1O1N2LV@1m?Oooa1"}, "image_id": 753, "id": 12730}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 12.0, 60.0, 60.0], "area": 2516, "segmentation": {"size": [512, 512], "counts": "ZQY65k?`0@9F10000000000000000000000000O10O100000000000000UO\\OZBd0f=k00000000O100000000000000000000KkA_NU>a1500O1000O10000000000000000>B?A`0@Wnh0"}, "image_id": 753, "id": 12731}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 29.0, 29.0, 26.0], "area": 750, "segmentation": {"size": [512, 512], "counts": "n`a7i0W?00000O1000000000000000000000000000000000000000000000000SO"}, "image_id": 753, "id": 12732}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 33.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "Q16k?N2Nn^n7"}, "image_id": 753, "id": 12733}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 41.0, 5.0, 9.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "Y18i?00N2N1Oe^m7"}, "image_id": 753, "id": 12734}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 50.0, 51.0, 72.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "jRg11m?3L3M3N3L3N3O010QAAZ>`0dABZ>`0dABT>IeAi03BU>GfAi03BU>f0gA^OU>X1N3L3N2O2O010O01O01O010O01O01O01L3N3L3N0O102M4M2N2M4M2M4CXA^Oj>`0XA]Ol>?WA_Ok>?1YA1e>4UA0g>4VANh>4VAOg>f0N3M2M4M2N3M2N3M2M4M2N3M2N1O0O1002N3M1O0HVB\\Nj=c1ZBZNg=e1[BYNg=e1mNcAT1]>iNfAV1b>010O0010O010O0010O010O0010O0010O010O0010O010O0010O0010O010O0010O0010O010O00N3N1010N1N3M2M301O010O010_OSAMm>1VAMk>1WAMl>0WALl>1WAMl>0WAMk>1TmS3"}, "image_id": 753, "id": 12737}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 88.0, 58.0, 72.0], "area": 2735, "segmentation": {"size": [512, 512], "counts": "k2>g03\\OFl=8hBYNRBd1V>N2M4M2N3M2N3L3N3M2N201O0010ON3M2N3L3N3J5N2MalR7"}, "image_id": 753, "id": 12738}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 129.0, 45.0, 44.0], "area": 1161, "segmentation": {"size": [512, 512], "counts": "jTT34j?2N3L3N2M4M2N3L3N3L3O2O010O01O010O010O010O01O010O01O010O010O010O010O0O1N3TOPAc0X?N3M2N3M2M4M2N][U4"}, "image_id": 753, "id": 12739}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 151.0, 63.0, 74.0], "area": 2437, "segmentation": {"size": [512, 512], "counts": "_eo62k?3N2M4L3M4M20010O00010dA]OY=c0dBA\\=>aBE_=<]BGc=9[BJe=5XBNh=3TB0l=0QB4o=LmA7S>IkA9U>GhA=X>e000010O010O00010O00010O00M4M2M4L3M3M11N4L3M4M2M3M4L3M3N3L3M3M4L3N3L3M3M4L3N2M4L3MTk0"}, "image_id": 753, "id": 12740}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 155.0, 80.0, 46.0], "area": 2138, "segmentation": {"size": [512, 512], "counts": "]UY14j?2M3N3L3N3L3O2O0h@\\OU?g0010O00010O010O010N1N2N3N101O01O001M2M4M2N3O01O01O010O010O0010O0010O010O0010O010O0001L3N3O0010O010O00010O010O01O01O0M4M2N2N3L3N3M2N3L3N2N3M2Mkj^5"}, "image_id": 753, "id": 12741}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 155.0, 26.0, 26.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "TUk32m?3N1N3N2M2O2M3M3N1N10O0010O0011N2N3N2M2O2M3N1N3M3Nejg3"}, "image_id": 753, "id": 12742}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 168.0, 51.0, 48.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "VVj21l?4M2N3L3N2M4M2N3L3N3O000O2M2M40O00010O010O01O01O010O010O00010O0ROVAg0j>VOXAONg0j>WO[A0Mi0T?0O0010O010O000N3M2M4M2M4M2N2M4MPZ\\4"}, "image_id": 753, "id": 12743}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 171.0, 31.0, 29.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "iUh41n?2N2N2M2O2N2N2N2M2O2N2N2O0100000O10O1O1M3N1O2M3N2N1O2M3N2N2NXZh2"}, "image_id": 753, "id": 12744}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 173.0, 33.0, 100.0], "area": 2190, "segmentation": {"size": [512, 512], "counts": "f5b0j0i0]dBR1l=kNWBS1k=kNXBQ1l=kNWBS1k=kNXB4Dd0W>UOWB5Eb0i>[OYAc0S?Cg@1\\?Kg@2\\?Lg@1e?MUY_7"}, "image_id": 753, "id": 12745}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 192.0, 52.0, 49.0], "area": 1337, "segmentation": {"size": [512, 512], "counts": "jfP42l?2O2N2N2M3N1O2M3N2N2N2M2O2N2N2M3N101000000O01000M3N2N00O0100002M3N2N2N1N3N2N2N2N1N3N2N2N1N3N2N2N2M2O2NaYU3"}, "image_id": 753, "id": 12746}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 203.0, 37.0, 54.0], "area": 1652, "segmentation": {"size": [512, 512], "counts": "RWZ55\\??M300001O\\A\\Ol=d0fAJT>T1000000000010O0000000O100000000000000000001O01O0000000B>B>ASZS2"}, "image_id": 753, "id": 12747}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 204.0, 26.0, 25.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "kVa32k?4M2M3N3M2M4O001O01O010O010O01O010OO2M2N3L3N2N3L3N_iQ4"}, "image_id": 753, "id": 12748}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 205.0, 52.0, 67.0], "area": 2249, "segmentation": {"size": [512, 512], "counts": "\\hn51^?a0J7O0000000001O0000000000N2N2000001O00000000000J6YOg0H8000000000000010O0000000000000M3001O00000000`NZBf0`>I7YO_YW1"}, "image_id": 753, "id": 12749}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 216.0, 28.0, 32.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "^gZ22l?3M2N2N3L3N3M2N3M2N3N110O010O010O01M2M4M2N3M2N2N3L3N3MRYW5"}, "image_id": 753, "id": 12750}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 226.0, 32.0, 28.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "eW_42m?1O2M3N1O2N2M2O2N2N1N10O1002M3O10O10000M2O2N2N2M3N1O2N2M3N2N1OchP3"}, "image_id": 753, "id": 12751}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 243.0, 42.0, 53.0], "area": 1135, "segmentation": {"size": [512, 512], "counts": "ohn22l?2M4M2N3L3N2N3L3N3M2M4M2N3L3N2N3L3N1O02N2N3L3N3M2N2M4M2010O010OO2L3N2N3M2M4M2N3L3NVX\\4"}, "image_id": 753, "id": 12752}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 244.0, 25.0, 30.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "SXS7250`?3]@0`?9N3M2N3M2N3N110O010O01O0N2N3N101M2M4M2N3M2N3MVX`0"}, "image_id": 753, "id": 12753}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 273.0, 82.0, 110.0], "area": 2775, "segmentation": {"size": [512, 512], "counts": "ck]31l?3N2M4M2FEm@>P?El@>Q?Em@=Q?;L3N3M2O2O01O010O01O010O01O010O01O0N3MOJgNiAY1W>jNeAW1[>50100O03N3L3N3M1N10O10O010LYAQOg>o030100O010000O3N2N3O001O01N1N3L3NLQO\\Al0c>XO\\Ae0e>]OZAd0f>[OXAg0f>[OYAf0e>]OXAf0e>R1aAlNb>Q18N2O2M2N2N2O2M2N2N3M2O1N2N3M2O1Nad5"}, "image_id": 753, "id": 12766}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 369.0, 48.0, 48.0], "area": 1293, "segmentation": {"size": [512, 512], "counts": "b\\o43k?2N3M2N3L3N3M2N2N3L3N3M2N3M2M310O010O010OO2M200010O010O010OPOXAh0h>VOZAj0g>SO[An0l>O010O010M2M4M2N2M4M2N3M2M4M2NkcX2"}, "image_id": 753, "id": 12767}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 383.0, 71.0, 60.0], "area": 1791, "segmentation": {"size": [512, 512], "counts": "e]`51m?3M2N3L3N2N3M210O010O01MO10O0100O10O03N3L3N3L3N3L3N30O010L3N3M2N2N3L3N3M210O010O00M4M2n@YOi>j0TAXOj>Q1L3N3O01O010O0010O0010N1N3L3N2N3L3N3L3N2M4M2N3L3N2MkS\\1"}, "image_id": 753, "id": 12768}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 383.0, 26.0, 27.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "Y\\a71n?3M2O1N3M2N2O2M2N2O2M2N2OO003N1N2N3M2O2M2N2N3N1N2Ncc1"}, "image_id": 753, "id": 12769}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 414.0, 31.0, 27.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "^mh61o?1N3M2O2M2N3N1N1O01O01O01O01O01O01O01O01O01O01O03M2O2M2N3N1Njbg0"}, "image_id": 753, "id": 12770}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 426.0, 57.0, 54.0], "area": 1634, "segmentation": {"size": [512, 512], "counts": "Qnn61<0U?2i@1U?1i@0U?2i@0U?3h@0V?1h@1V??M2N2O2M2N2N3N0O001O01O00010O0000012M00010O0000010O000010O01O2N2O2M2N2O2M2N2N010O002N3N1N2N3M2O1N3M2ORb4"}, "image_id": 753, "id": 12771}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 84.0, 71.0], "area": 4452, "segmentation": {"size": [512, 512], "counts": "o=Q2P>O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00N2N2M3M3M3M3M3N2O100001O001O00001O000N3L3M3N3L3M3M4L3M3N3L3M4L3M3M4M2M3M4L3M3MPbe6"}, "image_id": 753, "id": 12772}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 469.0, 7.0, 15.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "jnl72m?3M2O1N3M2N2ZA"}, "image_id": 753, "id": 12773}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 472.0, 101.0, 40.0], "area": 2152, "segmentation": {"size": [512, 512], "counts": "nog12k?3M3N2M3M3M3N2M3M3M3N2M3M3N200001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0]Ol@:T?Dn@d0kA\\OW>U12N2N2N1O0O10000001O2N2N2O10000000N2N2N00000AbA]O^>c0dA[O\\>d0gAZOY>f0iAXOX>g0jAVOY>h0iAVOY>h0iAVOY>h0a0N2N2N2N2N2N1N3N2N2N2N2Nb^_3"}, "image_id": 754, "id": 12776}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 85.0, 33.0, 33.0], "area": 512, "segmentation": {"size": [512, 512], "counts": "PSU32l?3N2N2N2N2N2N2N2N2N2000000000000000O100000OO2N2N2N2N2N2N2N2N2N2Ng\\Z4"}, "image_id": 754, "id": 12777}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 387.0, 147.0, 125.0], "area": 7653, "segmentation": {"size": [512, 512], "counts": "oof51n?1N2O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O11O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O1O1O1O001O1O00001O000000O1L4L4L4K5L4L4L4K5L4L4L4K5L4L4L4L4K5L4L4L4K5L4L4L4O1001OM3M3001O0000001O0000001O0000001O000000"}, "image_id": 756, "id": 12778}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 0.0, 53.0, 57.0], "area": 1638, "segmentation": {"size": [512, 512], "counts": "ZP`22m?2N3N1N3M2O2M2VA@Q>b0mAAQ>a0mA@Q>b0mAAP>b0mA@R>a0lABS>?kABV>=hAEX>;fAHX>:fAEZ>=dAC\\>Q10O0000010O00100O1N2O1O100O1O101N2N3M2O2M2N2O2M2N3M2O2M2N2O2M2N3N1N3MV_e4"}, "image_id": 757, "id": 12779}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 0.0, 82.0, 77.0], "area": 2556, "segmentation": {"size": [512, 512], "counts": "P`\\31o?1O2N1O1O2N1O2N1O2N00O1O1O100O1O100O1001O2N1O1O2N1O2N1O1O2N1O2N1O1O2N1O2N1O2N1dAmNi=T1UBnNj=T1SBnNl=S1SBoNk=S1RBoNm=R1QBQOm=P1QBROn=P1PBQOo=b1N1O2N1O110O0010O001M2N2N3N1N3M2O0O1O2lNfA?\\>@eA?_>\\OdAa0^>]OdAa0^>]OeAa0]>]OdAa0^>]OdAa0P?M010O3M2O2D\\@7i?M2O2MW^Z3"}, "image_id": 757, "id": 12780}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 0.0, 26.0, 13.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "PPm42n?1O2N1O2N1O1O2N1OO1O100O1O100O1O100O1O100O1O100O1OQPf2"}, "image_id": 757, "id": 12781}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 0.0, 27.0, 13.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "PP\\51o?1O2N1O2N1O2N1O2NO100O1O100O1O100O1O100O1O1O100O1O10P`V2"}, "image_id": 757, "id": 12782}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 36.0, 18.0], "area": 341, "segmentation": {"size": [512, 512], "counts": "P`\\62n?1O2N1O1O2N1O2N1O1O2N1O1OO1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1OQ`Q1"}, "image_id": 757, "id": 12783}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 0.0, 18.0, 25.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "PPg72n?1O2N1O1O2N1\\@G_?:_@H`?=OO1002N1O1O2O010O0K"}, "image_id": 757, "id": 12784}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 9.0, 33.0, 33.0], "area": 568, "segmentation": {"size": [512, 512], "counts": "fPj61n?3M2N2O2M2N2N3M2O1N3M2N2O2M2N1O01O2O2M2N2O2M2N2N3N1N2N3N1N2N3N1NT_e0"}, "image_id": 757, "id": 12785}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 12.0, 59.0, 58.0], "area": 1616, "segmentation": {"size": [512, 512], "counts": "faW51n?2N2N3N1N2N3M2O1BAXAb0f>@WAb0g>@WAb0g>@XAa0f>BWA>j>BeA<^>VO`A74a0^>WO`A55b0a>]O`Aa0b>]OaA`0Q?N3N1N2N3N1N3M2Ng[P5"}, "image_id": 757, "id": 12787}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 36.0, 21.0, 43.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "cae71o?1N2N3M2O1N3M2N3N1N2N3M2O1N3M2N3N1N2N3M2kN"}, "image_id": 757, "id": 12788}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 38.0, 81.0, 109.0], "area": 3351, "segmentation": {"size": [512, 512], "counts": "PRk51n?2N2O2M2N2N3N1N2N3M2O1N3M2O1N3M2N2WAoNa>S1]AnNa>Z1N3M2O1N3M2N2O0O002N2O1N3MWOVB_Oh=a0ZB_Oe=a0\\B@a=`0bB_O]=b0cB^O]=b0cB_O\\=`0fB_O[=>gBBY=m<@TCa0m<]ORCf0m]OhAa0Z>^OgA`0[>^OhA`0Z>]OhAa0Z>]OhAa0Z>^OhA?[>^OgAa0Z>]OhAa0n>M2N2O2M2N2N3N1Nn[l0"}, "image_id": 757, "id": 12789}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 43.0, 34.0, 40.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "fQ_32n?2M2N3M2O1N3M2N3N1N2N3N1N2N3M2O2O00010OO2M2N2N3N1N2N3N1N3M2F_@2c?L_@3h?N3Mimo3"}, "image_id": 757, "id": 12790}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 61.0, 52.0, 45.0], "area": 961, "segmentation": {"size": [512, 512], "counts": "kRb43l?2O1N3M2N2O2M2N2N3N0O1O0001O01O0001O01O0001O01O0001O01O0001O01O0001O01O00010O0001O2O1N3M2N2O2M2N2N3N1Ngmc2"}, "image_id": 757, "id": 12791}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 61.0, 47.0, 58.0], "area": 1473, "segmentation": {"size": [512, 512], "counts": "bbP71o?1N2N3N1N2N3M2O2M2N2N3N1N2N3N1N2N3M2O1N3M2O2M2N2N3N1N2N10O002O1N3M2N2O2M2jN\\Am0f>QO\\An0f>PO[A0Nj0S?N2N3Ai@1X?Mj@1Y?Li@2Y?Mi@0Y?Ni@1[\\8"}, "image_id": 757, "id": 12792}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 65.0, 30.0, 40.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "hRc51n?2O1N3]OJWA8g>KWA6g>LWA7g>JWA8g>KWA6g>LWA7h>IVA9j>?010M2N2O2M2N2O2M2N3M2O1N3M2N2O2M2N2NVmm1"}, "image_id": 757, "id": 12793}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 72.0, 53.0, 57.0], "area": 1575, "segmentation": {"size": [512, 512], "counts": "nbg02n?1N2N3N1N2N3M2O2M2N2N3N1N2N3JROUAP1i>5N3N1N2N3M2O2M2N2N2OO2N3N1N3M2N2O2O000O2N1N2N3M2O1N3Im@\\OU?b0m@\\OV?a06O1N3M2N2O2M2N2Nfl]6"}, "image_id": 757, "id": 12794}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 107.0, 34.0, 36.0], "area": 617, "segmentation": {"size": [512, 512], "counts": "gco41n?2N2O2M2N2N3N1N3M2N2O2M2N2O2M2N3O00O2M2N2N3N1N3M2O1N3M2N2O2M2N2N3Nn[_2"}, "image_id": 757, "id": 12795}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 115.0, 41.0, 56.0], "area": 1329, "segmentation": {"size": [512, 512], "counts": "X4l0h>VOaAm0\\>UObAm0\\>UOcAl0\\>VOaAm0\\>UObAm0\\><0O0001O01O00010O2N3N1N3M2N2O2M2N3N1N201O001N1N2N3N1N3M2N2O2M2N3M2O1NZ[[7"}, "image_id": 757, "id": 12796}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 117.0, 55.0, 59.0], "area": 1477, "segmentation": {"size": [512, 512], "counts": "TTe51n?3M2O1N3M2N2O2M2N3M2O1N3M2N2O2M2N2N3N1N210O01O01O01O01O01O01O01O0O1N3N1N2N1O01O01FRAAn>?TA_Ol>a0WA]Oj>a0XA]Oj>b0j@CT?g0N1N3M2O0O00011O1010O0N3N1N3M2O2M2N2N3N1N3M2O2M2N3N1NRkY6"}, "image_id": 757, "id": 12800}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 135.0, 27.0, 28.0], "area": 397, "segmentation": {"size": [512, 512], "counts": "_dg03l?2O2M2N2N3N1N2N3M2O2M11O1N3N10010O0O2N1N2N3N1N2N3M2OUkj6"}, "image_id": 757, "id": 12801}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 138.0, 52.0, 46.0], "area": 1050, "segmentation": {"size": [512, 512], "counts": "Vem23l?2O1N3M2N3N1N3M2O1N3M2N01O01O01O01O0001O01O01O01O0001O01O01O01O0001O01O01O01O01O3M2O2M2N2O2M2N3N1N2N3MW[X4"}, "image_id": 757, "id": 12802}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 147.0, 52.0, 60.0], "area": 1685, "segmentation": {"size": [512, 512], "counts": "PUR52m?2Y@M_?4_@N_?5^@M`?YOdAf0]>XOfAe0]>XOeAf0]>XOeAf0]>YOeAe0l>N2N3N1N3M2N2O2M2N3M2ORjS2"}, "image_id": 757, "id": 12803}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 148.0, 24.0, 25.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "hda43l?2O2M2N2N3N1O2O00010O00010OO2NO0002N2O2M2N2N3NkZR3"}, "image_id": 757, "id": 12804}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 156.0, 19.0, 18.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "UUd02m?3N1N2N3N1N1O01O0001O01O012M2N2O2M2NnZR7"}, "image_id": 757, "id": 12805}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 160.0, 21.0, 20.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "[en02m?3N1N3M2N2O0O0001O01O01O01O0003N1N3M2N2Oijf6"}, "image_id": 757, "id": 12806}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 163.0, 41.0, 41.0], "area": 833, "segmentation": {"size": [512, 512], "counts": "ae]12m?3N1N2N3M2O1N3M2N2O2M2N1O10O0003M2O1N210O00010O00O2M2N2N2O2M2N2N3M2O1N3M2N2N3N1NRjm5"}, "image_id": 757, "id": 12807}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 165.0, 28.0, 26.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "dU81n?3M2O2M2N3M2O1N00010O000010O00010O0000012M2N3N1N2N3M2OajY7"}, "image_id": 757, "id": 12808}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 165.0, 28.0, 32.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "]eQ41n?2N2N3N1N2N3M2N2O2M21O0O2N10010O0010O0001M2N3N1N2N3M2OSZ`3"}, "image_id": 757, "id": 12809}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 170.0, 51.0, 48.0], "area": 1299, "segmentation": {"size": [512, 512], "counts": "SfP22m?3N1N2N3N1N2N3M2O1N3FZOUAh0j>YOTAi0j>ZOTAh0i>9N3N0O0001O01O00010O0002N2O1N2N0010O000002O2M2N2N3N1N2N3N1N2N3M2O2M2N2N3NPjU5"}, "image_id": 757, "id": 12810}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 182.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "f51Yjo7"}, "image_id": 757, "id": 12811}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 183.0, 49.0, 60.0], "area": 1520, "segmentation": {"size": [512, 512], "counts": "[V`43h?N\\@3b?0[@2c?7N3N1N2N3M2O1N3M2N2O2M2N201O00001M2N2O2M2N2N3N1N2N3M0102M2iNiAb0Y>\\OjAa0Y>\\OiAc0X>\\OiAb0Y>\\OjAa0Y>\\OiAb0Y>\\OiAc0X>\\OjAa0l>N2N3N1N2N3M2O1N3MnXg2"}, "image_id": 757, "id": 12812}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 186.0, 13.0, 25.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "k5h0W?11N2N3N1N3M2O2M2N3N1N3MgYi7"}, "image_id": 757, "id": 12813}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 186.0, 25.0, 28.0], "area": 362, "segmentation": {"size": [512, 512], "counts": "RVP41n?3M2O1N3M2O2M2N2N3N1N30O010N1O2M200O2M2O2M2N2KZ@Nh?OhYc3"}, "image_id": 757, "id": 12814}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 188.0, 66.0, 63.0], "area": 2273, "segmentation": {"size": [512, 512], "counts": "afd01n?2N2GNb@4[?Nc@4[?Nc@4\\?:M2N2N3N1N2N3M2O1N3M2N3N100010O00N3M2O1N3M2N2O2ON3M2N2O2M2N210O00010O0O2M2N2O2M2N2N3N0O0000101N2N2O2O000O2]Oh@9Z?Ei@8Z?Fg@8b?O2M2N2NeXZ6"}, "image_id": 757, "id": 12815}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 215.0, 88.0, 107.0], "area": 3075, "segmentation": {"size": [512, 512], "counts": "VWi21n?2O2M2N3N1N2N3M2O2f@]OR?e0l@^OQ?j0O1N3M01O01O3N1N3M2N210cAdNS>\\1kAfNV>Z1gAiNX>W1gAjNY>`10O0010O00010O0010O0O2M2N2N3O0010O0001hN]AQ1b>nN`AQ1`>mNbAT1e>01OWAmNd>T1ZAmNg>W1O0001M2O20N1N2N3N1O20O00010O0O1N3M2O2M2N2O2M2N3M2O1N3M2N2O2M2N3N1N2N3M2O^fj3"}, "image_id": 757, "id": 12816}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 218.0, 3.0, 5.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "kfn71n?3N1UI"}, "image_id": 757, "id": 12817}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 220.0, 38.0, 39.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "bgm11n?2N3N1N2N3M2O2M2N2O2M2N2N10O0000010O000010O0000010O0001O01O01O3Ch@MZ?1h@NY?0i@NZ?0h@MZ?1h@MZ?1WY_5"}, "image_id": 757, "id": 12818}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 225.0, 50.0, 63.0], "area": 1961, "segmentation": {"size": [512, 512], "counts": "X7j0U?3M2O1N3M2N2O2M2N201O00010M2O1N3M2N2O2M20O1N2N3M2O1N30O01O01O01M2O1N3M2O1N3M2N2O2M2N2Dk@JX?4j@JW?4k@JW?4k@JX?3TO`Aj0a>TOaAj0b>TO`Ai0b>UO`Ai0m>O2_Oi@4Y?Kh@3[?Jh@4Y?Ji@4c?N3McgW3"}, "image_id": 757, "id": 12821}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 235.0, 44.0, 51.0], "area": 1344, "segmentation": {"size": [512, 512], "counts": "hWZ72m?3N2M2N3N1N4L3N0O2N2O1N3M2N2O2M2N2O2M200010O0010O0N2N010O000101N3M2O2M2N3M3N1N1O10O000XH"}, "image_id": 757, "id": 12822}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 264.0, 58.0, 53.0], "area": 1434, "segmentation": {"size": [512, 512], "counts": "gXe61n?3M2N2O2M2N2N3N1N3M2O1N3M2N2O2M2N3M2O1N3N11O010O00010O0N2N3N1N3M2N2O2M2N2N100O0000010O0001O02N2N2O2M2N3M2O1N3M2O1Nkf="}, "image_id": 757, "id": 12823}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 268.0, 51.0, 42.0], "area": 1097, "segmentation": {"size": [512, 512], "counts": "mhZ22m?2N2O2M2N2N3N1N3M2N201O00N3M2N01O01O0001O101N2N2N3N1N2N3N1N2N3M010O0002N2N3N1N2N2N3N1]Oi@:Y?Eh@9[?Dh@9a?O1N2N3M2Nkfk4"}, "image_id": 757, "id": 12824}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 279.0, 54.0, 51.0], "area": 1356, "segmentation": {"size": [512, 512], "counts": "bYm01n?3M2O1N3M2N3N1N2N3M2O1N3M2O1N3M2N2OO01O0001O01O0001O01O01O01O0001O01O0002N3N1N2N3M2O2M2N2O2M2N2N3N1N3M2N2ObfW6"}, "image_id": 757, "id": 12825}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 298.0, 51.0, 42.0], "area": 1140, "segmentation": {"size": [512, 512], "counts": "kie11o?2M2N3M2O1N3M2N2O2M2O110ON2N100O00010O000010O3M2N2O2M2N3N1N2N3N1N00010O03M2N3NO00000101]OPA3R?Fo@M2:R?Fo@N0;R?FVA7[?N3M2Ole`5"}, "image_id": 757, "id": 12826}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 300.0, 27.0, 27.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "gi`71n?2N3M2O1N3M2O1N3M2N3N1N1O0012M2N2N2O2M2N2O2M2N2N3N1NUf1"}, "image_id": 757, "id": 12827}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 301.0, 57.0, 54.0], "area": 1527, "segmentation": {"size": [512, 512], "counts": "mYS62m?2N2O2Z@J]?8b@J[??N3N1N1O01O02N3M2O2M2N2N3N1N3M2O2M201O0001O0O2M2N3N1N2N3M2O2M2N3N1N2N2OO01O00010O00011N2N3N1N3M2O2M2N2NeUP1"}, "image_id": 757, "id": 12828}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 318.0, 77.0, 56.0], "area": 2107, "segmentation": {"size": [512, 512], "counts": "mj^21n?2JOZ@4c?N\\@3b?600O00003N1N3M2O2M2N3M2O1N0010O0000010O00101010O00010SAQOd>Q1[APOc>R1[APOd>V1001N1O0010O000001O0BXAFg>:\\ACd>=^AAc>>_A@a>b0`A\\O_>f0aAXO`>i0`AUO`>m083M2O1N3M10O001O2N2O2M2N2N3N1N2N2N3N1N2N3M2N2O2M2N2N\\eZ4"}, "image_id": 757, "id": 12829}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 321.0, 19.0, 21.0], "area": 190, "segmentation": {"size": [512, 512], "counts": "UZb32m?3M2O2M000^@H_?7`@K_?4a@O\\?1d@1[?Oe@2\\?:OO1O2@`@;e?M2N2O2M2NaUT4"}, "image_id": 757, "id": 12830}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 328.0, 52.0, 42.0], "area": 1146, "segmentation": {"size": [512, 512], "counts": "ijP11o?1N3M2O2M2N3M2O1N3M2O2O01M2O1N00010O00010O0003N1N2N3M2O2M2N3N1N2N010O0002O2M2N2N100O000_OPA4Q?FPAK2GVA7[?N3M2OmTU6"}, "image_id": 757, "id": 12831}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 334.0, 27.0, 30.0], "area": 400, "segmentation": {"size": [512, 512], "counts": "gjo62m?2O2M2N2N3M2O1N3M2N2N3O010O00O2M2O1N3M2N2O2M2N3M2O1Nodb0"}, "image_id": 757, "id": 12832}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 335.0, 55.0, 58.0], "area": 1461, "segmentation": {"size": [512, 512], "counts": "c[`51o?1N3M2O1N3M2N2O2M2N3N1N2N3M2O1N2N01OBROkAo0U>SOhAm0X>UOfAk0Z>WOeAi0[>YObAg0^>[O`Ae0`>=10O0000010O0002N2O2M2N2O2M2N2N3N1N2N3M2O2M2N2N3N1N2N3N1N2NeTd1"}, "image_id": 757, "id": 12833}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 354.0, 48.0, 46.0], "area": 1179, "segmentation": {"size": [512, 512], "counts": "hkn11n?3M2O1N3M2O1N3M2N3N1N2N3N1N2N3N1N2N10O01O100O1O01O01O0001O01O01O3N1JTAUOo>i05N3M2O2M2N3N1N2N3M2O2M2NYTY5"}, "image_id": 757, "id": 12834}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 358.0, 20.0, 19.0], "area": 201, "segmentation": {"size": [512, 512], "counts": "c[f71n?100O2N1O100O2N100O2N100O1O2O0O1O1O2O0jD"}, "image_id": 757, "id": 12835}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 380.0, 59.0, 52.0], "area": 1547, "segmentation": {"size": [512, 512], "counts": "a\\f43m?1N2N3N1N3M2N2O2M2N3N1N3N11O010O01N1N2O2M0010O000010O000010O00010O0000010O00010O000010O000101N2N3N1N3M2N2O2M2Db@3a?Jb@4_?Kb@3g?N^S\\2"}, "image_id": 757, "id": 12836}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 381.0, 29.0, 29.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "Y\\]61o?2M2N2N3N1N2N3M2O1N3M2N2OO01O0001O2O2M2N2N3N1N2N3M2O1N3MeST1"}, "image_id": 757, "id": 12837}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 394.0, 18.0, 13.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "_lT75h?3N200010O000010O0000010O000001K4MeSb0"}, "image_id": 757, "id": 12838}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 396.0, 10.0, 22.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "n\\k71k?4M4K4L4M4O0000001cC"}, "image_id": 757, "id": 12839}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 406.0, 66.0, 52.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "a]l32m?2N2N3N1N2N3N1N3M2I]On@e0P?]Oo@e0n>8N2O2M2N2N3NO01O0001O2O2M2N002O2M00010O0000010O0000010O0000010KUATOl>k06N3N1N3N100010OO1O2M2N0001O02N3M200O2M2O1NfbR3"}, "image_id": 757, "id": 12840}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 406.0, 27.0, 27.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "Qma52m?2O1N3M2N2O2M2N2N3N1N2N1O10O003M2O1N3M2N2O2M2N2N3N1NlbP2"}, "image_id": 757, "id": 12841}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 411.0, 38.0, 30.0], "area": 770, "segmentation": {"size": [512, 512], "counts": "^]T71k?4M3L5K4M3O101O01Of@\\OX?f000010O00N30O000010O0000010O0000010O000010O0N2L5K4L4M3LSc8"}, "image_id": 757, "id": 12842}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 423.0, 23.0, 22.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "a]^31n?3M2N2O2M2N3M2O1N1O01O01O01O2N3N1N2N3M2O1N3M_RV4"}, "image_id": 757, "id": 12843}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 428.0, 15.0, 15.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "cmm51n?3N1N2N3M2OO01O0002N3N1N2N3M^bj1"}, "image_id": 757, "id": 12844}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 432.0, 42.0, 65.0], "area": 1792, "segmentation": {"size": [512, 512], "counts": "W_W61k?5L3L4L4M4K4WOHgA;Y>f001O00001O3M1O0I70001O000OO2O11O00N2M3L4L4M3L4M3L4N2000000000006Jc0]OgaS1"}, "image_id": 757, "id": 12845}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 439.0, 19.0, 40.0], "area": 378, "segmentation": {"size": [512, 512], "counts": "dnf7;_?60000000000001O00000000000000XOh00000YB"}, "image_id": 757, "id": 12846}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 443.0, 49.0, 52.0], "area": 1290, "segmentation": {"size": [512, 512], "counts": "h^Q31n?2N3N1N2N3M2O1N3M2N2M[Oi@g0T?4O1FTO]Ao0`>SO^Ao0`>TO]An0b>SO]An0a>9O0001O01O0001O011N3M2N2O2M2N2N3N10001O0UOm@f0Y?M2O1N3M2N2O2M2N2N3NTQV4"}, "image_id": 757, "id": 12847}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 445.0, 14.0, 14.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "Rna51o?1N2N3M2O1N3M11N3N1N2N3M2OkQW2"}, "image_id": 757, "id": 12848}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 29.0, 49.0], "area": 828, "segmentation": {"size": [512, 512], "counts": "d>\\1d>0O1O10O0001O03M2N3N1N2N3N1N3M2N2O2M2N3N1N2N3M2O1N3M2O2MdPa7"}, "image_id": 757, "id": 12849}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 489.0, 48.0, 23.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "o_V31n?100O1O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O1O100O1002N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1OQ`Q4"}, "image_id": 757, "id": 12850}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 495.0, 20.0, 16.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "coR41o?2M2N2N3N1N201O01M2O1N00001O00002N2N3N[Pc3"}, "image_id": 757, "id": 12851}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 497.0, 49.0, 15.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "moP52m?2N1O100O1O100O1O1O100O1002N1OO100O1O1O100O1O1001O1O2N1O2N1O1O2N1OO1O100O1O1O100O1O12N1O1O2N1O2NQ`V2"}, "image_id": 757, "id": 12852}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 504.0, 31.0, 8.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "m_\\63i?4O100001O000000001O0000001O000000O10000001O0000001O0000001MUPT1"}, "image_id": 757, "id": 12853}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 510.0, 15.0, 2.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "noh72n?000000000000000000000000000"}, "image_id": 757, "id": 12854}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 85.0, 45.0], "area": 2270, "segmentation": {"size": [512, 512], "counts": "01o?1O1O1O1O1O1O1O1O1]@G]?:a@H^?>O1O1O1OO1O10j@_Om>b0QA@n>a0PAAo>`0o@BP?h0O1O1O1O1O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O1O1O1001O1O1O1O1O1O1O1OO1O1O1O1O1O1O1O1O1O1O1O1O1O2N2N2N2N2N2N2N2N2N2N2N2Ne_e6"}, "image_id": 758, "id": 12855}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 0.0, 72.0, 73.0], "area": 2900, "segmentation": {"size": [512, 512], "counts": "cQ`21m?2M4M2M3FGj@=R?Fl@@TAj0i>9L3N3M200N3L3M4M2O101O00001O001ON30O00010O0010O0010O\\NmA[1S>bNPB^1Q>_NQBb1W>O01O01O010O0hNeAl0Z>QOiAo0X>mNkAS1U>kNnAU1^>00010O010O00010O01O01O010O00mNWAn0o>L3N2M4L3N3L3N2M4L3Ngn[4"}, "image_id": 758, "id": 12856}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 0.0, 25.0, 5.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "PPb35k?0000000000000000000000000000000000000000000000OQ`Q4"}, "image_id": 758, "id": 12857}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 0.0, 81.0, 53.0], "area": 2578, "segmentation": {"size": [512, 512], "counts": "ePU62l?3L3N2M4M2N3L3N3M20001O001O001O00001O001O001O00001O001O00001O001O001O00010XAlNb>T1\\AnNd>X10O0010O010O0010O0010O010O0010O0010O0010O0OO0001N3N2N3L3O20O010OO1N3L3N3Ig@B\\?<5M4M2N3Lg_b0"}, "image_id": 758, "id": 12858}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 15.0, 76.0, 56.0], "area": 2519, "segmentation": {"size": [512, 512], "counts": "`QU41m?2M4M2N2M4M2N3L3N2N3L3N3O010O01O010O010O00010O010O0010O0N3L3O2N10001N101N100O2O001N100O2O0010O01O01O010O010O0N2N3L3N3L3N2N3FSA\\OP?b0RA\\OQ?a081O01O010O0M4M2M3N3L3NWod2"}, "image_id": 758, "id": 12859}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 28.0, 16.0, 18.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "Yah31m?2M4M2M3N3N1010O0010ON2M4M2M4MR_o3"}, "image_id": 758, "id": 12860}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 35.0, 25.0, 44.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "S1\\1e>N2N001O2N2O100O1N2N2N2N2N2N2N2N2N2N2N2N2N3M2N2NX^c7"}, "image_id": 758, "id": 12861}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 61.0, 76.0, 54.0], "area": 2307, "segmentation": {"size": [512, 512], "counts": "gbj51m?3L3N2N3M2M4M2N3L3N2O2O0010O010O0010O0010O010O0010O0010O010OQATOi>m0UAUOi>S1L3N3M20010O010O010O00010O010O010O010O0001N1N3M2M10003L3N2N3L3N3M2M4O01O01O0O2M2M4M2N2Mj]o0"}, "image_id": 758, "id": 12862}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 62.0, 93.0, 68.0], "area": 3042, "segmentation": {"size": [512, 512], "counts": "Rcg12k?3N3L3N2O2O00010O001L3N2M4M210O01O01O010O01O010O00010O0N3O0001N1N3L3N3L30010O0N3M2N2M4M2N3O00010O010O010O00010O010OdNgAQ1Y>mNiAS1X>iNlAW1S>gNoAY1]>0O0010O0010O0010O0010O010O00010O010ORO\\A`0d>^O_Ab0a>ZObAf0^>XOdAg0l>M2M4M2M3N3L3N3M2Mili4"}, "image_id": 758, "id": 12863}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 71.0, 76.0, 63.0], "area": 2666, "segmentation": {"size": [512, 512], "counts": "Ycb32k?4M2M3N3L3N3L3N2M4O0010O0010O010O010O00010M2L5O0O101N101O0O1N3M2M4O01O010O01O010O01O01O0N3N10010O010O01O01O010O01TOfA0Z>NiA2V>KmA5T>HoA8P>BhAE;i0n=^OYBb0f=\\O\\Bd0d=ZO^Bf0_>0O01M2N2M4M2N3L3N2M4Mb\\W3"}, "image_id": 758, "id": 12864}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 108.0, 114.0, 72.0], "area": 3861, "segmentation": {"size": [512, 512], "counts": "^dn01m?2M4M2N3L3N3L3N2M4O010O01O01O010O01O01O010O010O00010O01O0N3M12L3N3M2O2O01OM4M2N3L3N2O2O010O01O010O01O010O01O01O010O01O01O010O01O010O01O010O01O01O01jN`Ak0_>SOdAl0]>POfAP1Z>nNhAS1c>O010O00010O010O01O01O010O010O00010O010O00010O010N1M3N3L3N3L3N3M2M3N3L3N^[X5"}, "image_id": 758, "id": 12865}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 121.0, 89.0, 59.0], "area": 3118, "segmentation": {"size": [512, 512], "counts": "jdY53j?3N3L3N3M2M3N3M2M4O010O01O010O01O0O0N10O11N3N2O2O010O01O01O010O010O01O01O010O010VAoNc>W110O00010O010O001L3N2N3M2010O010O00010O010O001M2N2N3L3N1O0O1002010O000M4M2N3L3N3M2M3N3Jd@D^?97M2N2MTlY1"}, "image_id": 758, "id": 12866}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 136.0, 68.0, 63.0], "area": 2707, "segmentation": {"size": [512, 512], "counts": "[UZ33i?4M3M4K4M3M4K4N21O01O01O01O00010O0001O01O000O2L3M3M3010O00010O0001O01O0001]AiN\\>]100010O0001O0M3M3N3O01O01O01O01O01O0001OTOmAKT>2oAMQ>3oANP>3oAMR>2nAJV>6kAFX>JeAO64X>IfAO64Y>IeAO64Y?Mdjc3"}, "image_id": 758, "id": 12867}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 156.0, 84.0, 82.0], "area": 3396, "segmentation": {"size": [512, 512], "counts": "kV31m?3L3N3L3N2M4M2N3O01O01j@YOR?f0l@]OS?h00010O001L3N2M4M2N30L301O01O01O010O01O01O01O0M4M0O102M3N2M4M2M4M2M3N3L3N3O00010O010O0001O0M4M2M3N3L0HoAeNQ>Z1RBcNn=^1TB`Nl=_1T1jAPOV>]11O0001O01O0L4L4LhAgNR>X1jAlNV>`1O0001O0001O01O0001O0001O01O0001O01O0001O01O0001K4L4K5L5L31O000O1L5K4000001O0001O01O0001O0001O01lN]Ai0c>RObAn0h>10N1L4L4L5K4K5L4LPZg1"}, "image_id": 758, "id": 12870}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 184.0, 70.0, 77.0], "area": 2961, "segmentation": {"size": [512, 512], "counts": "^g^13k?2M3N3L3N3L300010O010O0003N0O01O01O010O0N2N3L3N3L3N2M4M2GdNkA`1R>bNlA`1Q>9N3L3N3L111001O01O010O01O01O010O01O01O010O01O000M4M2M4M2TOmAHV>6lAHW>4mAHV>6lAHW>4mAHV>6lAHV>5mAHW>5lAHV>5m0M2MnY^5"}, "image_id": 758, "id": 12871}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 188.0, 12.0, 16.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "Vfa33k?2M3N3L310O010OM3N3L3NSZX4"}, "image_id": 758, "id": 12872}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 191.0, 14.0, 14.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "Xfa21l?3N3M2N201O010O010ON3M2M3NPZW5"}, "image_id": 758, "id": 12873}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 196.0, 61.0, 77.0], "area": 2486, "segmentation": {"size": [512, 512], "counts": "Shc23b?0c@3[?Oc@4Y?;N2M4M2N3O001ON02N2M3N3O010O010O0O1N3L3N3M2M3N3L3N3M2M4M2M3O20O0M3N3L3N3M2M4M2M3N3M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3Mbi]4"}, "image_id": 758, "id": 12874}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 207.0, 9.0, 9.0], "area": 56, "segmentation": {"size": [512, 512], "counts": "dV`22l?2M4O01O01O01O0MbY[5"}, "image_id": 758, "id": 12875}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 207.0, 53.0, 79.0], "area": 2737, "segmentation": {"size": [512, 512], "counts": "fha31j?5L5K4K5L4K5L5K4K5L4K6K4L4N201O01O00L4K5L2210O0000010O0000010O000000010O0000010J5L4_ORBkNS>Q1RBjNR>R1RBjNS>P1a0L4K5L5K4K5L4K_ic3"}, "image_id": 758, "id": 12876}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 238.0, 73.0, 66.0], "area": 2635, "segmentation": {"size": [512, 512], "counts": "nhb42j?4K5L4K5L5J5L4K5L5J5M3001O01O0001O0O1M300010O00000aAeNZ>`100000010O00000010O00000010O00000010O00000010OAhAVOX>f0lAZOT>a0QB_Oo==VBCj=7[BIe=3_BMa=NdB2\\=JiB5a>1O0000010O0000010O000000010OK5LofX2"}, "image_id": 758, "id": 12877}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 325.0, 89.0, 67.0], "area": 3498, "segmentation": {"size": [512, 512], "counts": "j:?_?2M4L3M3N3L3M3N30O00XAkNf>W100010O01O01O010OO101O00010O0010O2N010O00010O010O00010O2N10O01O01O010O0M3N3L3M3eA`NV>e100010O00010O010O00010O0010O00010O01M2M4L3N3L3M4L3N3O0010O01M2M4L3M4L3M4L3MXUc6"}, "image_id": 758, "id": 12878}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 365.0, 76.0, 59.0], "area": 2979, "segmentation": {"size": [512, 512], "counts": "f\\a21l?4M2@LQA7l>LQA8k>KRA8l>?M4L301O01O01O000M4N10010O01O01O010O000O2L3N201O01O01O01O01O01L3M3OdAfNU>Y1hAkNW>_10010O0001O0M3M4M20010O00010O00010O00010O0N2N3L3M3M4L3N2M4L3M4L3N2M4LVdX4"}, "image_id": 758, "id": 12879}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 71.0, 74.0], "area": 2951, "segmentation": {"size": [512, 512], "counts": "V00000fA`NS>`1kAbNU>^1iAdNW>c10000000000000000000000000N2N2N2N2N2N2N2N2N2N2NAoNQBo0P>SOnAm0R>UOlAk0T>WOjAk0T>WOjAk0T>WOjAk0T>WOjAk0T>WOjAj0U>`0000000001O2N2L4N2N2N2N00002N2N2N2N2N2KPAWOR?g05N2N2N2N2N2N2N2N2N2N2N2NSSl6"}, "image_id": 758, "id": 12880}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 390.0, 84.0, 64.0], "area": 3122, "segmentation": {"size": [512, 512], "counts": "PmR41m?3M2IM_@6^?7N3M2M4M2N3N110O0PASOn>o0010O010O010O010ON2N30O010O[AmN]>T1`AnNa>Q1]AROb>V110O010O010O0010O010O01M2N3M2N3M2O2O010O010O010O010OO2M2N3M2N3M1O00000021ON3M2N3M2JWASOl>j0601O0O2M2N3L3N2N3M2N3M2NdSc2"}, "image_id": 758, "id": 12881}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 418.0, 79.0, 66.0], "area": 2852, "segmentation": {"size": [512, 512], "counts": "ZnS23k?2O2M2N3N2M2N3N1N3G[OSAh0k>ZOSAg0k>9M2O2M3M2O2M3M2O2N11000O010O01000O010O0100M2O2M2O2O10O10O10O010O10O10O010O10O10O01N1N3M30O010O01oNYAh0g>VO[Aj0d>TO^Al0l>0O0100N1N3N2M2N3N1N3M3N1N3M2O2Mdad4"}, "image_id": 758, "id": 12882}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 446.0, 79.0, 66.0], "area": 2889, "segmentation": {"size": [512, 512], "counts": "hna32l?2N3M3M2O2M3M2N3M2O200O010O10O010O10O1M2N3N1N3M3M201000O010O0100O010O10O10O010O10O01N1aAfNV>]1gAfNV>c1N3M3N1O2O010O01O001O1O001O001VOmAES>9PBFQ>8PBHP>6SBIn=^OkAa091m=[OmAa093o=KTB4m=IUB7k=HWB7j=FXB:h=D[B;f=B]BM3M2O2M2N3N^`V3"}, "image_id": 758, "id": 12883}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 464.0, 5.0, 10.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "`>:g?N3M2N2N\\Qm7"}, "image_id": 758, "id": 12884}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 473.0, 19.0, 18.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "P_X23k?2O2M3M2010O10O10O010O10O10M2O2M2N3NPQ^5"}, "image_id": 758, "id": 12885}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 475.0, 81.0, 37.0], "area": 1651, "segmentation": {"size": [512, 512], "counts": "nob12l?2O1N2N2O1N2N2O1N2O1001O00N2N2O1N2N2O1N2N2O1N2N2O100001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001N2N1NW`T5"}, "image_id": 758, "id": 12886}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 500.0, 26.0, 12.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "ooU31m?2N2O1N2N2N21O001O1O001O001O001O1O001O001O001O1O00QP]4"}, "image_id": 758, "id": 12887}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 501.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "e?3n?N[`n7"}, "image_id": 758, "id": 12888}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 505.0, 17.0, 7.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "oog31m?2N2N200001O001O001O1O001O001O00Q`o3"}, "image_id": 758, "id": 12889}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 89.0, 69.0, 171.0], "area": 7948, "segmentation": {"size": [512, 512], "counts": "SVh64l?7H8I7I7fLQOUGV1d8QOUGV1d8QOUGU1e8ROTGU1e8QOTGV1e8ROTGn0l8YOmFg0S9@fF`0Z9G_F8b9OWF1i96PFJo9>iECW:d0bE\\O^:k0[EUOe:S30O1000O10000000O1000O10000000O1000O10000000O1000O1007I7I7H8I7I7I7I7I7I7H8I7I7I7I7I7I6I8I7I7I7I7I7IWX5"}, "image_id": 759, "id": 12890}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 108.0, 23.0, 33.0], "area": 627, "segmentation": {"size": [512, 512], "counts": "`cd73l?8I7I7I5K00000O0100000000000O0100000000000O0eL"}, "image_id": 759, "id": 12891}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 125.0, 56.0, 101.0], "area": 3864, "segmentation": {"size": [512, 512], "counts": "TTk14l?5J7J6J6J5K6J6I7J5K6J6J6I6K6J6J4L00O01000000000O010000000O10O10000000O010000004L6I7J5K6J6J6J5J7J6J6J5K6I7J6J5KYiX5"}, "image_id": 759, "id": 12892}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 147.0, 16.0, 102.0], "area": 893, "segmentation": {"size": [512, 512], "counts": "eTh72n?7H8I7I7I7I8H7I7I7H8I7I7I7I6J0]K"}, "image_id": 759, "id": 12893}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 173.0, 17.0, 82.0], "area": 966, "segmentation": {"size": [512, 512], "counts": "]5b2^=000000O102N7I7I7I7H8I7I7I7I7I7H\\Xg7"}, "image_id": 759, "id": 12894}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 190.0, 73.0, 55.0], "area": 2904, "segmentation": {"size": [512, 512], "counts": "Zfa06j?5J7J6J5K6J6I4MO10000000O10O10000000O10O100000O10O10000000O10O10000000O10O10000000O010000000O10O10000000O10O10000000O010000000O11N7J6J6J5K6J6I6KihY6"}, "image_id": 759, "id": 12895}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 318.0, 275.0, 194.0], "area": 47624, "segmentation": {"size": [512, 512], "counts": "[a0iABV>>iADV>=hADX>=eAFZ>;dAG[>9dAI[>8bAJ^>7`AK_>6_AL`>k0O001O1O1O001O1OO1O1O1N2O1O1N2O1O1N3N1O2M2O000O2O1N2O1O1N2O1O1N2O1O1N2O1O2M3N1O2M3N2Nloi4"}, "image_id": 760, "id": 12899}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 0.0, 80.0, 58.0], "area": 2337, "segmentation": {"size": [512, 512], "counts": "Y``31n?2N2N2N2N2N2N3M2O1N2O1O1O1O1O1O1O1O2N1O1O1O100000000TAoNf>S1XAoNf>W1N201O0001O000000000000N2N2N0000000001O001O3M2N2N2O1N2N2O1000000O1N3M2N2N2N0001O01O2N2N3M2N2N2N2N2N2N2OR_W3"}, "image_id": 760, "id": 12900}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 0.0, 12.0, 7.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "P`g41o?2N1O1O1O1OO1O1O1O1O1OQ`R3"}, "image_id": 760, "id": 12901}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 32.0, 52.0], "area": 989, "segmentation": {"size": [512, 512], "counts": "k0U1j>0000001O0000010O2N0000001O000002N3M2N2N2N2O1N2N2@i@4Z?Jg@4[?Jg@4[?Jg@4[?Jg@4d?N2Ojn_7"}, "image_id": 760, "id": 12902}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 35.0, 76.0, 59.0], "area": 2423, "segmentation": {"size": [512, 512], "counts": "fah21n?2N2O1N2N2N2N2N2N2m@_Od>d0YA^Oe>d0YA^Oe>d0YA^Oe>d0ZA]Od>e0ZA]Od>Q1N2O1N2N2N2N201O000000O1N2N2N2N2N2O1N3M2N2N2N2N20N2N2N2N2N2N2N2N02N2N2N3M1O00000001O00000001O2N2N2N2N2Ij@AX?=j@AX?=7N2N2N2O1N2N3MW^Q4"}, "image_id": 760, "id": 12903}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 39.0, 91.0, 70.0], "area": 2805, "segmentation": {"size": [512, 512], "counts": "ga\\12m?2N2N2N2O2M2N2N2N2N3M2j@YOP?m0N2O1N2N3M2O1000000OO01O002N2O11O0001O0000000010O04L00O100O100O2fN_AR1a>lNaAS1`>lNaAT1_>jNcAU1e>O100O2O0O1O1M3N2O1O00000000000001O01O00000000000000003I401O000001O000000000000000000IbmU5"}, "image_id": 760, "id": 12904}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 185.0, 3.0, 5.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "i55l?N2OUZn7"}, "image_id": 760, "id": 12905}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 193.0, 57.0, 85.0], "area": 2237, "segmentation": {"size": [512, 512], "counts": "T7S1l>2N3N1N2N2N2N2N2N3M2N2O1N2N000000001O01VOPBCP>=RBAn=`0SB^Om=b0UB\\Ok=d0XBYOh=g0ZBWOf=i0\\BUOd=k0^BTOa=l0aBRO`=m0bBQO^=o0dBoN\\=R1eBlN[=T1j0N2N1O0000001O01O0000000002N2Go@@S?>o@AS?\\OfAd0Z>a0000000000O0100000000000O0100000000000O10O10000000RObA:^>FiA3W>MQBJo=7XBBh=>m000O100000O100000O13M00OJ^OPAb0P?700O100000O100000O100000O100000O1006J8H7HbnZ3"}, "image_id": 765, "id": 12916}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 31.0, 80.0, 57.0], "area": 2535, "segmentation": {"size": [512, 512], "counts": "jQn41m?3N2M2O2N2M210`@JT?b0O1M2O2N1N3N1N3N0101O1O1OImNaAR1_>POaAn0^>UO`Ak0a>UO^Am0b>810O100O001N2O001O1O1O001O1O10O1O1M2O2N2M2O2jNYAo0j>oNWAP1n>N0O10O10O010O10O2O2N2M3O01M3N1N3N2N2M2O2N2M2O2M3N2N1N3N2N1N3Neni1"}, "image_id": 765, "id": 12917}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 37.0, 64.0, 52.0], "area": 2245, "segmentation": {"size": [512, 512], "counts": "kQe04l?7I7I7I5J0100000000000O01000000000JVOVAj0j>5100000O100000O1000FnNfAR1Z>UO^Al0b>9000O0100000000000O0100000000000O0100004L7I2NO0100000000000O01\\OVA2o>IXA0S][6"}, "image_id": 765, "id": 12918}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 59.0, 27.0, 25.0], "area": 533, "segmentation": {"size": [512, 512], "counts": "mae65k?:F1O002N1O1O1O1O1N10O1000000000000000O10O10001O;E:F_ml0"}, "image_id": 765, "id": 12919}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 73.0, 30.0, 42.0], "area": 952, "segmentation": {"size": [512, 512], "counts": "oRa71`?1l@3Q?1k@3Q?0l@4o>1m@0R?>1000O10O10O4M2NO10O2O4L00O0100000O0100000O010000gM"}, "image_id": 765, "id": 12920}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 83.0, 51.0, 57.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "YSV61n?3M2O1N3M2N3N1N2N3N1N3M2N2O2M2N3N1N2N3M2O2M2N2O2M1O0001O01O01O2N3N1N2N3N1N3M2N2O2M2N3N1N2N3Cf@O\\?0e@O]?Ne@0]?Nf@O\\?0e@OnkP1"}, "image_id": 765, "id": 12921}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 98.0, 86.0, 56.0], "area": 3124, "segmentation": {"size": [512, 512], "counts": "mcX33l?7TOGeA`0T>GeA`0S>HfA>T>g00O10000000O1000O10000000O1000O10000000O1000O1000004L7I7I3L10O1000000000O10O1000000000O10O1000000000O0100000000000O0100000000000O0100000000000O01000000006J7H8I7Ie[\\3"}, "image_id": 765, "id": 12922}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 99.0, 19.0, 23.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "US62n?7I8H4K1000O1000000000O10O1000007I7I_\\`7"}, "image_id": 765, "id": 12923}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 99.0, 67.0, 47.0], "area": 2141, "segmentation": {"size": [512, 512], "counts": "[Sg06j?8H8H8G9H1OO1000000000O1000O1000000000O1000O102N0JVAVOj>j060LTOTAl0l>4000O10O10000000002N1O0O100000O100L4000O10000005K8G10O10000000I700000O13M8HW\\W6"}, "image_id": 765, "id": 12924}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 103.0, 3.0, 18.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "W3b0e?I7I[\\n7"}, "image_id": 765, "id": 12925}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 109.0, 59.0, 87.0], "area": 3153, "segmentation": {"size": [512, 512], "counts": "iSS51m?3N2N2jAK^<7dBG64S=8dBF83S=9bBG83U=9^BG;2U=l0iBVOV=j0hBXOX=i0fBYOY=k0bBWO]=g1O001O1N2O001O1O1O001O1N2O1O0000O1O1N101N2O1O1N8I1N2O1O1N2oNSBNm=3[BCg==aBZO`=e0n0000000000O1000O10003M8H8H8Hd[o1"}, "image_id": 765, "id": 12926}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 135.0, 15.0, 12.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "Xd^77i?4L000000000O100000M30000000ik9"}, "image_id": 765, "id": 12927}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 156.0, 66.0, 91.0], "area": 2966, "segmentation": {"size": [512, 512], "counts": "kUX62m?2N2N3N1N3M2N2N3N1I]On@e0P?]On@f0o>7O2M2N2N3M2N2O2M2N3N1lA[Nm=d1QB_Nn=b1oA`No=b1PB`Nm=k1O1N3M2N2N3N1N3M2N2N3NO01O02N2O2M2N2TOcBRO_=l0cBSO_=j0dBSO^=L[B?9C^=MZB?9B_=MZB?:B^=L[B?9C^=L[B?9C^=L[B?:C\\=L[Ba0:A^=LXBb0=_Oh=a0k002N2O1N2N3M2N2O1N3Mlif0"}, "image_id": 765, "id": 12928}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 158.0, 81.0, 75.0], "area": 2878, "segmentation": {"size": [512, 512], "counts": "ffZ32n?2M3N2M2UOFnAFnAFnAHlA7T>LiA5c=ZOhBc0C2e=^OeBc0C0h=_OcBZ1]=iN`BX1`=jN^BU1b=nN[BS1e=oNYBP1g=d0101N3N2M3N2CmAhNV>V1lAhNT>W1oAfNQ>\\192M3N2M3N0O010O01DjAnNW>Q1kAlNX>Q1kAmNW>Q1kAlNX>Q1=OO010O010O010O010O010O03M3N2M2O0O010O010O010O010O010O010O2O2M3N2M3N2M3N2M3NXj\\3"}, "image_id": 765, "id": 12929}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 162.0, 25.0, 32.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "jec72V?h001O1O2N1O1OO1N2O1O1O1000000005K000000000000000nJ"}, "image_id": 765, "id": 12930}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 205.0, 66.0, 92.0], "area": 3167, "segmentation": {"size": [512, 512], "counts": "^gZ52m?2N3M2N2N3N1N2EARAb0j>DQA>l>=N3M2N2O1O2N1N2O2N1O1O2O0O101N1oAXNi=h1UBZNi=h1UBZNi=Q2N1N2N2N3N1N2N3M2O1N2N01O1O010O1O1O0^NhBSO0S1X=JjBPO1S1W=KjBQO0R1X=KRC3o<\\ObB1b0a0m<\\OcB1b0a0m<[OdB3`0`0Q=XOaB6a0?]=^OeBa0\\=]OfBa0\\=^OfB?]=^OeB`0_>N2O2M2N2N3N1N2N^Wd1"}, "image_id": 765, "id": 12931}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 214.0, 47.0, 90.0], "area": 2714, "segmentation": {"size": [512, 512], "counts": "of\\13m?7I7I7I6I8I7I1O006J6J7H8I7I3M00O1000O10000000O1000O10000O0O2O1OjNhBFX=:nB@R=`0UCYOkQORBh0Q>TOWBe0h>J5J2O0O2MPhk5"}, "image_id": 765, "id": 12932}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 217.0, 71.0, 81.0], "area": 2631, "segmentation": {"size": [512, 512], "counts": "iXm31o?1N3dNLgB6W=MfB5Y=LeB7X=KgB6W=MfB5X=MfB6W=LgB6X=KgB6W=MfB5X=MfB6W=JiB7W=HjB8U=IjB7V=JiB6W=MfB3[=NcB3\\=OcB0]=3\\BSOLj0h=5[BROKi0j=7YBMg=5VBKj=U1000010O00000101N2N3JoAXNS>f15N3M2O1N3M2N2N2O2M2N2N3N1N2N1O01O01O000001O01O1O3N1N2N3M2O1N2N3M2N2O2MXXo2"}, "image_id": 765, "id": 12933}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 221.0, 62.0, 92.0], "area": 3664, "segmentation": {"size": [512, 512], "counts": "XW85k?6J6J3M00000O10O10000000O1PBB^^NmAb1T>_NkA`1U>50001O01O0JlAaNT>_1nA`NQ>`1QB^NP>a171O0000010O000001O01O0000010O000002N2O2M2N2N2O2M2N2N3M2O1OO2N2O10O1N3M2N2O1N3M2N2N3N1N2N2N3N1N2NkfU2"}, "image_id": 765, "id": 12935}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 329.0, 89.0, 41.0], "area": 2652, "segmentation": {"size": [512, 512], "counts": "ojR14l?7YOFZAa0_>FZA;e>`00O10O1000000000O10O1000000000O10O10000000O10001O1O00000O100000O100000O100000O100000O1000O10000000O1000O10000000O1000O100002M6K0000000000O015KO100000O100000O10002N6F;H8Ild`5"}, "image_id": 765, "id": 12936}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 340.0, 80.0, 79.0], "area": 3107, "segmentation": {"size": [512, 512], "counts": "W\\h61l?4M2N2N3L3N3M2M3N3M2O20O00010O010O0010O0010O010O00010O010O00VOUO_Bl0`=WO^Bh0a=[O]Bf0b=]O\\Bb0c=A[B?e=CZB=e=FXB:g=IUB:j=IPB:o=l001O0O2O0O2O0O101O0O2O0O10N210O3N1N2N3N1YOnA\\OR>b0PB_Oo=>TBBl=1O01O0M4M2M4M2M3NaC"}, "image_id": 765, "id": 12937}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 368.0, 47.0, 67.0], "area": 2094, "segmentation": {"size": [512, 512], "counts": "k;R1n>O010000O0100000O0100000N2O5L4L4K5L4L4L1N0100000O0100000O0100000O0104L4K4M4L4L4oNZA?k>]OYA?j>]O[A?W?L3L5L4LeRX7"}, "image_id": 765, "id": 12938}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 379.0, 67.0, 41.0], "area": 1542, "segmentation": {"size": [512, 512], "counts": "^\\^11o?1N3M2O2M2N3N1KCd@`0Z?4N3M3N1N3M2O0O000101N2N10O00002K6OO0O100O1O100O1O1O2O5J3M2O1N0010O002O1N3M2O1N00010O000100O2N3N1N3M2O2M2N2O2M2N3N1NcS`5"}, "image_id": 765, "id": 12939}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 400.0, 53.0, 54.0], "area": 1798, "segmentation": {"size": [512, 512], "counts": "l]_61l?3N3M2^OHYAFZAHYA;e>GYAGXAb0M2M3N30O010O01O01O010O01ON3M2N30O0010O0010O010O00010ON3N10010O001FYAZOf>d0\\AYOh>c0\\AZOf>d05N2N2N1O0000000000000000000000000000000000000000000N2O1OL^AmN`>U1aAjN^>W1dAgN[>Z141O11O2N1J\\AoNf>f0YA_O3Ie>f0[A_Ol>>WA@j>`0WA^Oj>b0;0O010O01N1M3N3M2M4M2NdRS4"}, "image_id": 765, "id": 12942}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 426.0, 44.0, 63.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "X^Z72l?3L3M3N3M21O01O010O0N2M4M2M4M20001^ASOP>l0mAWOS>i0jA[OV>e0gA]OV>f0gA^OV>e0gA]OV>X1N3O01O01O010O01O01O01O000N3L3N3BcATO`>i0cATO`>j0cASO`>i0=MbB"}, "image_id": 765, "id": 12943}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 429.0, 57.0, 48.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "b^k32k?4M2M3N3L3N3M200N3M2M4M2M3N3L3N3N11O01O010ON3L3N2M4M2N3L300010O01O010O01O01O010O01O010O01O01O010O01XOm@a0Y?O2L3N2M4M2NoQX3"}, "image_id": 765, "id": 12944}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 458.0, 50.0, 54.0], "area": 1973, "segmentation": {"size": [512, 512], "counts": "b>^1b>01O001O00001O001O001O00N2M3N2N2N2O11O00001O001O001O00001O00N2NKfAhNX>X1jAhNV>U1nAjNS>S1oAmNQ>Q1RBnNn=o0UBQOl=l0VBTOj=j0YBUOh=g0j0N3M2M3N3M2N3L3N3MaaV7"}, "image_id": 765, "id": 12945}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 470.0, 94.0, 42.0], "area": 2129, "segmentation": {"size": [512, 512], "counts": "Z_U61m?2M4M2M3N3M2M4M2O110O010O01O01O010O010O00010O010O00010O010O00010O0O2L3O110O010O00010O010O0010O0010O010O0001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O000M4M2M4M_`;"}, "image_id": 765, "id": 12946}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 481.0, 54.0, 31.0], "area": 1048, "segmentation": {"size": [512, 512], "counts": "mob33k?2M3N2O1O1M3N2M3N2N2M3N2N21O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O0Bd@8]?Ef@:`?01O001O000N3L3NZPb3"}, "image_id": 765, "id": 12947}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 501.0, 5.0, 11.0], "area": 31, "segmentation": {"size": [512, 512], "counts": "oom71l?3N2M3N2"}, "image_id": 765, "id": 12948}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 502.0, 36.0, 10.0], "area": 196, "segmentation": {"size": [512, 512], "counts": "n_]12l?2M3N2O11O001O00001O001O001O00001O001O00O100O10000O100001O001O00001O2NQ`P6"}, "image_id": 765, "id": 12949}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 139.0, 68.0, 67.0], "area": 1936, "segmentation": {"size": [512, 512], "counts": "o5?a?00000000000O100000000000000000O1000000000000000O1O1O1O1O1N2O1O1O1O000O1mNH_B8b=R11N2O1O1O1O1O1N2O1O1O1O1CbAWO_>i0<000000000O1000000000000000000f0ZOojm6"}, "image_id": 767, "id": 12950}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 141.0, 30.0, 38.0], "area": 647, "segmentation": {"size": [512, 512], "counts": "Yec13j?3N3M2M4M2N2M4M2N3L3N3M200010O01OO2L3N3M2M4M2N2M4M2N3M2M4M^[m5"}, "image_id": 767, "id": 12951}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 0.0, 41.0, 44.0], "area": 1137, "segmentation": {"size": [512, 512], "counts": "Pa?2k?4ROKmA7S>IjA:V>GgADeA?[>AcAb0\\>_OaAc0_>]O^Af0b>[O[Ah0d>XOYAk0g>UOWAn0h>50001O001O00001O001O00001O0000N2M3EXA\\Oj>a0YA\\Oj>b0YA[Oi>b01YA1d>2YA1e>2XA1e>1XA2e>1YA2d>1YA1d>g0N3M2M4F_NQBc1m=_NPBe1m=8M3N3L3N3M2fBcMn<`2oBcMo`NlA^1\\>N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2N]_T6"}, "image_id": 769, "id": 12953}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 49.0, 46.0, 86.0], "area": 2115, "segmentation": {"size": [512, 512], "counts": "b2e0e0_Om=a0PBCo==oAER>;jAHV>8hAKX>5dANZ>4dAOX>4eAOY>m0M4M2M4M2M3N3L3N3L3NO012M4M2M3N3L3ITBXNo=d18N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3NWnX7"}, "image_id": 769, "id": 12954}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 64.0, 40.0, 67.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "Tc]21l?3CMj@6S?Mj@7R?Mk@5`0En=i0nAZOo=i0oAZOm=i0PBZOn=i0nA[On=h0oA[OP>f0nA\\OR>Z10O00010O00010O010O00010O000bNnAo0R>nNQBn0R>oNQBn0R>oNQBo0R>mNRBo0a>M3M4M2M4L3N2M4L3M3NX]n4"}, "image_id": 769, "id": 12955}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 64.0, 17.0, 17.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "\\bQ31l?4L3N2M4N100010O01O01O001L3N2M4Lome4"}, "image_id": 769, "id": 12956}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 72.0, 23.0, 31.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "PS^13j?3N3L3N2M4M2M4M2M3O2O01O0N3M2M3N3L3N3L3N2M4Me]V6"}, "image_id": 769, "id": 12957}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 96.0, 44.0, 76.0], "area": 1982, "segmentation": {"size": [512, 512], "counts": "^cb01;25O\\>2]A23O^>2[A25N]>3\\A140=GZ=i0VBB=IY=h0WBC=G[=Y1bBkN]=U1aBmN_=T1]BoNd=P1ZBSOe=d110O00010O010O0001N1N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3Nd\\g6"}, "image_id": 769, "id": 12958}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 108.0, 56.0, 96.0], "area": 2592, "segmentation": {"size": [512, 512], "counts": "YUW13f0O]>3aAO\\>5`AO]>3aAO\\>4bAN\\>5`AO]>3aAO\\>5`AO]>k0M3N3M2M4M2M4M2N2M4M2M4OO1M4M2N2M4M2M4M000O010O01000O102M4M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N^ll5"}, "image_id": 769, "id": 12959}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 140.0, 66.0, 114.0], "area": 3470, "segmentation": {"size": [512, 512], "counts": "cga22l?3L3hNKcB8Y=KRBL2h0gA[OW>g0gA\\OU>h0gA[OW>g0gA[OV>h0gA\\OV>X1N3L3N2M4M200010O010O00010O0O2L3NO010O0103L3N3L3N2M4L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4MV[Z5"}, "image_id": 769, "id": 12961}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 260.0, 30.0, 63.0], "area": 1015, "segmentation": {"size": [512, 512], "counts": "T8n1S>0O01O1M2N3N1N3M3M2O2M2N3M2O2M3M2O2M2N3M2O2M3M2N3N1N3M2N3N\\g`7"}, "image_id": 769, "id": 12962}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 298.0, 61.0, 70.0], "area": 2104, "segmentation": {"size": [512, 512], "counts": "nZ63j?3N3M2N3M2N2M4O010O010O010O010M2N3M2N2N3M2M4M2N3M2N3L3N3M2N3M2N2M4N11N1N3M2N2N3L3N3M2N3M2N3L3N3M2N3M2N2M4M2N3M2N3L3N3M2N\\Vk6"}, "image_id": 769, "id": 12963}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 332.0, 50.0, 63.0], "area": 1775, "segmentation": {"size": [512, 512], "counts": "mko02k?4M2N2M4M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2M301O010O01O010O01O01L3N3M2M3N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3L[UW6"}, "image_id": 769, "id": 12964}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 343.0, 11.0, 25.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "g:g0Y?100O01O0N3@f@6]?Ge@7d?N1N3MoTj7"}, "image_id": 769, "id": 12965}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 379.0, 69.0, 56.0], "area": 2072, "segmentation": {"size": [512, 512], "counts": "i\\h21m?2N3L3N3M2N3M2M4M2N2N3M2O20O010O010O01O01O010ON3M2M4N110O0010O0010O010O010O010O0010kN^Aj0b>SOaAm0`>POcAP1f>0010O010O010O010O0010O0001L3N3M2N3M2M4M2N3M2N2M\\SU4"}, "image_id": 769, "id": 12966}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 384.0, 26.0, 25.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "^\\f03k?2N3L3N3M2N3N110O010O010O010O010O01N1N3M2N3M2N3M2Nicl6"}, "image_id": 769, "id": 12967}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 414.0, 75.0, 65.0], "area": 2757, "segmentation": {"size": [512, 512], "counts": "[=e0Y?2M4M2M3O20O010O0010O0010O0010O0010O0010O0010OTAQOg>T11O010O01O010O01O01O010O01O01O010O01O01L3N3L3N2N3L3N3N11O0WOkAHU>5nAJR>3QBNn=3RBLo=3QBNn=2RBNo=2QBMQ>1oA0P>0QBOP>MSB4l=JVB6k=FYB9g=E[Ba=_OaB>b=_OaB?b=^OaB>b>N3L3N2N3LiQj6"}, "image_id": 769, "id": 12968}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 423.0, 28.0, 27.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "jme31l?3N3L3M3N3L3N3N11O01O01O010O01O01O010O000N3L3M3N3L3M4McRl3"}, "image_id": 769, "id": 12969}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 427.0, 72.0, 49.0], "area": 1995, "segmentation": {"size": [512, 512], "counts": "S^d43k?2M4L3N2M4L3N3L3M301O01O01O010O01O01O01O01O010O01O01O01O01O010O01O01O01O01O010O01O01O01O01O010O01O01L3M3N3O010O00010ON2N3L3M4M2M3M4M2M3M4M2M4LZbW2"}, "image_id": 769, "id": 12970}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 431.0, 29.0, 29.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "R^R42k?4L3M3M4L3M3O20O01O01O01O01O010O01O01O01O01M2M4L3N2M4L3M\\R_3"}, "image_id": 769, "id": 12971}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 437.0, 55.0, 57.0], "area": 2008, "segmentation": {"size": [512, 512], "counts": "on^23j?3M3M4L3M3M4L3M3M4M2M3M4L3M301O00010O0010O00010O00010ON2M40O00010O0010O0001L3M3N3L3M3N3L3M4M2M3M4M2M3M4M2M3MUbe4"}, "image_id": 769, "id": 12972}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 446.0, 28.0, 28.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "`^a33k?2M3M4M2M4L3N201O0010O0010O0010O0010O00O2M2M3N3L3N3L3NmaP4"}, "image_id": 769, "id": 12973}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 453.0, 78.0, 59.0], "area": 2043, "segmentation": {"size": [512, 512], "counts": "noS52k?3N2M3N2M3N2N2001O001O00001O00001O001O00001O001O00001O001O00001O00N2IIc@9[?7M3N2M3N2M3M3N2M3N2M3N2M3N2M3N2O100001N1N3N10001O0N3L3N2M4L3N3L3N2M4M2M4M2M3N3L3N3LdQe1"}, "image_id": 769, "id": 12974}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 473.0, 31.0, 39.0], "area": 926, "segmentation": {"size": [512, 512], "counts": "U?k0U?0001OO1N2M3N2M3N21O00001O001O00001O00001O001O0DTADl>:WAEj>7YAIg>4\\AJg>3\\AIg>4d0NQQ`7"}, "image_id": 769, "id": 12975}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 475.0, 33.0, 32.0], "area": 537, "segmentation": {"size": [512, 512], "counts": "Yoh31n?2N2N2N2N2N2N2N2N2N1O2N2N2N2O10000000N2N2N2N2N2N2N1O2N2N2N2N2M3Nf`f3"}, "image_id": 769, "id": 12976}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 484.0, 30.0, 28.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "co^42m?2N2N2N2N2N2N2N2N1O2N2N2M2O11O1O1O1N2N2N2N1O2N2N2N2N2M3N2N_PR3"}, "image_id": 769, "id": 12977}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 489.0, 23.0, 23.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "o_W71m?2M3N2M3N2M3M3N2N2001O001O00001N1M4M2M3N3L3NdP="}, "image_id": 769, "id": 12978}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 506.0, 17.0, 6.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "moR33j?300001O00001O001O00001O001O0000Q`d4"}, "image_id": 769, "id": 12979}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 508.0, 10.0, 4.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "moW23l?1001O001O001O0000QPc5"}, "image_id": 769, "id": 12980}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 508.0, 7.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "mol73l?1001O001O00"}, "image_id": 769, "id": 12981}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 314.0, 44.0, 37.0], "area": 856, "segmentation": {"size": [512, 512], "counts": "_jl42k?3M3N3L3M4M2M4M21O01O010O01O01OXOl@d0X?1O01O010O01O01O010O01O01O010O01O010O01N1N3M2M3N3L3N3LgU]2"}, "image_id": 770, "id": 12982}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 318.0, 11.0, 17.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "Zjh43k?2N3L3N2M40N1M3M4L3NRfQ3"}, "image_id": 770, "id": 12983}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 357.0, 16.0, 16.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "][W42m?2N1N3N2M3N101O10N2M3N1O2M3N2Nfd`3"}, "image_id": 770, "id": 12984}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 359.0, 44.0, 38.0], "area": 857, "segmentation": {"size": [512, 512], "counts": "n[]23k?3M2M3N3O001N100O2O001N101O0O101N101O0O2O000O2O001N100O2ON2N1N3N2N1N3N2M2O2N1N3N2N1N3Nkdl4"}, "image_id": 770, "id": 12985}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 366.0, 70.0, 41.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "Tlg51m?2N2M4M2N3M2N3L3N2N3O001O01O010O01O01O010O010O01O01O0N3M20010O010O0010M2N3M2N3O0O1O20O01O010O01O01O010O010O00010O01M2N2N3L3N3M2N3L3N2N5J3NWTU1"}, "image_id": 770, "id": 12986}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 384.0, 39.0, 34.0], "area": 799, "segmentation": {"size": [512, 512], "counts": "g\\S51l?3M3N3L3N3L3N2M4M201O01O010O010O0010O00010O010O01O01OM4L3010O010O00M4L3M4L3NfSY2"}, "image_id": 770, "id": 12987}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 391.0, 75.0, 45.0], "area": 2665, "segmentation": {"size": [512, 512], "counts": "Y]o23h?5K6J5K5K5K5L5O0001O0001O0001O0001O0001O0001O01O000001O01O000001O01O000001O00000000O1000O100000O1000O10000000O1000O10000000O1000O10000000O5L6J7I7I6JiRk3"}, "image_id": 770, "id": 12988}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 411.0, 75.0, 53.0], "area": 2108, "segmentation": {"size": [512, 512], "counts": "dmZ53k?3L3M4L3N2N3O0010O0010O0010O010O0010O0010O0010O0010O010O00N3M2N3L3N210N1N3L3N3N10010O010O01O01O010O010O01O01OgN_AS1b>jN`AW1d>01O010O0O2M2M3N3M2M4M2N2M4M2N3M2M4Mcb_1"}, "image_id": 770, "id": 12989}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 451.0, 21.0, 21.0], "area": 254, "segmentation": {"size": [512, 512], "counts": "`^T41m?2N3L3N3M2N30O01O010O01O010O010L3N3M2N2NiQa3"}, "image_id": 770, "id": 12990}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 459.0, 91.0, 53.0], "area": 2797, "segmentation": {"size": [512, 512], "counts": "Qof42l?2N3L3N3L3N3M2M3N3N1010O0010O0010O010O00010O010O010O0001M2N3M201O00010O010O0010O0010O0010O010O0010O0010O0010O0010O010O0010O0010ON3L31O01O01O001O00001O001O0M4M2M3N3M2M4M2M3N3M2M4M2MUak1"}, "image_id": 770, "id": 12991}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 464.0, 26.0, 31.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "Xoi11m?2M4M2M3N3L3N3M2M4M200010O010O0N2N3M2M4M2N3L3N2M4M\\Qi5"}, "image_id": 770, "id": 12992}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 479.0, 29.0, 31.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "e_U32k?3N3L3N3M2M3N3L3N3O01O010O01O01O010O001M2N2M4M2M4M2N2M4MkP\\4"}, "image_id": 770, "id": 12993}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 480.0, 51.0, 32.0], "area": 722, "segmentation": {"size": [512, 512], "counts": "ook11l?3N2N2M3001O001O00001O001O00001O001O00001O0000M3N2M3N2M3N2M3N2M3N2M3001O00001O001O0M3N3L3N3L3N2M4M2Mm`Z5"}, "image_id": 770, "id": 12994}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 492.0, 30.0, 20.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "n_a31l?3N3L3N2N2M3N2O1001O001O001O00001O001O001O001O00001M2N3M2M^`o3"}, "image_id": 770, "id": 12995}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 505.0, 14.0, 7.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "ood21l?3M30000001O00001O001O000MWPT5"}, "image_id": 770, "id": 12996}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 508.0, 26.0, 4.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "lo_44l?000000000001O0000000000001O00000000001O0000000000QPS3"}, "image_id": 770, "id": 12997}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 510.0, 12.0, 2.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "non42n?00000001O000000000000QPk2"}, "image_id": 770, "id": 12998}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 0.0, 36.0, 7.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "PPS71o?00000000001O000000001O00000000001O00000000001O00000000001O000000001ONRP;"}, "image_id": 771, "id": 12999}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 0.0, 17.0, 38.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "Rag71i?6J7I6K5J6K50001O00000000001O0000"}, "image_id": 771, "id": 13000}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 20.0, 198.0, 74.0], "area": 8206, "segmentation": {"size": [512, 512], "counts": "cQ_42g?7J7I6M30000010O000001O01O000001O01O0001RA[O_>e0YACg>k01O0001O000L4K501O000001O01O000001O01O000001O01O000001O01O000000010O00000001M2K5K5J6K6KO6J5O100001O01O000001O0001O0001O06J3M1O2O0O0M4N2O001O0001O000000001O0O100O100O2N1O1N2M301O000000001O0oNVAm0n>0iARO\\=n0^BXOb=h0XB^Oi=a0RBEm=;mAKS>Q100001O01O000001O0001O0001O000000010YNmA_1Z>01O0M3K5K5010O000000010O000000010O000000010O000XAmNc>X1010O00000001K4O100001O01O000001O01O000000010O00L4K5K6J5K5K5J6K6Jen="}, "image_id": 771, "id": 13001}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 0.0, 42.0, 34.0], "area": 889, "segmentation": {"size": [512, 512], "counts": "WPV32l?2M4M20001O001O0k@Ee>;YAGg>:UAJj>6TALl>5PAOo>1o@1Q??10O010O0O1O2O001OO1M3O1001O00O2N02O00M3Jm@]OU?a06M3N2M3N2M3NRPU4"}, "image_id": 773, "id": 13002}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 0.0, 22.0, 8.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "P``41o?00001O001O00001O001O001O00001O001O00N2N2MS`T3"}, "image_id": 773, "id": 13003}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 0.0, 55.0, 62.0], "area": 2167, "segmentation": {"size": [512, 512], "counts": "XaQ53k?3FKd@8Z?Kb@83GQ?i0M2M4L3N2M4M2M3N3O010O00010O010O00001L3N2N3O001O00001ON2N2M3N2M3M3N2M30000001OD]AXOf>e0^AWOe>f0^AXOd>f05lANU>o00O0010O01O010O1O010O0010O01O010O1ON30O000M4L3M4M2M3M4L3M3M4L3M4L3N2M4L3Ma_4"}, "image_id": 773, "id": 13006}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 3.0, 32.0, 40.0], "area": 937, "segmentation": {"size": [512, 512], "counts": "4Q1o>00010O00010O01O01O01O01O01OM4L3010O00001L3M3M4M2M3M4L3M3M4L3Mko_7"}, "image_id": 773, "id": 13007}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 75.0, 72.0, 49.0], "area": 2005, "segmentation": {"size": [512, 512], "counts": "TSl14j?2M3N3M2M4M2N3L300010O01O01O01O0N3M2N3M21O01O010O010O01O01O010O01O01O010O010O04L010O010O00010O010O000O2O0010ON3N11O010O01O001L3N2M4M2N3L3N2N3Lolo4"}, "image_id": 773, "id": 13008}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 102.0, 17.0, 21.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "ec`32l?3L3N2M4M2M40O0010OO1N3L3N3L3M3NhlV4"}, "image_id": 773, "id": 13009}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 119.0, 43.0, 33.0], "area": 863, "segmentation": {"size": [512, 512], "counts": "\\dm22l?2M4M2M4M2N2M4M2N3O01O010O01O01O010O010O01O01N1N30O00010O010O00010O01N1N2N3L3N3L3N3Lnk\\4"}, "image_id": 773, "id": 13010}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 136.0, 89.0, 56.0], "area": 2535, "segmentation": {"size": [512, 512], "counts": "mTc12k?4M2N3L3N3L3N2N30O010O00010O01O010O01O01O010O000M4M2M4O01O01O010O010O01O01O010O01O01O010N100010O010O01O01O010O01O01O010nNVAl0o>10O0010O0010O010O00010O010O00010O010O0N3L3N2M4M2N3L3G\\@4j?LkZP5"}, "image_id": 773, "id": 13011}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 146.0, 56.0, 62.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "SVS41m?3L3N2M4M2M4M2M3N3L3N3M21O010O01M2N2M4M2M4M2M3N3L310O01O01ON3L3N3L3N2M4M2M4O000010O010OM3N3L3N3L3N2M4M2M4M2M3NTkP3"}, "image_id": 773, "id": 13012}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 158.0, 85.0, 55.0], "area": 3023, "segmentation": {"size": [512, 512], "counts": "oUV53h?6K4K5L4K6K4K5O10001O01O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O01O0001O0001O0001O01O0001O1O10O0000010O000000010O0000010O000000010O0000010O000L4L4K6K4K5L4K6KeZ_1"}, "image_id": 773, "id": 13013}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 159.0, 12.0, 36.0], "area": 361, "segmentation": {"size": [512, 512], "counts": "iUj79`?7H8I7M31ON20001O00000QK"}, "image_id": 773, "id": 13014}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 174.0, 62.0, 56.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "^VZ11m?2M4M2M3N3L3N3O000010O010ON2M4M2M4M2M3N3O0010O0010O0010O0010O0010O0010O0010O0010O001ZAgNd>Z1010PO]Aa0c>]O`Ac0_>ZOdAf0]>WOeAi0[>TOiAl0e>010O00O2M2M4M2M3Ba@9e?L3N3Lcif5"}, "image_id": 773, "id": 13015}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 184.0, 34.0, 30.0], "area": 614, "segmentation": {"size": [512, 512], "counts": "\\VZ23k?3GM`@6]?9M2N3M2O101O010O01O01O010O01N1N3L3N2O2O0010O0010O001M2M3N3M2MQjT5"}, "image_id": 773, "id": 13016}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 196.0, 31.0, 30.0], "area": 578, "segmentation": {"size": [512, 512], "counts": "gVn43k?2N2M4M2M4M2M3N3O0010O00010O010O01O01O010O001M2N2M4M2M4M2M3NfYb2"}, "image_id": 773, "id": 13017}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 207.0, 31.0, 31.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "YWd32k?4M2M3N3M2M4M2M100O021000010O010O00010O010O0001M2M4M2M3N3L3N\\Yl3"}, "image_id": 773, "id": 13018}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 211.0, 9.0, 19.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "lfk74c?91O05K000000001[I"}, "image_id": 773, "id": 13019}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 224.0, 28.0, 26.0], "area": 432, "segmentation": {"size": [512, 512], "counts": "bgc01m?2N2M4M2M4M2N2N3O0010O0010O0010O010O001N1N2M4M2M4M2N2MlXn6"}, "image_id": 773, "id": 13020}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 230.0, 68.0, 46.0], "area": 2108, "segmentation": {"size": [512, 512], "counts": "ThU51l?4M2M4M2M3N3L3N2M4M2M4M21O01O010O00010O010O00010O0010O0010O0010O0010O0010O00001O001O0000000000000O010000000000000000000O1000O1009GOQA1l>3RAOk>3SAOk>e0M201O01O010ON3M2010O0010O0010O010O0010O0010O010O0010O0010O010O00010O010O0010O0010O010O0PO]Ac0d>YO`Ag0_>WOcAg0_>WOcAg0m>L3N3M2M4M2N2M4M2NjeV6"}, "image_id": 773, "id": 13029}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 305.0, 34.0, 40.0], "area": 939, "segmentation": {"size": [512, 512], "counts": "\\Z_72k?3M4M2M3M4L3M3M4L3O1010O00010O00010O00010O00010O00010O00010O00010OWF"}, "image_id": 773, "id": 13030}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 322.0, 60.0, 53.0], "area": 2444, "segmentation": {"size": [512, 512], "counts": "lZV32a?=O100000000000000000001O0C=00000000000000000001O000000000000000000M3WOi0XOffk3"}, "image_id": 773, "id": 13031}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 338.0, 50.0, 68.0], "area": 1978, "segmentation": {"size": [512, 512], "counts": "Tle42k?4M2M3N3M2M4M2M4M2M3N3M2M4M2M3N3L3N3M2M3O20O01O010O01O01O010O01O01O010O[OnA\\OS>a0PB^OP>?SBBm=;UBBn=;VBBl=CSA?j>=N3M2O2O0M4M2N3N10010O010ON2N3O010O010O010O01O01O010O010O010O01O01O010O010O010O01OoNbA`0_>]OcAc0]>XOiAh0W>TOlAl0T>ROoAn0a>0001L3N2N00020000010O0O2L3N3M2M4H]@Oe?O]@OoSo6"}, "image_id": 773, "id": 13033}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 347.0, 71.0, 65.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "mkl62k?4M2M3N3M2M4M2M3N3M2M4M2N3L30010O010O0010O0010O010O00010O0101N10O00010O010O00010O010O0010O0010O0010O010O00010O010O0010O0010O010O00010O010O00\\D"}, "image_id": 773, "id": 13034}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 349.0, 17.0, 19.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "[kg21m?2M4M2N3M2M301O010O01L3N3M2N3L3NQeo4"}, "image_id": 773, "id": 13035}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 380.0, 61.0, 61.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "Y]S13k?2M3N3L3M4M2M301O0010ON2N3L3N3L3M3N3M210O00O2NO2M4M2N2N3O00010O00010O00010O00001O000010M2M3M4L3M3M4L3M3M4L3M3M4L3M3M4L3MnSn5"}, "image_id": 773, "id": 13036}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 406.0, 57.0, 47.0], "area": 1943, "segmentation": {"size": [512, 512], "counts": "amT47S?f0K6O00000000000000000000000001O000000000001O00000006J00000000001O0000000:F000000001O00000000000000000001O00000]Odcn2"}, "image_id": 773, "id": 13037}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 413.0, 37.0, 59.0], "area": 1340, "segmentation": {"size": [512, 512], "counts": "X=S1k>3M2M4M2N2010O010O00010O010O01O01OaAaN]>a11O010DbATO^>IcAk02]O_>a0dA^O]>?eAA[>8kAGU>6nAKR>2QBMo=1SBOm=NVB0m=MVB2j=KYB5i>10O0N3MaQ]7"}, "image_id": 773, "id": 13038}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 417.0, 67.0, 51.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "l]e61l?3N3L3N2N3L3N3O010O0N2N3M2M4M21O01O010O01O01O010O010O00010O010O01O01O010O010O00010O010N1M3N3O001O01O010O010O01O01O0O2\\OXAKk>3WAKk>2XAKl>2WAKk>2XAKl>2WAKk>2c0NdR9"}, "image_id": 773, "id": 13039}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 422.0, 39.0, 29.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "`]f01m?3W@M`?6^@M^?:jAIV>P10010O010O00010O01lNgA`0X>]OlAb0U>[OmAe0S>XOQBg0o=WOSBi0n=SOUBm0k=QOXBn0]>01O001O00001O001O00001O001O00001O0000N2M3O1001O00001O001O00001O001O00001O001O0Gi@IW?5k@KV?1m@OS?OPA0Q?LRA4n>JUA5[?O1NXP:"}, "image_id": 773, "id": 13044}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 471.0, 37.0, 34.0], "area": 772, "segmentation": {"size": [512, 512], "counts": "Z_k14j?2M4M2N3L3N2M4M201O00010O010O00010O010O0010O0010O0010O0N3M2M3N3L3N3L3N2NQQb5"}, "image_id": 773, "id": 13045}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 474.0, 55.0, 38.0], "area": 1303, "segmentation": {"size": [512, 512], "counts": "j>V1k>O001N1N2N3M2N3M2010O000O2O010O0010O0010O0010Ol@YOo>l01O00001O00001O001O00001O0XOk@e0X?01O00001O001O00001O001L3N2M4M2M3NbPT7"}, "image_id": 773, "id": 13046}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 489.0, 47.0, 23.0], "area": 575, "segmentation": {"size": [512, 512], "counts": "moj22n?1N1O100O100O1O100O100O1O100O100O1O100O100O1O100O100O1O100O1O100O12N2N1O2N2N1O2N2N1O2N2N1O2NQ`]4"}, "image_id": 773, "id": 13047}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 501.0, 26.0, 11.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "o_n01m?2M3N2M3001O00001O001O00001O001O00001O2N1O00001O00Q`d6"}, "image_id": 773, "id": 13048}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 0.0, 7.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "PP;1o?00001O001OOQ`a7"}, "image_id": 774, "id": 13049}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 0.0, 60.0, 29.0], "area": 1186, "segmentation": {"size": [512, 512], "counts": "YP>4i?3M3M4O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0000N2M3M3M3M3M3M4L3MRPd6"}, "image_id": 774, "id": 13050}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 0.0, 63.0, 36.0], "area": 1388, "segmentation": {"size": [512, 512], "counts": "g`^21m?2M4M2M4M2M3M4O0010O00010O00010O010M2M3O2O0010O001M2M3N3L3O2O00001O001O00001O00001O001O00001O001ON2N2M3N2M3N2M3M3N2M3N2M3M3NRPb4"}, "image_id": 774, "id": 13051}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 0.0, 34.0, 34.0], "area": 810, "segmentation": {"size": [512, 512], "counts": "iPb32l?3L3BJPA9m>IPA:m>JPA8o>In@;Q?:01O00001O001O00001O00O1M3N2M30000001O0000M3N2M3Cd@4^?Jd@3g?Mjol3"}, "image_id": 774, "id": 13052}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 0.0, 38.0, 33.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "`Pd41m?3L3[@K]?7a@K]?=M4M2N3O00001O001O001O00001O001O00001O001OM3N2M3N2N2M3N2M3N2N2M3N2MSPi2"}, "image_id": 774, "id": 13053}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 0.0, 65.0, 89.0], "area": 2451, "segmentation": {"size": [512, 512], "counts": "RRd51m?3M2M3c@Km>8PAJm>9PAKm>8PAJn>f0O000010O010O00M4M201O0UOiNmBX1P=kNmBW1Q=kNlBV1S=nNjBR1V=POgBQ1Y=RO_BAO\\1b=VO\\BBOX1f=i04M2N210O010O00001L3N00KWNTBi1o=22M3N2N2M3N2M3N2N2M3^OZAHh>2VAC58h>2^ALd>2_AKc>4^AIe>7b01O00001O00O1N2M3N2NR`[1"}, "image_id": 774, "id": 13054}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 9.0, 58.0, 91.0], "area": 2707, "segmentation": {"size": [512, 512], "counts": "fbP74i?3N3L3M3M4N10001L3N2M4L3M3M4M2M4VObNmBa1P=cNlB`1Q=cNlBa1P=bNnB`1o6k?Ldon7"}, "image_id": 774, "id": 13056}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 36.0, 55.0, 90.0], "area": 2423, "segmentation": {"size": [512, 512], "counts": "Xco22k?3M4^OKUA7h>LUA73DY>7aA92CZ>8aA73DX>o0dATOZ>[1L3M3N3N100010O01O0O1N3L3M3N0O000111QO^BAa==aBB`=;cBC`=9dBC_=;cBB`=;cBC`=;bBAa=`0^BXOCKS>l0ZBWOj=m0a03N2010O00010M2M4L300010OM3N3L3M4Gb@L`?1c@L`?1onT4"}, "image_id": 774, "id": 13057}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 50.0, 24.0, 25.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "TR`01l?3N2M4L3M3N3M201O01O01O01O010O01O01L3N2M4L3N2M\\nS7"}, "image_id": 774, "id": 13058}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 51.0, 38.0, 53.0], "area": 1126, "segmentation": {"size": [512, 512], "counts": "oRl31l?4ENd@4Y?Od@5Y?:M3N3L3M3N3L3N3M200010O010O0001OON10O013L3M3N3L3N2M4L3N3L3N2M4L3N3LZn`3"}, "image_id": 774, "id": 13059}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 54.0, 89.0, 172.0], "area": 4695, "segmentation": {"size": [512, 512], "counts": "]Vf31m?3YON\\A4b>N\\A5`>O\\A4ME[>9bAGM>3E[>9bA81A[>R1bAQO[>[1N3L3N3M2O2O010O010M2N2N3O0O0N0100nN\\BJd=6_BH`=8bBH]=7fBF\\=7gBG[=7hBF[=7gBF\\=;dBYO@0]=JRCl01XOB0e_BXOAOR>i0]BVODNo=n0^BQOi=R1?3N3O0010N1N3M2N3N100O2M2N3M2Fh@J[?3h@I[?4;M2NU^m2"}, "image_id": 774, "id": 13060}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 59.0, 21.0, 57.0], "area": 938, "segmentation": {"size": [512, 512], "counts": "k1f1Z>10O000000010O0000010O00000ROjA2V>IPB6Q>ESB7Q>DTB7Q>ESB7n>J5LX]e7"}, "image_id": 774, "id": 13061}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 69.0, 19.0, 15.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "]bf71l?4L3N2010O00010O01O01O01O01O01O01O00fM"}, "image_id": 774, "id": 13062}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 85.0, 89.0, 165.0], "area": 4548, "segmentation": {"size": [512, 512], "counts": "Sgc41l?4L3e@Ik>9SAJi>:SAIj>:TAHj>;RAIn>d000010O010O00N3L3O101UOiNkBW1R=mNkBV1R=lNkBW1R=lNkBT1V=oN`BD0\\1`=SO^BCO[1d=h03M4M2M310O00010O01SORNgCl1WSDe0EjN[e1010O0010O0010O0010O0010O0010O0010O001^NhA[1W>cNlA\\1\\>1O010O01O01O010O01O010G_AQOa>l0bARO`>l0cAPOa>l0;N3L3N2O20O01Dg@NZ?0h@NZ?Oi@N_[a5"}, "image_id": 774, "id": 13064}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 110.0, 63.0, 60.0], "area": 1896, "segmentation": {"size": [512, 512], "counts": "oSc64i?3N3L3N2O2O010O01O01O010O01O01O01O01ONO3M3N3L301O0bAWOe=i0XBZOi=f0TB]Ok=c0RB@o=`0nACQ>=lAFT>:jAHW>8eALZ>m010O00010O01O01O0N30O001N1M3N3L3N3L3M3N3L3N2M4M2M4L3N2M4M2MY\\="}, "image_id": 774, "id": 13065}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 113.0, 38.0, 52.0], "area": 1126, "segmentation": {"size": [512, 512], "counts": "jd_34c?Mc@5[?Mb@6[?Mc@6Z?9M4M2M4M2N2M4M2N3O01O010O01O01OO0N010O03N2M4M2M3N3L3N3L3N2N3L3N3L3N\\\\m3"}, "image_id": 774, "id": 13066}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 130.0, 35.0, 56.0], "area": 1193, "segmentation": {"size": [512, 512], "counts": "Uec21l?3N3L3e@G^>2cA9MG^>3bA9LH_>1bA:MG^>3bAd0\\>b0N110O00010O010O01O01O010OM4M2N2M4O001POYAc0i>ZOZAc0i>[OYAc0T?L3N3H`@Kc?27N\\kj4"}, "image_id": 774, "id": 13067}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 159.0, 83.0, 68.0], "area": 2940, "segmentation": {"size": [512, 512], "counts": "SVk53k?3L3N2M4M2M4M2M3O2O010O00010O010O00010O010O00010O010O0O1M4M2M4M2M3N3L3N3N10010O010O00010O010O00010O010O00010OeNhAn0Y>nNjAS1U>kNmAU1S>hNQBW1\\>10O01O01O010O00010O010O0PO[Ae0e>YO]Ah0c>TOaAj0`>TObAj0l>L3N2M4M2M4M2M3NjYk0"}, "image_id": 774, "id": 13068}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 162.0, 35.0, 58.0], "area": 1375, "segmentation": {"size": [512, 512], "counts": "me^71l?3M3N3m@HY>;dAIY>9dAJY>:dAHY>;dAIY>:cAIZ>:dAHY>R10O010O00010O01O01O010O000M4M2M4M2M3N3L3O10O2M2O2iJ"}, "image_id": 774, "id": 13069}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 172.0, 49.0, 37.0], "area": 1172, "segmentation": {"size": [512, 512], "counts": "SVn22l?3L3M3N3L3N3L3M301O01O010O01O01OO2O0010O00010ON2M1102O01O01O010O01O01O010O00010ZOo@;Q?CQA>n>_OUAa0l>\\OWAa0U?M3M4M2M4MSZY4"}, "image_id": 774, "id": 13070}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 226.0, 80.0, 62.0], "area": 2749, "segmentation": {"size": [512, 512], "counts": "RX_54j?2M4M2M3N3M2M4M2M3N3L3N3L30010O0010O0010O0010O0010O0010O0010O0010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O0O2L3N2M4M2M4M2M3N3L3N3L3N2MWhX1"}, "image_id": 774, "id": 13071}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 233.0, 60.0, 88.0], "area": 2776, "segmentation": {"size": [512, 512], "counts": "RXR71l?3N3M2WAIg=:VBIi=7VBJj=6UBKl=5RBMm=3RBNo=1PB3m=NQB4n=LQB5P>JoA7Q>GPB;P>BQB?o=^OTBc0k=]OTBd0F[Og=4`Bd0FZOh=4`Bd0FZOg=6_Bc0G[Og=4`BP1]=TObBm0[=UOfBj0X=YOgBh0Y=WOgBi0Y=WOdBl0\\=UOaBn0_=QO_BQ1a=h00O010O010O0001ONOGaBTN`=l1bBRN^=n1eBoM[=Q2gBmM[=P2=M2N3L3N3M2N2M4O001mN]Ag0c>WO`Af0c>VO`Ah0b>VOaAf0n>N3M2N2M4MUH"}, "image_id": 774, "id": 13072}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 303.0, 9.0, 29.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "Wjk72k?3M4L3M3M4L3M3M4`F"}, "image_id": 774, "id": 13073}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 3.0, 29.0, 42.0], "area": 990, "segmentation": {"size": [512, 512], "counts": "[`a71o?8H7I8B[OQAm0g>8N000O010000000000000O0100000005K1O000O10000NRAROm>n0K"}, "image_id": 776, "id": 13074}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 375.0, 30.0, 78.0], "area": 1526, "segmentation": {"size": [512, 512], "counts": "e]a7:[?;0000000000RO\\O`Bd0S=\\O\\B2?b0U=^OYB3`0?V=BWB2`0=X=CUB3b09X=GTB2b07Z=8dBH\\=;aBE^=>`BB`=`0^B@a=d0\\B\\Od=f0WB]Oi=b1J100O10000O100O10000YD"}, "image_id": 776, "id": 13075}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 463.0, 31.0, 49.0], "area": 1408, "segmentation": {"size": [512, 512], "counts": "[o`77^?;h@F_>S1L4000000000000001O000000M30000000000000000001O0N21O00000`A"}, "image_id": 776, "id": 13076}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 64.0, 95.0, 328.0], "area": 18604, "segmentation": {"size": [512, 512], "counts": "ii`64j?2M3N3L3N3M2M3N3L3N3L3N2XBmN`BSA`0k>BTA?k>BSA`0k>=M2N2N000001O01O0DjNoAV1Q>lNmAT1T>mNjAS1V>oNiAP1W>ROgAo0X><00001O000001O000001O0001O0002N2N2N2N2O1N3M2N2N2N2N3N1N1O000000101N3M2N2N2N2N3N1N2N2N2N2NT^j4"}, "image_id": 778, "id": 13082}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 43.0, 38.0, 29.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "ha63j?3N3L3N2M4O001O010O01O01O010O01O010O01O010O01O01O010O01O010O01O01_Oc@2l?3L3N3M2M4M2N2M4M2N3L3O20O0010O010O0010O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010O0010O010O0010O0010O010O0010O0010O010O0010O0010O0010O010O0010O0010O010O001O0N2M4M2N3L3N3M2M3N3M2M\\lQ6"}, "image_id": 778, "id": 13084}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 115.0, 45.0, 45.0], "area": 1593, "segmentation": {"size": [512, 512], "counts": "Vd`57e?8J5K3M4M2N2N2N2N2O1N101N101N10000O2O0000000000000001O0O10001N101N101N2N2O1N2N2M3N3L4L5Jokh1"}, "image_id": 778, "id": 13085}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 123.0, 28.0, 28.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "\\To43k?2M3N3L3N3L3N210O010O00010O010O00010O010OM3N3L3N3L3N3Lokb2"}, "image_id": 778, "id": 13086}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 233.0, 204.0, 156.0], "area": 19498, "segmentation": {"size": [512, 512], "counts": "[Y_42l?3L3N3L3N2M4M2M4M2UAUO_>m0^AVO_>n0^AUO_>W1M3N3L3N3L3N3L3N2N3bBiMnO00000000000001O00000000000001O00000000000001O00000000000001O00000000000001O00000F:H800000000001O00000001O0G81A?I70000000000000001O01O0000000000bNfAV1b>000001O000000000I7000000000000000001O0000000001O0dA\\O\\=d0UBKk=U10000000000001O000001H700000000007I00000000000000010O00000000000000000000000I7A?A`0@Wih5"}, "image_id": 779, "id": 13095}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 249.0, 58.0, 45.0], "area": 2207, "segmentation": {"size": [512, 512], "counts": "hhn5:Y?=C=K50000010O000000000000000000000010O0000000000000000000000010O000000000000000000000010O6J0000000000001O000O1C=CYXT1"}, "image_id": 779, "id": 13096}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 281.0, 20.0, 11.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "iXe7:f?01O0000000000000000000001O00000000000Vg0"}, "image_id": 779, "id": 13097}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 326.0, 9.0, 12.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "WZ_2;e?000O10000000000jU\\5"}, "image_id": 779, "id": 13098}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 328.0, 9.0, 12.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "aZg22e?9000000000010O0gUT5"}, "image_id": 779, "id": 13099}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 461.0, 25.0, 19.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "inP25_?<000001O00000000000001O00000000000001O00000000AQbb5"}, "image_id": 779, "id": 13100}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 17.0, 41.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "_>X1h>00000000jN_Am0j>00000000000010O000M3BeQg7"}, "image_id": 779, "id": 13101}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 480.0, 38.0, 32.0], "area": 970, "segmentation": {"size": [512, 512], "counts": "c_a2=V?=0000001O00000000000000000000I700000000001O000WOPAb0W?0000000000000000000000O1@Xak4"}, "image_id": 779, "id": 13102}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 491.0, 16.0, 21.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "m_a03[?b00000000000000000001O0000000Ik`V7"}, "image_id": 779, "id": 13103}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 491.0, 51.0, 21.0], "area": 775, "segmentation": {"size": [512, 512], "counts": "foc3:f?000000000000000000000O10000000000001O000000H8M31O00000000000000000000000000001O0000000000000000000Fm`b3"}, "image_id": 779, "id": 13104}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 493.0, 30.0, 19.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "c_k0=]?60000000000000000000000000000000000001O00000000000000000Ii`e6"}, "image_id": 779, "id": 13105}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 494.0, 40.0, 18.0], "area": 660, "segmentation": {"size": [512, 512], "counts": "n_e12`?>N200000000000000000000001O0000000000000000000000001O00000000000000000000000Mc`f5"}, "image_id": 779, "id": 13106}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 505.0, 16.0, 7.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "k_h75i?20000000000000000001O00000000"}, "image_id": 779, "id": 13107}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 0.0, 51.0, 28.0], "area": 730, "segmentation": {"size": [512, 512], "counts": "XPa11n?3N1N3M2N2O2M2N2O2M2N2O2N1O1OO1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O1O10P`e5"}, "image_id": 780, "id": 13108}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 8.0, 149.0, 134.0], "area": 9136, "segmentation": {"size": [512, 512], "counts": "Tce5Z1f>0BoNmAQ1S>>O1O1O100O1O100O1O100O1O100O1O1O100000O1000000000000000000YNbBl0^=TObBl0^=TObBl0^=TObBl0Y>0000000000000000000000000000000000000000000000000000000000000000000000000006J00000000000000000000000000000000000000000000000O100000M3N2M3M3M3N2M3M3M3M3N2M3M3M3M3N2M3M3M3N2M3M3M3M3N2M3M3M3M3N2M3M3M3M3N2N200001O001O00001O00001OD"}, "image_id": 780, "id": 13109}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 272.0, 63.0, 44.0], "area": 1613, "segmentation": {"size": [512, 512], "counts": "`Xh3c0]?00000000000000000000000=C000OiA3U>0gA4U>0hA3U>OiA3U>0gA4U>0hA3U>0hA2V>0gA4U>o0M3L3N3M3L3N3M3L3N3L2OO10O10O10O10O10O10O010O4M2VNeBk0^=QOfBk0^=ROdBl0^=ROeBk0^=QOfBk0^=ROdBl0_=QOdBk0_=ROeBk0^=ROdBl0[>L4M3M2M4M3L3N3M3LbdU4"}, "image_id": 780, "id": 13112}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 302.0, 31.0, 56.0], "area": 1593, "segmentation": {"size": [512, 512], "counts": "`iY7f1Z>000000000000000000000N2000000002N0000000GkAgNU>Y1900000000000000Kef6"}, "image_id": 780, "id": 13113}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 306.0, 37.0, 53.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "fiW62n?i0WO000L40>B008H000000000000000000000000000000000000000000000000000000GQ1XOfeU1"}, "image_id": 780, "id": 13114}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 316.0, 77.0, 88.0], "area": 3124, "segmentation": {"size": [512, 512], "counts": "Wjg12n?2N3L4M3M3L4M2M4M3M3L2OO10O10O10O10O10O2O3M3L4M3M2M4M3L4M3M3L40O010000000O1M3L3NO10O10O10O01000O010O4M3M3L3N3M3L4M3M3L3N3M3L2O00O01000O0102M4M3M3L4M3M2MlcQ5"}, "image_id": 780, "id": 13115}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 324.0, 20.0, 25.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "Tjk3>b?00;E0000000000000000000000000000000?A]Uj3"}, "image_id": 780, "id": 13116}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 362.0, 52.0, 44.0], "area": 1432, "segmentation": {"size": [512, 512], "counts": "gkn01o?4L3L4M3M2M10002M4M3M3L3NO10O10O10O10O10O10O10O10O10O10O10O2O3M2M1000O02O2N0O04M3M3L4M3M3L4M3M4K4M3M3LbSW6"}, "image_id": 780, "id": 13117}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 369.0, 19.0, 23.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "ek>3m?3L4M3M3L5L000O01000O010003L5L3M3L4MocW7"}, "image_id": 780, "id": 13118}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 370.0, 32.0, 64.0], "area": 1784, "segmentation": {"size": [512, 512], "counts": "c[_6l0T?000000a0_O00000000b0^O00000O10000001O00000000000000000O1000000000]dP1"}, "image_id": 780, "id": 13119}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 371.0, 18.0, 10.0], "area": 151, "segmentation": {"size": [512, 512], "counts": "d[f49g?0000O10000000000000000000O1000000]dP3"}, "image_id": 780, "id": 13120}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 375.0, 18.0, 61.0], "area": 765, "segmentation": {"size": [512, 512], "counts": "g[g7?a?4L001O1O1O001O1m@7m=n00000000000000000YD"}, "image_id": 780, "id": 13121}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 388.0, 66.0, 71.0], "area": 2526, "segmentation": {"size": [512, 512], "counts": "_]f33m?;E1O000000000SOFTBc0c=BWBc0e=]O[Bc0e=]O[Bc0d=^O\\Ba0e=_O[Ba0e=_O[Ba0a=C_B=]=HbB8]=4XBLh=V1000TOWBAi=?WBAi=?VBBj=>VBAk=?UBAi=a0WB_Oi=a0WB_Oi=a0WB_Oh=b0XB^Oh=a0YBPO4Nc=R1YBPOV>P1jAPOV>P1jAoNW>Q1hAPOX>P1mAkNS>U1<00000O11N2O000000000000000000O01000000000007I;E3M0O1000008HnbX3"}, "image_id": 780, "id": 13122}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 393.0, 19.0, 16.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "^lk23l?4M3M2M1000O01000O010O01000O3N3M3L3N]cj4"}, "image_id": 780, "id": 13123}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 394.0, 44.0, 51.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "U]31o?3M4K4M3M4L3YO\\OkAh0R>[OkAe0T>_OiAa0W>CdA>\\>EaA:_>d0000O01000O0100000O01000O01000O2O4L3M3L5L3M3L4M000O10O12Kb@Cb?:5M4KdbV7"}, "image_id": 780, "id": 13124}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 404.0, 24.0, 21.0], "area": 315, "segmentation": {"size": [512, 512], "counts": "jlg14l?3L4M3M2M0100O01000O01000O01000O01000O4M3M3L4MPSl5"}, "image_id": 780, "id": 13125}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 408.0, 39.0, 55.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "n\\o03m?3L4M3M3L4M3M3M3L3N003L4M3M3L4M3M2M10O4M3M3L4M3M4K4M3M3L10000O2O3M3L4M3M3L4MiQ]6"}, "image_id": 780, "id": 13126}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 417.0, 31.0, 45.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "T]U32m??0000000O10O100000J600000000000000000O1006J0O100000000000O100000000000O101O8H00000000000000001O=BkPl6"}, "image_id": 780, "id": 13131}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 476.0, 30.0, 15.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "n^k3b?000000000000000"}, "image_id": 780, "id": 13136}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 511.0, 11.0, 1.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "o__31o?0000000000000000000QP[4"}, "image_id": 780, "id": 13137}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 0.0, 50.0, 48.0], "area": 1195, "segmentation": {"size": [512, 512], "counts": "jPo42m?2N2M2O2N2N2M3N1O2N2N2M3N1PAUOj>m0SAUOl>Q1M3N1O000O3N101O1O1O1OBaA[O_>d0cA[O]>d0eAZO\\>e0eAZO\\>d0gAZOZ>e0hAXOZ>g0hAWOY>h0a0M3N2N1O2M3N2N1O0O0100002L3O2N2NmoW2"}, "image_id": 782, "id": 13138}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 48.0, 49.0, 56.0], "area": 1339, "segmentation": {"size": [512, 512], "counts": "hRW41n?2M3M2O2M3N1N3N2M2O2M3N1N3N2M2N3N1N3N2M2O2M3N0O010O02N2O2M3N1N3N2M2O2M3N1N3N2M3M3N2M2O2M3N1N3N2MS^P3"}, "image_id": 782, "id": 13139}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 52.0, 22.0, 20.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "ka^51m?3N2N2M2O2000O0100000O01000O01000O1M2O2M3NQ^V2"}, "image_id": 782, "id": 13140}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 53.0, 19.0, 21.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "SbP22m?2N2M3N1O2N2N0O01000000O2O2N2N1N3N2NXne5"}, "image_id": 782, "id": 13141}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 95.0, 67.0, 55.0], "area": 1893, "segmentation": {"size": [512, 512], "counts": "UTd42m?2M3N1N3N2M2O2M3N1N3N2M3M2O2M3N1N3N2M2O2M3NO010O011N2O2M2O2M3N1N3N1N3N20O10O01000O0100O01000O1O0N300O010ON3M3N1N3M2O2M3M2O2M2N3N1N3M_\\Z2"}, "image_id": 782, "id": 13142}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 99.0, 53.0, 47.0], "area": 1373, "segmentation": {"size": [512, 512], "counts": "hSg11n?2M3N1N3N2M2O2N2M2O200N1N3N2M2O2N2000O01000O10O10O10O10O01M3N2M2O2000O010O1N1N3N2M3N1N3[Om@:U?Dl@;U?Dm@9V?Dm@:]?N3N2M2O2MW\\^5"}, "image_id": 782, "id": 13143}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 117.0, 81.0, 110.0], "area": 4305, "segmentation": {"size": [512, 512], "counts": "_To22l?3N1N3N2M2N3N2M2OO1110O10O10O010O10O10M3N1N3\\BVOo;l0oCUOo;m0oCVOn;m0PDUOn;m0oCUOo;m0PDUOm;n0PDTOo;m0PDUOn;m0oCVOPaCC`<<^CGa<:]CHc<7[CKf<9TCIk^NXB[1W>N2O1N6UOZA3g>M[AOg>1YANh>JUAN45h>NaA0`>0`ANd>0]ANcih3"}, "image_id": 782, "id": 13144}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 124.0, 66.0, 85.0], "area": 2318, "segmentation": {"size": [512, 512], "counts": "PV\\53l?2M2O2M3N1MDb@=\\?De@:3N2M2O2M3N1N3N2M2O2M3N1Jb@Ha?66M2O2Mhkb1"}, "image_id": 782, "id": 13145}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 135.0, 44.0, 40.0], "area": 828, "segmentation": {"size": [512, 512], "counts": "gdc11m?3N1O2M3N1N3N2N2M2O2M3O00100000O01000O10O1000O10O10O1000O10O1000O01M3N2M2N3N2M3N1N3N2MT[f5"}, "image_id": 782, "id": 13146}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 191.0, 26.0, 23.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "Zf`41m?3N1N3N1N3N2M2010O10O10O010O10O10O010O0N3N3L3M2O2MgYR3"}, "image_id": 782, "id": 13147}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 198.0, 46.0, 51.0], "area": 1012, "segmentation": {"size": [512, 512], "counts": "\\gk42m?2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O1N10O10O10O012M2O00O01000O010002M3N2N1N3N2N1N3N2N2M2O2N2M3N`Y]2"}, "image_id": 782, "id": 13148}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 228.0, 40.0, 36.0], "area": 751, "segmentation": {"size": [512, 512], "counts": "gWV42l?3N1N3M2O2M3M2O2M3N1N3N20O0100O01N2M2100O01000O2O1NJm@_OT??m@AT?9VACl>:?N1N3N2M2OahU3"}, "image_id": 782, "id": 13149}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 250.0, 15.0, 14.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "Phl41n?2M3N1N3O1O01000O10N2N1N3M2ORhk2"}, "image_id": 782, "id": 13150}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 253.0, 65.0, 56.0], "area": 1902, "segmentation": {"size": [512, 512], "counts": "fhV61n?1N3U@Lg?8GHg@9X?Ie@:X?If@9X?9N1N3N2N20O1000O10O10N2M3N1O2M3N2N10100O0100000O0100000O0100000O01N2N2M2O2N2M3N1N3N2N2M2O2N0O100O3N1O0O101N3M3N2M2Oagh0"}, "image_id": 782, "id": 13151}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 266.0, 51.0, 47.0], "area": 1031, "segmentation": {"size": [512, 512], "counts": "hhk43l?2N2M2O2M3N2M2O2M3N110O1000O010O1Oj@ZOR?g0l@ZOU?i000O0100_Ol@5T?Il@8T?Fm@GPA;n>Go@Fo@GPA9P?;0O12M2O2N2M3N002N2M2O2N000001N3N2N2N20M11O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2MmT>"}, "image_id": 782, "id": 13155}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 373.0, 23.0, 22.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "o[^63l?2M2N3N1N3M20100O01Ba@:^?Dd@3l?2N1O2M3N2N1N3N2N2000O010000000O0100000O0O2N2M3N2N1O2M3N2N1OkjQ7"}, "image_id": 786, "id": 13164}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 160.0, 69.0, 66.0], "area": 2002, "segmentation": {"size": [512, 512], "counts": "ieP11n?2M3N1O2N2M3N1O2M3N2N2N1N3N2N2M2O2N2N2N20O1000O10O1000O1000O10O100000O0100000O10O100000kNZAn0f>PO]AP1c>nN_AR1h>0O0100000O01M3N2N1N3N2N2N1N3N2N2M2O2N2M3N1Ofil5"}, "image_id": 786, "id": 13165}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 199.0, 207.0, 263.0], "area": 29719, "segmentation": {"size": [512, 512], "counts": "j6d49aKd:a4YEbKd:a4ZE`Ke:b4YE`Ke:o4N1N01000O0100002M3N1O200000O0100000O10O1000O10O1N2N2SFZJ]9g5bFZJ\\9i5bFYJ\\9i5aFZJ_95]FR52kJa91_FS5OmJb9O`FU5LnJd9KbFe5^9XJeFe5^9YJdFe5m9N1N3N2N2000O10O1000O10O100000O10O1000N2N1O2M3N2SFXJ_9i5`FYJ]9:ZFR57eJ_98\\FS52hJb93]FV5OiJd9N`FX5KlJd9KcFe5^9YJdFf5]9XJdFf5m9N2N2N2M21000O1000O10O1000O10O100000N2N1O2N2M3N1WFWJZ9k5cFWJ\\9k5bFWJ^96]FU53gJ_93_FV50jJa9NaFX5LlJc9IdFh5]9VJeFh5]9VJeFh5j9O2M3N2N1O2000000O0100000O10O100000O01N1O000O010000O0QKcE^4_:`KcE^4_:_KdE_4^:_KdE^4^:aKdE]4^:`KdE_4^:_KdE^4_:`KcE^4Q;N2N1N3N2N2N1N3N2N2M3N1O2N2M3N2N1O1N1000O010000PM^Cc2a<^MaC_2a<_MbC_2`<_MaC`2`<_MbC^2a<`MaC^2a<_MbC_2`<_MbC_2P=N3N2N2N2M2O2N2M3N1O2N2M3N2N1N3N2N2N0O0100000O0QO^Ac0b>]O_Ab0a>]ObA`0a>^OaA`0a>^OaA`0a>^OaA`0`>^ObA`0Q?N2N2N1N3N2N2M3NnTh4"}, "image_id": 786, "id": 13166}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 249.0, 48.0, 49.0], "area": 1222, "segmentation": {"size": [512, 512], "counts": "fXf22m?2M3N1O2M3N2N1N3N2N2N1N3N2N2M2O2N2M3N1O2M3N20O010O1N2M2O2N2M3N1O2M3N2N1O2M3N2N1N3N2N2M2O2N2M3Nega4"}, "image_id": 786, "id": 13167}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 405.0, 85.0, 107.0], "area": 5640, "segmentation": {"size": [512, 512], "counts": "e<[3fO2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O0000O1O100O1O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O2N2N3N1N3M2O1Nho`4"}, "image_id": 787, "id": 13170}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 0.0, 37.0, 19.0], "area": 389, "segmentation": {"size": [512, 512], "counts": "PPT41o?2N1O2N1O1O2N1O2N1O1O2N1O1OO100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O10P`Y3"}, "image_id": 787, "id": 13171}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 138.0, 128.0], "area": 11001, "segmentation": {"size": [512, 512], "counts": "0000000I7000000PNIWD7i;IWD7i;P200000000000000000000000000000000000000000iM^D0b;0^D0b;0^D0b;0^DcNo<]1j0000000000000000Yge0"}, "image_id": 787, "id": 13178}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 282.0, 42.0, 127.0], "area": 4116, "segmentation": {"size": [512, 512], "counts": "ei`1`1`>=C000000000000000UOR2iN00000000000000000000000000000000gMbD0^;0bD0^;0bD0^;0bD0^;0bD0^;VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0d:VO\\Ej0jaj5"}, "image_id": 787, "id": 13179}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 291.0, 33.0, 36.0], "area": 1088, "segmentation": {"size": [512, 512], "counts": "[Y>b0W?706J000O1000000000000000000000003M1O00000000000000000000000007IfVQ7"}, "image_id": 787, "id": 13180}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 304.0, 40.0, 64.0], "area": 1906, "segmentation": {"size": [512, 512], "counts": "di\\2U1k>=C000fAbNR>^1nAdNo=g100O1000000001OM3000F:00000000C=000000000004L000000000000000000000000\\Vo4"}, "image_id": 787, "id": 13181}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 305.0, 30.0, 46.0], "area": 1243, "segmentation": {"size": [512, 512], "counts": "oYg3j0h>>0000000000001O0001O0000004L00001O0000000000000000000000^OPgi3"}, "image_id": 787, "id": 13182}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 312.0, 44.0, 59.0], "area": 1792, "segmentation": {"size": [512, 512], "counts": "hib5W1i>0000d0\\O000000000000000000B>000000000000J6000002N000000000000000000001B=000000000000000UVg1"}, "image_id": 787, "id": 13183}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 314.0, 31.0, 42.0], "area": 1212, "segmentation": {"size": [512, 512], "counts": "ki`4j0V?6J2N0000000000007H1000000000000000000000000000000000000007Ioeo2"}, "image_id": 787, "id": 13184}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 315.0, 10.0, 22.0], "area": 220, "segmentation": {"size": [512, 512], "counts": "ki5f0Z?00000000000000000UVe7"}, "image_id": 787, "id": 13185}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 329.0, 7.0, 8.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "]jl04h?40000001O00feo6"}, "image_id": 787, "id": 13186}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 337.0, 20.0, 22.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "aja0?a?00000000007I000000000000000000I700000_UT7"}, "image_id": 787, "id": 13187}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 358.0, 42.0, 61.0], "area": 1944, "segmentation": {"size": [512, 512], "counts": "i[e3Z1f>000000000000000000000000000000000]OTORBl0n=c00000000000000000000000000000000jNV100O10000000jde3"}, "image_id": 787, "id": 13188}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 361.0, 43.0, 31.0], "area": 1159, "segmentation": {"size": [512, 512], "counts": "Y[7i0W?006J00000000000000000000000000J6000000000000000000000000000000000000000000000000000gTS7"}, "image_id": 787, "id": 13189}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 362.0, 20.0, 8.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "Zke48h?0000000000000000000000000000000000000fTP3"}, "image_id": 787, "id": 13190}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 367.0, 23.0, 21.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "`kn20000000000000Ydk7"}, "image_id": 787, "id": 13193}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 398.0, 54.0, 59.0], "area": 2064, "segmentation": {"size": [512, 512], "counts": "\\]_42P?n0000000000000001O00000000000000003M00000000\\A\\Ol=d0TB\\Ol=d0TB\\Ol=d0TB\\Ol=d0TB\\Ol=g0gAEW>;iAEW>;iAEW>;iAEW>;iAEW>;iAEW>R100000000000000004L0001O0000E;D<00000000^Olce2"}, "image_id": 787, "id": 13194}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 406.0, 43.0, 26.0], "area": 1069, "segmentation": {"size": [512, 512], "counts": "fl7j0V?000000000000O10000000000000000000000000000000000000000000000000000000000000000000=CmbR7"}, "image_id": 787, "id": 13195}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 410.0, 31.0, 21.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "j\\g22n?c0]O000000000000000000000000000000000000000000000000000000000VSi4"}, "image_id": 787, "id": 13196}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 411.0, 25.0, 24.0], "area": 527, "segmentation": {"size": [512, 512], "counts": "Z]]58h?00000E;L400000001O00000000000000000000000000000USV2"}, "image_id": 787, "id": 13197}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 413.0, 29.0, 19.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "T]Q7a101O01OnAWNm=n101RBoMl=R20nNTB0l=MWB3j=IYB6h=F\\B;c=BaB=`=_OcBa0]=\\OfBd0Z=YOjBf0W=UOmBk0S=ROPCn0R>O00N2O1001O00001O00001O00001TOo@g0U?0001O0000001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O0000001O00001O000KZ@0k?0001O0000Q`a2"}, "image_id": 787, "id": 13202}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 441.0, 31.0, 22.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "imU1f0Z?00000000000000O10000000000000000000000000000000000000000000WbZ6"}, "image_id": 787, "id": 13203}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 442.0, 20.0, 14.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "jm=>b?0000000000000000000000000000000000000VRX7"}, "image_id": 787, "id": 13204}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 446.0, 163.0, 66.0], "area": 5473, "segmentation": {"size": [512, 512], "counts": "o^Z53i?5L3L4M3M4K4N201O00010O0001O01O01O01O0001O01O01O01n@VOl>o000M4N100000N3N10010O0000010O00010O000010O000010O00010O000010O000010O000O2K4M3O10N3K4N2010O0000010O00010O00010O000001YAoN^>P1_ATO`>V10010O0000010O00WOaA1`>KcA7[>FhA:X>BmA=T>_OoAa0Q>\\ORBd0e>01O00001O00001O0000001O00001O00001O00M3M3L4M3M3L4M3L4M3M3L4M3N21O0000N2L4M3O100001O00001O0YNSB[1m=aNXB^1h=_N[Ba1R>01L3M3L4M4L3M3M4L3L4M3M4L3M3MbQ4"}, "image_id": 787, "id": 13205}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 459.0, 6.0, 10.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "[>:f?000000000eal7"}, "image_id": 787, "id": 13206}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 492.0, 25.0, 20.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "noS32k?3L4M3M3L4O11O0000001O00001O00001O0000001M2M3L5La`_4"}, "image_id": 787, "id": 13207}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 497.0, 50.0, 15.0], "area": 644, "segmentation": {"size": [512, 512], "counts": "b?>b?00000000000O10000000000000000000000000000000000000000004L0000000000000000O100000000000000009G0000S`V7"}, "image_id": 787, "id": 13208}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 499.0, 39.0, 13.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "mob53j?3L4N200001O0000N2000000001O00001E^@6f?0001O00001O0000001O00001O00001O00001O0000Q`i1"}, "image_id": 787, "id": 13209}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 69.0, 126.0], "area": 8513, "segmentation": {"size": [512, 512], "counts": "0n3R<00000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000j0VO]2cMi\\m6"}, "image_id": 788, "id": 13210}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 0.0, 40.0, 6.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "PPP26j?00000000000000000000000000000000000000000000000000000000000000000000000000000PP\\5"}, "image_id": 788, "id": 13211}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 0.0, 74.0, 47.0], "area": 3424, "segmentation": {"size": [512, 512], "counts": "PPj57i?X1hN000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000>BboP1"}, "image_id": 788, "id": 13212}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 10.0, 92.0, 67.0], "area": 5431, "segmentation": {"size": [512, 512], "counts": "\\Ph1a0_?U1kN9G00000O100000000000O10000000000000000000000000000000000000000000000000000000000000O100000000000O14L00O3N9G000000000000000000000000000000000000000000000000000000000000000000000OU1lNWni4"}, "image_id": 788, "id": 13213}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 42.0, 18.0, 33.0], "area": 594, "segmentation": {"size": [512, 512], "counts": "ZaY4Q1o>000000000000000000000000000000000f^]3"}, "image_id": 788, "id": 13214}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 90.0, 55.0, 55.0], "area": 2249, "segmentation": {"size": [512, 512], "counts": "ice5g0Y?0000000000000000000000000000000000000000000QOo0000000000000000000000000000001O000000000000000000000000000000WOom^1"}, "image_id": 788, "id": 13215}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 141.0, 23.0, 16.0], "area": 312, "segmentation": {"size": [512, 512], "counts": "]TS3`0`?000000000000000000000000000000N2N2N2N2N2N2Ne[a4"}, "image_id": 788, "id": 13216}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 193.0, 18.0, 12.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "Qfb5000000000000000000000000001O000000000000000000006J0000000000001O000000000Lc``6"}, "image_id": 788, "id": 13218}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 494.0, 21.0, 13.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "^_i1=c?000000000000000000000001O00000000000000aPl5"}, "image_id": 788, "id": 13219}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 506.0, 16.0, 6.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "j?6j?00000000000000000000000000000V`g7"}, "image_id": 788, "id": 13220}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 75.0, 22.0, 16.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "bbo51m?3M2N3N11O010O010O01O010O01O010O010O0O1N3L`]e1"}, "image_id": 790, "id": 13221}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 98.0, 50.0, 63.0], "area": 944, "segmentation": {"size": [512, 512], "counts": "\\To11n?2V@Nb?5[@Nc?9N1O2M10002M2O2N2N2M3N1O2N2TAnNf>h0ZAEd>:_AF`>9bAG^>7dACHKc>c0fAAHKb>c0iA@[>`0gA]OZ>c0`010O1000O01000O10O1000002M3N2N1O2M3N2N2N11000O1N1OelW5"}, "image_id": 790, "id": 13222}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 410.0, 32.0, 41.0], "area": 868, "segmentation": {"size": [512, 512], "counts": "d]`72l?3L3N3L3N2M4M2M4M2M3O20O01O01O010O01O01O010O01O01O010O01O01O01mB"}, "image_id": 790, "id": 13223}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 35.0, 39.0], "area": 975, "segmentation": {"size": [512, 512], "counts": "8o0Q?00000000000O010000000I700000O1000000B[AAe>?>00000000O100000000000008G8Ia_^7"}, "image_id": 791, "id": 13224}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 0.0, 46.0, 38.0], "area": 1341, "segmentation": {"size": [512, 512], "counts": "ZPd03m?7I7I00000O7J5K000GYOYAg0g>900000O1000000000000O1000000000000O10000K50000000000O1O102N1O003M7H8I\\od6"}, "image_id": 791, "id": 13225}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 0.0, 41.0, 52.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "Z`[16j?6@Hl@?m>`0J000000000000O10000000000O10000007I6J6J0000O10000000000O10005K7I6J7I6I7J7Idno5"}, "image_id": 791, "id": 13226}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 0.0, 35.0, 64.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "gPP27X?0RA7g>0QA7i>d0L00000JiNbAX1^>50O100000000005K7I6J00000000O1000005K7I6J7H7J7I7I6J7IV^^5"}, "image_id": 791, "id": 13227}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 0.0, 36.0, 77.0], "area": 1824, "segmentation": {"size": [512, 512], "counts": "Taa25S?2^A4\\>2^A4\\>2^A4\\>j0NOEdNTB\\1l=iNnAX1R>;00000O10000006J6J6J00000000O106J6J6J6J6I7J6J6J6J6J6I8Ig]l4"}, "image_id": 791, "id": 13228}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 0.0, 35.0, 82.0], "area": 1879, "segmentation": {"size": [512, 512], "counts": "YQS32W?2\\A5]>1\\A6^>0\\A6^>j0K0BfNXBZ1g=lNTBT1l=`0000000O0100004L7I6J1O00000003L8I6J6J7I6J6J7H7J7I6J6Jg][4"}, "image_id": 791, "id": 13229}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 4.0, 65.0, 54.0], "area": 1803, "segmentation": {"size": [512, 512], "counts": "dPm56j?:E8I0000O10000000000000O1000O10N201O001m@VOn>o0N2O001O1O1O001N2O1O1O001O1N01N2O3M1O0O01O10O102N1N3N0000O0100LQAXOQ?e07N2N2M2O2N2M3N1O2N2M3Nc_R1"}, "image_id": 791, "id": 13230}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 9.0, 39.0, 73.0], "area": 1856, "segmentation": {"size": [512, 512], "counts": "ePd31d03c>4VA3c>4VA3c>k0J0GjNhAV1X>80100000000000O0100000000000OiA^NQ>c1nA`NP>g116J6J06J7I2M10005K7I7I7I7H8I7I8Ha]h3"}, "image_id": 791, "id": 13231}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 18.0, 47.0, 58.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "d`V45?Ll>j0J7I6J6J4L0O10O1000000000O14K5L000000000O02O0000000000000O10O1000000000O10O1N4N8G8I8H8H7IQnQ3"}, "image_id": 791, "id": 13232}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 21.0, 58.0, 43.0], "area": 1805, "segmentation": {"size": [512, 512], "counts": "n`o49g?8H6J00000O107I4L0000000O1000O100000L400000O0102N00000000O10O1000000000O3N00O1000E\\AZOd>f0;J600O10O1000000000O10O1007I7Im^S2"}, "image_id": 791, "id": 13233}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 54.0, 11.0, 51.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "Pcj73i?4K6K4K6K5J5L5J6K4N3YN"}, "image_id": 791, "id": 13234}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 125.0, 31.0, 58.0], "area": 883, "segmentation": {"size": [512, 512], "counts": "[d`71n?2N2N2N3M2N2N2O2M2e@]OV?h0N3M2N2N2N200000001O0001O0000WAmNd>S1ZAoNf>W1M2N20000fK"}, "image_id": 791, "id": 13235}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 192.0, 2.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "QVo73l?2oI"}, "image_id": 791, "id": 13236}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 313.0, 41.0, 67.0], "area": 2120, "segmentation": {"size": [512, 512], "counts": "nic1c0]?000000k0UO000000000000000000000000006E?F00000000000000000000000000000000000Z1fN0000mdg5"}, "image_id": 791, "id": 13237}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 316.0, 39.0, 64.0], "area": 1848, "segmentation": {"size": [512, 512], "counts": "c[[26a?9G:G8J600001O0000J6G:K400000000000010O000KN610000000001O0001O0000000UOjALV>KTBKU>LTBLT>LWWQ5"}, "image_id": 791, "id": 13238}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 317.0, 43.0, 71.0], "area": 2212, "segmentation": {"size": [512, 512], "counts": "kkn23h?5K6K4K5K5K6J5K500010OO1J6K6O00000M4J5L4000N201O01O000001O01O00000001O01O00M3\\NRB20?T>XORB4O?T>XORB303H3X?Gm@4YT\\4"}, "image_id": 791, "id": 13239}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 321.0, 55.0, 87.0], "area": 2472, "segmentation": {"size": [512, 512], "counts": "_la31k?4M4L3M3M4L3M3M4FWOWAm0f>8M4M2M3M4L3N30O0010N1M3M4L3M01O002N3N3L3001M2M3M4NO2N3L3CSBdNP>Y1TBcNP>Z1<1O01O001L3M3N3L3M3M4Bl@KW?2l@KW?2l@KX?1Ufb3"}, "image_id": 791, "id": 13240}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 328.0, 48.0, 70.0], "area": 1965, "segmentation": {"size": [512, 512], "counts": "hjl01o?4L4K6K4L4L3L1000O10O3N4L5K4L4CeNlA`1P>dNlAa1n=;N00000O0100000O010000000O04M5K4L4K5L5K4L4FUAVOl>j0501N5L5K4L4K5LQT[6"}, "image_id": 791, "id": 13241}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 349.0, 49.0, 64.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "]k71o?2N3L3N3L3N3M2M4E[OUAg0h>e0UAXOn>e0:M2M4M2N3L3N3M2Mgco6"}, "image_id": 791, "id": 13242}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 366.0, 66.0, 81.0], "area": 2880, "segmentation": {"size": [512, 512], "counts": "cmP52l?2M4_OKSA7k>KSA8j>KSA7j>LTA7i>KUA7i>b0M2O20OCiNQBX1l=kNTBT1j=nNTBT1j=oNSBT1j=nNSBU1j=c0M201O0010O0N3M1N010000001O3O01O010O010O010OO2M2N3M2M4M2N3M2N3M1O001O2N2N3L3N3M2N3M2N3M2N3M2N3M2MZTn1"}, "image_id": 791, "id": 13243}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 377.0, 20.0, 77.0], "area": 981, "segmentation": {"size": [512, 512], "counts": "i;\\2f=N3N1N3M2N3M3M2N3M2N3M2N3M1O0G9C>J6O7Hmbe7"}, "image_id": 791, "id": 13244}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 388.0, 95.0, 74.0], "area": 3383, "segmentation": {"size": [512, 512], "counts": "Pmm51m?3M2N3M2N3M2N3M2N3M2O2M3M2N3N110O010O010O010O010O010OTOXA`0i>]OYAd0f>[O\\Ad0e>YO]Ah0n>O010O10O10O010O010OCZO`Ag0^>ZOaAg0]>\\O`Ag0_>YO`Ah0`>YO]Aj0c>9010O01M2N3M2N3O010O010O10O02OO010O010O0100O010O010O010O0100ROjA0V>NlA3T>KnA4R>JPB7P>FSB:l=DVBN1N3M3M2N3N1NYbb0"}, "image_id": 791, "id": 13245}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 414.0, 13.0, 15.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "Qm=1n?4M2M4M2N1N0101O2M4M2M4Mgb[7"}, "image_id": 791, "id": 13246}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 428.0, 45.0, 73.0], "area": 2160, "segmentation": {"size": [512, 512], "counts": "ZnY24f?6J6J6K5J7N1\\AQOX>n0aAYO_>T10000PBfNZ=Z1^BnNb=R1VBVOj=`11O0001O00000001O0001O00000001UNZBZ1f=_NaBa1o=1O00000001N1I7I7I7L40001I6I7IUbo4"}, "image_id": 791, "id": 13247}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 435.0, 56.0, 64.0], "area": 2217, "segmentation": {"size": [512, 512], "counts": "XoX15e?6O1O1O100O2N100M3J6K5J7I6K5K5O1O1O2O0O1O1O1000001O01O000000010O1O1O1O2N1aNhAU1a>00000001L3J6I7N20001O01O0000K5J6I8InQk5"}, "image_id": 791, "id": 13248}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 436.0, 81.0, 74.0], "area": 2636, "segmentation": {"size": [512, 512], "counts": "SnP61m?3M2\\@MY?6d@LZ?6d@MY?>N3M2O20O01\\ATOS>k0kAXOT>i0iAZOW>e0gA]OY>d0dA_O\\>`0bAB^>?_ADa>m0010O010O010O010O010O010O010O001M3N110O010O010O010O010O010O010O010PO[Ac0e>[O]Af0c>WO`Ah0`>VObAk0j>O010O010O010O010O010O010WOo@a0Q?]ORAc0U?010O010O01M2N3M2N3M2N3M2Nf`f0"}, "image_id": 791, "id": 13249}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 437.0, 49.0, 75.0], "area": 2405, "segmentation": {"size": [512, 512], "counts": "`_o23h?5L5J5K5L401PAXOf>h0WA[Oj>m00001O01OL4L5J5K5L4L40000001OO1L4L4L400000000001K4K5K5O2O000001O01O000O1]OnASOW>h0nATOW>5bA5Q?FTA5Q?GSA4YaX4"}, "image_id": 791, "id": 13250}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 464.0, 63.0, 48.0], "area": 1921, "segmentation": {"size": [512, 512], "counts": "`>_1a>gNdAn0g>00PO]Ac0P?0000001O00000000000001O000000000000000000001O000000000000000000001O0000L4E;F:N21O000000000000000000001O0000000J6E;E;EiQP7"}, "image_id": 791, "id": 13251}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 481.0, 66.0, 31.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "`ob44k?8H2N2N1N3N2O1O0O1000000000000O11O1O1O1O1O1O1O1O00000000O100000000000000000000000000000000J6O1N2O1O1O1000000000000O100000000005Kb0^OXP\\2"}, "image_id": 791, "id": 13252}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 0.0, 4.0, 1.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "P`01o?00000P`m7"}, "image_id": 792, "id": 13253}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 0.0, 63.0, 25.0], "area": 1327, "segmentation": {"size": [512, 512], "counts": "PPe02n?8H9G6J00000000000000O10000000000000000O100000000000000O10000000000000000O100000000000000O10000000000000000O1000000000006J8Gc_[6"}, "image_id": 792, "id": 13254}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 0.0, 18.0, 9.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "PPl33m?6J00000000000000O1000000000000000PPk3"}, "image_id": 792, "id": 13255}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 0.0, 57.0, 56.0], "area": 2107, "segmentation": {"size": [512, 512], "counts": "n`S7a0_?9G000000000000000000000000000000000N2N2N2N20000001OM3N2N2M3N2M3N2M3N2O100000O1000000N2N2N2N2N2N2N2O1N2N2N2N2N2N2"}, "image_id": 792, "id": 13256}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 6.0, 67.0, 47.0], "area": 2562, "segmentation": {"size": [512, 512], "counts": "W`f4f0Z?h0XO0000000000000000000000O100000000000000000000000000000000000000000000000000000O10UOaA6^>JbA6^>JbA6i>_OWAa0T?0000000000000000000000000000000000000008HlnW2"}, "image_id": 792, "id": 13257}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 23.0, 56.0, 39.0], "area": 1799, "segmentation": {"size": [512, 512], "counts": "nPS3i0W?7I0000000000000000000000000O10000000000000000000ON3O10N200000000000000O10O100000000000000000O01000000008H;E:F\\nP4"}, "image_id": 792, "id": 13258}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 25.0, 14.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "TQ21m?2M4M2N2N30O010O0O2M2M3N3MUof7"}, "image_id": 792, "id": 13259}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 30.0, 25.0, 26.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "QQ;4l?7I6J6J00O10O1000000000O0100000000000O01006J7I6J_^X7"}, "image_id": 792, "id": 13260}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 44.0, 87.0, 47.0], "area": 2732, "segmentation": {"size": [512, 512], "counts": "UR=5j?7J6J000000O01000000000O0100000005K2N0O01000000000_OYOhAg0Y>_OaAa0_>E[A;e>`000O10O100000O1000O100000O1000O100000O1003M0O100000O1000O10000000000O10000000000O10000000000O1000000000006I7J7I6J6Jc]W6"}, "image_id": 792, "id": 13261}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 70.0, 49.0, 87.0], "area": 2430, "segmentation": {"size": [512, 512], "counts": "Wb\\3i0W?h0XO4L00000000000000000000000000000000000000003Mi0VO6K0000O=Dh0XO>B00000000000000000000000000000000=Ckkj3"}, "image_id": 792, "id": 13262}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 71.0, 59.0, 79.0], "area": 2353, "segmentation": {"size": [512, 512], "counts": "VTR72l?2N3M2N3M2N3M2M4M2N3M2N2N3M2N3M2M4M2N3M2N3M2N3M2N3L3N3M2N3M2001M2N3L3N3M2N3M2N3ZNkA]1W>aNkA]1^>M2M4M2N3M2N3M2N3M2N3L3N3M2N3M2N3MW="}, "image_id": 792, "id": 13263}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 74.0, 3.0, 8.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "Z28i?L3Mh]n7"}, "image_id": 792, "id": 13264}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 75.0, 48.0, 78.0], "area": 2929, "segmentation": {"size": [512, 512], "counts": "]cQ55k?000X@8W?Ii@6Z>E`B5VO6Z>7nACH6Z>h0fAXOZ>Z1Ma0^O5L00000000O1000000000000000000000O1000000000O1000000000000000000WNhBj0X=VOZC8f07Ie[V2"}, "image_id": 792, "id": 13265}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 83.0, 41.0, 74.0], "area": 2747, "segmentation": {"size": [512, 512], "counts": "dR]48h?e0[Oe0[Of0ZO1O000000000000000000000000000O1000O10000000000000000000000000000000a0_Oe0[Of0ZOa[n2"}, "image_id": 792, "id": 13266}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 88.0, 15.0, 12.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "ib2;e?0000000O100000000000O10002NVme7"}, "image_id": 792, "id": 13267}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 96.0, 77.0, 60.0], "area": 2367, "segmentation": {"size": [512, 512], "counts": "mSi01n?3N2N2M3N2M4M2M3N2M3N2M4M2M3N000O010O010O010O010O010O01000O010O010O010O010O010O010O10O10O010O010O010O010O010O10O10O010O010O04M2M3N2M3N2N3L3N2M3N2M3N3L3NQ\\P6"}, "image_id": 792, "id": 13268}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 98.0, 26.0, 27.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "VS94l?6I6K6J2N0000O0100000000O01000000000O010003M6J6J6IZlY7"}, "image_id": 792, "id": 13269}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 125.0, 14.0, 37.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "`Ti74j?2N3`@HS?;k@GS?;j@HS?e0M2M4M20001O010O0RL"}, "image_id": 792, "id": 13270}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 140.0, 70.0, 67.0], "area": 2324, "segmentation": {"size": [512, 512], "counts": "_eW11n?2O1N2N3M2N2O1N2N3M2N2N2O2M2N2N2N2O2M2N2N2N3N1N2N2N2N0010O000000010O00000000010O000000102M2N2N2N3N1N2N2N2N3M2O1N2N3M2N2O1N2N3M2N2N2O2M2N2NfZe5"}, "image_id": 792, "id": 13271}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 162.0, 28.0, 29.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "ZUg03m?2M4M2M3N3M2M4M1N10O10O010O10O010O10O03N3M2M4M2M3N3M2M\\jj6"}, "image_id": 792, "id": 13272}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 163.0, 14.0, 15.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "SUV5?a?000000O1000000O10000O1000mjb2"}, "image_id": 792, "id": 13273}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 167.0, 47.0, 65.0], "area": 1842, "segmentation": {"size": [512, 512], "counts": "W5T1l>0010O00bAoNm=R1PBQOP>n0mAUOS>l0jAVOW>i0fA[OY>V1010O00010O010O00010O0010O00fNfAo0Z>mNiAS1W>jNmAU1S>iNoAX1]>O00010O010O0O1M4M2M3M4M2M4L3N2M4M2M3M4MnYX7"}, "image_id": 792, "id": 13274}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 185.0, 65.0, 70.0], "area": 2089, "segmentation": {"size": [512, 512], "counts": "XWk11n?2M3N2N2N1O2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2N2M10O10000O01000O1000O010002M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N1O2M3N2N1OfYT5"}, "image_id": 792, "id": 13275}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 222.0, 53.0, 60.0], "area": 1727, "segmentation": {"size": [512, 512], "counts": "ahc22k?3N2N3M2M4JBf@`0X?6M2N3L3N2N3M2M4M2N3L3N2N1O0O12M3N3M2N210O010O0010O010O0010N1N3M2M4M2N3M2M3N3M2M4M2N3M2M3N3Mgha4"}, "image_id": 792, "id": 13276}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 232.0, 53.0, 77.0], "area": 2464, "segmentation": {"size": [512, 512], "counts": "YiU63i?4M4K4M3M3L5L3M3L5L3L4M4L3L4M3M4K4M3L5O01O0001O01O01O01O01O01O000O1M4K4M3M4K4M3M4K4M3L4M4L3L4M4L3L4M4K4Maho0"}, "image_id": 792, "id": 13277}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 242.0, 45.0, 74.0], "area": 2255, "segmentation": {"size": [512, 512], "counts": "dYa32h?6J7J5J6J6J6K6I6J6J6K5M4O000000010O000000010O00000010O0000000M4J5K5K5L4K6J5K5K5K5K6J5L4K]Xh3"}, "image_id": 792, "id": 13278}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 244.0, 31.0, 61.0], "area": 1773, "segmentation": {"size": [512, 512], "counts": "jg_5g1Y>0000000000000000000J60000000000000000000000000000000000j0VO00bgP2"}, "image_id": 792, "id": 13279}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 244.0, 57.0, 84.0], "area": 2387, "segmentation": {"size": [512, 512], "counts": "jYl63j?3N3M2N3L3N2N3L3N3M2M4M2N2M4M2N3^OcN]B_1a=cN\\Ba1a=bN\\B`1a=cN]B_1a=dN[B`1b=`0N3L3010O0N3M2N2M4M2N3L3N3M2N2M4M2N3L3N3M2M3N3M2M4M2N3M2M3N3M2M4M2N3LQX7"}, "image_id": 792, "id": 13280}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 248.0, 47.0, 90.0], "area": 2936, "segmentation": {"size": [512, 512], "counts": "SZW4>[?71O000000000000M4ZOe0lNSOkBW1l3EnNeAV1W>mNhAT1U>oNkAQ1R>SOmAn0o=UORBj0k=YOUBg0h=]OWBd0e=_O\\B`0a=C]B?`=S1L3N210O00010O00010O000010O00010O0N2M4kN]BHf=5^BGe=8\\BEh=;WBBl=>UB^On=b0RB[OR>a0QB\\OR>a0RB[OQ>b0RB[OR>`0RB]OQ>`0i0M4N100001M200001Fa@2^?Jf@6c?O0N2McgT7"}, "image_id": 792, "id": 13283}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 260.0, 27.0, 25.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "dXQ11m?2N3L3N3M2M3N3O001O010O01O010O01O010O01N1N3M2M3N3M2NgWa6"}, "image_id": 792, "id": 13284}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 287.0, 31.0, 29.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "aYl12l?2N3L3N2M4M2N3M200010O010O01O01O010O010O00010M2M4M2N2M4M2M4MjVd5"}, "image_id": 792, "id": 13285}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 289.0, 57.0, 65.0], "area": 2175, "segmentation": {"size": [512, 512], "counts": "ajn04i?3M3N3L3M4M2M3M4L3N201O00010M2N3L3M3M4M2M3M4N11O01O010O01O01O01O010O00010N1N3L3N2M4M2M4L3N2M4M2M4M2M4L3N2M4M2M4McfT6"}, "image_id": 792, "id": 13286}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 292.0, 32.0, 29.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "eio22l?2N3M2M4M2N3M2N2010O010O010O01O010O01O010O010N1N3M2N2M4M2N3M2NdV`4"}, "image_id": 792, "id": 13287}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 304.0, 22.0, 22.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "`YR5f0Z?00000000000000000000000000000000000000000`fb2"}, "image_id": 792, "id": 13288}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 307.0, 27.0, 28.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "Tj_74j?2M4M2M3N3L3N3O01O010O01O01O010O01O01N1M3N3L3N3L3N2MYf2"}, "image_id": 792, "id": 13289}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 315.0, 16.0, 21.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "[jc62k?3M3M4K4M310O0001O01ON2M4L3L4MUVT1"}, "image_id": 792, "id": 13290}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 319.0, 55.0, 74.0], "area": 2304, "segmentation": {"size": [512, 512], "counts": "jkl13k?2M4L3N3L3N2M4L3N2M4L3N3L3N2M4L3N3L3M3N3L3N2N30O010O00010O00010O0M4L3N2M4M2M3M4M2M4L3N2M4M2M4L3N2M4L3N2M4M2MheW5"}, "image_id": 792, "id": 13291}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 327.0, 23.0, 22.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "XZR65k?`0@000000000000000000000000000000000000O01c0]OVUb1"}, "image_id": 792, "id": 13292}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 337.0, 73.0, 79.0], "area": 3061, "segmentation": {"size": [512, 512], "counts": "\\\\d21m?2N3L3N3L3N2N3L3N3L3N3L3N2N3M2010O00010O010O0010O0010OM4M2N2M4M2M4M2N2M4M2M4M201O01O010O01ON3M2M4M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2M3N3M2M4M2M3N3M2MUUW4"}, "image_id": 792, "id": 13293}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 338.0, 69.0, 55.0], "area": 1992, "segmentation": {"size": [512, 512], "counts": "Uko41l?4L3N2M4L3N2M4O010O000j@]OP?i00010M2010O0010O00010O0010O00010O0010O001UAQOd>n0ZATOf>l0WAXOh>P11O010O01O01O01O01O010O01O01O01O01O010O01O01L3N2M4L3N3L3M3N3L3M3N3Lldm1"}, "image_id": 792, "id": 13294}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 347.0, 17.0, 22.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "Vkg71l?4M2N3L301O00010O3NO01O010O0N2N3TE"}, "image_id": 792, "id": 13295}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 355.0, 30.0, 26.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "bkV72l?2M3N3L3N3L310O00010O010O01O01O010O01O01O010O01L3N2M4M2M4MeT:"}, "image_id": 792, "id": 13296}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 374.0, 52.0, 69.0], "area": 1967, "segmentation": {"size": [512, 512], "counts": "^]h32l?3L3N2M4M2M4M2M3M4M2M4M2M3N3L3N3L3N2M4M2M4M2O110O01O01O01O0M4M2M3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3N3L3N2MRd]3"}, "image_id": 792, "id": 13297}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 377.0, 20.0, 18.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "Sl[61m?3M2N3M2N3O0010O0100O010O01N1N3M2N3M2NSTZ1"}, "image_id": 792, "id": 13298}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 379.0, 57.0, 52.0], "area": 1825, "segmentation": {"size": [512, 512], "counts": "clS74j?2N3L3N2N3L3N3M2M4M2O1010O01O010O01O010O01O010O01O010O01O010O01O010O01O010O01ON3M2M4O010O00010O010O010O00010O010iC"}, "image_id": 792, "id": 13299}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 394.0, 85.0, 66.0], "area": 3191, "segmentation": {"size": [512, 512], "counts": "_m_41m?2M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3O0cAfNU>Z1hAhNX>`110O0010O010O0010O0010O010O0010O0010O010^NgA[1Y>cNjA\\1]>10O010O01OO2M2M4M201O01O010O010O01ON3M2N3O010O01O01O010O01M2N3M2M3N3M2M4M2N3L3N2N3L3N3LlbU2"}, "image_id": 792, "id": 13300}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 406.0, 49.0, 41.0], "area": 1064, "segmentation": {"size": [512, 512], "counts": "e]n52l?3L3N2M4N101O01O010O00010O010OO1N3L3N3L3N1N010O102O2O01O01O010O00010O01O01ON3M2M4M2M3N3L3N3L3M3NTSY1"}, "image_id": 792, "id": 13301}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 422.0, 60.0, 36.0], "area": 1179, "segmentation": {"size": [512, 512], "counts": "kmd61m?3M20010O00010O0O2M2M3N3M2M3N3L3O2O00010O010O01O01O010O01O01O010O01O01O01N1N3L3N2M4M11N3L3N3M21O01O010ON3L3N2M4M2M4M2MhR="}, "image_id": 792, "id": 13302}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 441.0, 34.0, 36.0], "area": 825, "segmentation": {"size": [512, 512], "counts": "a^_71m?3L3M3N3L3M3N3L3M4O00010O00010O010O00010O00010O010O00010O0001M2N3LQB"}, "image_id": 792, "id": 13303}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 443.0, 36.0, 50.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "Woe52k?3M4M2IFe@=Y?7L3M4M2M3M4M2M10O00013L3M3N3L3N3L3M3010O00011b@@X?Nh@d0Z?1O01O0Bg@4Z?Ii@4c?M4MeQh1"}, "image_id": 792, "id": 13304}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 468.0, 120.0, 44.0], "area": 2966, "segmentation": {"size": [512, 512], "counts": "a_T62k?4M2M3N3L3N3L3M3N3L3N3N11O01O010O01O01O010O01O01O010O00010O010O0001O001O00001O001O000QORAl0R?O001O00001O001O001O001O00001O001O001O001O0M3N3L3N3L3N2Oa@0m>4PANn>4o@On>5n@NP?4m@OP?b0N2O11O001O00001O00001O00001O001O00001O00001YOk@a0Z?01O001O00001O00001O001O00001O00001O001O00"}, "image_id": 792, "id": 13305}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 0.0, 71.0, 53.0], "area": 2051, "segmentation": {"size": [512, 512], "counts": "Za41l?4L3M3M4L3M3M4O00010O00010O00010O00010O0001\\Ok@7jALV>4gAOY>2dA1[>ObA4^>L_A8`>I\\A:d>FZAEVA?i><0001O00001O001O00001O00001O00001O001O00001O0000N2M3M3M3N2M3M3M3M3N2M3M3M3N2M3MSPi5"}, "image_id": 795, "id": 13307}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 0.0, 64.0, 54.0], "area": 1867, "segmentation": {"size": [512, 512], "counts": "UQV21m?3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2O2M2N3O001O001O001O001O001O001OO1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O100O1000000O3Nnoi4"}, "image_id": 795, "id": 13308}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 0.0, 55.0, 62.0], "area": 2201, "segmentation": {"size": [512, 512], "counts": "`Qa31l?3N2VONcA6Z>LdA6Y>NdA5Y>MdA6Z>LdA7X>MeA5Y>MdA6Y>k0O001O00001O0010O01O0O101O001O00001O001O001ON2M3N2N2M3N2N2M4M2N2M4M2N3L3N3L3N2N3L3N3M2M3Nl_c3"}, "image_id": 795, "id": 13309}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "P`i61o?1O00OQ`T1"}, "image_id": 795, "id": 13310}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 15.0, 55.0, 65.0], "area": 2161, "segmentation": {"size": [512, 512], "counts": "Pb\\42k?3M3M4L3M3M4L3M3M4L3M3M4L3M3N30O00O2M2O101N100O2O00001O000010O00100O1O101N6]NdAV1]>fNfA[1`>O01OO2L3M3M4L3M3M4L3M3M4L3M3Mkng2"}, "image_id": 795, "id": 13311}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 18.0, 17.0, 18.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "m`g72k?3M3M4M200010O01O01O01O01O010O00ZO"}, "image_id": 795, "id": 13312}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 24.0, 50.0, 77.0], "area": 2192, "segmentation": {"size": [512, 512], "counts": "jb^52k?3SOMjA7R>MjA6S>MjA6S>MjA7S>LiA7W>IfA:[>EbA?]>b00010O00010L3M3M4L3M3M4L3M3N3O00010OO1M4L3M3M4L3M3N3L3M3M4L3M3M4L3M3M4M2M4L3M3M4LR_h1"}, "image_id": 795, "id": 13313}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 58.0, 53.0, 70.0], "area": 2123, "segmentation": {"size": [512, 512], "counts": "eSV61l?3N2M4L3M3M4L3M3M4L3M3N3L3M3M4L3M4L3M3O2O00010O01O01O01O01N1M3ON3M2N3M3M4L3M3M4L3M3M4L3N3L3M3M4L3M3M4L3MS^o0"}, "image_id": 795, "id": 13314}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 100.0, 43.0, 56.0], "area": 1749, "segmentation": {"size": [512, 512], "counts": "mcZ72k?3M4L3M3n@D]>`0_AD\\>`0aAC\\>`0aAC\\>a0`AC_>o000010O00010O00010O00010O00010O00010O00010OO1M4L22L3M3M4L3M3L5L3M3MlL"}, "image_id": 795, "id": 13315}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 150.0, 64.0, 53.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "jUS14d?9G8H8N2001O000001O00000001O000001O000SATOf>S1001O00O10001N100O1O100O1N2O2M21O00M3O1001O010O00001L3L5O000N3L3N2N3L3N3M2M3N3L3N3M2M3NSkl5"}, "image_id": 795, "id": 13316}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 159.0, 60.0, 45.0], "area": 2117, "segmentation": {"size": [512, 512], "counts": "o4T1m>O0001O0000000001O01O0000004L01O000000000M312M0001O01O01O0M300001O0001O0000000001O01O0000000001O0001O00000001I6I7H8I7HQkQ7"}, "image_id": 795, "id": 13317}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 160.0, 51.0, 55.0], "area": 1619, "segmentation": {"size": [512, 512], "counts": "XfX22k?4M2M4M2N3L3O10O1010O010M2M4M2M3N3M2M4M2M4M2N30O0010O010O0010O0010ON3M2M3N1O0O3N1N3N1O2M4M2N2M4M3Fa@Ne?L^@2]Zn4"}, "image_id": 795, "id": 13318}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 173.0, 85.0, 54.0], "area": 2917, "segmentation": {"size": [512, 512], "counts": "]f^31m?2M4M2N3L3N3M2M3N3M2N3L3N2N3O010O0010O0010O010O0010O0010O0010O010O00010O010O001O00010O00001O0000000001O0001O000000000001O0001O000000000001O01O00000000000000010O000K5E;F\\jV3"}, "image_id": 795, "id": 13319}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 198.0, 30.0, 28.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "hfi11m?3L3N2N3L3N3M2O20OO1O2O010O0010O0010O010OO1N3L3N3M2M4M2N2MfYg5"}, "image_id": 795, "id": 13320}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 199.0, 27.0, 26.0], "area": 422, "segmentation": {"size": [512, 512], "counts": "ifk21l?3M3N3L3M3N3M210O00010O0010O0010O00010OO2M2M3M4M2M3Ndif4"}, "image_id": 795, "id": 13321}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 207.0, 24.0, 23.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "kfk09]?:N20001O01O00000000000001O0001O00000000000M3FiYh6"}, "image_id": 795, "id": 13322}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 212.0, 18.0, 19.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "d6b0^?0001O000000000001O0000N3M2000000Iaif7"}, "image_id": 795, "id": 13323}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 223.0, 10.0, 13.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "VWf05d?71O0001O00000001JTiT7"}, "image_id": 795, "id": 13324}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 227.0, 90.0, 67.0], "area": 2901, "segmentation": {"size": [512, 512], "counts": "UXT31m?3M2M3N3L3N3L3N2N3L3N3L30001O0N3L3010O0010O0010O0010O0010O010O0010O0010O0010O0010O0kN]Am0d>PO^AQ1i>O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O010O01O01O010OO2M2N2N3M2N3L3N3M2N2N3M2Njg^3"}, "image_id": 795, "id": 13325}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 233.0, 72.0, 52.0], "area": 2390, "segmentation": {"size": [512, 512], "counts": "\\hV64i?3M3M4M2M3M4L3M3M4M2M3M4N110O01O01O01O01O01O01O01O01O01O01O01O01O01O0O2N10010O01O01O01O01O01ON3L30001O01O010O00010O01O01O01M2M3M4L3M3M4M2M3M4LUXe0"}, "image_id": 795, "id": 13326}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 234.0, 68.0, 55.0], "area": 2519, "segmentation": {"size": [512, 512], "counts": "Z7^1b>00000001O01O00000001O01O00000001O01O00fNdAP1f>01O000001O0001O000001O000001O0001O000001O000001O0001O000001O000001O0001O000001O000N2J6J6J7H7JWhm6"}, "image_id": 795, "id": 13327}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 253.0, 30.0, 25.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "\\Xb22k?3N3M2M4M2N3N10010O010O0010O0010O010O00OO0003L3N3M2M4M200NQhn4"}, "image_id": 795, "id": 13328}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 259.0, 36.0, 72.0], "area": 1504, "segmentation": {"size": [512, 512], "counts": "dY^71l?3M4L3M3M4L3M301O01O01O01O01N1M3M4L3]AnNU>U1hAnNU>V1gAmNV>a1L3M3M4M200010O00010O00010O00jG"}, "image_id": 795, "id": 13329}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 285.0, 63.0, 57.0], "area": 1867, "segmentation": {"size": [512, 512], "counts": "QjR31m?2M3N3L3N3L3N2M4M2M4M2M3N3M2N3O01O010O01O010O01O010O01O01mN[Ai0e>TO^Am0b>POaAo0h>1O01O010O01O01O010O01O01O010O01O01O010O0M3N3L3N3L3N2M4M2M4M2MZfm3"}, "image_id": 795, "id": 13330}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 288.0, 60.0, 66.0], "area": 2296, "segmentation": {"size": [512, 512], "counts": "mYV11l?3N2n@MT>7iALT>6jALT>7hALU>7iALT>7hALV>6hAMT>6jALT>7hAMU>P10010O010O00010O010O010OO1M4M2M01000O102N3L310O0010O0010O010O001oNZAg0e>WO]Ai0c>UO`Ak0`>QOcAo0g>10O0N3M2N2M4M2M4M2N3L3N2N\\fk5"}, "image_id": 795, "id": 13331}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 293.0, 84.0, 69.0], "area": 3204, "segmentation": {"size": [512, 512], "counts": "mj_52l?2M4M2M3N3M210N1M3N3L3N3L3N2M4M2O101O010O01O01N1M4M2N210OO2L3N2M101N201000O2O0O2O0O1N3M2M3N3N10010O01O01ON3L3M3N3O0010O00010O00010O01O01ON3M2M3N3L3M4M2M3N3L3M3N3L3N2MaVV1"}, "image_id": 795, "id": 13332}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 306.0, 28.0, 29.0], "area": 484, "segmentation": {"size": [512, 512], "counts": "Vjf02k?3N3L3N2M4M2M4M21O01O010O01O01O010O01O0M3N3L3N3L3N2M4MXVk6"}, "image_id": 795, "id": 13333}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 308.0, 31.0, 33.0], "area": 604, "segmentation": {"size": [512, 512], "counts": "\\ZU21m?2M4L3N3L3N2M4M2M4N100010O010O00010O0010ON2N3L3N3L3M3N3L3N2MXV[5"}, "image_id": 795, "id": 13334}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 311.0, 28.0, 40.0], "area": 802, "segmentation": {"size": [512, 512], "counts": "i9P1P?10O0001M2O1010O00010O00010O00010N1M3M4L3M3M4L3M3M4L3MUfa7"}, "image_id": 795, "id": 13335}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 335.0, 56.0, 60.0], "area": 1777, "segmentation": {"size": [512, 512], "counts": "f[i22k?4M2M4M2M3N3L3N3L3M3N3L3N3L3N2O2O010O01O01O01O01O010O01O01O010O01O0VOaA4^>JeA5\\>GgA:X>CkA=V>@mA?S>^OPBc0o=[OTBd0d>1O01O010O01O01M2M4M2M3N3L3M4M\\dZ4"}, "image_id": 795, "id": 13336}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 348.0, 39.0, 26.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "X[U11l?3N3L3N2N30O010O00010O010O01O01O010O010O00010O010O01O01O010O010O000O2M2M4M2MiTW6"}, "image_id": 795, "id": 13337}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 364.0, 77.0, 55.0], "area": 2523, "segmentation": {"size": [512, 512], "counts": "a\\W53j?3M4_OHUA;h>HVA:g>IVA;f>IVA:h>`0O2O0O101N10001N110O00010O0001O0M4L31O01O01O01O01OmNWAo0m>10O0010O00010O00010O00010O0010O0010O00010O00010O000O2O0O2N100O11L3M3M4L3N2M4L3M4L3N2M4LSTb1"}, "image_id": 795, "id": 13338}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 371.0, 20.0, 44.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "d;Y1h>OO2N1010N1M4M2N3L3N2N3L3N3L3N2N3L3N3LYde7"}, "image_id": 795, "id": 13339}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 376.0, 54.0, 70.0], "area": 2069, "segmentation": {"size": [512, 512], "counts": "amo01m?2M3N3M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N2N3L3N3L3N3O00010O010OO1M4M2M4M2N2M4M2M4M2N2M100O4M2N3L3N2M4M2N3L3N2I\\@OicU6"}, "image_id": 795, "id": 13340}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 388.0, 79.0, 58.0], "area": 2551, "segmentation": {"size": [512, 512], "counts": "l\\a63k?3L3N2N3L3N3L3O101M2M4N10010O0010O010O00N30O01O0^AnNW>Q1gAROX>o0dATO]>k0aAXO^>U10010O010O01O01O010O010O00010O010O0001M2N3M2M3N3L3N3N110OO2L3N0O2020O00010O010O0N2N3L3N3M2M3N3L3N3L3NaS7"}, "image_id": 795, "id": 13341}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 389.0, 27.0, 27.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "g\\S21m?3L3N2M4M2M4M2O2O01O01O010O01O01O010O01M2M3N3L3N3L3NfS_5"}, "image_id": 795, "id": 13342}, {"iscrowd": 0, "category_id": 1, "bbox": [119.0, 391.0, 13.0, 15.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "_lk14j?2M4M2O1010O010O0O1N3L3Nhcm5"}, "image_id": 795, "id": 13343}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 413.0, 48.0, 79.0], "area": 2129, "segmentation": {"size": [512, 512], "counts": "_^h42l?2c@Oh>4VAOf>5VANa>I[A;10`>H\\A<1Na>m0N3O0O2M2M3N3L3N2M4L3N30O00N3M2N30L3N2M4M2M4M2M3M4M2M6K2M4M2M3M4M2M3N3L3N3L3M3N3L3Nib_2"}, "image_id": 795, "id": 13344}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 419.0, 23.0, 27.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "Y=`0^?2M4N100010O010O00010O010O00001L3N3L3N2M4M2MiRd7"}, "image_id": 795, "id": 13345}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 419.0, 64.0, 54.0], "area": 1911, "segmentation": {"size": [512, 512], "counts": "om[22k?4M2N3L3N3L3N2N3L3N3L3O110O010O0010O0010O0010OVAnNf>R1XAPOi>S101O010O010O01O01O010O01O01O010O010O01O01O010O01O0O1N3L3N3L3N3M2M3N3L3N3M2M3NZRd4"}, "image_id": 795, "id": 13346}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 425.0, 6.0, 14.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "d]m71m?2N3L3N2N3fB"}, "image_id": 795, "id": 13347}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 430.0, 47.0, 78.0], "area": 2191, "segmentation": {"size": [512, 512], "counts": "_n_51?2k>1RA24LU>6cA24LV>4cA34LY>1aA53N[>`0bAB_>o001O0N2N3L3M3M4L3M3N3N101O000M4N10M3N3O010O0FZBYNg=d1]BXNg=d1j0mASOS>l0QBPOQ>o0QBoNo=o0VBmNk=Q1d0L3M3N3L3M4L3N2M4L3MiQi0"}, "image_id": 795, "id": 13350}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 454.0, 13.0, 13.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "^nX11l?4M2M4O01O01O010O01M2M4Mha`6"}, "image_id": 795, "id": 13351}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 479.0, 71.0, 33.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "no22l?2M3N2N2M3N2N2M3N2N2M3N2M31O00001O001O001O00001O2N001O001O00001O001O001O00001O001O001O00001O001O0000O100001O001O002N001O001O001O0K[@Oe?O^@0i?01O001OQ`i6"}, "image_id": 795, "id": 13352}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 480.0, 79.0, 32.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "goi12k?3N3L3N3M2N210N1M4M2M4N10002OO001O001O00001O001O00001O001O001O00001O001O001O00001O001O00001O001O001O00001O001O0000O10000000000000000000000O1000000000000005K[`n4"}, "image_id": 795, "id": 13353}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 490.0, 19.0, 22.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "nof72k?3N2N2M3N2N2N2M3O100001O001O001O0000"}, "image_id": 795, "id": 13354}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 70.0, 102.0, 90.0], "area": 4214, "segmentation": {"size": [512, 512], "counts": "Vdk31o?1N2N3M2N2O1N2N3M2N2N2O1N3M2N2N2N2O2M200O1OO01O00000001O01O000000010O0_OjNYBW1f=kNXBW1f=kNXBW1f=kNXBW1f=kNYBV1f=lNWBV1g=lNWBW1f=kNXBW1f=c0N2N01O00000001O01O0000000]O]BjNe=U1\\BiNe=V1]BiNd=U1_BhNb=W1`BgN`=Y1bBeN^=[1dBcN]=\\1eBbN[=^1gB`NZ=`1gB_NY=b1b01O00O1O2N1O2N2N10O0001O000000101N2N2N2K5N3M2N2N2N2O1N3M2N2N2N2N2O1NW]a2"}, "image_id": 796, "id": 13355}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 168.0, 25.0, 30.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "]UX32m?2N2N3N1N2N20010O000003M01O01O0001O01O0O1N2N2O2MPZ[4"}, "image_id": 796, "id": 13356}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 204.0, 53.0, 56.0], "area": 1411, "segmentation": {"size": [512, 512], "counts": "ofQ21n?2O1N2N2N3M2N2N2O1N3M2N2N2N2N3N1N2N2N2N3M2N2001O0001O000001O000N2N2N3M2N2N2O1N2N3M2N2N2N2O1N3M2N2N2N2O1NbhS5"}, "image_id": 796, "id": 13357}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 312.0, 53.0, 62.0], "area": 1497, "segmentation": {"size": [512, 512], "counts": "dj:3l?2N2N2N2O1o@FY>=dAEZ>=dAEZ>=dAEZ>=dAEZ>=eACZ>?dAA]>a0`A_O`>c0^A]Ob>e0\\A[Od>P100010O0N\\AiNd>W1200001O01O0000000001O01O002N2N2O1N2N2N3M2N2N2O1N2N3M2N2N2O1N3M2Ncej6"}, "image_id": 796, "id": 13358}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 328.0, 70.0, 77.0], "area": 2719, "segmentation": {"size": [512, 512], "counts": "R\\S41n?2N2M3N1AITA9j>ISA:k>HSA:j>HUA9j>ISA:k>HSA:b>[OdAU1Z>lNeAU1Y>nNeAT1Y>nNdAU1Z>:M2O200000O0100N2N1O1N10LSBVNm=j1310O1000O01000O03N1O2M3N1O0O011O2M3N2N1O2O10M3NO3N2N1O2GVAXOm>f0UAXOm>f07N3N2N2M2O2N2M3N1O2M3NZei2"}, "image_id": 796, "id": 13359}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 361.0, 27.0, 26.0], "area": 363, "segmentation": {"size": [512, 512], "counts": "ek`32m?2N1O2N2M3N2N1O2N2N2N20O0100000O1M3N1O2N2M3N2N2N1N3N\\dQ4"}, "image_id": 796, "id": 13360}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 371.0, 14.0, 15.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "k[W52l?2O2M3N1N30000O01N2N1N3N2MYda2"}, "image_id": 796, "id": 13361}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 392.0, 24.0, 24.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "el41n?2N3M2N2O1N2N2N2N2N0001O0001O03M2N2N2N2N2O1N3M_S_7"}, "image_id": 796, "id": 13362}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 491.0, 16.0, 15.0], "area": 150, "segmentation": {"size": [512, 512], "counts": "c_73k?3M2M4N11O010O01O01O001M2N2M4Mb``7"}, "image_id": 797, "id": 13363}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 12.0, 30.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "0n0R?02NO10O10002N6J5J6K5KXoi7"}, "image_id": 798, "id": 13364}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 0.0, 32.0, 19.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "Y`71l?3M4M2O101O001O00001O001O00001O00001O001O00001O00N2M3M3N2M3N2MS`X7"}, "image_id": 798, "id": 13365}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 0.0, 31.0, 31.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "PPR12n?00001c@0h>0VA3i>NSA5m>KQA8n>Io@9Q?Gl@3TB0XOOb>4SBOYO0a>3TB6k=LRB7k=KSB7k=LRB7k=LSB5k=MSB6j=MSB3m=OQB1o=2nAOS>o001O001O001OO1O1N2N2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2N2N2N2O1N2N2O1001OG[@7h?M2No_Z5"}, "image_id": 798, "id": 13367}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 0.0, 60.0, 57.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "Pa_23k?2N3M2N3M2N3N1N3M2N3M2N3M2N3M2N3N110O10O010O010O010O010O0mN\\Ak0d>RO^An0b>POaAP1g>010O010O010O010O10O010OO0O02N2N3M2N3N1N3M2N3M2N3M2N3M2NY_b4"}, "image_id": 798, "id": 13368}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 0.0, 6.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "PPV61o?00001O000PPg1"}, "image_id": 798, "id": 13369}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 0.0, 49.0, 39.0], "area": 1175, "segmentation": {"size": [512, 512], "counts": "f`\\64j?2M4M2M3N3O0010O0010O001O0M3N3L3N3M20001O001O00001O001O00001O001O00M3N2M3N2M3N2M3N2M3N2M3N2M3N2MSPk0"}, "image_id": 798, "id": 13370}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 0.0, 25.0, 23.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "`P\\71m?3L3M4M2M3N3N101O00001O001O0000O1M3N2M3N2M3N2M3NR`7"}, "image_id": 798, "id": 13371}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 24.0, 56.0, 63.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "aa`03`0Oi>4SA0j>2TA0i>4TAOi>3TA0j>2TA0j>e0O010O0010O00XAnNc>W1O2O010O00010O010O001O0O101O000ON2O00O010000O101O3L3N3M2M4O00010O0O2M2N3L3N2N3L3N3M2MS_c6"}, "image_id": 798, "id": 13372}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 32.0, 45.0, 58.0], "area": 1562, "segmentation": {"size": [512, 512], "counts": "gaX74j?2`@LQ?7l@MQ?6l@LQ?7l@MQ?b0N2M4M2M4N100bAgNV>Y1fAjNZ>V1dAlN]>\\1O0010O0010O0010O0010O0010N1M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4Mdn0"}, "image_id": 798, "id": 13373}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 37.0, 28.0, 30.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "ia_63j?3N3L3N2M4M2M4M21O010O01O01O010O01O01O0N3L3N2M4M2M3N3Lf^R1"}, "image_id": 798, "id": 13374}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 52.0, 57.0, 51.0], "area": 1542, "segmentation": {"size": [512, 512], "counts": "]RZ32m?2N2M3N1N3N2N1N3N2N2M2O2M3N2N11000O010UAQOd>o0ZAROf>U10O1N2M210OO2N2M2O2M3NO10O10O101N3N2N1N3N2N1N3N2M3N1O2M3N2M2O2N2M2O2N2MP^i3"}, "image_id": 798, "id": 13375}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 70.0, 29.0, 29.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "fRd23k?2O2M3N1N3M2O2M3N1N3O10O010O10O10O1O0O2M2O2M3M2O2M3N1N3Ma]m4"}, "image_id": 798, "id": 13376}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 81.0, 10.0, 21.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "RSk71m?2M3N3L3N3L3N201O0_M"}, "image_id": 798, "id": 13377}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 83.0, 33.0, 30.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "h2`0^?2M4O010O00010O010O00010O00010O010O00010O0010O001O0N2M4M2M4L3N2MU]_7"}, "image_id": 798, "id": 13378}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 103.0, 32.0, 31.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "PTg21m?2O2M3KJ]@7b?5M2N3N2M2O0O010O00210O10O10O0100N1N3M2O2M3N1N3N2M2N3N_lh4"}, "image_id": 798, "id": 13379}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 106.0, 17.0, 15.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "ac_21n?2M2O2M3N11000O01000O01O1M2O2M3N_lW5"}, "image_id": 798, "id": 13380}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 115.0, 38.0, 35.0], "area": 770, "segmentation": {"size": [512, 512], "counts": "VTa13k?2N3K4N3M2N2N3L3010O010O010O010O01O01O010O010O010O010O01O01M2N3L3N3M2N3M2JY@0j[l5"}, "image_id": 798, "id": 13381}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 118.0, 21.0, 19.0], "area": 209, "segmentation": {"size": [512, 512], "counts": "PTY21m?3M2N3M2N30O010O010O010O010O010M2M3N3M2NU\\\\5"}, "image_id": 798, "id": 13382}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 120.0, 69.0, 68.0], "area": 2521, "segmentation": {"size": [512, 512], "counts": "hTo43l?1O2N2M3N2N1N3N2N2M3N1O2M3N2N2N1N3N2O1N1O2N2M3N2N1O2N200000O10O100000O10O10000OO00101O100000O10O1N2M3N1O2N2M3N2N1CXAAj>=XA@k>>WA@k>=XAAj>=WABj>=>N2M3N2N1O2M\\[n1"}, "image_id": 798, "id": 13383}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 126.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "nco72P<"}, "image_id": 798, "id": 13384}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 153.0, 63.0, 44.0], "area": 1486, "segmentation": {"size": [512, 512], "counts": "]ei12l?2M3N3L3O2O001O0O2O001O00010O0N3M2N3M2O2O0010O010O0010O010O010O0010O010O0010O010O010O0010O010O0010O01O0N3L3N2N3M2N3M2M4M2N3MfjV5"}, "image_id": 798, "id": 13385}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 156.0, 69.0, 63.0], "area": 2220, "segmentation": {"size": [512, 512], "counts": "le73j?3N2N3L3N3M2N3N100010O010O0010O0010OO2L3N2N3L3O2O01O010O010OM4M2N2M4O010O0010O0010aAaN[>b110O0010O010O0010OTOgA1Y>MiA4W>HlA8T>FoA:Q>CQB=o=@UB?l=^OVBc0i=[OZBd0f=YO]Bh0]>M4M2N3L3N3M2M3N3Mmie6"}, "image_id": 798, "id": 13386}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 164.0, 9.0, 25.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "T5i0W?N3L3N2M4M2M4M2MlZk7"}, "image_id": 798, "id": 13387}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 175.0, 66.0, 101.0], "area": 2899, "segmentation": {"size": [512, 512], "counts": "mfk51n?2N2N2YAK_=8_BI`=8_BJ_=8_BJ_=8^BK_=8_BJ_=8_BI`=9^BI`=8^BK_=8_BJ_=8_BJ_=8_BI`=9]BJ`=8_BJ_=8_BJ_=8_BJ_=8_BI`=]1M3NO1000OQOdB_OZ=`0hBAV=?lBAR=?PCAm<`0UC^Ok^1O01O0dNaAX1^>fNeAY1a>O2O000010O010O0010oNYAg0g>WO[Ah0f>XO[Ad0h>]OWAa0k>_OUA>o>@SA=P?AQA=[?L3N2M4M2NZYf5"}, "image_id": 798, "id": 13389}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 189.0, 31.0, 30.0], "area": 478, "segmentation": {"size": [512, 512], "counts": "]f`51n?1N3N2N2N2M2O2N2M3N1O2M3O10O0100000O1M2O2N2M3N1O2M3N2N1O2M3Ngio1"}, "image_id": 798, "id": 13390}, {"iscrowd": 0, "category_id": 1, "bbox": [267.0, 193.0, 61.0, 60.0], "area": 1814, "segmentation": {"size": [512, 512], "counts": "ZgU41n?2M3N2N1O2M3N2N1N3N2N2M2O2JUOTAm0j>TOTAo0i>5O2N2N20O10N2M3N000O10O1000O010000O2O2N2M3N1O2M3O1O0100000O010N2N2N1N3N2N2M2O2Ab@9d?M2O2N2M3NXik2"}, "image_id": 798, "id": 13391}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 205.0, 62.0, 62.0], "area": 2196, "segmentation": {"size": [512, 512], "counts": "m6f0Y?2O010O00010O010O01O01O0N3M2M4M2N201O0XAlNe>X1O010N1N2M4M001101O010O01O01O03NO010O01O01O010O0WOdAO\\>MgA3Y>KiA6V>HmA7T>EoA;Q>CRB=n=@TB`0l=]OXBc0h=ZOZBf0f=XO\\Bh0_>L3N3M2M4M2M3N3M\\hP7"}, "image_id": 798, "id": 13392}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 205.0, 37.0, 31.0], "area": 555, "segmentation": {"size": [512, 512], "counts": "nfV52m?1N3N2N2M2O2N2M3N1O2M3N2N11000O10M3N2N1N3N2N2M2O200000O010000O0N3N2N2M2OSiV2"}, "image_id": 798, "id": 13393}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 224.0, 26.0, 27.0], "area": 436, "segmentation": {"size": [512, 512], "counts": "bgU22k?4L3N2M4L3M4N11O01O01O010O00010O01O01N1M3M4M2M4L3MlX]5"}, "image_id": 798, "id": 13394}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 231.0, 61.0, 61.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "`hg61n?1N3N2M2O2M3N1N3N2M2O2000O01000O01000O01N2M2O2M3N1N3N2M3N1O0O010O010O010O010O010O03N2M2O2M3N1N3N2N1N3N2M2O2M3N1N3N2M3N1N^h9"}, "image_id": 798, "id": 13395}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 239.0, 52.0, 50.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "VhU12m?2M3N1O2M3N2N1N3N2N2M2O2N2M2O2M3N2O01000O10O1000O10O1000O010000mNVAo0i>POYAo0m>00O0O1O0O3N1O2M3N1O2M3N2M2O2N2M3N1OhWP6"}, "image_id": 798, "id": 13396}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 250.0, 28.0, 29.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "P8?^?3N3N1010O01O01O010O010O00010O010O00010N1N3M2M4M2M3N3Loga7"}, "image_id": 798, "id": 13397}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 253.0, 21.0, 24.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "_hn42m?2N1N3N2N2M2O00O010O10O10O101O1N3N2N2M2OQhf2"}, "image_id": 798, "id": 13398}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 259.0, 32.0, 62.0], "area": 979, "segmentation": {"size": [512, 512], "counts": "^Y`72l?2N3N1N3M2N3N2M2O2O010O010O10OO2M2N3N2M2GPO`AS1]>PO`AR1^>POaAR1\\>:O2M3M2O2M2010OlG"}, "image_id": 798, "id": 13399}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 267.0, 58.0, 45.0], "area": 1605, "segmentation": {"size": [512, 512], "counts": "SYb32l?3M2N2N3M2N3M2N3M2M4M2O2O010O0010O010O010O0010O010O0O2M2N3O0010O010O010O010O010O0OO0001O3M2N3M2M4M2N3M2N3M2N3M2N3L`g`3"}, "image_id": 798, "id": 13400}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 273.0, 30.0, 29.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "UYj11m?2M4L3N2M4M2M4M20010O010O00010O010O00010O01N1N2M4M2M3N3L3NYgf5"}, "image_id": 798, "id": 13401}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 278.0, 73.0, 55.0], "area": 1886, "segmentation": {"size": [512, 512], "counts": "[Y>2m?1N3M2N3M3N1N3M2N3M2N3N110O010O10O10O010O010O010O10O010O10O010N1N3N1O200O010O0100O010O0100O010O01POUAj0k>SOWAm0o>0O010O01000O010O01O0N3M2O2M3M2N3M2O2M2N3M2N\\V]6"}, "image_id": 798, "id": 13402}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 284.0, 44.0, 49.0], "area": 1054, "segmentation": {"size": [512, 512], "counts": "cYP62m?2N2M3N1O2N2M3N2N2N1N3N2N2N2M2O2N2N2N20O100000O10O100000OSOZAa0f>]O\\Ab0e>[O^Ac0d>[O]Ad0e>ZO]Ad0d>[O^Ab0R?N2N1O2M3N2N2N1N3NUfY1"}, "image_id": 798, "id": 13403}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 299.0, 25.0, 28.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "oiR71m?2O2M3N1N3M3N1N3N2M2O2M1001N3N2M2O2M3M2O2M3N1N3N_f`0"}, "image_id": 798, "id": 13404}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 303.0, 40.0, 29.0], "area": 695, "segmentation": {"size": [512, 512], "counts": "QZ_41m?2N3M2M4M2N3M2M4O0010O0010O010O0010O010M2N30O010O0010OO2O01O010O0O1N3M2M4M2N3MWfl2"}, "image_id": 798, "id": 13405}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 312.0, 97.0, 77.0], "area": 2820, "segmentation": {"size": [512, 512], "counts": "jZ^31m?3M2M4M2N3L3N2N3L3N3M2N30O00M4M2M4O0010O0010O010O00010O010O010O00010O010O010O00010O010O00nN]Ah0c>UO`Aj0`>TObAl0i>10O010O0ZORA;n>BTA>l>@WA?i>^OZAc0R?O010O01O0g@]OV?g0O010O01O01O010O01O01O010O010O01O01O010O010O01O01O010OO2M2N3L3N2M4M2N3LcTQ3"}, "image_id": 798, "id": 13406}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 313.0, 36.0, 63.0], "area": 1449, "segmentation": {"size": [512, 512], "counts": "V:\\1e>O0010O0010OM3N3L3N3L3N2O20O010O000M4M2M4POeA7]>FgA9Y>EiA;X>AlA>T>@nAa0R>[OQBe0o=YOTBe0d>N3L3N2N3L3N3L3NYe]7"}, "image_id": 798, "id": 13407}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 341.0, 13.0, 25.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "Vki71n?2M2N3M2N3N2M2N3M2O20O01YE"}, "image_id": 798, "id": 13408}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 343.0, 65.0, 53.0], "area": 1722, "segmentation": {"size": [512, 512], "counts": "Wkb02m?3M2O1N2N3M2N2N2O1N3M2N2N2O2M2N2N2N2N3N1000001O01O0000010M2O1N2N2N3M2N2O1N2N1O0001O003M2O1N2O100010O0N2N2N2O2M2N2]Oe@>a?M2N2N2N2N3M2ORd\\6"}, "image_id": 798, "id": 13409}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 344.0, 37.0, 38.0], "area": 837, "segmentation": {"size": [512, 512], "counts": "W[P53k?3M2M3c@FR?=j@GS?;k@GS?e0M21O01O010O010O01O01O010O010O010O01O01O01L3N3M2M4M2N2M4M2N3MmT]2"}, "image_id": 798, "id": 13410}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 365.0, 53.0, 70.0], "area": 2105, "segmentation": {"size": [512, 512], "counts": "S]e53k?3L3N3L3N2N3L3N3L3N2GTO[AP1b>RO\\AP1a>:M2M3N3L3N3L3O110O010O00010O010O0010M2N3L3N2M4M2N3L3N2M4M2M4M2M4M2M3N3L3N3L3N2MYT`1"}, "image_id": 798, "id": 13411}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 369.0, 137.0, 76.0], "area": 3851, "segmentation": {"size": [512, 512], "counts": "[\\o22l?2M4M2M3N3M2M4M2M4M2N2010O01O01O010O010O00O2L3010O010O00010O010O00010O010O0004MO0010O010O0010O001YOZA3e>K]A5c>HaA8_>EcA;]>CeA=\\>_OhAa0W>]OkAc0j>0O0001M2O2O01N1N3L3N3O00010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0010O010O0010O0010O0010O0010O0O2M210O000O2L3N3M2M3010O0N2N3L3N3M2M3N3L3N3L3N2N3L3N3L[Sl2"}, "image_id": 798, "id": 13412}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 379.0, 5.0, 13.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "k;=d?L3N2M4MUTm7"}, "image_id": 798, "id": 13413}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 404.0, 72.0, 61.0], "area": 2504, "segmentation": {"size": [512, 512], "counts": "Pn]61k?4M4K4M3L5L3L4M3N3O00010O0001O01O00010O0001O01O00010O0001O01O00010O0O1O101O00010O0001O01L3L4M4K4M3L4010O0001O01O0N2L5L3L4M3L5L3L4M4K4M3L4M4K4MYS>"}, "image_id": 798, "id": 13414}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 406.0, 54.0, 54.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "gm42l?3L3N3M2N3L3N3M2M3N3M2N3L3N3N11O010O01O010O010O01O01OSO[A?f>^O\\Ac0c>[O`Ad0a>YOaAh0^>UOeAk0i>O010O010O010O01O01O010O0N3M2N2M4M2N3L3N3M2N^RP7"}, "image_id": 798, "id": 13415}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 429.0, 7.0, 18.0], "area": 66, "segmentation": {"size": [512, 512], "counts": "lml71m?2M4M2M4M2M3cB"}, "image_id": 798, "id": 13416}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 435.0, 51.0, 45.0], "area": 1380, "segmentation": {"size": [512, 512], "counts": "\\n`23k?2N3L3N3M2M4M2N2M4M2N3O010O00010O010O010O00010O010O001N11O010O01O010O01O010O01O01M2M4M2N3L3N2N3L3H^@0k?Mjae4"}, "image_id": 798, "id": 13417}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 438.0, 28.0, 23.0], "area": 532, "segmentation": {"size": [512, 512], "counts": "ime56j?:E5L00000000O1000O10000000O100000O100000O100000007I8HkQl1"}, "image_id": 798, "id": 13418}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 445.0, 27.0, 27.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "^^`73k?2M4M2M4M2M3N3O001O01O010O01O01O010O0O1N3L3N3L3N2M4MnQ2"}, "image_id": 798, "id": 13419}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 459.0, 49.0, 53.0], "area": 1508, "segmentation": {"size": [512, 512], "counts": "d>n0P?3M2M4M2010O0010O010O010O00010OoN[Ag0f>VO]Aj0b>TO`Al0k>0O0010O0010O010O0010O0010O000N2N2OO2N3M2M4M2N2M4M2N3M2M4M2NQQW7"}, "image_id": 798, "id": 13420}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 461.0, 22.0, 18.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "_^U56j?8H2N000000O10O100000000000O10O1000000008H[a_2"}, "image_id": 798, "id": 13421}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 464.0, 23.0, 25.0], "area": 349, "segmentation": {"size": [512, 512], "counts": "P_V63k?3L3M3N3L3N3N100010O010O00010N1M3N3L3M4M2M3N]Q^1"}, "image_id": 798, "id": 13422}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 465.0, 24.0, 26.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "T_Y31m?2M3N3L3N3M2M3N3O010O01O01O010O01M2M4M2M3N3M2M\\aZ4"}, "image_id": 798, "id": 13423}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 471.0, 66.0, 41.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "[oc33k?2M4M2N3L3N2M4M2O2O0010O0010O010O00010O010O0010O0010L3N3N1010O0010O010O001N1M3010O010O001O00001O001O001O00001O001O0N3BPAGS?7o@GS?7o@GT?5=N3M2MQQ[3"}, "image_id": 798, "id": 13424}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 477.0, 29.0, 35.0], "area": 745, "segmentation": {"size": [512, 512], "counts": "eoa74j?2M3N3L3M3N3L3N3L3O1010O010O00010O010O00001O001O00001O00"}, "image_id": 798, "id": 13425}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 479.0, 49.0, 33.0], "area": 1127, "segmentation": {"size": [512, 512], "counts": "o_W51e?0b@2\\?0b@2[?2a@1]?:N2M3N2N2O1001O001O00001O001O001O001O00M3N2N2M3O100001O001O001O00001O001ZOn@Q?_OQAa0X?M2M4M2N3L3NbPP2"}, "image_id": 798, "id": 13426}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 485.0, 57.0, 27.0], "area": 1137, "segmentation": {"size": [512, 512], "counts": "moW23i?4M3L4M3L4M3M30000001O00001O0000001O0000001O0000001O0000001O000000O1M3001O00001O0000001O00001O000Kg@CY?9l@FU?6n@JR?OUA1]?01O0000Q`k4"}, "image_id": 798, "id": 13427}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 485.0, 32.0, 27.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "f_Q62k?3M4M2^@F[?=c@E[?b0L3O2O01L301O01O01O010O01O01O010O00001O000O2L3N3L3M3Nd`^1"}, "image_id": 798, "id": 13428}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 494.0, 25.0, 18.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "j_Z31m?2N3M2N3L3O20O0010O0010O001O001O001O00001O0N3M2M]PY4"}, "image_id": 798, "id": 13429}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 1.0, 111.0, 61.0], "area": 2239, "segmentation": {"size": [512, 512], "counts": "``h03j?4M2M3M4M2N3O01O010O01O010O01O01O010O010O00010O010O00010O010O0010O0010O0010O0010O0010O010O00010O010O00010O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01O01O010O010O00010O010O00010O010O0010O0010O00M4M2M4M2M3N3Lhn_5"}, "image_id": 799, "id": 13430}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 44.0, 14.0, 22.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "ma_23k?2M4L3M3M4M2O11M2M3M4L3M3Nd^Y5"}, "image_id": 799, "id": 13431}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 90.0, 90.0, 105.0], "area": 5502, "segmentation": {"size": [512, 512], "counts": "_U^33k?3[NLYC7ei@EW?;g@HX?`00001O001O010O00010O010O00010O010O010O00010OM4M2N2M4M2M4M2Nh_e4"}, "image_id": 803, "id": 13435}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 0.0, 54.0, 21.0], "area": 623, "segmentation": {"size": [512, 512], "counts": "P`W42n?1O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00O1M3N2M3N2N2M3NR`m2"}, "image_id": 803, "id": 13436}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 0.0, 30.0, 15.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "PPc51o?001O1O1O1O001O1O1O1O001O1O1O1O001O1OO1O1O1N2O1O1O1N2O1O1NRPn1"}, "image_id": 803, "id": 13437}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 0.0, 75.0, 46.0], "area": 2300, "segmentation": {"size": [512, 512], "counts": "gP_64l?2N3L4M3M3L3YO\\OnAg0o=[OnAf0R>]OkAb0V>AgA?Y>DcA=]>F`A9a>I]A7c>c00O10000O100O10000O10000O100001O2NO10000O10000O10000O100O10000O4M3M2M10O10O01000O010O10O10O10O10O10O10O103M3L4M2M4M3M3L4MW_;"}, "image_id": 803, "id": 13438}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 63.0, 77.0], "area": 2448, "segmentation": {"size": [512, 512], "counts": "f0`0_?2N3M2O1N2N3M2N20N2N21O0001O0001O0001ON2N2N3M2O1N2N2N3M2N2O1N2N3M2N2N2N2O1N3O01O000001O01aNUBf0k=YOVBg0j=WOXBg0j=WOXBg0j=WOXBg0j=WOYBf0j=WOXBg0j=XOWBg0j=WOXB>_OD[>LXB>_OD[>LYB=^OET?9o@DS?:;N2O1N3M2NP^P7"}, "image_id": 803, "id": 13439}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 35.0, 66.0, 94.0], "area": 3045, "segmentation": {"size": [512, 512], "counts": "`ST32l?2M4M2M3@FXA=d>FYA=e>FXA=d>FYA=3YO[>W1cAlNZ>^1M3N3L3N3L3N3M2M3N3L3N2M010O0103L3N3M2M4M2M3N3L3N3L3N2N3L310ON2M4M2N30O010M2N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3Lgnj3"}, "image_id": 803, "id": 13440}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 39.0, 82.0, 92.0], "area": 3257, "segmentation": {"size": [512, 512], "counts": "^SR42m?2N2N1N3N2N2N2N2M2O2N2N2N2N2M2O2BoNjAS1T>oNjAS1T>oNjAS1T>oNiAT1T>nNkAS1T>oNjAS1T>>N000O1HTN]Bl1c=VN[Bj1e=XNYBg1g=\\NWBd1i=9000000O010000000O01000002N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2N101000000O1N1N3N2N2N2N2M3N1O2N2N2M3N2N1O2Njmd2"}, "image_id": 803, "id": 13441}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 47.0, 68.0, 56.0], "area": 2657, "segmentation": {"size": [512, 512], "counts": "Zbh66a?9M30000e@@V?e000000001O01O00000000000O1N3OI7K6O0000000ZAUOW>k0`A^O`>Q10010O00000000000000010O000000000I7000001O00000001O00000001O00000001O00000SOgA3Y>DQB6T>AUB6o>Gm]5"}, "image_id": 803, "id": 13442}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 60.0, 98.0, 61.0], "area": 3309, "segmentation": {"size": [512, 512], "counts": "_ST19f?2O0000000000O1000BKQA5o>>0000000000O100000O1000H]OSAc0m>8000000000000O10O100000000000]O]OiAc0W>c00000000O10000000K5000000O100000O1000000000000000O100000O10c0XABh>k001O01O00N2J6O2O01O00000001O01O000001O0001O000001N1J6K5J601O00000000010OM3J6J6K5J7I6K5O10001O0000000J6J7J5JPne1"}, "image_id": 803, "id": 13444}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 73.0, 30.0, 42.0], "area": 681, "segmentation": {"size": [512, 512], "counts": "Y2V1n>L4L4L4K3N000O010004L2M10O10O1000O10O1000O10O11O4K5L4L4K4Mfl`7"}, "image_id": 803, "id": 13445}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 116.0, 90.0, 51.0], "area": 3177, "segmentation": {"size": [512, 512], "counts": "hd_65c?8G9M30010O000000000000001L3I700000000000001O01O0000000000L4L5O000000000000010O00000000000001O01O000000000000010O00000000000001O01O0000000000ROcA9]>_OkAa0j>1O0000000001O000001O00000001K4H8Gj[3"}, "image_id": 803, "id": 13446}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 120.0, 12.0, 9.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "hc^28h?0000001O0000000000000X\\[5"}, "image_id": 803, "id": 13447}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 134.0, 50.0, 85.0], "area": 2940, "segmentation": {"size": [512, 512], "counts": "Zfh1a0]?20000000000000000000000\\Od0hNX1000000000000000000000J6000000000J6000000000001O00000000000000000g1YN0000SZ^5"}, "image_id": 803, "id": 13448}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 136.0, 54.0, 84.0], "area": 2712, "segmentation": {"size": [512, 512], "counts": "QVh25d?7I7H9K40000000000010O0000000000010O0000YAWOW>i0bA^O^>S1O00000L4H8I7I7I8O000001O000001O000001O00000N2CfBTNZ=e1mB[NS=]1UCcNkm0000000000000000003M000000000000000000M3000000000F:I7PBnNR=P2000000000000000000000000000000000000000\\Od0_NUm\\6"}, "image_id": 803, "id": 13451}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 141.0, 55.0, 89.0], "area": 2978, "segmentation": {"size": [512, 512], "counts": "heg3791f>8RA0l>c000000001O0001O000000N2G901O000001G81O00000000000jAoN]=Q1[BWOW=ClBU1DAW=CUCe1l00000000001O0001O0000000001^MhBR2g=G9G9H8G9G9G:Fak\\3"}, "image_id": 803, "id": 13452}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 155.0, 55.0, 83.0], "area": 2657, "segmentation": {"size": [512, 512], "counts": "ofh42g?7J6I7J60001O01O00000001O0001O00000001O0QAUOj>P1M3J7H7I7J6I7I7N3O0001O00000001O01O0000000001O01K4I7I7J6I7kNcA`0d>YOdA`0Q?I7I7Jij[2"}, "image_id": 803, "id": 13453}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 172.0, 66.0, 39.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "[Vi66b?8I8H7O10000000001O01O000000000N02K5000000010O0000000000010O00000000000M3M4O00000001O000MN3K7N200001O0001O00000001O0001O00000K5I7Iij5"}, "image_id": 803, "id": 13454}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 178.0, 58.0, 80.0], "area": 2822, "segmentation": {"size": [512, 512], "counts": "gVi57a?8H8H8I701O000001O00000001O000001O0M3000000000001O01O0nASOQ=m0gB[OY=e0_BCa==WBKi=W1O01OL4J61O000001O00000001O000001ON2H8H8H9G8H8H8H8H8HcjY1"}, "image_id": 803, "id": 13455}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 219.0, 60.0, 40.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "dge62e?9H80000000010O000000000000010O0g@@R?g00000M3M4O000000000001O01O000000000O1L40001O00000001O000000015J00001O000K5M3000000J6GVY<"}, "image_id": 803, "id": 13456}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 309.0, 50.0, 82.0], "area": 2594, "segmentation": {"size": [512, 512], "counts": "VZY12e?9L40000000000dADT=S100jAnN^=R1YBWOh=`101O000000000004L0010O000L4G9H8L400010O00000000000000010O0L4G9H8G9H8G9H9F9G9HWVl2"}, "image_id": 803, "id": 13461}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 325.0, 56.0, 89.0], "area": 3124, "segmentation": {"size": [512, 512], "counts": "dkW56b?8d@Dl>l0H800000001O0000000hAPOa=o0WBYOi=g0oAAQ>W1000001O0001O00000000K6G8H8L40000001O01O000000000001O000H8H8H8H8H8H9H7H8H8K8M010O00dUl1"}, "image_id": 803, "id": 13462}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 346.0, 51.0, 98.0], "area": 3073, "segmentation": {"size": [512, 512], "counts": "a\\R74d?8H8N2000000fA]OW=c0aBE_=o00O000000000000I8G8I7H8H800000001O0001O00000001O000001K4H8H8H8I7H8lNhA;]>]OkAc0V>UOQB9@6S?AUA?V?001OL4H\\T4"}, "image_id": 803, "id": 13463}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 349.0, 51.0, 79.0], "area": 2380, "segmentation": {"size": [512, 512], "counts": "Z[R63f?701O000]@G`?jAJ9HS==\\B39H[=o0]BYOc=b101O00000O100000001O00jM_Bm1j=0001O000001O00000001M2H8H8H8H8H8H8HoTT1"}, "image_id": 803, "id": 13464}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 422.0, 96.0, 61.0], "area": 3253, "segmentation": {"size": [512, 512], "counts": "km=2l?3M2M4M2M3N3M2M4N110O01O01O010O01O01O010O0N3M2N210O010O01O01O010O010O00010O010O01O01O010M2N3O01O01O010XAlNb>T1\\AnNe>W1O0010OO1M4M201O00010O010O0010O0010O010O00010O010O0001M2M4M2M4M2N2M4M2M4Bk@MW?1k@OV?Nm@1S?LPA4P?JSA6[?O1NmQR6"}, "image_id": 803, "id": 13465}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 434.0, 8.0, 22.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "b=f0Z?M3N3L3M3N3L3M_bk7"}, "image_id": 803, "id": 13466}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 434.0, 60.0, 75.0], "area": 2348, "segmentation": {"size": [512, 512], "counts": "Q^\\21m?2N3M2N3M2N3M2010O010OnA@dSBDi01S=bBE[==cBE[=>bBE\\=B`A>Q?0001O01O000000L4H[Qb2"}, "image_id": 803, "id": 13468}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 452.0, 61.0, 60.0], "area": 2664, "segmentation": {"size": [512, 512], "counts": "lob54d?8G9H800000000001O00000000000000O1H8G9H8O1001O00000000000000001O000000000000001O00I7O1001O000000000000001O0O1H8H8H8H8H8GSb^1"}, "image_id": 803, "id": 13469}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 455.0, 65.0, 51.0], "area": 1766, "segmentation": {"size": [512, 512], "counts": "onf65d?7J600001O01O00000001O01O00000000010O0000000001OO2I6J6N2001O01O00000001O01O00000001O01XASO[>m0_AYOa>S10000010O00000000010kN^Aj0b>POdAP1f>00[OUA5k>E[A;e>^ObAb0n>1OJ7I6Jo`8"}, "image_id": 803, "id": 13470}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 470.0, 62.0, 42.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "b?>`?2M3N2M3O1001O00001O001O0000M3N2M3N2M30000001O001O00001O001O00001O00M3N2N2M3N2000N3M2M40O00010O010OO1M4M2M4M2M3N3M2M4M2M3NTaP7"}, "image_id": 803, "id": 13471}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 487.0, 51.0, 25.0], "area": 738, "segmentation": {"size": [512, 512], "counts": "o_Q11m?2M3N2O100001O00001O001O00001ON2M3M3N2M3N2M3N200001O00001O001O00001O00001O001O00001O00001N1N3L3M3N3LbPU6"}, "image_id": 803, "id": 13472}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 498.0, 42.0, 14.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "o_a31m?2N2O1N2001O1O001O001O00N2N2N2O1N2O11O001O1O001O001O001O1O001O001O001O001O1O001O00Q`i3"}, "image_id": 803, "id": 13473}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 500.0, 35.0, 12.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "k_g65i?20000001O000000000000001O00000000L4M31O000000000000001O000000000O1IaPg0"}, "image_id": 803, "id": 13474}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 506.0, 13.0, 6.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "ooa41m?2N2O11O001O001O001O001OQ`W3"}, "image_id": 803, "id": 13475}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 509.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "o_X41m?2001O001OQ`d3"}, "image_id": 803, "id": 13476}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 0.0, 13.0, 7.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "P`d42n?1O1O1O1O1O00O1O1O1O1O1OQPU3"}, "image_id": 805, "id": 13477}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 247.0, 75.0, 78.0], "area": 2654, "segmentation": {"size": [512, 512], "counts": "\\Yh21o?1N2N2N2Z@I`?9^@I`?=N2N2N2N2N3M2N2O1N2N1O00110]OmNXBS1f=oNZBQ1d=QO\\Bo0b=SO^Bm0`=UO^Bl0b=UO\\Bk0d=WOZBi0f=YOXBg0h=[OVBe0j=]OTBc0l=@QB`0o=BPB=P>i001O0000000001O000MPBWNP>i15N2N2N2N3M2O1N2O1000000O1N2N2N2N3M2N2N2N2N2N2O1N2N2N2N2N2N3M2N2NWWR4"}, "image_id": 805, "id": 13478}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 288.0, 74.0, 79.0], "area": 2631, "segmentation": {"size": [512, 512], "counts": "ejc31n?2N2N2N2[@H_?:_@H_?>N2N3M2O1N2N2N2N2N1O0020001YOmN^BS1`=oNaBP1]=ROaBP1^=QO`Bo0`=SO^Bm0b=UO\\Bk0d=WOZBi0f=YOXBg0h=\\OUBd0k=^OSBc0l=_OSB`0m=BQB>o=i00000001O000KPB[NP>f1QBXNo=h15O2N2N2N2N2N2N2N200001O00N2N3M2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N2N2NPVW3"}, "image_id": 805, "id": 13479}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 334.0, 75.0, 80.0], "area": 2710, "segmentation": {"size": [512, 512], "counts": "S\\[41n?2N2N3M2\\@G^?;`@G^??N2N2O1N2N2N2N3M2N1O0020000\\OlN[BT1d=nN\\BQ1b=QO^Bo0`=SO^Bo0`=SO^Bm0b=UO\\Bk0d=WOZBi0f=YOXBg0h=[OVBe0j=]OTBc0l=_ORBa0o=AoA>Q>h001O00000000000000LRBWNo=h1402N2N2O2M2N2N2O1000000N2N2N3M2N2N2O1N2N2N2N2N3M2N2N2N2O1N2N2N2NaT_2"}, "image_id": 805, "id": 13480}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 376.0, 66.0, 77.0], "area": 2400, "segmentation": {"size": [512, 512], "counts": "f]?1m?4M2M3N1O1N3N2M2O2N1N3N1O2M2O2M3N1O2M2O2CgNPBZ1o=hNnA[1Q>fNmA[1R>gNlAZ1S>:O1O01O0000000O2O000O1000001N10000O101O0O10001O0O2BaAZO`>e0bAYO`>f0`AXOb>g0`AWOb>g0`AWOb>g0`AWOc>f0>N1N2N3M2O1N3M2N3Lhc_6"}, "image_id": 805, "id": 13481}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 0.0, 54.0, 18.0], "area": 567, "segmentation": {"size": [512, 512], "counts": "S`X22n?1LNW@4h?3O1O2N1O1O1O1O1OO1O100O1O1O1O1O1O1O1O1O100001O1O2N1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O100O1O1O1O1O2NP`l4"}, "image_id": 806, "id": 13482}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 0.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "P`S31o?0P`k4"}, "image_id": 806, "id": 13483}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 0.0, 75.0, 43.0], "area": 1875, "segmentation": {"size": [512, 512], "counts": "ZP\\32m?2N2N3M2O1N2N2N2N2N2N3N1O1O11O00N2N2N3N1O1O1O1O1O1O1O2NO1O1O1O1O1O100O1O1O1O1O1O1O1O2O1N2N3M20000001O00000O1N2N2N2N3NO2N2O1N3M2N2N2N2N2N2N2N3N1N2N2NX_^3"}, "image_id": 806, "id": 13484}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 0.0, 33.0, 18.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "P`k41o?1O1O1O1O1O2N1O1O1O1O1O1O1O2N1OO1O1O100O1O1O1O1O1O1O1O1O1O100O3Mooc2"}, "image_id": 806, "id": 13485}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 0.0, 14.0, 7.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "P`h61o?1O1O1O1O1O1OO1O1O1O1O1O10P`P1"}, "image_id": 806, "id": 13486}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 2.0, 37.0, 51.0], "area": 1049, "segmentation": {"size": [512, 512], "counts": "2Q1P?00000000001O0N2N2N2O1001O000001OO1N2O1N2N3M2N2N2N2N2N2N2O1N2N3M2N2N2N2No^]7"}, "image_id": 806, "id": 13487}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 5.0, 44.0, 64.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "ePZ71n?3M2N2N2N2N2N2N2O2M2N2N2N2N2N2N2N3N1O11O000000WAlNe>X10000001O0001O0000000001O0001ON2O0O001OUO"}, "image_id": 806, "id": 13488}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 8.0, 9.0, 9.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "\\Pe63l?2N2N1O01O2N2N2Nf_V1"}, "image_id": 806, "id": 13489}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 11.0, 71.0, 74.0], "area": 2483, "segmentation": {"size": [512, 512], "counts": "UbY52m?2N2jNL]B6^=0_B3]=0aB2]=1\\BC^O=T>2\\BC^O=U>1[B6c=L[B6d=KZB7d=KZB7e=JYB9e=IZB6f=KZB3g=NXB2h=OWB0j=1TBOn=1PBOQ>o00000001O01O0000000002N2DjAkNX>T1iAjNZ>S1hAkNZ>S1hAlNY>R1iAlNY>R1:000000010O0000000002N2N2N3M2O1N2N02N1O00000101N2N2N2N2N2N2N3M2O1N2NWob1"}, "image_id": 806, "id": 13490}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 26.0, 68.0, 57.0], "area": 1898, "segmentation": {"size": [512, 512], "counts": "[aj22m?3M2N2N2N2O1N2N2N2N3g@[OR?k0N2N2N2O2M00002N200000000001O0001O000000N2N000000010O1O2N2O100001O0001O00000000000N2O1N3M2N2N2N2N2N2N2N2O2M2N2N2Nm]S4"}, "image_id": 806, "id": 13491}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 32.0, 8.0, 8.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "TQb31n?3M2N1O02O1N2NnnY4"}, "image_id": 806, "id": 13492}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 42.0, 72.0, 73.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "hQ_62m?2N3M2N2N2N2N3N1N2N2N2N3M2O100000O0O01O003M2N2O1O101O0000000010O00000001O00001N2M4L3M3M3M4L3M001O10O01O001O0011QOVBFk=8XBEj=9YBEh=9\\BCf=;]BCd=;_BBc==`B@a=8kABg04_=8lABh03^=9lBDV=9Z1N2N2N2N2NUn<"}, "image_id": 806, "id": 13493}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 50.0, 73.0, 94.0], "area": 2495, "segmentation": {"size": [512, 512], "counts": "T3\\1c>0002N2N2N1O000000000000001O00000001O0HlNeAT1[>nNcAR1]>POaAh0OYO`>1_Af03WO^>Q1dAmN\\>S1801O0000000000000000000001O2N2N2N1O000000000000000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2Nj]k6"}, "image_id": 806, "id": 13494}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 56.0, 5.0, 9.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "h19h?N2N2N2NU^m7"}, "image_id": 806, "id": 13495}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 78.0, 74.0, 71.0], "area": 2294, "segmentation": {"size": [512, 512], "counts": "cck32m?3M2N2N2N2O1N2N3M2N2N2N200000010O0000N0010O2N3M2N2N2N2N2N2O2M2N2N000000000001O01O0000000000FcARO^>n0cAPO]>P1eAoNZ>Q1hAmNX>S1:00000000010O0001O2N2N2N2N2O2M2N2N2Bf@3\\?Kf@3\\?Kf@3\\?Kf@3e?N2Oj\\o2"}, "image_id": 806, "id": 13496}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 93.0, 71.0, 75.0], "area": 2630, "segmentation": {"size": [512, 512], "counts": "^ch52m?2N2N2N2N3M2O1N2N2N2N2N2N2N2N3M2O1N2N2O100001O01O00000000000001O01O000O1O1N2N2N2N2N2N3N1000000N2N2N2N2OlNRB6m=JUB4k=LWB2i=NYB0g=1ZBMf=3\\BKd=5_BHc=6_BHc=6_BHc=2nA^Oa0?b=1dBM^=1dBM^=1dBM^=1dBM_=1bBM`=1cBL_=2cBL_=2Y1NclS1"}, "image_id": 806, "id": 13497}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 93.0, 10.0, 10.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "QSm62m?2N2N2N20O1N2N2N2Nolm0"}, "image_id": 806, "id": 13498}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 97.0, 56.0, 59.0], "area": 1532, "segmentation": {"size": [512, 512], "counts": "Ydk01n?2O1N2N2N2N3M2N2N2N2N2N2N2N2\\OUOQBl0m=VOQBl0m=VOhAK4Q1R>VOhAK4P1S>\\OkAd0U>^OiAc0V>c000002N2N2N2N2N2N1O001O2O1N3M2N2N2N2N2N2N2N1O0000011N2N3M2N2N2N2N2N2NY\\X6"}, "image_id": 806, "id": 13499}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 117.0, 40.0, 39.0], "area": 752, "segmentation": {"size": [512, 512], "counts": "]d41n?2N2N2N2N2N2N2N2N2N2N2N2O1N2N1O0000000000002N000002Lo@VOS?h04N2N2N2N2N2N2N2O1N2N2N2Nn[W7"}, "image_id": 806, "id": 13500}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 120.0, 75.0, 77.0], "area": 2801, "segmentation": {"size": [512, 512], "counts": "bef41n?2N2N2N2O1Z@Ha?=N2nNA`Ba0^=A`Ba0^=A`Ba0^=A`Ba0^=A`Ba0^=AoAE=m0a=@PBE=m0a=E\\B2eAM^>OdAO`>NaA0b>M`A1d>K^A3f>I\\A5h>GZA7a9"}, "image_id": 806, "id": 13502}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 134.0, 9.0, 10.0], "area": 50, "segmentation": {"size": [512, 512], "counts": "ZTm52m?2N2N2N20N2N2N2Ng[n1"}, "image_id": 806, "id": 13503}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 148.0, 10.0, 12.0], "area": 70, "segmentation": {"size": [512, 512], "counts": "f46j?1N2N2O10M3N2N2N2NXkj7"}, "image_id": 806, "id": 13504}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 156.0, 15.0, 16.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "RUj02m?2N2N2N2N2N200000N2N2N2N2N2NmZn6"}, "image_id": 806, "id": 13505}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 163.0, 58.0, 52.0], "area": 1481, "segmentation": {"size": [512, 512], "counts": "oe`12n?2M3N2M3N2M3N2M3N3L3N1N100O010O010O010O010O010O010N1O2O0O2O0O2N101N2O0101N2O0O2O1N101N1O2M3M2O2M3N1N3M3N1N3N1N3M3N`Zb5"}, "image_id": 806, "id": 13506}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 166.0, 26.0, 38.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "]5=b?2N2N2N2N2N2N3O000000O1N2O100[Oh@a0X?]Oj@c0Z?00Ee@0[?Ng@2Y?Li@4W?Jk@6`?N2N2N2Njib7"}, "image_id": 806, "id": 13507}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 171.0, 80.0, 62.0], "area": 2451, "segmentation": {"size": [512, 512], "counts": "bfV53l?2N2O1N2N2N2N2N2N3I\\Om@f0Q?\\Om@d0S?6N2N2N2N2N2N2OO0001O01O3M2N2N2N2N2N2O11O0N2N2O1N2N2N2N2N3M2N000010O0000000000000000010O00000K`AkN`>U1bAiN^>W16O3M2N2O1N2N2N2N2N2N3M2N2N2O1N2N2N2N2NRZa1"}, "image_id": 806, "id": 13508}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 180.0, 30.0, 31.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "Ufl02n?1N2N2N2N2N3M2N2N2N2N2N00000000000011N3M2N2N2N2N2N2N2N2N2NQZd6"}, "image_id": 806, "id": 13509}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 190.0, 56.0, 64.0], "area": 1821, "segmentation": {"size": [512, 512], "counts": "Wg[22m?2N2N2O1N2N3M2^OB_A`0_>B_A`0_>B_A`0_>C^A?`>C^A?`>C^A?`>C^A?`>b0O1N2N3M2N1O00000000000002N2N2N2N3M2O1N2N2N2N200O1N2N2QOPAk0T?N2N2N2N3N1N2N2N2N2N2N2N2NSYh4"}, "image_id": 806, "id": 13510}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 210.0, 70.0, 80.0], "area": 2733, "segmentation": {"size": [512, 512], "counts": "VW42m?2N2N2N2N2g@Fi>:UAHi>:UAHi>:UAHi>i0N2N2N2N2N2N2N2N2N3M2O1O100001O0000000O1N2N2O1N2N2N2N201O000000000001O0000000001O000000000O1N2N2]OYAJi>4YAJi>4YAJi>4YAJi>4ZAIi>5XAIj>5XAIj>5b0N2N2Nmgh6"}, "image_id": 806, "id": 13511}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 217.0, 16.0, 17.0], "area": 152, "segmentation": {"size": [512, 512], "counts": "PWb13m?1N2N2N3M2O1N1O0002N2N3N1N2N2NQiU6"}, "image_id": 806, "id": 13512}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 217.0, 84.0, 69.0], "area": 2666, "segmentation": {"size": [512, 512], "counts": "Phk52m?3M2O1N2N2N2N2N2N3M2N2O1N2000000N1O0000002O1HSOZAP1c>RO[AP1c>8O1N2N2N2N2N3M01O2O1N2N2N2N2N2N000001O01O0000000L^AkNb>U14000010O0000000000001O2O1N2N2N2N2N2N1O00010O2N2N2N2N2N2N3M2O1N2N2NgXj0"}, "image_id": 806, "id": 13513}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 224.0, 24.0, 38.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "ZWd72m?2N2N3M2N2O1N2N2N2N3M2O1000000001O01O0O1N2N2N2jH"}, "image_id": 806, "id": 13514}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 226.0, 15.0, 15.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "XW]11n?2N2N3M2N2N2O10O2M2N2N2N2N2NgX[6"}, "image_id": 806, "id": 13515}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 237.0, 71.0, 55.0], "area": 1783, "segmentation": {"size": [512, 512], "counts": "chc21n?2N2N2N2N2N2N3N1N2N2N2N2N2N2N2N10O000000000000010O1O2N2N2N02N2N3M2N2N2OO00000000000001O01O00000000001O2O1N2N3M2N2N2N2N010O1O2N2N2N2N2N3M2O1NShX4"}, "image_id": 806, "id": 13516}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 243.0, 54.0, 48.0], "area": 1765, "segmentation": {"size": [512, 512], "counts": "jg`1h0X?0000000000000000000000a0_O00000000000000000I700000000000O1000000000000_Oa00005K0000000000000000000000:F000000nWd5"}, "image_id": 806, "id": 13517}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 267.0, 83.0, 70.0], "area": 2923, "segmentation": {"size": [512, 512], "counts": "oX`62m?2N2N2N2N2N2N2N2N2N2N2N2m@WOl>k0RAWOl>P1N2O1N2N2N3M2N2O10000000N2N2N2001O000000000001O0000000000000000N2N2N00000000000000000001O0000000FbASO^>m0dAQO\\>P1eAnN[>R1hAlNW>T1;O2N2N2N2N3M2N2N2Jf@C\\?;f@C\\?;6N2N2N2N2NRW6"}, "image_id": 806, "id": 13518}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 300.0, 28.0, 29.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "kim01n?2N2N2N2N1O2N2N2N2N2N2N2N2N2N02N2N2N3M2N2N2N2M3N2N2N2NXVd6"}, "image_id": 806, "id": 13519}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 307.0, 83.0, 51.0], "area": 2351, "segmentation": {"size": [512, 512], "counts": "XZ[11n?2N2N2N2N2O1N2N2N2N2N2N2N2N2O11O00N2N2N2N2N3M2N2N2N2N2O1N10O2N2O1N2N2N2N2N2N2N2O10000000000000N2N2N1O000001O001O2N2N1O00000000000000002N2N2N2N2N2O1N2N2N2N2N2N2N2N2NkU[5"}, "image_id": 806, "id": 13520}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 312.0, 14.0, 15.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "oY\\32m?2N2N2N2N2N2N02O1N3M2N2N2NRf\\4"}, "image_id": 806, "id": 13521}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 320.0, 25.0, 20.0], "area": 254, "segmentation": {"size": [512, 512], "counts": "TZR32m?2N2N2N2O100000000000000N2N2N1O000002N2L4N2N2N2NjUa4"}, "image_id": 806, "id": 13522}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 322.0, 116.0, 122.0], "area": 5754, "segmentation": {"size": [512, 512], "counts": "\\[l31n?2N2N2N2N2M3N2N2G@o@b0o>@o@b0o>@o@b0o>9N2N2N2N2N2O1N2N2eAaNR>a1lAaNR>a1lAaNR>a1mA`NQ>i1N2N2N2N2N2N2N2N00000002N2O1000000000000000000oM[Bf1e=XN]Bh1c=VN_Bj1a=TNaBl1_=RNcBn1h=00000000000000000000]NoAV1Q>hNQBX16_N_=7]BZ12aNa=3_B\\1NcNc=OaB^1JeNe=KcB`1FgNg=HdBk1i=0000000000N2N2N0000000000000010O00000000000000000001O2N2N2N2B`AYOb>e0`AYOb>f0_AXOc>f0_AXOc>f0_AXOc>f0=N2N2N2N2N2N2N2N2N2N2NkcY2"}, "image_id": 806, "id": 13523}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 329.0, 70.0, 63.0], "area": 2133, "segmentation": {"size": [512, 512], "counts": "jZ;3l?2N2N2[@I^?9`@I^?>O1N2N2N00002N2N2N2N2N2N2WAlNc>V1[AlNd>Y10000000000cAfNS>Z1kAhNU>X1iAjNW>V1hAkNX>U1fAmNZ>[12N2N2N2N3O0N2N1O01O000000000001O1O2N2N2N2O100000000YOn@?R?_OPAb0P?\\OQAd0o>ZOSAf0T?O1N2N2N2N2N3N1N2N2N2N]da6"}, "image_id": 806, "id": 13524}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 373.0, 2.0, 7.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "e;7i?Jadn7"}, "image_id": 806, "id": 13525}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 425.0, 48.0, 47.0], "area": 1125, "segmentation": {"size": [512, 512], "counts": "T^61n?2N2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N2N2N1O0000000000000010O002N2N2N2N2N2N2N2N3M2N2N2N2N2O1N2N2N2NUbQ7"}, "image_id": 806, "id": 13526}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 477.0, 22.0, 22.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "XoZ43l?2N2N2N2N2O1N2N2N00000001O2N2N2N2N2O1N2N3MkPZ3"}, "image_id": 806, "id": 13527}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 493.0, 26.0, 19.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "eo_42m?2N2N2N2N2N2N2N2O10O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2NTPS3"}, "image_id": 806, "id": 13528}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 0.0, 72.0, 79.0], "area": 3894, "segmentation": {"size": [512, 512], "counts": "`PV32n?7H7J7I6J6J000O010000002N8G:G8H2NO10GUN]Bk1c=900000O01000000000O1000O100000O15K00O0100000000000O10O100004G:K6J6J1O000000000AhAUOX>l0oAmNQ>S1>000000O104L7I7I6J7I\\ne3"}, "image_id": 808, "id": 13529}, {"iscrowd": 0, "category_id": 1, "bbox": [328.0, 0.0, 183.0, 88.0], "area": 13250, "segmentation": {"size": [512, 512], "counts": "PPT52n?8H7I7I8H7I7I7I8H7I7I8H5K000000O1000000000000O100000000000000O1000000000000O1000000000000O100000000000000O1000000000000O1000000000000O1000000000000O100000000000000O1000000000000O1000000000000O100000000000000O1000000000000O1000000000000O100000000000000O1000000000000O1000000000000O1000000000000O100000000000000O1000000000000O1000000000000O100000006J7I7I8G8I7I8H7I7IP>"}, "image_id": 808, "id": 13530}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 9.0, 83.0, 46.0], "area": 3008, "segmentation": {"size": [512, 512], "counts": "``Y13m?8H7I7I7I7H1000000000000O10O100000000000O0100000000000O0100000000000O0100000000000O01000000000001N3N0000O10000000O1000O10000000O100000O100000O100000O100002M8I7I7I8Hhn\\5"}, "image_id": 808, "id": 13531}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 24.0, 14.0, 102.0], "area": 757, "segmentation": {"size": [512, 512], "counts": "h0V3m\\2eM5K4L000000O0100000000000O10O100000000000O0100000000000O0100000000000O10O100000000000O01000000000005J7LN8H7I7I7I4K010000000000000O0100000000000O0100000000000O01000000000000O01000000000000O0105K8H1OO1000O10000000O1000O1000000000O1000O10XNnCBR<>UD[Ok;e0]DSOc;m0dDkN];U1jDdNV;\\1RE\\Ni:h1_EQNa:o1fEjMY:W2h10000000000O0100000000000O0100000000000O10O100000000000O0100000000000O0100000000000O10O1001O4L00O10O1000000000O10O100000000000O0100000000000O10O1000000000O10O100000000000O07J8H7I7I8H7I7I3L10000000O1007Hjfo3"}, "image_id": 808, "id": 13538}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 283.0, 17.0, 15.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "SYg61n?2O2M2N3N0O0001O01O000101N2N3N1NQWP1"}, "image_id": 808, "id": 13539}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 291.0, 14.0, 14.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "Yia61n?3M2O2M3M0010O00102M2N2O2MhVW1"}, "image_id": 808, "id": 13540}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 346.0, 59.0, 63.0], "area": 3012, "segmentation": {"size": [512, 512], "counts": "P[i26j?7I6I8I7I7I6J7I4L000O10O100000000000O10O100000000000O10O10000000000000O10O100000000000O10O1000000000003M8G9H8H7I8H8HdSY4"}, "image_id": 808, "id": 13541}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 395.0, 79.0, 117.0], "area": 6147, "segmentation": {"size": [512, 512], "counts": "knh6U1j>1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O100"}, "image_id": 808, "id": 13542}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 428.0, 172.0, 84.0], "area": 11982, "segmentation": {"size": [512, 512], "counts": "[oi0e0[?00000000000000000000000000000000000000000_N8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHV=8jBHh]OnAc0S>_OjAa0W>@gA`0Z>AdA?]>BbA=_>a01O1O1O1O100O1O1O1O1O1O1O1O1O1M3O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1OQ`Z2"}, "image_id": 809, "id": 13546}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 0.0, 37.0, 9.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "PPZ61o?1O1O1O1O0000000000004L000000000000000000000000M300O1000000000000O1O1O1OQ`S1"}, "image_id": 809, "id": 13547}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 0.0, 24.0, 12.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "PPn61o?1O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1OQPf0"}, "image_id": 809, "id": 13548}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 1.0, 80.0, 80.0], "area": 2874, "segmentation": {"size": [512, 512], "counts": "\\QU11n?2N2N2N2N2N2N2N2N2N2N2N2N200000000000000000000000000N2N2N2N2N2N2N2N2N2N2N1O0000000000000GaNRB_1n=cNPB]1P>eNnA[1R>gNlAY1T>90000000000000000000002O1000O1N2N2N2SOeA2]>LeA2]>LeA2]>LeA2]>LeA2]>_O[A;:4_>JcA4_>JcA4_>JcA4_>JcA4_>JcA4V?N2Nenb5"}, "image_id": 809, "id": 13549}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 4.0, 9.0, 8.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "48h?000000000000000l_k7"}, "image_id": 809, "id": 13550}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 12.0, 58.0, 75.0], "area": 2389, "segmentation": {"size": [512, 512], "counts": "nPS71o?1N2N2N2N2N2N2N2N2N2N2N2N2N2N2O20O00TASOd>m0ZAUOf>k0XAWOh>Q100N2N2N2N2N2N2N0000000hA^NS>d1kA^NU>g10000000000001O0000N2N2000000001O0N2N2N2N2N2N2NoN"}, "image_id": 809, "id": 13551}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 17.0, 81.0, 77.0], "area": 2997, "segmentation": {"size": [512, 512], "counts": "aaT51o?a0^O1000:F0000000000000000000O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N01O2N2N2N2N2N2N1O00000O1000000LcAfN]>Z14000000000000000000O102N2N2N2N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2Nlnb1"}, "image_id": 809, "id": 13552}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 25.0, 9.0, 10.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "mP]62l?3N2N2000O1N2N2NS_^1"}, "image_id": 809, "id": 13553}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 26.0, 29.0, 49.0], "area": 1262, "segmentation": {"size": [512, 512], "counts": "j0a1_>000000H8000000000002N07I000000000000000000J600000000000T_a7"}, "image_id": 809, "id": 13554}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 51.0, 68.0, 78.0], "area": 2578, "segmentation": {"size": [512, 512], "counts": "jRn11n?2N2N2N2N2N2N1O2N2N2N2N200M3N2NYOo@b0P?]ORAc0n>[OSAf05XOc>j0[AWOd>k0ZAWOd>k0ZAWOd>T1M3N2N1O000000002N2N2I\\NoAf1o=\\NoAf1o=7000000N2N1O00000000XOVB[Oj=e0XBYOh=g0ZBWOf=i0\\BUOd=k0^BSOb=m0`BQOa=n0aBPOa=n0aBPOa=n0aBPOa=n0aBPOa=n0aBPOa=n0aBPO`=o0j0N2N2N2@j@3X?Kj@3X?Kj@3X?Kj@3X?Kj@3c?N2Ndmo4"}, "image_id": 809, "id": 13555}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 54.0, 76.0, 66.0], "area": 2569, "segmentation": {"size": [512, 512], "counts": "Yb`61n?2N2N2N2^@IX?9f@IX?9f@IX?8g@JW?a0N2N2N2N2N2N2N2N2N2N1O01O2N_AhN]>W1bAkN^>T1aAnN_>R1_APOa>X10000000000000000000000N2N2N2N1O000002N2N2N2N2N2N2M3N2N2000000000000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3NR]9"}, "image_id": 809, "id": 13556}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 60.0, 79.0, 77.0], "area": 3027, "segmentation": {"size": [512, 512], "counts": "RS`0h0X?000000000000000000000000006J000010O00000000O1O1O1O1O1ON21O1O1^OmNVBT1i=nNUBS1i=POUBQ1j=QOTBo0l=SORBm0n=UOPBl0o=VOoAl0o=VOoAl0o=d0M3N2O1000000000000000N2N2N2N2N2N2N2N2N2N2N2A`A[Ob>c0`A[Ob>c0`A[Ob>c0`A[Ob>c0`A[Ob>c0`A[Ob>c0?N2N2N2N2N2N2N2N2N2NR]X6"}, "image_id": 809, "id": 13557}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 83.0, 7.0, 8.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "ebf12m?3M2O10N2N2N[mU6"}, "image_id": 809, "id": 13558}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 91.0, 67.0, 76.0], "area": 2438, "segmentation": {"size": [512, 512], "counts": "cTm22m?2N2N2N2N2O1N2\\OBcA`0[>BcA`0[>BcA`0[>BcA`0[>BcA`0[>BcA`0[>BcA`0[>BcA`0[>BcA`0[>d0N2N1O0000000000000000001O0000000O1000000002N1O2N2N2N2N2N2N2N2N2N200nNVAl0k>ROWAl0P?N2N2N2N2N2N2N1O2N2N2N2N2N2MX\\Q4"}, "image_id": 809, "id": 13559}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 95.0, 8.0, 9.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "SSg12m?2N2N2N02N2N2NolT6"}, "image_id": 809, "id": 13560}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 96.0, 59.0, 59.0], "area": 1945, "segmentation": {"size": [512, 512], "counts": "\\cS62m?2N2N2N2N2m@F]>a0^AA`>a0^AA`>a0^AA`>a0^AAa>P10000000000000000000OOO10000000002N2000000000000000000000000O1N2N2N2N2N2N2N2N2N2N2@f@7\\?Gf@7\\?Gf@7c?N2N2Nhkn0"}, "image_id": 809, "id": 13561}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 98.0, 18.0, 34.0], "area": 612, "segmentation": {"size": [512, 512], "counts": "R3R1n>000000000000000000000000000000000nlf7"}, "image_id": 809, "id": 13562}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 132.0, 80.0, 51.0], "area": 2293, "segmentation": {"size": [512, 512], "counts": "eT`1m011O1N2O1O1O1O0000000000000110000000000000000000000001O0O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NmjW5"}, "image_id": 809, "id": 13563}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 144.0, 77.0, 73.0], "area": 2616, "segmentation": {"size": [512, 512], "counts": "TVa31n?2N2N2N2oNHWB:g=HWB:g=HWB:g=HWB:g=HWB:g=HWB:g=HlAE2e0P>HlAE2e0P>HlAE2e0P>HlAE2e0P>1nA1P>Q1N2N2N2N2N02N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O000000000000000001O2N2N2N2OO2N2N2N2N2N2N1O002N2N2N2N2N2N2N2N2NiZX3"}, "image_id": 809, "id": 13564}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 171.0, 8.0, 72.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "[5X2h=00000`1`N000000Uik7"}, "image_id": 809, "id": 13565}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 185.0, 95.0, 89.0], "area": 3929, "segmentation": {"size": [512, 512], "counts": "gf\\41n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O100PBSNl=m1RBUNn=o100000000000000000000000000000000000000000N2N2N2N000000002N2N4L2N2N2N2N2N20000O1N2N2N2N2N1O001O2N2N2N2N2N2N2N2N2N2N2N2N2NchS2"}, "image_id": 809, "id": 13566}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 190.0, 83.0, 76.0], "area": 3452, "segmentation": {"size": [512, 512], "counts": "Vg63k?2M4_@JT?8i@LS?7k@KS?c0L3N2O20O01O010O00010O010O00010O010O0010O0010O0010OO1O2N1O2N1O2N1O1O2N1O2N1O1O2N1O2N00000000000000002N2N2N2N2N2N200000kNXB1h=L[B4e=J]B4e=J]B4e=J]B4e=J]B4e=J]B4d=K^B3d=K^B3d=K^B3d=_OiA8e07d=_OiA8e06i=HYB6i=HXB7j=GXB7j=GXB7j>N2N2N[h_6"}, "image_id": 809, "id": 13567}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 200.0, 70.0, 75.0], "area": 2417, "segmentation": {"size": [512, 512], "counts": "lVa22m?2N2N2N2N2N2N2N2N2]A]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Of=e0XB]Oh=c0VB_Oj=\\1000000000N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N0000000000000000002N2N2N2N2N2N2N2N2N2N2N2N2Njh[4"}, "image_id": 809, "id": 13568}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 236.0, 93.0, 78.0], "area": 3314, "segmentation": {"size": [512, 512], "counts": "Shh32m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O100000000000000cA`NY>`1eAbN[>b1000000000000000000000000000000000O1N2N1O000000002N2N2N2N2N2000000000O1N000001O2N2N2O10000N2N2N2N2N2N2N2N2N2N2N2N2N2N2NRgh2"}, "image_id": 809, "id": 13569}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 246.0, 10.0, 8.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "kg_31n?1O2N2M2000O2N2N2NXX[4"}, "image_id": 809, "id": 13570}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 250.0, 70.0, 74.0], "area": 2587, "segmentation": {"size": [512, 512], "counts": "mh^12m?2N2N2N2N2O1N2N2N2N2N2N2O1000000000000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N0000000000000000000000000200O1N2N2N2O1N2TOdA1^>GaAF3a0^>GaAF3a0^>GaAF3a0^>GjA7X>GjA7X>GjA7X>GjA7X>GjA7Q?N2N2NkV^5"}, "image_id": 809, "id": 13571}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 272.0, 44.0, 53.0], "area": 1502, "segmentation": {"size": [512, 512], "counts": "b8U1l>O010O01O010UAmNg>V110O010O010O01O01O010O010O0O2M2M4M1O01O3M2N3L3N3M2N3M2M3N3M2N3M2M4M2N3M2NZgY7"}, "image_id": 809, "id": 13572}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 273.0, 74.0, 91.0], "area": 3290, "segmentation": {"size": [512, 512], "counts": "nh]31n?2N2i@L_>6_ALMOP>7QBLMOP>7QBLMOP>7QBLMOP>7QBLMOP>7QBLMOP>7QBLMOP>7QBLMOP>7QB>m=DQB>m=l0O1000000TBnMi=U200000000000000000000O1N2N2O100000N2N2N2oNXBGj=7XBGj=7XBGj=7XBGj=7XBGj=8WBFk=:UBDm=QB@Q>`0oA^OS>b0mA\\OU>d0kAZOW>f0iAXOY>h0gAVO[>j0eATO]>l0<000000000000O1N2N2N2N2N2N2N2N2N2N2N2N2NkU]3"}, "image_id": 809, "id": 13573}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 288.0, 66.0, 76.0], "area": 2624, "segmentation": {"size": [512, 512], "counts": "\\jh62m?2N2N2XOJcA8[>JcA8[>JcA8[>JcA8[>KbA7\\>KbA7\\>LaA6]>LaA6]>LaA6]>h0N2N2N2hAYNT>k1N2N2N2N2N20O1O100\\OTBSOl=k0VBUOh=k0ZBUOd=k0^BUOb=j0_BVOa=j0_BTOc=l0]BROe=n0]BnNe=R1[BlNg=T1YBjNi=V1WBhNk=X1TBgNn=Y1RBeNP>[1;0000000001O000N2N2N2N2N2N2N2N2N2N2ZOi@`0]?N2N2N2N2N2N2N2NeU6"}, "image_id": 809, "id": 13574}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 308.0, 74.0, 73.0], "area": 2765, "segmentation": {"size": [512, 512], "counts": "ajd01m?3M2GNb@5[?Mc@5[?:M2N3M2N3L3O2O010O01O010O010O01O010O010fAmNe=S1YBPOg=P1VBROj=n0SBVOm=j0PBXOP>h0nA[OR>d0lA^OT>c0iA@W>T1010O010O00010O010O010O010O010O0O2M2N3L3N2N00000000O04M2N3M2N2N3M2M4M2N3M2N3M2N3M2MPVV6"}, "image_id": 809, "id": 13575}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 322.0, 78.0, 95.0], "area": 3353, "segmentation": {"size": [512, 512], "counts": "U[m52m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2JROWAP1g>4000002N2N2N2N2N2N2N2N2kAYNn=i1PBYNn=n1N20000000000000000000000O1N2N20O1N2N2O10[BoMZ=Q2dBQN\\=o1bBSN^=m1`BUN`=<]Bk01kNb=8_BW1b=gN`BW1b=gN`BDJa1h=iNbBU1`=iNbBS1DeNl=6bBS1b=kN`BS1b=kN`BS1b=kN`BS1X>N2N2N2N2XOm@`0U?^Om@`0[?N2N2N2N2N2N2N2Njck0"}, "image_id": 809, "id": 13576}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 324.0, 11.0, 11.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "YjR23l?2N2N1O2N11N2N2N2N2Nheg5"}, "image_id": 809, "id": 13577}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 330.0, 69.0, 77.0], "area": 2623, "segmentation": {"size": [512, 512], "counts": "T[b21n?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N20000000000O1N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O10000000000000000000000000O1N2N2N2eNkALO`0X>BkALO`0X>BkALO`0X>BkALO`0X>BkALO`0X>BkALO`0X>BWBN2N2N2N2N2NlS[4"}, "image_id": 809, "id": 13578}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 331.0, 10.0, 11.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "_jn13l?2N2N1O2000M2O2N2NbUl5"}, "image_id": 809, "id": 13579}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 349.0, 4.0, 11.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "U[n72l?3L3M3SE"}, "image_id": 809, "id": 13580}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 358.0, 58.0, 64.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "V;c1]>001O00POhA9W>\\OTBd0d>10O010O010O010O010O010O01O010O010N1O1O2N1O2N1O01O2N2N2N2N0000000000000000001O2N2N2N2N2N2N2N2N2N2N2N2N2NjcR7"}, "image_id": 809, "id": 13581}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 370.0, 77.0, 79.0], "area": 2828, "segmentation": {"size": [512, 512], "counts": "a\\X51n?2N2N2N2N2N2N2N2N2N2H]OPAe0n>]OPAe0n>8N2N2N2N2N10O1O0001O2O1N2N2N2N200000000000000nA]Nf=c1XB_Nh=a1VBaNj=_1TBcNl=]1RBeNn=[1PBgNP>d11N2N2N2N2N2O10000N2N200000000000000lN`Ag0`>WObAh0_>VOcAh0_>VOcAh0_>VOcAh0_>VOcAh0l>N2N2N2N2N2N2N2N2N2N2N2NbRa1"}, "image_id": 809, "id": 13582}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 379.0, 34.0, 59.0], "area": 1098, "segmentation": {"size": [512, 512], "counts": "c\\_72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N200000000000000000000jC"}, "image_id": 809, "id": 13583}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 381.0, 85.0, 64.0], "area": 2660, "segmentation": {"size": [512, 512], "counts": "[\\l12m?2N2N2N2N2N2N2N2N2N2N2N2000000000000N2N2N2N2N2N2N2N2000000000000000000000O1000000000O1N2N2N2N2N2N2N000001O2N2N2N2N2N2N2N2N2N2O1O1M3N2N2O1OO2N2OO2N2M3N2N2N2N2N2N2N2N2N2NSSi4"}, "image_id": 809, "id": 13584}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 414.0, 73.0, 70.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "o]V11m?2N3M2N3M2N3N1N3M2N3M2N30O010O01M2N3M2N3M2N3O0010O010O010O0100O010O010O010O010`AcN[>]1cAfN]>^110O010O010O010O010O010O010O010O010O01cNbAW1]>gNeAY1b>00M2WOZA6i>\\OWA839l>DVA;k>CXA:k>CWA;Y?M2N3M2NdQe5"}, "image_id": 809, "id": 13585}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 422.0, 59.0, 90.0], "area": 3271, "segmentation": {"size": [512, 512], "counts": "jnR72m?2kNMZB5e=LYB6e=LYB6e=LQBLB:[>LQBLB:[>MPBKC:[>MPB=n=EPB=n=EPB=n=EQB?Jh=FkA>?Jh=FkA>?Jh=GjA=a0In=5TBI^O"}, "image_id": 809, "id": 13586}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 424.0, 98.0, 75.0], "area": 3657, "segmentation": {"size": [512, 512], "counts": "Pn_42m?1N3M2O2M3M2N3h@@m>a0QAAm>b0PAAm>a0RA@m>k0M3M2O2N110O0100O0100O010ORO\\A`0e>]O^Ac0b>ZO`Af0`>YObAg0^>VOeAi0[>UOgAl0g>0O01M2O2000O0O2N2M2N3N1N3M3N1N3M2N3N2M2N300O0100O01iAXNU>j1O010O01000O010O10O1]NmAX1R>fNPB[1P>bNSB\\1n=bNTB\\1Z>N110O0100M2010O0100O01M2N3M2N3N1N3M2N3VOn@c0X?N3M2N3M2O2M2N3M`Qo1"}, "image_id": 809, "id": 13587}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 426.0, 27.0, 41.0], "area": 584, "segmentation": {"size": [512, 512], "counts": "_=T1m>N2N2N2N000000001N10000000002N2N2N2N2N2N2N2N2N2N2N2N[Rb7"}, "image_id": 809, "id": 13588}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 446.0, 63.0, 66.0], "area": 2358, "segmentation": {"size": [512, 512], "counts": "To51n?4K2O2N2N1O2M3N1O2N2^O[OeAg0Z>[OdAg0Y>\\OeAf0Y>[OfAf0Y>\\OdAg0Z>[OdAg0Y>[OfAg0X>[OfAf0Z>a0O10O10nA[Ng=f1VB\\Nk=c1TB^Nm=b1QB_No=i11O1OO1001O1O001O1O001O1O001O1O001BfATO[>j0gAUOY>i0jAVOW>j0hAVOY>i0hAVOX>k0hATOY>l0gASOZ>k0gAUOY>i0jAVOW>h0kAVOW>0^Ab0=\\OW>0_Ab0X?N3N2M2O2M3N1N3NSaj6"}, "image_id": 809, "id": 13589}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 453.0, 12.0, 10.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "Yn_61o?1N2N2N1O200O01N2N2N2NgQZ1"}, "image_id": 809, "id": 13590}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 457.0, 10.0, 11.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "^nV62l?3N2N2N2O01M3N2M3NdQd1"}, "image_id": 809, "id": 13591}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 474.0, 19.0, 21.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "Vof63k?2N3M2N3M2N300O010O010OO2M2N3M2O2M2NRao0"}, "image_id": 809, "id": 13592}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 475.0, 96.0, 37.0], "area": 1465, "segmentation": {"size": [512, 512], "counts": "oob31n?1N2N2N2O1N2N21O001O001O1O001O001O001O1O001O001ON2O1N2N200001O1O001O001O001O1O001O001O001O00N2N2O1N2N2N2O1N2N2HAm@a0Q?Bm@?Q?9O1N2N2O1001O00O1N21O001O001O1O001O001O001O1O001NO10O2N3M2O2M2N3M3N1N3M2N3NjPm2"}, "image_id": 809, "id": 13593}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 490.0, 60.0, 22.0], "area": 869, "segmentation": {"size": [512, 512], "counts": "o_P61m?2N2N2O1N2N2O1N2N2N2O1N200001O001O1O001O001O001OO1N2O11O001O1O001O001O001O001O001ON2N2001O001O001O001O1O001O001O001LY@Mg?1\\@NloQ1"}, "image_id": 809, "id": 13594}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 498.0, 60.0, 14.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "noY12k?3M300001O00001O00001O00N20000001O00001O00001O0000N2M3M3N200001O00001O00001O00001O00001O00001O00001O00001O00001O00001LWPh5"}, "image_id": 809, "id": 13595}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 502.0, 25.0, 10.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "o_o61n?1N2O1N2O1O1O1001O001O001O1O001O001O1O001O001O00QPd0"}, "image_id": 809, "id": 13596}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 0.0, 141.0, 130.0], "area": 8755, "segmentation": {"size": [512, 512], "counts": "P`f01o?1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2O00010N1N3N1N2N3M2O1O2N1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2N1O1O2N1O2N1OmMmC:Rh0eAZOZ>h0dAZOZ>g0dA[O[>f0cA]O[>c0eA^OZ>a0fABX>?fACY>>fADX>=fAEY>R1OO100O1O100O1O1O12N1O2N101RB\\N^=:XBi08oN`=6ZBl04POb=1[BP11QOd=NZBS10ROe=I[BX1NPOh=EZB]1LPOj=BZBP2i=2N3NO0000101N3M2O2M2ZNmA\\1U>bNmA]1U>`NmA^1[>O01O0002N2O2M2mNZAk0f>SO\\Al0f>QO\\Am0n>M2O2M2N3N1N3M2O1N3M2O2M2NT^]1"}, "image_id": 810, "id": 13598}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 0.0, 35.0, 31.0], "area": 609, "segmentation": {"size": [512, 512], "counts": "VP`61n?2N2N2N2O2M2N2O1O1O2N1O1O1O2N1O1O1O1O2N1O01N2O1N3M2N2N2O1N3A`@9f?M2N2N2O]_n0"}, "image_id": 810, "id": 13599}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 0.0, 29.0, 16.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "P`o61o?1O1O2N1O2N1O1O2N1O1O2N00O100O1O1O100O1O100O1O1O100O1LW@0j?OTPb0"}, "image_id": 810, "id": 13600}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 26.0, 50.0, 54.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "Zai22=OR?3m@OQ?3l@OR?3l@OR?4k@OS?2l@OR?b0M0001O01O01O01O3M2O1N3M2O1N3M2N3N1O110O00N3M2N3N1N2N3N1N2N3M2O2M2N2O2M2N3M2O1N3M2N2O2MT^]4"}, "image_id": 810, "id": 13601}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 26.0, 19.0, 24.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "QQa63l?2N3KI]@8b?5M2N3N1O110O01M2N3N1N3M2N3N1Nh^U1"}, "image_id": 810, "id": 13602}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 62.0, 71.0, 106.0], "area": 3148, "segmentation": {"size": [512, 512], "counts": "n1Z3f<02M2N2nLWCk2P=N2N3M2O2M2N010O002N3eM^BT2c=jM_BT2h=N3N1N3M2N2O2M2N3N1N2N3M2O2M2N010O0_OXAKh>GYA521f>JZA312d>K]A121a>N`AN12`>OaAN02n=MTB4OK21k=2RB2c0J[=6PB0P?2n@NQ?4j@F06V?`01O01O00012M2N2O2O0010O0N2O2M2N3M2O1N3M2O2M2N2N3N1Nm\\l6"}, "image_id": 810, "id": 13603}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 80.0, 47.0, 53.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "TS\\31n?2N2O2M2N2N3g@Ck>>SAEj>=TAEj>>TACk>>SAEh>GVAS1h>5N1N3M010O0000010O000010O002N3N1N2N3M2O2M2N2N3N1N2N3N1N2N3M2O2M2N2N3Nc\\l3"}, "image_id": 810, "id": 13604}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 92.0, 23.0, 21.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "Wcl22m?3M2O2M2N3N0O00010O000010O00010O3M2N3N1N2N3Nklg4"}, "image_id": 810, "id": 13605}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 119.0, 18.0, 21.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "lSk13m?1N2N3M2O1N3M21O01O01M2O1N3M2N2O2Mlkk5"}, "image_id": 810, "id": 13606}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 119.0, 74.0, 69.0], "area": 2552, "segmentation": {"size": [512, 512], "counts": "kTg31n?2O1N3M2N2O2M2N2N3N1N2N3N1N3M2N2O2M2N2N3N1N2N3N1N1O00010O000010O000010O0000010O000010O000010O00000101N3M2O2M2N2N3N1N2N3M2O2M2N2O2M2N2N3N1N3M2N2O2MZkS3"}, "image_id": 810, "id": 13607}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 135.0, 63.0, 68.0], "area": 2108, "segmentation": {"size": [512, 512], "counts": "mdP71n?3M2O2M2N2N3N1N3M2O2M1O010O002O2M2N2N3N1N3M2O2\\AhN[>Z1cAhN[>a1N1N3M2O2M2N210OO2N1N2N3N1N3M2O2M2N3N1N2N3M2O2M2N3N1N3M2O2M2N2O2M2N3M2O2M2N]J"}, "image_id": 810, "id": 13608}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 136.0, 13.0, 27.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "adi71n?3N1N2N3M2O2M2N2O2M2N3M2gK"}, "image_id": 810, "id": 13609}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 154.0, 21.0, 22.0], "area": 237, "segmentation": {"size": [512, 512], "counts": "Peb21n?2O1N3M2N3N1N2N3N1O2O01N1N3M2O1N3M2N3N1NijR5"}, "image_id": 810, "id": 13610}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 158.0, 49.0, 53.0], "area": 1296, "segmentation": {"size": [512, 512], "counts": "eUh21n?3M2O1N3M2O1N3M2O1N3M2N3N1N2N3N1N2N3N1N2N3M10O00010O000010O0000L`AjNa>T18HWAUOj>j0WATOl>i07N3N1N2N3M2O2M2N3N1N2N3N[Z_4"}, "image_id": 810, "id": 13611}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 187.0, 15.0, 27.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "Ufh72m?2N3N1N3M2O2M2N3N1N3M10O01O0VJ"}, "image_id": 810, "id": 13612}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 210.0, 25.0, 22.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "PgW32m?2N2O2M2N2OO00010O0000010O00001O01O01O002O1N2N3MZi[4"}, "image_id": 810, "id": 13613}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 232.0, 8.0, 25.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "eWl72m?3FMc@4[?Nc@4\\?Na@5\\?9N2N3fH"}, "image_id": 810, "id": 13614}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 245.0, 53.0, 50.0], "area": 1295, "segmentation": {"size": [512, 512], "counts": "_hP53m?1N3M2N3N1N3M3N1N3M2N3N1N3M2OO01O00010O0000010O0001O01O00010O0001O01O0002O1N2N3M2O2M2N2O2M2N2N3N1N3M2O1NggT2"}, "image_id": 810, "id": 13615}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 279.0, 142.0, 157.0], "area": 10190, "segmentation": {"size": [512, 512], "counts": "_j`42m?2O2M2N2N3N1N2N3M2O2M2N2O2M2N2N3N1N2N3M2O2M2N2N3N1N2N3N1N2N3M2O2M2N2N3N1N2N3M2O1N3M2O2M2N2N3N1N2N3M2O1N3M2N3N1N2N3N1N2N3M2O2M2N2N3N1N2N3M2O1N3MkKkDb3T;]LnDc3P;^LQEa3n:_LUE^3k:bLWE\\3j:cLYEZ3h:fLYEY3h:eLZEY3i:dLZEY3h:eLZEZ3h:dLYEZ3i:dLZEY3h:eLZEY3i:eLXEZ3i;M2N3M2O1N3M2N2O2M2N2O2M2N2N10O00000100O3M2O1YNYBR1j=kNXBS1j=kNXBS1j=kNYBS1i=kNXBS1j=kNXBS1k=jNXBS1j=kNXBT1i=kNYBR1\\>N2N3N1N2N3N1N001O01O01O0001O01O3M2O1N3M2O1N3M2N3NnSX1"}, "image_id": 810, "id": 13616}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 332.0, 21.0, 23.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "bjZ22n?1N3M2O1N3M2N3N1N201O00O2M2N3M2O1N3M2O2MVeZ5"}, "image_id": 810, "id": 13617}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 335.0, 218.0, 177.0], "area": 20632, "segmentation": {"size": [512, 512], "counts": "T_82n?1N3M2N2O2M2N3N1N2N3M2O1N3M2O2M2iNnN]CT1akNhAT1Z>iNiAT1Y>jNiAT1Y>kNhAT1d>M2N3M2O1N3M2N2O2M2N3N1N2N3M2O2McaZ4"}, "image_id": 810, "id": 13618}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 455.0, 93.0, 57.0], "area": 3009, "segmentation": {"size": [512, 512], "counts": "ooa61n?100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O100O1M3O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O11O1O1O2N1O2N1O1O2N1O1O2N1OO100O1O1"}, "image_id": 810, "id": 13619}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 361.0, 17.0, 42.0], "area": 352, "segmentation": {"size": [512, 512], "counts": "]\\P34h?4L5K4K5L4L4L5K2OO5K4K5L4L5K4L4LiTg4"}, "image_id": 811, "id": 13620}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 386.0, 22.0, 14.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "X\\T74h?4N200010O0000010O00000010O00000010O000M3Koc`0"}, "image_id": 811, "id": 13621}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 391.0, 169.0, 121.0], "area": 15466, "segmentation": {"size": [512, 512], "counts": "W0000O1000000000000000000000000000000000000000O100000000000000000000000000000000000000000000001OQlm4"}, "image_id": 813, "id": 13631}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 154.0, 186.0, 186.0], "area": 16015, "segmentation": {"size": [512, 512], "counts": "bWS51n?2N2N1O2M3N2N2N2N1N3N2N2N1N1000O10002N2N2M21cN]O\\Cc0b<_O^C?beee<`1N2M3N2N1O2M3N2O1000O10O100000O1000O1N2N2[DSLT;n3jDULT;m3jDULT;m3jDTLT;o3jDSLU;n3iDTLW;l3fDWLY;Y4000O100O1M2O2N00001N3N200000O10O100000O1000O100000O1000O100VM[DX1e;fN\\D[1c;dN_D\\1a;aNbD_1^;_NdD`1];^NeDb1[;\\NgDd1Y;TN\\DWOX1dAgN]>W18EYAZOi>d0YAZOi>d0YAZOi>c0ZA[Oh6"}, "image_id": 814, "id": 13632}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 249.0, 146.0, 172.0], "area": 12863, "segmentation": {"size": [512, 512], "counts": "[Zi22m?2N1O2M3N2N2N2N1N3N2N2N2M2O2N2N2N2M3N1O2N2M3N2N2N1O2[O\\NjBg1T=[NjBg1T=ZNkBh1R=[NlBf1S=\\NjBg1T=[NjBg1T=[NjBg1S=\\NkBf1S=[NlBg1R=d0O2N2M3N2N1O2000000O10O100000O1N1O2N2N2M3N2N1O2N2M3N2N2N1N3N2N2N2O010000000O10O10000000O010000000O10O10000000O01000000000O01kL\\Dm1d;QN^Do1b;nM`DS2`;kMbDU2];jMeDU2\\;iMfDW2Z;gMhDY2X;dMkD\\2U;`M\\D]O`0T3T;\\MQEd2n:[MTEd2m:ZMUEd2m:ZMUEd2m:ZMTEe2n:XMUEf2l:YMVEd2m:ZMUEd2m:ZMUEd2ULcA2^>LdA2_>LcA2_>LcA2_>LcA2_>KdA2_>LbA3_>LcA2_>LcA2_>LcA2W?M\\dm2"}, "image_id": 814, "id": 13633}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 315.0, 60.0, 58.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "aZd52l?3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M3N1O200000000O01000000000O01000000000O01000000000OO2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1NUe]1"}, "image_id": 814, "id": 13634}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 361.0, 88.0, 83.0], "area": 4103, "segmentation": {"size": [512, 512], "counts": "X]h11n?2N2N2ALo@6o>Lo@6o>Lo@6o>Lo@5P?Mn@5o>`0N2N2N2N2M3N2N2N2N2M10000000O0100000001N3N2N1LWNPBk1n=5N00000O1000O1000110O1000O100000O0100000000O0100000O1000O10O01N2N2N2N1cNlAl0W>ROkAl0W>ROkAl0W>QOlAm0U>ROmAl0U>ROmAk0V>SOlAk0e>O2N2N2M3N2N2N1N3N2N2N2N2MUck4"}, "image_id": 814, "id": 13635}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 464.0, 85.0, 48.0], "area": 2317, "segmentation": {"size": [512, 512], "counts": "c?9f?2N1O2N2N2M2O1O1O1O1N2O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001N2N2NUPe6"}, "image_id": 814, "id": 13636}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 496.0, 32.0, 16.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "n_i42m?1O1O1N2O1O1O1N2O1O1O1O1001O1O1O1O001O1O1O1O001O1O1O1O001O1O1OQ`f2"}, "image_id": 814, "id": 13637}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 0.0, 49.0, 16.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "P`d12n?1O1O2N1O2N1O1O2N1O2NO1O100O1O100O1O1O100O1O100O1O1O100O1O11O1O2N1O1O2NO100O1O100O1O1O100O1O100OQPc5"}, "image_id": 815, "id": 13638}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 0.0, 66.0, 52.0], "area": 1684, "segmentation": {"size": [512, 512], "counts": "]Pl22m?2M2O2M3N1O2M3N2M201O1O001O1O1O001O1O001O1O001O1O1O001O1O001O1OlNVAS1l>O01000O01000O1oNSAn0P?0M3N11000O010000O010ROTAg0l>WOVAi0i>UOZAh0P?O2M3N2N1N3N2M2O2N2M3N1NRoR4"}, "image_id": 815, "id": 13639}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 0.0, 70.0, 42.0], "area": 1712, "segmentation": {"size": [512, 512], "counts": "SPa42m?2N3N1N2O1O1O2N1O1O1O2N1O1O1O2N1O1O1O2N1O1OO2N2N03M2O1O1O2N1O1O1O2N1O1OO1O100O1O1O100O1O1O1O101N2N2N3M10O0000010O0003M2N2O2M2N2N2N3N1N2N2Nco[2"}, "image_id": 815, "id": 13640}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 0.0, 4.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "PPm51o?001O0PPQ2"}, "image_id": 815, "id": 13641}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 0.0, 64.0, 56.0], "area": 2212, "segmentation": {"size": [512, 512], "counts": "_`n52m?2N3_@JT?7k@KR?7l@KR?8k@J1Jl>>QAK0In>=QA1l>c0N2OO01O003N1N2N3N1N2N3M2O1OO1O100O1O1O100O1O3M2O2M2N2O2M21O01O0N01O0001O100O0010O00000101N2N3M2O1N3M2^Oe@=a?M2N2N3I[_Q1"}, "image_id": 815, "id": 13642}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 10.0, 29.0, 63.0], "area": 1494, "segmentation": {"size": [512, 512], "counts": "m0j0V?b0^O0000000]Oc000000000000000000000000000000000000Ka1dN000000Z^a7"}, "image_id": 815, "id": 13643}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 23.0, 75.0, 64.0], "area": 2275, "segmentation": {"size": [512, 512], "counts": "iQc01n?2N2N2N2N2N2N2N2N2N2N2N3M2N2N2N2N2N2O10N2N2N2N20000000000000N2N2N2N2N2FiNhAY1V>iNhAW1X>kNfAU1Z>8000000000000000000002N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N1O2N2Nb^W6"}, "image_id": 815, "id": 13644}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 23.0, 28.0, 28.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "WQS21m?3N1N3N2M2O2M3N1N3N2N110O10O10O10N2M2O2M3N1N3N2N1N3N2MPo^5"}, "image_id": 815, "id": 13645}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 26.0, 52.0, 51.0], "area": 1282, "segmentation": {"size": [512, 512], "counts": "aa^23l?1O2M3N1O2M3N1O2M3N1N3N2N2M201O10O10M3N1N3N200O01000O01POYAf0g>XOZAi0f>TO]Ak0c>TO_Al0j>10O1000O10O1N1N3N2M2O2N2M2O2M3N2M2O2M3NY^g4"}, "image_id": 815, "id": 13646}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 36.0, 66.0, 58.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "jaR52n?1N3M2N3N1N3M2k@@h>b0UAAh>a0VAAh>b0TAYOO6l>n0M0010O000101N3M2O2M2N3NO0001O01O00010O00012M2N3N1N3O010O0O2M2N2O2M2N3N1N3M2O0O00010O0000101N2N3N1N3M2O2M2N2Ok]l1"}, "image_id": 815, "id": 13647}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 47.0, 63.0, 64.0], "area": 1960, "segmentation": {"size": [512, 512], "counts": "Wb`32m?2N2O2M2N2O2M2N3N1N2N3M2O1N3M2O2N100O2M01O03M2O2M2N3N1N3M2O2M1O0010O00010YOlAAT>?oA^OQ>b0QB\\OP>d0RBZOo=d0SBZOP>c0SBZOo=e0RBZOP>c0SBZOo=d0SBZOm=g0TBXOl=g0f00010O1O2O2M2N3M2O2M2N2O2M2N3Nmm_3"}, "image_id": 815, "id": 13648}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 72.0, 23.0, 26.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "`RX31n?2N2]@LW?7f@KY?6e@LY?7d@LY??N10O01O02O1O2M2N3N1N2N3M2O1N3M2OX]\\4"}, "image_id": 815, "id": 13649}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 73.0, 28.0, 28.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "fb_22m?2N2N2N2N2N2N2O1N2N2N2N2N2N11N2N2N2N2N2N2N2N3M2N2O1N2NZ]R5"}, "image_id": 815, "id": 13650}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 77.0, 62.0, 51.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "PS]12m?2N2N2O1N2N2N3M2N2N2N2N2N2O1N2N2N2O100N3M2N2N2N2000000000O1N2N2O1N2N3M1O0000000000000000002O1N2N2N2N2N2N2N3M2N2N2N2N2N2O1NPmc5"}, "image_id": 815, "id": 13651}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 95.0, 62.0, 50.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "_SS42m?3N1Z@K^?7a@K0OV?8h@K1NV??h@CV?f0M2N3N1N2N3N1N3O0O0O01O01O01O02N1O010O0002N3N1N21N1O1N01O3N1N2N2O0O000010O0000010O02N2N3N1N3M2N2O2M2N3N1N2N3M2O[lm2"}, "image_id": 815, "id": 13652}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 113.0, 61.0, 70.0], "area": 2143, "segmentation": {"size": [512, 512], "counts": "kSP32n?2M2N2O2M2N2N3N1N3M2O1N3M2O2O01O010O00010O01N1O1N3M2O1N3M2N3N1N2N3N1N2N3N1N3M2N201O01N1N2O2mNnA3T>KnA3U>JnA3T>LmA3U>JmA4U>JnA3T>KnA4T>JnA3T>KnA3U>JnA3T>LmA3U>JmA4U>JnA3T>LmA3R?MgZQ4"}, "image_id": 815, "id": 13653}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 129.0, 81.0, 89.0], "area": 2954, "segmentation": {"size": [512, 512], "counts": "hdU22m?2N3N1N2N3N1N3M2N3N0O0001O01O2N3N1N3M2N2O2M2N3N1N3M2N2O2M2N3N1N2O2O0010O010ORBUNf=k1YBVNg=2VBb10_Nj=LXBm1h=RNYBo1m=O010O00001N1N3M2O2M2N2N3N1N3M2N2O2M2N3N1N3M2N2O2M00010O1O3M2O1N3M2N2O2M2N3Nnia4"}, "image_id": 815, "id": 13654}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 149.0, 70.0, 72.0], "area": 2972, "segmentation": {"size": [512, 512], "counts": "bUm61n?2O2M2N2N3N1e@Do>?n@CP??n@CQ?>m@EP?g0N3M2O1N3M2O2M2N2N3N1N3M200N2N00010O0000010O1O2O0O001O01O01O01O00101N2N01O03M2N2O2M2N3N1N2N3M2O2M2N2O2M2N2N3O0O2M2O1N3M]J"}, "image_id": 815, "id": 13655}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 167.0, 61.0, 76.0], "area": 2057, "segmentation": {"size": [512, 512], "counts": "Rg12m?2N2N2N2N2N2N2DBSA`0k>BSA`0k>BSA`0k>BSA`0k>nNeAR1\\>oNcAQ1\\>QObAo0^>90010O0000010O000102M2N2N3N1N2N3M2O2M1O1O01O000101N2Bh@1[?Lg@2[?Mg@1Z?Mh@1[?Lg@2YWU6"}, "image_id": 815, "id": 13659}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 240.0, 35.0, 36.0], "area": 641, "segmentation": {"size": [512, 512], "counts": "kgk61n?2O1N3M2O2M2N3N1N3M2O1N3M2O2M2N3N10O1O2M2N3N1N3M2O2M2N3N1N3M3N1N3M2Oggb0"}, "image_id": 815, "id": 13660}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 251.0, 59.0, 64.0], "area": 1827, "segmentation": {"size": [512, 512], "counts": "bhd41n?2O2M2N2N3N1N2N3N1N3M2N2O2O000N3M2N2O2M2N3M2O1N3M2O1N3M2N2O1N1O00010OZOPB]OP>d0QBZOR>c0PB[OR>c0QB[OP>d0QBZOR>c0PB[OR>f0nAXOS>h0mAVOV>i0kATOW>m0?N1N2N3N1N3M2N2O2M2N2O2M2N3MWg]2"}, "image_id": 815, "id": 13661}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 256.0, 28.0, 26.0], "area": 385, "segmentation": {"size": [512, 512], "counts": "ZhX61n?3N1N3M2O1N3M2N3N1N3M01O01O01O2O1N3M2O2M210O0O2M2O1N3M`WY1"}, "image_id": 815, "id": 13662}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 270.0, 55.0, 76.0], "area": 2135, "segmentation": {"size": [512, 512], "counts": "WiT72m?2N2N2N2N2N2N2N2N2M3N2N2N2N1O2N2N2N2N2N2N2N2N2N200000000000000000000000000000000000000000000000000000N2N2N1OfF"}, "image_id": 815, "id": 13663}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 283.0, 16.0, 16.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "Pil53l?2O2M2N2O2M2N1001N3M2N3N1N2N3NkVk1"}, "image_id": 815, "id": 13664}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 288.0, 52.0, 50.0], "area": 1324, "segmentation": {"size": [512, 512], "counts": "hYQ42m?3N1N3M2N2O2M2N2O2M2N3M2O1N3M2O2M2N00010O0001O01O00010O0001O01O0003N1N2N3M2O2N10001M2N2O2M2Jb@Ga?66O1N3M2OWfT3"}, "image_id": 815, "id": 13665}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 291.0, 36.0, 39.0], "area": 788, "segmentation": {"size": [512, 512], "counts": "_ib52m?3N1N2N3M2e@Fn>;QAFm>=PAEn>=PAFm>h0O0O001O01O01O2O110OO1N3M2O1N3M2N2O2M2N2N3N1N2N3M2O1NVVk1"}, "image_id": 815, "id": 13666}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 293.0, 63.0, 75.0], "area": 1909, "segmentation": {"size": [512, 512], "counts": "dYk11n?2N3N1N3M2N2O2M2N3N1N3M2O1N3M2O2M2N2O2M2N3O00010O010O010O01O010O010O00010O010O010O001M2N3N1N3M2O2M2N2O2M2N3M2O2M2N3N1N3M2O2MPUU5"}, "image_id": 815, "id": 13667}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 298.0, 17.0, 20.0], "area": 141, "segmentation": {"size": [512, 512], "counts": "]YR63l?2N2O2M201O01O01O01O000O2M2N3M2OVVe1"}, "image_id": 815, "id": 13668}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 314.0, 41.0, 44.0], "area": 1072, "segmentation": {"size": [512, 512], "counts": "S:h0W?2O1N3M21O01O01ON2N010O00010O0000010O00010O00000101N2N3N1N2N3M2O2M2N2O2M2N3M2O1NcU[7"}, "image_id": 815, "id": 13669}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 317.0, 56.0, 68.0], "area": 1310, "segmentation": {"size": [512, 512], "counts": "VZ`12m?3N1N2N3N1N3M2O1N3M2O2M2N2O2O00010O010O00010O01O01O010O00010O010O00010O01O01O010O00O2M2O2M2N2O2M2N2N3N1N3M2N2O[dc5"}, "image_id": 815, "id": 13670}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 323.0, 62.0, 55.0], "area": 1709, "segmentation": {"size": [512, 512], "counts": "gZa61n?2N2O2M2N2N3N1N3M2N2O2M2N2O2N1010O0010O0O1N3M2O2M2N1O010O0001O01O0001O01O00010O0001O0L]AmNb>T16N3M2N3N1N2N3N1N2N3M2O2M2N2N3N1N2NWe?"}, "image_id": 815, "id": 13671}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 325.0, 23.0, 22.0], "area": 261, "segmentation": {"size": [512, 512], "counts": "_Zc41n?3M2O2M2N3N1N3M01O01O01O01O01O2O2M2N3N1N3M2NbUQ3"}, "image_id": 815, "id": 13672}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 326.0, 15.0, 16.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "[j]72m?2N2O2M2N2N3N10N3M2N2O2M2N2Nbe:"}, "image_id": 815, "id": 13673}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 339.0, 50.0, 48.0], "area": 1210, "segmentation": {"size": [512, 512], "counts": "[[f42n?1N2N3M2O2M2N2O2M2N2N3N1N3M2N2O2M0010O0001O01O0001O01O00010O0001O02N2N2O2M2N3M2O1N3M2O1N3M2N3N1N2Nid`2"}, "image_id": 815, "id": 13674}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 341.0, 61.0, 71.0], "area": 1695, "segmentation": {"size": [512, 512], "counts": "Q[m02n?1N3M2O1N3M2N3N1N2N3N1N2N3M2O2M2N2O2N1010O00010O00010O010O00010O01O01O010O00010O010M2O1N3M2O1N3M2N3N1N2N3M2O1N3M2O2M2N2NdST6"}, "image_id": 815, "id": 13675}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 341.0, 26.0, 26.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "Q[V61n?3M2N2O2M2N2N3N1N2N2N01O0001O02N3M2O2M2N2N3N1N2N3Nmd\\1"}, "image_id": 815, "id": 13676}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 362.0, 57.0, 69.0], "area": 1319, "segmentation": {"size": [512, 512], "counts": "dka01n?2O2M2N2O2M2N3M2O1N3M2O2M2N201O0010O0010O0010O0010O00010O010O00010O0010O0010O0010O001N1N2N3N1N3M2N2O2M2N3N1N2N3Mmba6"}, "image_id": 815, "id": 13677}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 366.0, 63.0, 66.0], "area": 2026, "segmentation": {"size": [512, 512], "counts": "mln62m?3M2O1XOJcA9[>HcA:[>HdA:Y>IdA9Z>IdA9[>IbA:[>HdA9Z>IdA9[>IbA:[>HcA9\\>f0O01O00012M2N00010O0001O01O003NOK_AlNb>T1400010O0000010O0000010O0003N1N2N3M2O1N3M2N3N1N2N3N1N2N3M2O2M2Njc1"}, "image_id": 815, "id": 13678}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 368.0, 35.0, 25.0], "area": 386, "segmentation": {"size": [512, 512], "counts": "n[U51n?3N1N3M2N2O2M2N10O0001O01O00010O001O2O1N3M01O01O0001O01O010O2N2N3N1N\\TY2"}, "image_id": 815, "id": 13679}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 368.0, 50.0, 48.0], "area": 1261, "segmentation": {"size": [512, 512], "counts": "Wlf53l?2N2O2M2N3M2O1N3M2O1N3M2N3N1N2N3NO0001O01O00010O0000010O0001O01O2N2O1N3M201O000O2M2N2N3N1Ic@H`?57O1N3MjS`1"}, "image_id": 815, "id": 13680}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 374.0, 22.0, 21.0], "area": 245, "segmentation": {"size": [512, 512], "counts": "okh23m?1N3M2O2M2N3N0O000010O00010O2N3N1N3M2N3N1NQTl4"}, "image_id": 815, "id": 13681}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 393.0, 51.0, 54.0], "area": 1449, "segmentation": {"size": [512, 512], "counts": "R]_21n?2N3N1N2N3M2O1N3M2O1N3M2N3N1N2N3N1N2N3M2O1N1O00010O00010O0000010O0001O3N1N2N3M2VOTA>n>_OTA?n>_OTA?n>@TA>X?N2N3M2O2M2N2NiRg4"}, "image_id": 815, "id": 13682}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 405.0, 48.0, 52.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "S]51n?2N2O2M2N2N3N1N2N3M2O2M2N2O2M2N2N3N1N2O20O01O01O01O01O000N3N1N2N3M2O2M2N2O2M2N2N3N1N2N3M2O2M2NWbR7"}, "image_id": 815, "id": 13683}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 411.0, 43.0, 46.0], "area": 1129, "segmentation": {"size": [512, 512], "counts": "amZ77g?3N3M2N3N1O1O2M200O2N1O1000O2M0000010O0000010O01O1O1O101N1O1O100O1O2N01O0001O01O0000VC"}, "image_id": 815, "id": 13684}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 412.0, 51.0, 48.0], "area": 1298, "segmentation": {"size": [512, 512], "counts": "cmR62m?2N2O2M2N3M2O1N3M2O1N3M2N3N1N2N3M2O0O00010O000010O000010O00000100O3M2O1N3M2N3O00001M2O2M2N2Ia@Jb?4_@Kb?37N3N\\bS1"}, "image_id": 815, "id": 13685}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 421.0, 66.0, 54.0], "area": 1868, "segmentation": {"size": [512, 512], "counts": "^me11n?2O1N3M2Z@Hb?c0TA[On>c0:N1N2N3M2O2M2N2O2M2NTRY5"}, "image_id": 815, "id": 13686}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 432.0, 22.0, 18.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "e]]32n?4K5L3M000O0100000O01000O0100000O0104L4K4MUbW4"}, "image_id": 815, "id": 13687}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 449.0, 61.0, 58.0], "area": 1689, "segmentation": {"size": [512, 512], "counts": "Qoo21o?1N2N3N1N2\\OI\\A9b>I]A9`>J]A8b>I\\A9b>I]A9`>J]A8a>J]A8b>I]A9`>e0N2N3M2O1N3OO2M2O1N2N3M2N2O2M2N2N3N1N2N3M2O1N1O00010O0000010O0000010O002N2O2M2N2N3N1N2N3M2O1NUaQ4"}, "image_id": 815, "id": 13688}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 449.0, 52.0, 50.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "inc62m?2N2O2M2N3M2O1N3M2O2M2N3M2O1N3M2O2M001O01O00010O00010O0000010O00010O2N3M2O1N3M2O2M2N2N3N1N3M2O2M2N2N3NWQb0"}, "image_id": 815, "id": 13689}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 457.0, 32.0, 29.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "i^]22n?1N3M2O2M2N2O2M2N1O01O01O01O01O01O00010O0001O02N3N1N3M2O2M2N2N]aR5"}, "image_id": 815, "id": 13690}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 457.0, 27.0, 27.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "d^k51n?2N3N1N3M2O1N3M2N3N1N2N10O0000101N3M2O2M2N3N1N2N3M2OYQg1"}, "image_id": 815, "id": 13691}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 460.0, 25.0, 31.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "e>a0^?2N3N1N2N1O010O0001O01O02N2N2O2M2N2N3N1N2N3N1N3MUQc7"}, "image_id": 815, "id": 13692}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 470.0, 48.0, 42.0], "area": 1266, "segmentation": {"size": [512, 512], "counts": "eoe02l?2N3M2N3M2N3M2N3M2N3M2N3M3M2N3N1010O001O001O001O001O001O001O001O001O1N1N3M2N3N1N3M2N3M2N3M2N3MlPb6"}, "image_id": 815, "id": 13693}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 474.0, 28.0, 29.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "UoQ62n?1N3M2O2M2N2N3N1N3M2O1N2N00010O02N3M2O2M2N2O2M2N3M2O1NhP`1"}, "image_id": 815, "id": 13694}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 476.0, 35.0, 34.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "Zog11n?2O1N3M10O01O00010O2N2O2a@CV??h@DU?e0O1N3M2O1N1O03M2O2M2N3N1N3M2N2O2M2N3N1N3M``f5"}, "image_id": 815, "id": 13695}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 480.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "Poo72n0"}, "image_id": 815, "id": 13696}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 487.0, 7.0, 14.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "[ol72m?2N3N1N2N3N1h@"}, "image_id": 815, "id": 13697}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 488.0, 36.0, 24.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "eo^23l?2O1N3M2N3N1N1O100O1O1O100O1O1O100O11O1O1O2N1O1O2N1O1O2N1O1O2N1O2N1O1ORPo4"}, "image_id": 815, "id": 13698}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 490.0, 45.0, 22.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "ooP71n?100O1O1O100O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O11O2N1O1O2N1O2N1O1O2N1O2N1O1O2NQ`8"}, "image_id": 815, "id": 13699}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 500.0, 17.0, 12.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "j?6j?0O1O1O1O1O100O1001O1O1O2M2N2N2OTPg7"}, "image_id": 815, "id": 13700}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o_S41o?0Q`k3"}, "image_id": 815, "id": 13701}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 37.0, 298.0, 217.0], "area": 39700, "segmentation": {"size": [512, 512], "counts": "Y3l1T>010O00010O010O00010O010O0010O0010O0010O0010O010O0010O00N3M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2M3N3SObLSEa3k:aLSEb3i:bLSEa3k:aLSEa3n:_LnDe3Q;[LmDg3T;XLjDk3U;VLgDm3Y;SLeDP4[;:0010N1N3N11O010O01O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O010O00010O010O00010O010O0010O0010O0010O0010O0010O0010O0010O010O00O2L3N3M2M3N3L3N3L310O01O01O010O01O01O010O010O00010O010O00010O011N01O01O01_OcE^K\\:`4fE`K[:]4hEbKX:[4kEfKT:X4oEgKR:U4QFlKn9R4TFnKm9n3WFQLi9m3YFTLf9i3]FWLd9f3\\F]Lc98eE[2h0`Md92fE\\26nL4g0o9MjEZ23UM1g0R:GmE[20ZMNf0V:BoEZ2MaMLf0W:\\OSF[2JeMIg0[:VOTF[2IROR:`NYF[2HTOo9_N[FZ2IWOm9[N^F[2HZOf:c0]E]Oc:a0_E@a:b1N3O010O01O01O010O010O01O01O010O01O010O01OM4M2N3L3N2N3L3N3L3N3M2M3N3L3N3M2M3N3LPhd4"}, "image_id": 816, "id": 13707}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 292.0, 377.0, 220.0], "area": 38980, "segmentation": {"size": [512, 512], "counts": "hk:2l?3g@M`>6]ANHM]>8hAMHN]>8hANGN^>6iA9U>JgA9V>JhA9U>j0QBYNb=i1\\B[Na=h1\\BZNa=i1]BZNa=S210O01OIdMjB\\2S=hMlBU2U=mMlBP2S=TNlBl1R=VNoBi1o0001O001ON2M3N2N2M3N2M3N2M3N2M3N2M3N20000001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001QNVBf1j=XNXBh1Q>O001O00001O001O00001O001O00001O001O001O00001O001O00001O001kN`Ah0`>UOcAk0]>SOfA0Lc0_>ZOgA1Le0i>YOYAg0Q?Ih@DX?:j@FW?6l@JT?4o@KQ?2RANo>OTA0l>NVA2]?O00001O00Q`h1"}, "image_id": 816, "id": 13708}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 495.0, 27.0, 17.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "ho:8f?2N2O1O1O100O100O100000000000000001O001O001O1O1O2N2NX`W7"}, "image_id": 816, "id": 13709}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 503.0, 16.0, 9.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "nok02l?2N2M31O00001O001O001O00001O0NVPl6"}, "image_id": 816, "id": 13710}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 0.0, 12.0, 5.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "PP;1o?001O001O00001O001OO1NRP_7"}, "image_id": 818, "id": 13711}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 0.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "PPf11o?001O00001O0P`V6"}, "image_id": 818, "id": 13712}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 0.0, 25.0, 20.0], "area": 329, "segmentation": {"size": [512, 512], "counts": "[`h11m?3M2N2M4M201O001O00001O001O001O001OO1N3M2M4M2N2MQPk5"}, "image_id": 818, "id": 13713}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 0.0, 19.0, 7.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "P`T31o?001O001O00001O001O00001O001O0000N2MSPb4"}, "image_id": 818, "id": 13714}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 0.0, 39.0, 47.0], "area": 1149, "segmentation": {"size": [512, 512], "counts": "VPm31m?2M3O2TAIm=7QBKo=6mANR>2lA0T>0jA2V>OfA5Y>KeA7[>IbA;]>F`A<`>D^A?a>B[Aa0e>=01O001O001O00001O001ON2M3N2N2N2N2N2N2@o@OS?OPAMS?0PANR?0QAMQ?0RANP?0a`_3"}, "image_id": 818, "id": 13715}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 0.0, 53.0, 57.0], "area": 1967, "segmentation": {"size": [512, 512], "counts": "\\af46e?5N2O1O1O2O0O1L4K5L4K6@lNmAY1S>hNgA]1Y>50001O000000001O000000001O000000001O00000000001OL4K5K5K5K5J6K5K5K5M300001OM3LTP_2"}, "image_id": 818, "id": 13716}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 0.0, 47.0, 83.0], "area": 2488, "segmentation": {"size": [512, 512], "counts": "^ae52e?9H8_AAY=g0_BAY=g0_BA`=`0XBHh=X1001O01O0M3H8N20000000001O000000000000001O0000\\NfBb0Z=UOPCj0P=SOSCj0P=VOPCb0X=^OhB:`=F`B2h=NXBIQ>7oAAY>?d000L4G9N2000000000PPc1"}, "image_id": 818, "id": 13717}, {"iscrowd": 0, "category_id": 1, "bbox": [415.0, 0.0, 56.0, 30.0], "area": 937, "segmentation": {"size": [512, 512], "counts": "V`_61n?2N2M3N1O2O1O1O001O1O1O001O1O001O1O1O001O1O1O001O1O1O001O1O001OO1N2O1O1N2O1O1N2O1N2O1O10000O1O1N2O1O1N2O1O1N2OQ`d0"}, "image_id": 818, "id": 13718}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 4.0, 52.0, 63.0], "area": 1788, "segmentation": {"size": [512, 512], "counts": "]QV72m?2N2M2O2M3N1O2M3N2_O\\OcAe0\\>]OaAf0]>\\OaAf0\\>\\ObAg0\\>[ObAf0]>\\OaAf0\\>\\ObAg0\\>?N3N2N20O1000O10O10O10M3N2N1N3N2N2M2O2M3N1O20000O0ROTAi0l>TOWAi0R?N1N3N2N1N300O10nN"}, "image_id": 818, "id": 13719}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 6.0, 25.0, 17.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "^Pk01m?2M4M2O2O0010O010O0010O010O0010O00O2M20N2M4M2N3Li_h6"}, "image_id": 818, "id": 13720}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 10.0, 21.0, 26.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "m`Y32l?2N3M2M4M2N3M2M4N10010ON3L3N3M2N2M4M2N3Mco[4"}, "image_id": 818, "id": 13721}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 12.0, 43.0, 56.0], "area": 1412, "segmentation": {"size": [512, 512], "counts": "ia22k?3M4M2ZOFcA=[>FbA=[>EbA>[>EcA>Z>EbA>\\>DaA?\\>e0N110O01O010O00010O010O00010L3N1OO03N3L3N2M4M2N3L3N3L3N2M4M2M4M2N^oW7"}, "image_id": 818, "id": 13722}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 22.0, 23.0, 25.0], "area": 322, "segmentation": {"size": [512, 512], "counts": "Xah21m?3M2N3M2M3N3M2N3N110O010O01O0N3L3N3M2N3M2N2NVok4"}, "image_id": 818, "id": 13723}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 28.0, 58.0, 65.0], "area": 2134, "segmentation": {"size": [512, 512], "counts": "\\bb01m?2M4M2N3L3N2N3L3N3M2010O01JTOWAk0f>YOYAg0e>[O[Af0a>^O\\Ad0b>?M2M4M2M4O00010O0N3O001O010O01O01O010OM4M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2M3N3M2M4Mj^`6"}, "image_id": 818, "id": 13724}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 43.0, 63.0, 99.0], "area": 2634, "segmentation": {"size": [512, 512], "counts": "oc_23k?3M2M3N3L3N3M2M4M2N2M4M2O20O010O00010OBoNlAS1Q>oNlAT1R>oNkAT1Q>?N2M4M2M4M2M100O10O10O10O0100OBcB\\Na=a1aB]Na=a1aB]Nb=a1`B[Nc=e1<01N1N3L3N2M4M2M4M2M3N3L3N3M2M4M2M3N3L3N]n`4"}, "image_id": 818, "id": 13725}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 49.0, 46.0, 60.0], "area": 1628, "segmentation": {"size": [512, 512], "counts": "PS`11m?3L3SOLlA7Q>LlA6R>LkA8Q>LlA6R>LkA7S>LjA7S>KkA7U>IhA;X>EeA=[>d010O010O0010O0010O0010O0010O010ON2N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2MSnh5"}, "image_id": 818, "id": 13726}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 63.0, 31.0, 34.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "hRV41m?2N3M2N2N3L3N3M2N3M2N3M20010O010O010O01N1M4M2N2N3M2M4M2N3M2Nk]Z3"}, "image_id": 818, "id": 13727}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 68.0, 78.0, 62.0], "area": 2596, "segmentation": {"size": [512, 512], "counts": "kbY62k?3N3L3N3M2M4M2N2M4N110TATOb>l0\\AVOe>j0XAYOg>P110O0010O001M2N2N3M210O010O01O01O010O01N1M310O010O010O00010O010N1M4M2N2M4dAdNR>\\1kAgNU>Z1hAiNX>_10010O010O0010O0010OUOiAMX>OkA2T>LnA4R>IRB6o=GSB:l=DWB;j=AYB?g=_O\\B>g=_O[B?g=^O\\B?e>M2M4M2N3Lf\\?"}, "image_id": 818, "id": 13728}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 79.0, 54.0, 78.0], "area": 1977, "segmentation": {"size": [512, 512], "counts": "aTW32k?3M4TOJjA8S>KjA8T>KiA8S>KjA8T>JjA9U>HgA;Z>DfA=Y>CgA=Y>DeA=\\>BbAa0]>a0N3M201O01O01O010O010O00O2M2M2OO010O010O010O1O0101N4M2M3N3L3N3L3N3L3N2M4M2M4M2M3N]mm3"}, "image_id": 818, "id": 13729}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 92.0, 26.0, 27.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "]cR42k?4M2N3M2N3M2N2N30O010O010O010O010O0N3M2N2N3M2N3M2Nn\\`3"}, "image_id": 818, "id": 13730}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 94.0, 4.0, 10.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "USn72l?2M4M2RM"}, "image_id": 818, "id": 13731}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 99.0, 32.0, 47.0], "area": 781, "segmentation": {"size": [512, 512], "counts": "UTf51l?3N3M2M4M2N2FAPAc0O^Og>1XAc0O^Of>2YAj0d>:N3N1001N1N3M201M2N3L3N3M2M4M2N3L3N2N3L3N3Mfli1"}, "image_id": 818, "id": 13732}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 111.0, 64.0, 87.0], "area": 2099, "segmentation": {"size": [512, 512], "counts": "leg42k?3M3mNJZB9c=JYB9d=JZB9c=IlAG5c0l=JkAG6a0m=JkAG5c0l=3QBOo=1nA3Q>n01O01O010O01O01O010ON3M2M3N3L3N3L3N2M4M2M3N0O100O100O010O1O1Ml@\\OT?d03000O1101010O010O01O010O010L3N3M2N2N3M2N3L\\\\X2"}, "image_id": 818, "id": 13733}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 113.0, 21.0, 21.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "eSY63j?3O101O01O01O0^@LY?3d@0\\?:010O00010O000N3L3M3M4L\\\\\\1"}, "image_id": 818, "id": 13734}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 127.0, 13.0, 18.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "Ydi73k?2N3M2N201N101O0O2O00001PL"}, "image_id": 818, "id": 13735}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 128.0, 46.0, 52.0], "area": 1255, "segmentation": {"size": [512, 512], "counts": "Veo33k?2M4M2N3M2M3010O010O01M2N2N3L3N3M2N3L3N3M2M3O20O010O001M2N2M4M2N3M2M4M2N2N3L3N3M2M4M2N3M2Mi[Y3"}, "image_id": 818, "id": 13736}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 136.0, 28.0, 27.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "jd^61m?2M3N3L3M4M2M4O00010O010O010O00010O010ON3M2M3N3M2M4M2Nb[S1"}, "image_id": 818, "id": 13737}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 150.0, 2.0, 4.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "iTo71l?3ZK"}, "image_id": 818, "id": 13738}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 153.0, 27.0, 28.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "\\UV71l?4M2M3N3L3M4M2O110O0010O0010O0010O01O01L3M4M2M3N3L3MS[<"}, "image_id": 818, "id": 13739}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 163.0, 46.0, 59.0], "area": 1456, "segmentation": {"size": [512, 512], "counts": "df`51m?2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N201O010O01O0O1N3L3N3L3N2M4M2M3NO0100O4M2M3N3L3N3L3L4MmZh1"}, "image_id": 818, "id": 13740}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 178.0, 45.0, 56.0], "area": 1392, "segmentation": {"size": [512, 512], "counts": "nfX61m?2M4M2M4M2N3L3N2M4M2M4M2M4M2M4M2N2M40O01O010O01O01O01M2M4M2N3L3N2M4M2M4M2M3N3L3N3M2M3N3LVjP1"}, "image_id": 818, "id": 13741}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 184.0, 6.0, 15.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "TVm71m?3M2M4M2M3XJ"}, "image_id": 818, "id": 13742}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 188.0, 44.0, 63.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "`gR71m?3^ONQA5l>NRA5f>HSA635h>2VA1f>2XA0f>f0M4M2N3N1010O0001L3N3L3O20O00010M2N3L3N3M2M3N3M2M4M2M3N3M2M4M2N3L3N2M4M2NlY7"}, "image_id": 818, "id": 13743}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 229.0, 16.0, 39.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "lWh72k?3N3L3N3M2M4M2N2m@[Oj>g0SA\\Ol>l01O010OM4M2kH"}, "image_id": 818, "id": 13744}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 231.0, 55.0, 76.0], "area": 2270, "segmentation": {"size": [512, 512], "counts": "PXa12l?3L3N2N3L3m@Ac>b0YABc>a0[AAc>a0[AAc>b0YABc>o0O20O00SBdNW=\\1fBfNZ=Z1dBiN\\=W1`BlN`=T1^BoNa=Q1\\BROe=n0XBTOh=l0UBXOk=^1010O0010O0010O0010O01N1N3M2M4M2M3N3L3N3L3N3L3N3L3N3L3N2M4M2M4M2N3LXXc5"}, "image_id": 818, "id": 13745}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 345.0, 140.0, 167.0], "area": 12266, "segmentation": {"size": [512, 512], "counts": "a??^?3N2M3N2N2M3N2M3N2M3N2N2M3_OdN\\B^1b=dN[B_1b=dN\\B^1b=eNZB^1d=dNZB^1c=a0N2N2N2M3N2N2M3N2N2M300001O001O001OO1N2M3N2MB\\CfMbYOmAh0W>ROkAn0Z>kNhAT1c>N1O2M3N1O2M3N2M2O2N2M3N1N3N2N1N3N2M`fi4"}, "image_id": 819, "id": 13753}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 447.0, 52.0, 65.0], "area": 2252, "segmentation": {"size": [512, 512], "counts": "\\>16V1[>kNcAX1Z>kNdAW1Z>8O2N2M3N1O2M2O11O1O001O1O1O001O1ON2O00O010O2O2N2M3N1N3N2N2M2O2M3N2N1N3N2M3N1N3N2N2M2O2M3N2N1N3N2M_aU7"}, "image_id": 819, "id": 13754}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 0.0, 34.0, 48.0], "area": 1132, "segmentation": {"size": [512, 512], "counts": "YQQ52i?5L4K6K4K5K5L4K6K400000001O0000001O000000001O00M3L4K5K5L4K5L4K5L4KUP^2"}, "image_id": 820, "id": 13755}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 0.0, 170.0, 310.0], "area": 41700, "segmentation": {"size": [512, 512], "counts": "ZX[55g?4L4K6K4L4K5L5K4L4K5L4L5J5L4L4K6K4L4K5L5K4K5WNhLfF\\3V9hLfF\\3U9iLfF]3U9hLfF\\3V9hLfF\\3U9iLgF[3U9iLfF]3U9hLfF\\3U9iLgF[3U9iLfF\\3X9fLdF_3[9aLaFc3_9_1001O01O0001O01L3L4K5L5K4K5L4L4K6K4L4K5L5K4K5L4L5L300000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O0000"}, "image_id": 820, "id": 13756}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 117.0, 57.0, 110.0], "area": 3617, "segmentation": {"size": [512, 512], "counts": "hf^44h?4L4L4K6K4L4L4L5J5L4L4L4L5J5L4L4L5K4K5L4L5K4K5N210O00000010O00000O2K4L4L4K6K4L4L4L5J5L4L4L5K4K5L4L4L5J5L4L4L5K4KXld2"}, "image_id": 820, "id": 13757}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 158.0, 87.0, 159.0], "area": 6679, "segmentation": {"size": [512, 512], "counts": "n4o4Q;O2M3N2N2N2N1N3N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2O100O10O0100O100O10O010000XOTBZOm=d0UB\\Ok=a0XB^Oh=a0ZB_Of=?]B_Od=?^B_Od=?^B_Od=>`B_Ob=?`B_Oa=`0aB]Ob=a0aB\\Oa=b0P1M3N2N1O2N2N200O10O10000000N2N1O2NlXd6"}, "image_id": 820, "id": 13758}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 170.0, 18.0, 18.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "aei12m?2N2M3N2N2N20O1000000O1N2N2N1O2N2M^Zm5"}, "image_id": 820, "id": 13759}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 187.0, 19.0, 20.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "Uf^13l?2N2N1O2N2M3N2N2N10O2M3N2N2N2N2N1N3NoiW6"}, "image_id": 820, "id": 13760}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 202.0, 21.0, 21.0], "area": 228, "segmentation": {"size": [512, 512], "counts": "ffW11n?1O2M3N2N2N2N1N3N2N2O01M3N2N2N1N3N2N2N2M`i]6"}, "image_id": 820, "id": 13761}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 315.0, 7.0, 29.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "ejl71k?4K6K4L4K5L5TF"}, "image_id": 820, "id": 13762}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 336.0, 3.0, 6.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "`:6k?M3N_Un7"}, "image_id": 820, "id": 13763}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 152.0, 53.0], "area": 5499, "segmentation": {"size": [512, 512], "counts": "0e0[?0000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O000000001O0000001O000000O1L4L4K5L4L4L4K5L4L4L4K5LTPd5"}, "image_id": 822, "id": 13764}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 0.0, 48.0, 99.0], "area": 2681, "segmentation": {"size": [512, 512], "counts": "bRX32j?4L5J5L4i@@j>m0L4K6K4L4K5L5K4K5L4L4K6K4L4L4O2O0000001OO1K5L4L4K5L4L4K5L4L4L4K5L4L4K5L4L4K5L4L4L4K5LTPP4"}, "image_id": 822, "id": 13765}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 0.0, 188.0, 151.0], "area": 21813, "segmentation": {"size": [512, 512], "counts": "ocl44g?6K4L4K5L5J5L4K5L4K6K4K5L4K6K4K5L4K5L5J5L4K5L5K4K5L4K5L5M2000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O00O1K5L4K5L4L4K5L4N21O0000001O000000001O00000010O000000010O0000010O000000010O0000010O000001O01O0000010O0000010O000000010O0000010O000000010O0000010O000000010O0000010O000000010O0cLlCn2TK5L4K6KU^5"}, "image_id": 822, "id": 13766}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 7.0, 28.0, 37.0], "area": 679, "segmentation": {"size": [512, 512], "counts": "Ta[23i?4L4L4L5K4M3L4N3O0000010O0000010O00010OM3L4L5K4L4M3L5Kh_V5"}, "image_id": 822, "id": 13767}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 51.0, 13.0, 21.0], "area": 170, "segmentation": {"size": [512, 512], "counts": "RRT44h?4L5K4M3010O000N2L5K4L4L_^e3"}, "image_id": 822, "id": 13768}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 73.0, 54.0, 91.0], "area": 2915, "segmentation": {"size": [512, 512], "counts": "Y2P2Q>O00000010O0000010O0000010OYNPB^1P>^NTBb1U>001O01O0001O01O0001VB\\NV=d1fB`N[=_1aBeN_=[1]BiNc=k1O01O0001OM4K4L4K5N3O0001OM3K6K4L4L4L5K4K5L4L5K4LUmT7"}, "image_id": 822, "id": 13769}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 116.0, 63.0, 63.0], "area": 3118, "segmentation": {"size": [512, 512], "counts": "bdl04f?_?7I5L4L4M2N3L3N2N2N2O1N1O2N2O0O2O0O2O001N10001N10000000001O000000O100000001O0O10001N101O0O2O0O2N2O0O2N2N2N2N2N3M2M4L4M3K6J;AkZ_4"}, "image_id": 822, "id": 13772}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 156.0, 152.0, 86.0], "area": 7919, "segmentation": {"size": [512, 512], "counts": "`Ve32j?5J5L4L4K6K4K5L4K6K4L4L41O01O0001O01O000001O01O0001O01O000001O01O0001O01O000001O01O0001O01O0001O01O000001O01O0001O01O000001O01O0001O01O000001O01O0001O01O0001O01O000001O01O0001O01O000001O01O0001O01O000001O01O0001O01O0001O01O000001O01O0001O01O000001O01O0001O01O000001O01O0001O01O00K6K4L4K5L4K6K4K5L4L5J5LXjn1"}, "image_id": 822, "id": 13773}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 184.0, 35.0, 36.0], "area": 858, "segmentation": {"size": [512, 512], "counts": "Xf[11k?4K5L5M20000010Ok@Bh>>TAFl>:o@KQ?b010O000M301O0001O01O0000010O000001M2L4K5L5K4L4LXjR6"}, "image_id": 822, "id": 13774}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 282.0, 18.0, 31.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "eY\\22j?4K6J5L4K5L401O0001O000K5L5J5K5L4KYgZ5"}, "image_id": 822, "id": 13775}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 322.0, 245.0, 122.0], "area": 16739, "segmentation": {"size": [512, 512], "counts": "UlP24g?5L4L5J5L4L4L5J5L4L4K5O20O000001O01O0001O000001O01O0000010O01O2N101N1O2N101N1dN`AV1e>00010O0000010O0000001O0K5L4O110O0000010O0N2L4L5K4001O01O0001O0001O0001O01O0001O01O0001O0001O01O0001O0001O01O0001O01O0001O0001O0001O01O0001O01O0L4L4L4K6K4L4L4N3O01O0001O01O0001O01O0SOSCgNmh0f0Z4^AOa>2]AOc>2ZA1e>OYA3g>NWA4h>LVA7i>JUA7k>JRA9m>>O001O001O1O001O001O1OO1N2N2O100001OO1HSA[Oo>c0TAZOn>d0TA[Om>d08N2N2O1N2N2O1N2O1N2N2O1NR`V5"}, "image_id": 823, "id": 13779}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 0.0, 52.0, 44.0], "area": 1533, "segmentation": {"size": [512, 512], "counts": "oP]31m?2N3M2N3N1N3IBh@a0m>AUAe0i>]OVAe0g>^OVAd0j>:O001O001O001O00N2N2001O001O001O001O001O1O001ON2O1N2N2N2N2N2O1N2N2N2N2^Og@;[?Cg@<`?M2L5M2Nkoh3"}, "image_id": 823, "id": 13780}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 0.0, 29.0, 20.0], "area": 384, "segmentation": {"size": [512, 512], "counts": "Z`i41m?3M2N3M2N3O00001O001O001O001O001O001O0000N2N2N2M3N2N3M2NQPh2"}, "image_id": 823, "id": 13781}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 0.0, 35.0, 22.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "X`X53j?3N3L30001O001O00001O001O00001O001O00001O00001O00^Oe@`0]?01O010N1N2M4M2M3NloU2"}, "image_id": 823, "id": 13782}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 10.0, 4.0], "area": 24, "segmentation": {"size": [512, 512], "counts": "PPl51o?001O00001O001O00NRPo1"}, "image_id": 823, "id": 13783}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 0.0, 52.0, 38.0], "area": 1284, "segmentation": {"size": [512, 512], "counts": "``T63j?3M4M2M3N3L301g@\\OT?i0O00001O001O00001O001O00001O00001O001O00001O001O00001O001OO1M3_OVAJl>3WAJl>5UAIm>8SADP?<:1O00O1M3N2M3M3NR`Q1"}, "image_id": 823, "id": 13784}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 34.0, 54.0, 52.0], "area": 1636, "segmentation": {"size": [512, 512], "counts": "Tbm22m?1N3M2N3M2N3M2N3M2N3M2N3M2N3M2O2M2N03N110O010O0100O01N110O010O010O010O01O0N30O010O010fN[AU1j>M3M2N3Bn@JT?4n@JU?3n@KS?3o@KT?2o@KS?3?M^^W4"}, "image_id": 823, "id": 13785}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 36.0, 72.0, 59.0], "area": 2292, "segmentation": {"size": [512, 512], "counts": "SRY52l?2M3N3L3N3L30001L3N3M2M4M2O1010O010O01O000M4O010O010O0N2N30O01O01O010O010O01O01O010O01O010O01O010O01OTO_A9b>D`A=_>AdA>]>^OfAb0Z>\\OhAe0X>XOkAg0g>10O0010O0010M2M4M2N3L3N2M4M2Njmb1"}, "image_id": 823, "id": 13786}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 37.0, 48.0, 84.0], "area": 2377, "segmentation": {"size": [512, 512], "counts": "^SQ43[?NTA4i>OTA5i>NTA4Q>H[B7A4R>GZB9@4R>G[Bk0c=WOZBm0b=WO[Bk0c=WOZBl0c=WO[Bl0a=k0O20O00O2L3O1010O010O0N210O010O000dMcBS2`=jMbBg1MbNk=Z1XBdNj=Y1ZBcNj=Z1XBcNk=Z1`0M2M4M2M3N3L3M4M2M4M2M4M2M4MYnV3"}, "image_id": 823, "id": 13787}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 60.0, 50.0, 63.0], "area": 1843, "segmentation": {"size": [512, 512], "counts": "]2T1k>1O2M3O10O100000O0O2N2M3N1O2N1N10O10O10O100O101O2M3N2N1N3N2N2000O10O1QOVAg0j>WOWAh0j>VOYAh0i>VOYAg0R?N2N1N3N2N2N1N3N2N2M3NYmV7"}, "image_id": 823, "id": 13788}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 81.0, 62.0, 72.0], "area": 2048, "segmentation": {"size": [512, 512], "counts": "Uda01m?3N2N1N3N2N2M2O2b@@Y?d0N3N2N2M2O2N2M3N1EjNkAX1R>kNkAX1S>jNkAX1S>iNlAX1R>=N000O10O10O10O10O12M3N2N1N3N2N2M2O2N2M3N1POVAi0l>TOVAm0o>O100O0O2N2M3N1O2M3N2N1N3N2N2M2O2Nd\\_6"}, "image_id": 823, "id": 13789}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 81.0, 53.0, 63.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "[Se43k?2M4M2M4L3N2M4M2M4N11O010ObASOi=n0SBVOi=m0UBUOj=l0SBXOl=i0QBYOP>f0mA]OS>d0jA_OV>U101N10010O010O00010O0010O001O0N2M4M2M4M2M3N3HXASOk>k06M4L3N3L3N2M4M2M3NU]`2"}, "image_id": 823, "id": 13790}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 100.0, 57.0, 59.0], "area": 1918, "segmentation": {"size": [512, 512], "counts": "ZTY54j?2N3L3N3L3N2M4M2010O0010O010M2N3M2M301O013L1M2N3M2N3M2N3M201O01N101O0O2O001N1N3L3N3L3N2M4M201O0N3M2M4M2He@I^?4e@I]?59M2Mc\\j1"}, "image_id": 823, "id": 13791}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 119.0, 59.0, 65.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "ZeR13l?1N3N2M2N3N2M2O2M3M2O2M3N1N3M3N1N3N11N0O10O010O010O010O011N3N1N3N2M2O1O02M2O2M3N1N3N1O2N2M2O2M3N1N3N2M2^Og@<_?N3N2M2O2M3Ngko5"}, "image_id": 823, "id": 13792}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 157.0, 57.0, 73.0], "area": 2070, "segmentation": {"size": [512, 512], "counts": "Z5:e?2N3N1N2N3M2O1N3M2N2O2O001M2N2O2M2N2N3N1N3M20010O00010O01O01O010O00010O01OO00O002O2M2N3M2O1WOYA6j>_OVA31@VA11>j>_OWA12=Z?N3M2O1N3M2N2OYYS7"}, "image_id": 823, "id": 13793}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 179.0, 55.0, 53.0], "area": 1663, "segmentation": {"size": [512, 512], "counts": "gVi22k?4M2M4M2N2M4M2M4M2M3N3L3N3L3O110O0010O010O00010O010O0010O0010O001hN\\AS1c>kN_AU1g>0O0001L3N5J3N3M200010O010OM3N3L3N3L3N2MiY[4"}, "image_id": 823, "id": 13794}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 215.0, 50.0, 81.0], "area": 2543, "segmentation": {"size": [512, 512], "counts": "igd33k?2c@Lk>7RAL60P>8gAK6Om=LgA<3L6Om=j0mAZOR>g0kA[OU>e0hA^OY>U1O010O00010O010O0003NO0010O0010OM6K2M2N4M2M3N3L3N3L3N3L3N2M4M2M4M[Xe4"}, "image_id": 823, "id": 13796}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 229.0, 67.0, 55.0], "area": 2149, "segmentation": {"size": [512, 512], "counts": "\\XX41m?201L3N2M3N0O2O3L3M4N10010M2N2M4L3N3L3O101O010O3NO00010O010O00010O010O0001L3N3O00010OO2L3N2M4L3N3L30001M2N30O00010ON3L3N2M4M2M4L3N2McXf2"}, "image_id": 823, "id": 13797}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 272.0, 77.0, 77.0], "area": 2532, "segmentation": {"size": [512, 512], "counts": "[ib11n?2N2N2N2N1O2M3N2N2N2N2N2N1O2N2N2N2N2N2O10O1000N2N2M3N2N2N1O2N2N2iAYNT>j1000N2O1O10O100000gNiAh0W>VOjAk0V>SOlAm0T>QOnAn0S>POoAP1P>oNRBQ1n=lNUBT1k=jNWBV1Z>0O1000O1000000O1N1O2N2N2N2M3N2N2N1O2N2N2M3N2N2N2N1O2NjeV5"}, "image_id": 823, "id": 13798}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 284.0, 59.0, 53.0], "area": 1963, "segmentation": {"size": [512, 512], "counts": "kiP43k?3L3N3M2M3N3L3N2N3L3N3L30010O000WAoNd>V101O01O010O00010O01O0O1M4N10010M2O2O0001O0N2M4L3N3L3M3010O0010O0M4M2M5K4M2M4L3N2M4MkfQ3"}, "image_id": 823, "id": 13799}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 295.0, 14.0, 13.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "^YX32k?3N3M20010O010O00010ON3M2Mhf`4"}, "image_id": 823, "id": 13800}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 315.0, 70.0, 59.0], "area": 1829, "segmentation": {"size": [512, 512], "counts": "S[91e?1b@1\\?1b@1\\?1b@1\\?1b@1\\?;N2N2N2N002N2N2N20000000000O1N2N1O000000000O10000000O100000001O2N2N2N2N2N2N1O000O1000000000000002N2N20N2N2N2N2N2N2N2N2N2N2N2N2N2Nhec6"}, "image_id": 823, "id": 13801}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 316.0, 89.0, 111.0], "area": 5615, "segmentation": {"size": [512, 512], "counts": "el^62l?2M3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M3N3L3N3L3N2M4M2N3O01O01O010O01O010O00010O010O00010O001M2M4M2M3M4M2M4M2M3N3L3N3L3M4M2M3N3L3N3L3M3N3L3N3L3N3L3N2M4L3N3L3N3Lae4"}, "image_id": 823, "id": 13802}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 337.0, 62.0, 57.0], "area": 2069, "segmentation": {"size": [512, 512], "counts": "\\k\\32l?3L3N3M2N3L3N2N3L3N3M2N30O00010O010O010O00010O010O010O010O00010OUAPOf>U11O010O01O010M2N2N3M2M4O010O00010O00XOeAL[>1hANX>0jA1U>MnA0U>LnA1U>MnA0U>MmA1U>LnA1U>MnA0U>MmA1U>LoA0\\cd3"}, "image_id": 823, "id": 13803}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 339.0, 66.0, 69.0], "area": 1911, "segmentation": {"size": [512, 512], "counts": "n[l01=0R?2m@0Q?2m@0Q?2m@OR?3l@OR?3l@OR?3k@0R?a0N2N2N1O2N2M3N000000O1000O10000000000O10O10000000002N2N2N2N1N3N2N0000000000O1000O100001100N2M2O2N2N2N2N2N2M3N1O2N2N2NQeR6"}, "image_id": 823, "id": 13804}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 349.0, 18.0, 17.0], "area": 176, "segmentation": {"size": [512, 512], "counts": "V[l23k?3L3M4N110O01N10010O011N010L3N3M2Nndj4"}, "image_id": 823, "id": 13805}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 370.0, 31.0, 80.0], "area": 1751, "segmentation": {"size": [512, 512], "counts": "cl`71R10g=3VB0h=3UBOh=4UB0h=2VB0g=4UB0h=2UB1h=3UBOi=3TB1h=2VB0h=3TB1h=2VB0k=R1010O01O01O01O01O010O01N1N2M4M2N3O01O]D"}, "image_id": 823, "id": 13806}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 371.0, 74.0, 83.0], "area": 2702, "segmentation": {"size": [512, 512], "counts": "i]_11m?2M3N3L3N3L3N2010O010O0001M2M4M2M3N3L3N3^OhNWB[1f=iNWBY1g=iNVB[1f=hNXBZ1f=iNVB[1f=a0N2N3L3O20O001L3N2M4M2M4M2M3N3L3N3M2M3dNcAS1f>M4M2M3N3N110O01O01O010O010O01O01O010O01ON3L3N3L3N2M4M2Mhc[5"}, "image_id": 823, "id": 13807}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 389.0, 63.0, 46.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "k\\g22k?4M2M4M2M3N3M2M4M20010O010O00010O010M2o@TOl>Q101O010O010ON201O010O01O01O010O010O01O01O010O01O01O010O01O01O010O0O2M2TOQAd0W?N2N3L3N3L3N2MYSY4"}, "image_id": 823, "id": 13808}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 423.0, 13.0, 16.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "amX23k?3L3M3N3O01O010N1M3M4L3Nhb`5"}, "image_id": 823, "id": 13809}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 424.0, 25.0, 26.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "hmd34i?3N3L3M3N3L31O01O010O00010O01O01O01M2N2M4L3N3L3Mdbn3"}, "image_id": 823, "id": 13810}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 429.0, 47.0, 70.0], "area": 1922, "segmentation": {"size": [512, 512], "counts": "inZ21m?3L3N3L3N3e@Co>?o@Cn>`0PACm>i0M4M2M3N3L3N3L3N2M4M210O01O010O01O01O010O010O001UOnAGQ>6RBJo=3TBMk=0XB0h=N[B2e=J^B6b=H`B8a=DcB9_=EcB9`=CdB9_=EcB9`=DbB9e>M2N3LUam4"}, "image_id": 823, "id": 13811}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 461.0, 49.0, 49.0], "area": 1421, "segmentation": {"size": [512, 512], "counts": "`_P31l?4M2N3L3N2M4M2M4M2M3N3L3N3L300010O01O01O010O01O01O010O010O00010N1M4M2M3N3L3N3N11O01M2N3L3N3L3N2MYQW4"}, "image_id": 823, "id": 13812}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 486.0, 55.0, 26.0], "area": 912, "segmentation": {"size": [512, 512], "counts": "loR62l?2M3N3M2M4M2001O001O00001O001O001O00001O001OO1N2M3N2M3N2N2001O001O00001O001O00001O001O00001O001O0O2M2M3N3L3Nb`Q1"}, "image_id": 823, "id": 13813}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 0.0, 100.0, 53.0], "area": 2710, "segmentation": {"size": [512, 512], "counts": "PP32n?7I8H7I6J0000O1000000000000O10\\AXOP>h0PB]Ok=c0UB\\Ol=e0SB[Om=l0lATOT>\\1000000000O100000000GhAjNX>U1:0000O1000000000000A_A^Ob>a0?000000000000O1000000000000O1000000000000O1000000000000O1000000000000O1000000000000O1000000000000O10000000003M5J7JboZ6"}, "image_id": 826, "id": 13814}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 0.0, 41.0, 7.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "P`P77i?000000O1000000000000O10000000000O10000000000O10000000000O10000000000O1000000000PP;"}, "image_id": 826, "id": 13815}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 8.0, 57.0, 280.0], "area": 9926, "segmentation": {"size": [512, 512], "counts": "a`S72n?6J7H7J7I6J7I6J6J7H7J7I6J7I6J6I8I6J7I6J7I6J6I8I6J7I6J7I6I8I6J6J7I6J7I6I8I6J6J7I6J7H6K00000000000O01000000000O010H"}, "image_id": 826, "id": 13816}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 26.0, 55.0, 36.0], "area": 1421, "segmentation": {"size": [512, 512], "counts": "RQS62n?7I7I8H4L00O01000000000O01000000000O01000000000O10O10000000O10O10000000O10O10000000O10O10000000O1000O17I7I8Ga^Q1"}, "image_id": 826, "id": 13817}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 39.0, 54.0, 140.0], "area": 4769, "segmentation": {"size": [512, 512], "counts": "]a[14k?8I6J7I7I7I6I8I7I7I6J7I7I7H7J7I7I7I6J7H3N000000000O01000000000O10O2O7I6J7I7I7I6I8I7I7I6J7I7I7H7J7I7I7I6J7HiZi5"}, "image_id": 826, "id": 13818}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 58.0, 23.0, 22.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "lQ;3m?7I7H4M0000O1000000000O10O1000000000004K8I7Id]Y7"}, "image_id": 826, "id": 13819}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 103.0, 46.0, 74.0], "area": 2458, "segmentation": {"size": [512, 512], "counts": "YS<6j?7I7I7I7I8G8I7I7I7I2N0O010000000003M7H1000O1000000000O10O1000000000O10O11O8H7I7I7H8I7I7I7Ihjl6"}, "image_id": 826, "id": 13820}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 103.0, 55.0, 36.0], "area": 1528, "segmentation": {"size": [512, 512], "counts": "^c[63l?:G8H9G000000000O10O1000000000O1000O1000000000O10O1000000000O10O1000000000O10O100000000000O10O10003M6J6J6J6Iokh0"}, "image_id": 826, "id": 13821}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 137.0, 113.0, 184.0], "area": 13364, "segmentation": {"size": [512, 512], "counts": "idj55j?7J7I6J6J6J6I7J7I6J6J6J6J6I8I6J0000003L7J6J5K6J5K6I7J5K6J6J5K5J100000O100000O10O10000000O10O10000000O0100000000^N^EfMa:[2eE^M\\:a2jEZMV:f2PFTMP:l2UFoLk9Q3[FiLe9W3aFcL_9\\3gF^LY9c3mFWLS9i3b1000000O01000000000O010000000O10O10000000O1000O1000O100000O1002M7J6J6J6J6TOlB`N[=Y1kB`N\\=Z1iBaN]=Y1iBaN]=Y1k0I7J7I6J6J6Jih<"}, "image_id": 826, "id": 13822}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 192.0, 60.0, 211.0], "area": 6302, "segmentation": {"size": [512, 512], "counts": "Wfh12n?7I7I6J7I7H7J7I7I7I6J7H8I6J7I7I7I6I8I7I6J7I7I7I6G:I7I7H7M4I7LM7I6J7I7H8I6J7I7I6J7I7H8I6J7I7I7I6_OkBjM]=o1jBjM]=o1`0I8I7I7I6J7I7I6I8InSY5"}, "image_id": 826, "id": 13823}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 217.0, 49.0, 46.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "lVc01o?6J5K6J5J7J6J5K3M000O10O100000O10O1000002N5K6I7J2N000Ei@MW?3h@MX?4h@LX?4h@LX?3i@MW?3i@MZ?0e@1a?I_@6f?0000000O010000000O10O10005KSXd6"}, "image_id": 826, "id": 13824}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 289.0, 51.0, 101.0], "area": 3601, "segmentation": {"size": [512, 512], "counts": "Vii04l?7H8I7I7I7I6J7I7I7H8I7I7I6J3M000O1000O1000000000O10O100000000000O01000006J7I7H8I7I7I6J7I7I7H8I7I6J7IPd\\6"}, "image_id": 826, "id": 13825}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 475.0, 46.0, 37.0], "area": 1630, "segmentation": {"size": [512, 512], "counts": "m^Y7=c?f0ZO00000O1000000000000000000000000000000000000000000000000000000000000000000000000000000O1"}, "image_id": 826, "id": 13826}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 65.0, 168.0, 226.0], "area": 30305, "segmentation": {"size": [512, 512], "counts": "oTc4:f?j0VOj0VOk0UOj0VOa0_O0TMZL^If3b6TMdHl2\\7nMjGR2V8l2000000000000000000000000000000000O10O100000000000000000000000000000000000000000000000O10O10000000000005K;E00O100000000000000000000000000000O10000000000000000000O100000000000000000000000000000O10000000000000000000O1000000GfIlFZ6T990000000000000000O10O1\\LPG8P9HkG]OU8c0eHcN[7]1_IiMa6W2ZJnLf5R3d30000000000000000000004Lk0UOj0VO5^O=00000000000000000O10O10002Nmkh0"}, "image_id": 827, "id": 13827}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 101.0, 121.0, 147.0], "area": 13936, "segmentation": {"size": [512, 512], "counts": "\\3n09W2[;iMdDX2\\;hMdDX2\\;iMcDW2];V100O10O1000000000O10O1000000000O10O100000000000O0100000000000O10O1000000000O10O1000000000O10O100000002N7I7H8I3MO10000000O1000O10000004K7J00000000000O0100000000000O0100000000000O01000000000000O01000000000007H8I7I7I7I7I7I8G8I7I7I7I7I7IjXS6"}, "image_id": 827, "id": 13828}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 239.0, 9.0, 24.0], "area": 172, "segmentation": {"size": [512, 512], "counts": "Rhk75k?0^Ob00000000000O1aH"}, "image_id": 827, "id": 13829}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 332.0, 153.0, 82.0], "area": 9131, "segmentation": {"size": [512, 512], "counts": "W\\k43m?b0^O0000000000000000000000000000000^OCaA=_>b00000000000000000000OlNROXCn0ha1j:HhDfN?b1i:HhDfN?b1Q;@`DnN?b1Z;WOWDWO>c1[;VOVDXO?b1[;UOWDXO?c1Z;UOWDXO?c1Z;WOUDVOa0b1[;EeD;Z;IcD7];1[D0d;X2N2N3M2N3M2N1N2O001O001O0000]LnDX2R;hMWEn1j:RNYEk1f:VNZEi1g:WNYEi1g:VNZEj1f:VNZEi1g:WNYEi1g:WNYEh1h:XNXEh1h:XNXEh1h:XNXEg1h:ZNXEf1h:ZNXEe1i:[NWEe1i:ZNXEf1n:TNREk1W;mMiDS2W;mMiDR2X;nMhDR2X;nMhDR2X;nMhDQ2Z;nMfDR2c;eM]DZ2n;[MSDe2d<00O2O0O10000O101N9H7I7H8I7H9H7I7H8I7HYk[6"}, "image_id": 828, "id": 13835}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 23.0, 43.0, 34.0], "area": 753, "segmentation": {"size": [512, 512], "counts": "YQZ71m?2N3L3N3M2N2M4M210O010O01O010O01O010O010O01O01O010O010O01O01O010O010ON3M2M3N3M2M4M2Nl>"}, "image_id": 828, "id": 13836}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 76.0, 118.0, 144.0], "area": 9545, "segmentation": {"size": [512, 512], "counts": "\\UU61m?2M4M2N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3M2M4M2M4M2M3N2M10O010O12M3N3N110O00010O010O01OgCnLd;S3YDoLh;P3UDSMk;n2RDUMm;k2QDWMPT1M3UOfNPCZ1l;JTD6f;0ZD8^;HbD8^;H[D?e;n1G9000000000000000000000000000000000000000000000000000N3O00000000000000000000000000000000000000000000000000000000000001O000001O0000000000000000000000000000000000000000000000000000000000000001OYOg0lNT1lNU1lNfVl0"}, "image_id": 828, "id": 13839}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 345.0, 17.0, 14.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "jZ?2n?;E000000000O100000O10000000002NUUX7"}, "image_id": 828, "id": 13840}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 348.0, 42.0, 84.0], "area": 2681, "segmentation": {"size": [512, 512], "counts": "P[n14k?9H9G9G8H9G8H9G9F8I00000000000000O0100000000000000O01000000000007I8H9F9H9G9G8H9G8Hib\\5"}, "image_id": 828, "id": 13841}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 357.0, 64.0, 125.0], "area": 6262, "segmentation": {"size": [512, 512], "counts": "Z[b01o?:F9G:F9G:F:F9G:F9F;F9G:F4L000000000O100000O100000000000O1000O100000000000O100000O100000000000O1000O101O:F:F9G:F9G:F:@?G:E:G:F`a]6"}, "image_id": 828, "id": 13842}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 383.0, 11.0, 60.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "amj79g?0000001O000WOi0WOi00000QD"}, "image_id": 828, "id": 13843}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 483.0, 47.0, 29.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "X?h0W?1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O10000009G:F9GQPX7"}, "image_id": 828, "id": 13844}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 508.0, 25.0, 4.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "oo^51n?11O000000N2001O00001O0000N2O11O00001O00001O0000Q`T2"}, "image_id": 828, "id": 13845}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 339.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "c:3m?N_en7"}, "image_id": 829, "id": 13846}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 354.0, 25.0, 43.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "R;W1j>00O01000O10O01N2N1N3N2M3N1O2M3N1N3N2N1N3N2M3N1N_Tc7"}, "image_id": 829, "id": 13847}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 495.0, 22.0, 17.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "o_T31n?1O1O1O1N2O1O1O1N2O1O1O1N21N1O2M3N2N2N2N1N]``4"}, "image_id": 830, "id": 13848}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 496.0, 73.0, 16.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "o_Z41k?4L4L4M30000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000001O00000000QPa2"}, "image_id": 830, "id": 13849}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 502.0, 15.0, 10.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "o_\\31m?2O1O1O1N2O1O11O1O1O1O001O1MVP\\4"}, "image_id": 830, "id": 13850}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 0.0, 25.0, 21.0], "area": 239, "segmentation": {"size": [512, 512], "counts": "^Pn21n?1O2M3N2N2N1O2M3N00O10001O1O1N2O1O1O1N2O1O1O1O1NR`e4"}, "image_id": 831, "id": 13851}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 0.0, 31.0, 28.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "ePS31m?3N2N1O2M3N2N2N1OO01000000O01000001N2O1O1O1O1N2O1O1O1N2O1O1OQ`]4"}, "image_id": 831, "id": 13852}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 0.0, 290.0, 299.0], "area": 68638, "segmentation": {"size": [512, 512], "counts": "YW_32i?5L4L4L5J5L4L3L5L4L4K6K4L4K5VBUN[=P2`BTN\\=[2J5L4L4K6K4L4K5L5K4K5L4L4K6K4L4L4K6K4L4K5L5K4K5L4L4K6K4L4K5L5K4K5L4L5J5L4LO104O10000001O0000001O000000001O0000001O000RHXHd7h7WH^Hh7m7000001O000000001O0000001O0000001O000000001O0000001O0000001O000000001O0000001O0000N2L4L400001O000000001O0000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O00000010O00000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O00000kN"}, "image_id": 831, "id": 13853}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 156.0, 24.0, 21.0], "area": 352, "segmentation": {"size": [512, 512], "counts": "\\UV11j?5L4K5N2010O000001O01O0000010O000000010O0L4L4KUk]6"}, "image_id": 831, "id": 13854}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 177.0, 21.0, 10.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "hek01h?70000010O00000000000000010O000000000000]ji6"}, "image_id": 831, "id": 13855}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 369.0, 112.0, 143.0], "area": 12852, "segmentation": {"size": [512, 512], "counts": "o_X61k?4L4K5L4L4L4L4K5L4L4L4L4K5L4L4L4L4K5L4L4L4L4L4K5L4L4L4L4K5L4L4L4L4L40000001O0000001O000000001O0000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O000000001O0000001O0000001O0000001O0000001O0000"}, "image_id": 831, "id": 13856}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 0.0, 31.0, 16.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "R`?1m?3O1O001O1O001O1O001O1O1O001O1O001O1O0000N2O1N2O1O1N2O1N2O1OQPQ7"}, "image_id": 832, "id": 13857}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 0.0, 77.0, 43.0], "area": 2279, "segmentation": {"size": [512, 512], "counts": "`PP14i?3M3M4L3M301O01O01O0f@^OW?e000010O0001O0M3O2O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O00001O00001O00001O0000O1M3M3M3M3M3M3000000M3M300M3M3M3MS`i5"}, "image_id": 832, "id": 13858}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 0.0, 87.0, 44.0], "area": 2223, "segmentation": {"size": [512, 512], "counts": "XP`32l?2O2M3M2O2O001O001O1O001O001O001O1O001O0000N2O11O001O001O001O1O001O001O001O1O003M001O001O1O001O001O0000001O001O001O1O001O001O001O1O001OO1O1N2N2N2O1N2N2N2O1N2N2Ki@_OY?8g@L2KX?7i@K]?3e@L\\?39N2NR`T3"}, "image_id": 832, "id": 13859}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 0.0, 85.0, 42.0], "area": 1832, "segmentation": {"size": [512, 512], "counts": "P`P61o?1O001O001O1O001O001O1O001O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O001O1O001O001ON2N2N2O1N2N2@QAMQ?2PALR?2QAKQ?3QAKQ?3RAKo>4RAJP?4?N2NRPe0"}, "image_id": 832, "id": 13860}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 0.0, 21.0, 8.0], "area": 83, "segmentation": {"size": [512, 512], "counts": "P`a71o?1O1O2NO1O1O1001O1O1O1O1O1OO1O1O1O1O1O1OQP4"}, "image_id": 832, "id": 13861}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 11.0, 79.0, 61.0], "area": 2692, "segmentation": {"size": [512, 512], "counts": "[Qg42l?3M2O2M2N3N2M2N3N1g@[OU?i0N3N2M2N3O00100O0N3O00100O0100O0kNYAR1j>10O10O010O1O0N3N10N2O2N1010O0N3M3M2O2M2N3M2O2M3O0010O010N2M2N3N1N3M3N1N3C[A[Og>d0ZA[Oh>b0[A[Oh>b0ZA\\Oh>e0:O001N2M2N3N1N3M3N1N3MW_Q2"}, "image_id": 832, "id": 13862}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 25.0, 76.0, 81.0], "area": 2893, "segmentation": {"size": [512, 512], "counts": "^bf52l?2N3N1N3M2O2M3M2N3N1N300O010O0100O0100O0\\OWORBi0k=ZOUBe0j=]OUBd0h=^OYBb0d=A[B?d=B]B>`=E_B;_=HaB8\\=JeB6Y=LfB4X=NiB2T=1kBOU=2kBNT=2mBMT=3kBNU=1lBOS=2^BTO9j0Z=1[BWO;i0Y=6dBL]=4aBN^=2`B1`=O]B3d=L[B6d=S110O01M3M2O2M2N3N1N3M3M2O2M2N3N2M2N3N1N3M2N3GPA_OR?>PA@R??9M2N3N2M2N3N1Na^S1"}, "image_id": 832, "id": 13863}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 98.0, 35.0, 68.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "[d^73l?1O2N2N2M3N2N1O2M3G\\OSAf0k>\\OSAf0k>[OTAf0k>\\OSAf0j>:N2N2N1N101O2N2N2M2O2N2N2M3N20O010000000OiL"}, "image_id": 832, "id": 13864}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 0.0, 72.0, 44.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "]Pe03k?2M4M2N3M2N3O00001O001O001O001O00001O001O010O0010O0010O010O010O0010O010O0010O010O0010O010O0010O010O010O0010O010O0010O010O0010N1M4M2N3M2M4M2N2N\\oV6"}, "image_id": 833, "id": 13865}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 0.0, 28.0, 28.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "cPo13k?2N3M2N2N3L3N3M2N3O001O001O00O1N2N2N2N2M3N2N2N2N2N2M3NRPc5"}, "image_id": 833, "id": 13866}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 262.0, 18.0, 19.0], "area": 220, "segmentation": {"size": [512, 512], "counts": "V8>b?0010O010O010O00010O010O0N2N3L3N3Megf7"}, "image_id": 833, "id": 13867}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 301.0, 27.0, 21.0], "area": 316, "segmentation": {"size": [512, 512], "counts": "gY^12k?4M2N2M4O010O010O00010O010O010O00010O010O01O0M3N3M2M]VT6"}, "image_id": 833, "id": 13868}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 467.0, 19.0, 14.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "i^k62k?4M2O1010O01O010O01O01O010O01O0N2N3MYQk0"}, "image_id": 833, "id": 13869}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 498.0, 23.0, 14.0], "area": 178, "segmentation": {"size": [512, 512], "counts": "fod72l?3M210O01OZ@L`?5]@Md?700001O001O001O00001O001O001O00"}, "image_id": 833, "id": 13870}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 25.0, 206.0, 177.0], "area": 22267, "segmentation": {"size": [512, 512], "counts": "fSi42l?2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N3L3N2M4M2UC[M[FTA;k>BYA=h>@ZA>V?L3N3M2M3Nob5"}, "image_id": 834, "id": 13889}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 385.0, 50.0, 40.0], "area": 1775, "segmentation": {"size": [512, 512], "counts": "RlU5W1i>000000000000000000000000000O100000000000000008H0O10O100000000000000000000000000000000000000000000hSQ2"}, "image_id": 834, "id": 13890}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 407.0, 35.0, 35.0], "area": 693, "segmentation": {"size": [512, 512], "counts": "]]e42k?3N3M2M3N3M2M4M2N3N100010O010O010O00010O010O010ZOo@;Q?CQA=o>ATA?V?01O0N3L3N2N3L3NgRi2"}, "image_id": 834, "id": 13891}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 408.0, 27.0, 28.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "ZmZ63k?2N2N3L3N3M2N3M2O20O010O0010O0010O001M2M4M2N3M2N3M2MScW1"}, "image_id": 834, "id": 13892}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 417.0, 28.0, 28.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "c]`23j?3N3L3N2M4L3N30O00010O010O00010O00010O0O2M2M3N3L3M4M2MjbQ5"}, "image_id": 834, "id": 13893}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 423.0, 57.0, 43.0], "area": 1491, "segmentation": {"size": [512, 512], "counts": "n]m22l?2M4M2N2M4M2M4M2M3O2O010O00010O010O00010O010O00010O010N1M3N3O010O010O00010O010O00010O010OO2L3N2M4M2M4M2M3N3L3N3L`RV4"}, "image_id": 834, "id": 13894}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 433.0, 63.0, 56.0], "area": 1944, "segmentation": {"size": [512, 512], "counts": "b^^62l?2N2M4M2M4M2M3N3M2010O0010O0010O01N1N2M4M2N3L3N2010O01O010O01O01O010O01O01O010O01O010O01O01oN_Ab0`>[OcAe0^>XOdAh0\\>UOhAk0W>SOkAm0e>0O00O2M2M4M2N3L3N2M4M2MdQb0"}, "image_id": 834, "id": 13895}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 442.0, 66.0, 49.0], "area": 1867, "segmentation": {"size": [512, 512], "counts": "cni32l?3L3N2N3M2M4M2N3M2O20O00010O010O010O010O010O00010O010O010O01O0N2N3L3N3M2N3O010O01O01O010O010O010O01O0N2M4M2N3M2M4M2N3M2M4M2N2N3L3NlQU3"}, "image_id": 834, "id": 13896}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 467.0, 54.0, 45.0], "area": 1392, "segmentation": {"size": [512, 512], "counts": "Yoa21m?2N3L3N2M4M2N3L3N3N11O010O01O010O01O010O01O01O010O01n@WOk>i0RA[On>k0010O0010O0010O010ON2N200001O001O00001L3APALS?1o@MS?0QALS?1o@MS?1PALR?1`Qc4"}, "image_id": 834, "id": 13897}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 474.0, 25.0, 27.0], "area": 409, "segmentation": {"size": [512, 512], "counts": "[_b33k?2M4M2M4M2N2M4O010O01O01O010O01O01OM4M2M4M2M3N3MQQQ4"}, "image_id": 834, "id": 13898}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 495.0, 31.0, 17.0], "area": 312, "segmentation": {"size": [512, 512], "counts": "ooW61l?3N2M3N2M3M3001O00001O001O00001O00001O0F_@5f?0001O00001O001O000OU`X1"}, "image_id": 834, "id": 13899}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 499.0, 24.0, 13.0], "area": 199, "segmentation": {"size": [512, 512], "counts": "n_c52l?2M3N2O11O001O00O1N2N20000001O001O0K\\@Ne?O^@0i?0001O00R`P2"}, "image_id": 834, "id": 13900}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 510.0, 4.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "o_n41n?11O00Q`o2"}, "image_id": 834, "id": 13901}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o__51o?0Q`_2"}, "image_id": 834, "id": 13902}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 0.0, 34.0, 13.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "P`j21o?001O001O00001O001O001O00001O001O001O00001O001O001O00001OO1M3N2N2MS`d4"}, "image_id": 835, "id": 13903}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 0.0, 65.0, 38.0], "area": 1251, "segmentation": {"size": [512, 512], "counts": "hPZ32l?3L3N3O00010O010O00010O0N2N3L3N3L3M3N3L301O00001O001O00001O0000N2M3M3N2M3N2M3O1001O00001O001O00001OO1M3N2M3N2M3M3N2M30000001O00OQ`e3"}, "image_id": 835, "id": 13904}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 0.0, 48.0, 20.0], "area": 657, "segmentation": {"size": [512, 512], "counts": "Y`n51l?3N3L3O101O001O00001O001O00001O001O00M3O100001O001O00001O001O00001O001O00001O00O1M3N2M3N2M3N2MS`Y1"}, "image_id": 835, "id": 13905}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 0.0, 37.0, 24.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "]PU71m?3L3M4M2M301O00001O001O00001O001O00001O00001O001O000000N2M3M3N2M3N2M3M3NR`8"}, "image_id": 835, "id": 13906}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 8.0, 17.0, 37.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "Rag71m?3L3N2M4M2a@CX?d0N3M2N3O01O010ON3L3N2H"}, "image_id": 835, "id": 13907}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 15.0, 27.0, 27.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "Qab62k?3N3L3N3L3N2M4O001O01O010O01O01O010O000N3L3N3L3N2M4M\\oo0"}, "image_id": 835, "id": 13908}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 19.0, 29.0, 49.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "o0U1j>00O0100000000000O011O2N2N2N2N2M3N2N1O2N2N2N2M3N2N1O2N2No^a7"}, "image_id": 835, "id": 13909}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 35.0, 98.0, 69.0], "area": 3129, "segmentation": {"size": [512, 512], "counts": "TR^52l?3M2M4M2M3N3L3N3M2M3N3L3N3M210O01O01O010O01O01O010O01O010O01O010O01O01O01kN[An0d>oN_AQ1h>010O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010O00010O010O00010O0M4M2N21L3N2N3L3N3L3N2M4M2N3L3N3L3NnmP1"}, "image_id": 835, "id": 13910}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 48.0, 70.0, 87.0], "area": 3136, "segmentation": {"size": [512, 512], "counts": "l2l0R?3M2N210O0010O0010O0010O010O0010O0010O00N3L3N3L3N3M2M3N3L3N3M20010N1M4M2N2M3NO010O103L3N2N3L3N3L3N3M2M3N3L3N3M2M3N3L3N3M201O0Hj@GU?7m@HU?4o@IS?5o@HU?4=N2NWnl6"}, "image_id": 835, "id": 13911}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 76.0, 57.0, 82.0], "area": 2567, "segmentation": {"size": [512, 512], "counts": "cTR11\\>1SC1k<2QC1l<2RC1k<2QC1l<2RC1k<1RC2l<1QC1l<2RC1l<1PC2P=NnB5Q=KlB8U=HhB:X=FeB>[=BbB`0^=@_Bd0`=\\O^Bf0c=k0010O010O00010O01O0NO012M3N3L3N2APBlNS>P1PBmNS>Q1PBlNP>S1SBjNm=Y1>0O010OO1N3L3N3M2M3N3L3N3L3N2M4M2M4M2N^]Q6"}, "image_id": 835, "id": 13912}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 85.0, 97.0, 75.0], "area": 3425, "segmentation": {"size": [512, 512], "counts": "]co11?1l>3PA0n>2PA0m>4PAOk>5RANk>f0M2N3L3N201O010O010O01O01O010O010O00010O010OnN_Ae0b>WOaAj0^>TOeAk0[>SOgAm0f>10O010O010O00010O010O0010O0010O010O0010O0010O010O00010O010O010O00010O01L3N3M2M3N3O001O010O01O01O01M2M4M2N3L3N2N3L3N3M2M4M2M3N3M2M4M[l_4"}, "image_id": 835, "id": 13913}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 91.0, 78.0, 91.0], "area": 3800, "segmentation": {"size": [512, 512], "counts": "VUi62k?3N3M2M4M2M3N3L301O01OM4M2N3L3N3L3N2M4N11M2N3L3N2N11N2]AjNY>Z1cAjNZ>_1N3L3N2M4M2M4M2N2010O01O01O010O01O010O01OO2L2O0O10O011N3N3D]BVNf=g1]BWNe=g1UO[Ak0n>M2N2N3L3N3M2M4M2N2MV\\d1"}, "image_id": 835, "id": 13915}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 139.0, 109.0, 76.0], "area": 3725, "segmentation": {"size": [512, 512], "counts": "]Ug43k?3L3N3L3M3N3L3N3L3N2M4M2M4N10010O00010O010O00010O010O00010O010OO1N3O010O00010O010O00010nNYAi0g>TO]Al0k>01O01O010O01O01O010O01O01O010O01OQATOj>R1O01O010O01O01O010O01O0N2N3N101O01O010O00010O010O00010O010O00010M2M4M2M3N3L3N3L3N2M4M2M3M4M`Zb1"}, "image_id": 835, "id": 13916}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 150.0, 93.0, 70.0], "area": 2856, "segmentation": {"size": [512, 512], "counts": "feg14j?2N2M4M2M4M2N2M4M2N3L3N3L3O101O010O01O010O01O010O01O01O010OhN^AR1h>1O01O010O01O04L01O010O010O01O01O010O010O00010O010O01O01O010O010O01O01O010O010OROTAh0l>VOVAj0Q?0O010O01O01O010O010O01N1N2M4M2N3L3N2M4M2Nnii4"}, "image_id": 835, "id": 13917}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 151.0, 14.0, 30.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "^Ui71m?3L3N3M2M3N3L3N3L3O1010O01WK"}, "image_id": 835, "id": 13918}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 171.0, 4.0, 9.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "[59h?N2M3Ncjm7"}, "image_id": 835, "id": 13919}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 184.0, 55.0, 75.0], "area": 2393, "segmentation": {"size": [512, 512], "counts": "ef:2l?3M2M3N3L3N3L3N2N3L3N3`AQOn=n0PBUOo=l0mAWO2Ie=o0WB[O1Hh=m0UB]O0Jj=j0RB_O2Im=g1O01O01O010O0O1N3L3N3O010O01O000M4M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2M3N3@g@5[?Hi@4c?N3Lgii6"}, "image_id": 835, "id": 13920}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 186.0, 2.0, 5.0], "area": 7, "segmentation": {"size": [512, 512], "counts": "j55l?MWjn7"}, "image_id": 835, "id": 13921}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 206.0, 77.0, 66.0], "area": 2689, "segmentation": {"size": [512, 512], "counts": "`gb12l?3M2N3M2O2M2N3M3M2N3M2O2M2N3M2N3M2010O0100O0100O010OO2L3N3dA_NV>g10O10O10O010O010O010WOkAEU>9mAHS>7nAHR>8nAIR>7nAHS>7mAJR>7nAIR>6nAJR>7nAGT>9kAEW>;jABY>>gA_O[>a0eA]O^>c0bAZO`>f0`AYOb>g0^AVOd>j0\\ATOg>l07O010O0100O010O010O01WOo@`0R?^OPA`0Z?M3M2N3N1N3M2NdhV5"}, "image_id": 835, "id": 13922}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 236.0, 25.0, 25.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "oW11k?4L4M3L5K4000010O0000010O000010O0000010K4M3L5K4LcXb7"}, "image_id": 835, "id": 13923}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 249.0, 74.0, 100.0], "area": 3437, "segmentation": {"size": [512, 512], "counts": "\\Zg32l?2M4M2M3N3L301O00010O010O000O2M2N3L3N2M4M2M4M2M3N3L3N3UO[NVCh1ha17N4L3N2M4M2M40O0001M2N3L3N2M4M2M4M2M3N3L3NSgX2"}, "image_id": 835, "id": 13927}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 279.0, 12.0, 64.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "g8P2P>L5K4K5L4lNbAb0c>XObAd0P?K4K5L4L4Lmfi7"}, "image_id": 835, "id": 13928}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 286.0, 81.0, 75.0], "area": 3182, "segmentation": {"size": [512, 512], "counts": "Pj72m?3N2M2N3N2M3N1N3N2M3M3N1N3N2M3N1N3N2M1O1IdNjA[1W>gNfAY1Z>iNeAW1Z>710O010O0N3O01O010O0102M2O0O1O010O01O010O2O2M2O2N2000O10O100000O0100O1M2O2M3N2M2O2M3N2M3M2O2M3N2M010O00102M2H`@Nb?0`@Mc?0ce_6"}, "image_id": 835, "id": 13929}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 326.0, 27.0, 46.0], "area": 795, "segmentation": {"size": [512, 512], "counts": "Rkb71k?1V@1h?5N2N2N2N2N2G@o@b0o>@o@b0o>@o@b0o>9N2N2N2N2N2N2N01O2O100N2N2N2N2NdE"}, "image_id": 835, "id": 13930}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 345.0, 62.0, 95.0], "area": 3270, "segmentation": {"size": [512, 512], "counts": "el]53n>NnA5n=NoA5n=OnA4P>NmA6o=NnA4H@n=?WB4HAm=>XB>i=BSBa0m=_OPBe0o=f0M4L3M3N3M20010O00010O0010O0010O000M4O00010O0010O0UOcBSO]=k0eBVO[=f0hBZOX=c0lB\\OT=a0oB@Q=NdBD>=ne0h<\\OcB1:K?g0dY1010O00010O010O0010OUO\\A:d>D^A=b>_OaAa0_>]OdAb0]>[OeAd0\\>YOhAd0k>M3N3L3N3L3N2NZST3"}, "image_id": 835, "id": 13933}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 381.0, 47.0, 61.0], "area": 1702, "segmentation": {"size": [512, 512], "counts": "Q\\i66j?9G9G7I0O10000000000000O01000000000000000O010001O9G9G9G00O10O1000000001O9G9F9H5K0000006J9G9G[R?"}, "image_id": 835, "id": 13934}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 407.0, 69.0, 75.0], "area": 2855, "segmentation": {"size": [512, 512], "counts": "amX31m?2N3L3N2M4M2M4M2N3L3N210O0hASO^=m0^BWOa=i0]BYOc=g0ZB\\Og=d0VB_Oi=a0UBAl=?PBEo=;oAGR>9jAKU>P10010O010O00010O010O00010O010O010O00010O010O00010O010O010O0001M2M4M2EcAPO`>m0dAPO_>m0;M4M2N2M4M2M4M2M4IY@1l?N`bd3"}, "image_id": 835, "id": 13935}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 445.0, 90.0, 67.0], "area": 3472, "segmentation": {"size": [512, 512], "counts": "a^o41m?2V@0a?9N3M2N2N2N02O2O01g@_OR?`0l@BT?f0N1N2N3M2WAnNe>R1XAPOh>U10O010O03M10O010O0010O010O01O010O01O01O01cAhNQ>W1mAlNR>U1kAmNV>R1hAQOW>\\110O01O001O00O1N2N2N2M3N2O11O001O001O00001O001N1N3M2WOhAG\\>6gAG[>9eAE^>:cAE]>:cAB`>?_A_Oc>a0^A\\Oe>d0ZAZOh>f0YAWOj>2RAb0Y?010O01O0M4M2N3M2N3M2M_ac1"}, "image_id": 835, "id": 13936}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 455.0, 1.0, 4.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "Wno74e1"}, "image_id": 835, "id": 13937}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 468.0, 17.0, 41.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "g>V1j>O1000000M3F70300000000N3E:L400JaQg7"}, "image_id": 835, "id": 13938}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 468.0, 45.0, 44.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "go`01f?9N200]@KZ?>L400000000G9J60000000000000000001O00L4O1000000001J50000001O00000001O00000K5F:G9Gcah6"}, "image_id": 835, "id": 13939}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 477.0, 65.0, 35.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "m_]23m?000C0j@0V?4e@M[?6b@J^?DQA=o>:O1000000O1000000O1000000O10000O1000000O1003M3M3MO1000000O10000O1000000O1000000OLRAXOn>h0501O3L5L4L3L5L4LXPb4"}, "image_id": 835, "id": 13940}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 480.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "Poo73m0"}, "image_id": 835, "id": 13941}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 481.0, 49.0, 31.0], "area": 1132, "segmentation": {"size": [512, 512], "counts": "mob62k?4M2M3M3O100N2M3N2M3N2M300001O00001O001O00001OM3N200001O00001O001O00001O001O00001O001YOk@a0Z?01Hc@I_?5d@H^?59Mc`d0"}, "image_id": 835, "id": 13942}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 488.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "Xoo73e0"}, "image_id": 835, "id": 13943}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 493.0, 32.0, 19.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "n_^12d?:I700001O0000000000000000001O0000000000000000001O00000000000Ig`Q6"}, "image_id": 835, "id": 13944}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 499.0, 29.0, 13.0], "area": 191, "segmentation": {"size": [512, 512], "counts": "ooe41n?1O1O1O1O1O100O1O1O1O1O1O11O1O2N1O1O1O1O1OO1O1001O1O1O1OR`k2"}, "image_id": 835, "id": 13945}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 0.0, 64.0, 33.0], "area": 819, "segmentation": {"size": [512, 512], "counts": "P``01o?00001O001O00001O001O00001O001O00001O001`@HV?8h@KX?4e@O[?;10O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0O2L3N2M4M2Mb__6"}, "image_id": 836, "id": 13946}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 0.0, 33.0, 29.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "hPR21m?3L3HKc@8Y?Kd@8Z?8M4M2O2O00O1N2N2M3N2O11O00001O001O00O1M3M3N2N2001OG^@4g?O1Nm_]5"}, "image_id": 836, "id": 13947}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 0.0, 100.0, 35.0], "area": 2136, "segmentation": {"size": [512, 512], "counts": "P`o2N^A4`>N]A6IG\\>7hA4JH[>6iA5IG[>7iA?U>DhA?U>g0M4M2M3N3L3N3O001O01O000N3L3N3O01OO2L3N3L3N2\\OfA_O^>>eA_O]>?eA_O^>=fA_O]>?eA_O^>=eA@^>>c0M4M2N3L3Nh_c1"}, "image_id": 836, "id": 13949}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 3.0, 73.0, 76.0], "area": 2778, "segmentation": {"size": [512, 512], "counts": "^ae65k?7I8H7I6JO0100000000000O10O100000000VOTO`Bk0a=\\OXBd0h=DoA=Q>i0O100000CSBgNm=X1>000O1000000L310O1000000000O1AfAXOZ>h0mAQOR>o0`00LoNYAQ1f>500000004L000O100000O100000O100001N8I8H7I6J00002N8Ggn5"}, "image_id": 836, "id": 13950}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 38.0, 67.0, 47.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "kQh31l?4M2M4d@Gm>;PAHm>;QAGP?9l@KS?a0010O0010O0010O0010O0010ON2M4M2M4M2N210ON2N3L3O2O01O010O01O01O010O01O01O010O01O01O010O010O00010O01L3N2M4M2N3L3N2M4M2M4M[^V3"}, "image_id": 836, "id": 13951}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 62.0, 31.0, 16.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "oaY3=c?2N0000O100000000000000000O10000000000000000000O1000000000d00000000000000O0100000000000000c0]O2N00O10000000000000O11O4L0000O10000000000M30O1000000000000000000000O9Hd0\\Omkd4"}, "image_id": 836, "id": 13953}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 110.0, 20.0, 16.0], "area": 226, "segmentation": {"size": [512, 512], "counts": "^Sk18h?0000001O0000000^@H\\??O0001O000000000I7001`lj5"}, "image_id": 836, "id": 13954}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 143.0, 29.0, 27.0], "area": 730, "segmentation": {"size": [512, 512], "counts": "aTm17h?c0^O00000000000000000000000000000000000000000000O0100004L][d5"}, "image_id": 836, "id": 13955}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 360.0, 44.0, 61.0], "area": 1650, "segmentation": {"size": [512, 512], "counts": "X;a1`>00O01000O01000O0100O0100O01000O01000O001M2O2M3N10100N1N3N2M2O2M2O2M30O0100N1N3M3N1D_@3OOg?0[@N`SZ7"}, "image_id": 836, "id": 13956}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 467.0, 36.0, 45.0], "area": 874, "segmentation": {"size": [512, 512], "counts": "joS22k?3W@Mc?9N2N2N2M3N2N2N2N2N2M3N2N2N2N2N2N20O2M2N3M2M4M2N3M2N2N3L3N3M2N3M2M4MWQZ5"}, "image_id": 836, "id": 13957}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 507.0, 14.0, 5.0], "area": 43, "segmentation": {"size": [512, 512], "counts": "o_h01m?2N200001O00001O001O001O00Q`P7"}, "image_id": 836, "id": 13958}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 75.0, 23.0, 15.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "bb`31l?3N3M200010O010O00010O0010O00010O01O01O01M2MamS4"}, "image_id": 838, "id": 13959}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 0.0, 29.0, 25.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "^`\\43k?3M2M3N3M2O2N1O2O00001O001O001O00001O00M3N2N2M3N2N2M3N2MSPU3"}, "image_id": 839, "id": 13960}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 0.0, 25.0, 12.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "PPg51o?1O001O1O1O1O1O001O1O1O1O1O00O1O1O1O1O1O1O1O1O1NR`l1"}, "image_id": 839, "id": 13961}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 0.0, 7.0, 4.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "PP^61o?1O1O001OO1OQ`^1"}, "image_id": 839, "id": 13962}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 0.0, 17.0, 33.0], "area": 341, "segmentation": {"size": [512, 512], "counts": "P`g71o?0d@0h>1VA1i>0TA3k>NSA4l>MRA4n>LPA7o>Jo@8P?In@9Q?Hm@9S?;O1O1O001O1O1O"}, "image_id": 839, "id": 13963}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 1.0, 59.0, 49.0], "area": 1797, "segmentation": {"size": [512, 512], "counts": "m`W33k?2N3M2N2N3M2N2M4M2N3M2N2N3N11O01O010O01O01O01O01O010O01O01O01OM4O000010O010O00010O00010O010OM3010N1M3N3@TAGo>6UAFn>8TAFo>6UAFn>7`0M2Mhoj3"}, "image_id": 839, "id": 13964}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 11.0, 70.0, 99.0], "area": 3412, "segmentation": {"size": [512, 512], "counts": "fRj41n?2M3N2N2N2N2N2N2N1O2h@]OP?e0n@]Oo>l0N1O2VOPOaBR1]=oNbBS1lN2N2M3N2N2N1O2N2NkmR2"}, "image_id": 839, "id": 13965}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 17.0, 70.0, 75.0], "area": 2366, "segmentation": {"size": [512, 512], "counts": "ZQf61n?2N2N2M2O2N2N2N2N1O2N2N2N2N2N1O2M3N2N2N1O2N2N2000O0100000000O0100000aAdNW>^1gAdNX>\\1gAfNY>a1O010000000O0100\\NgA`1]>0000O010000000O1N1POaA>a>@aA>a>@aA>a>_OaA`0a>^OaA?a>@aA>a>@aA>R?N1N3N2N2N2M3N1Oim6"}, "image_id": 839, "id": 13966}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 25.0, 28.0, 29.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "[Qf22m?2M2O2N2N2M3N1O2N2N2M3N00000001N3N2N2N2N1N3N2N2N1N3N2Nonk4"}, "image_id": 839, "id": 13967}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 25.0, 16.0, 16.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "oP`63l?1O2N2N2N2N2000O100O1N2N1O2M3NPoW1"}, "image_id": 839, "id": 13968}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 29.0, 16.0, 17.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "XQ^21n?1N3N2N1N3N2M3N02M2O2M3N1O2M3NonY5"}, "image_id": 839, "id": 13969}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 75.0, 70.0, 75.0], "area": 2429, "segmentation": {"size": [512, 512], "counts": "PSP61n?2N2M3N2N2N2N1O2N2N2N2N200000000000000O1N1O2N2N2N2M3N2N2N2000000O1N2N2N1O2N2N5K2N20O10000000000000OPOPB1P>MRB3n=MRB3n=LSB4m=JUB6k=GXB7j=GXB7j=GXB7j=GXB7j=CgAIa0b0j=CgAIa0b0j=C[B;h=CZB;h=CZB;g>N2N2N2N2N2Nlkl0"}, "image_id": 839, "id": 13970}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 112.0, 72.0, 82.0], "area": 2562, "segmentation": {"size": [512, 512], "counts": "XT^51n?2M2O2N2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N2N1010000000O01000000000O010000O1N2fAdNm=]1QBfNo=Z1oAhNQ>X1mAjNR>W1lAkNT>a10O100000O1000O100000O10[NkA^1U>`NmA_1T>_NnAa1Y>N2nNaA?b>^O`Aa0a>^OaA`0a>^OaA`0a>^OaA`0a>^OaA?b>^OaA`0`>_OaA`0Q?N2N2M3N2N1O2Nej]1"}, "image_id": 839, "id": 13971}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 144.0, 16.0, 16.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "iTP11n?2N2N2N2M3N2N1O11N2M3N2N2N1O2N[kg6"}, "image_id": 839, "id": 13972}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 151.0, 87.0, 81.0], "area": 3326, "segmentation": {"size": [512, 512], "counts": "UU_42m?2N2N2N2M3N2N1O2N2N2N2N2N20000000O10000000O1O1N2N2N2N1O2N2N200O10000000000O0100000O1N2N2O100O10O10cA`NY>`1eAbN[>b1N2N1O2N2N2O1O100000O10000000O100000N2N2POlA1V>MlA1V>MlA1U>MnA1T>KPB3R>JQB6o=HSB8m=FUB8m=FUB7n=GSB8n=GTB7n=GTB7n=GTB6m>N2N2N[YU2"}, "image_id": 839, "id": 13973}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 161.0, 81.0, 82.0], "area": 3482, "segmentation": {"size": [512, 512], "counts": "YVj12m?2N2N2N2N2N2M3N2N2N2N2N2N2TATOa>n0]ATOa>n0]ASOb>n0]ATOa>n0]ATOa>W1N2N2N2N2N0O1000001O2N2N2N2N2N2N1O2N2N2N2000000000O1N2M3N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N1O2N2N2N2N2N2N2N2N2N2N2M3N2NhYm4"}, "image_id": 839, "id": 13974}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 176.0, 9.0, 18.0], "area": 91, "segmentation": {"size": [512, 512], "counts": "iek72m?2N2N2M3N1O2N2N2_J"}, "image_id": 839, "id": 13975}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 177.0, 37.0, 36.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "oUh01n?2M3N2N2N2N1N3N2N2N2N2N20O10O1000000000O10O100000N2N2N1N3N2N2N2N2N2M2O2NlYe6"}, "image_id": 839, "id": 13976}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 199.0, 81.0, 85.0], "area": 3291, "segmentation": {"size": [512, 512], "counts": "PWR41n?2N2M3N1O2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N2N110O10000000O10O100000O1000O1N2N2N1O2N2M3O1O10OoAXNk=h1SBZNm=f1PB]NP>j100O10O10000000O10O100000O10SNQBd01:n=POSBd01;o=QOnAb05=k=SOPB>6?i=UOQB99?g=YOPB7;>e=]OPB3=>c=APBO?>a=EPBKa0>a=GnAHc0?b=IkAFe0?b=M`B1b=M`B1a=0_BNc=0_BMd=1]BNe=0]BNe=0]BNe=0]BNe=0TZe2"}, "image_id": 839, "id": 13977}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 209.0, 36.0, 61.0], "area": 1331, "segmentation": {"size": [512, 512], "counts": "RW^71n?2O1N1O2O1M3N2M3RABW>`0gABV>`0iABU>`0iABV>?hACW>>gADY>:bAI^>7`AK_>5`AM^>k0O2M3N2N2M3000N2N1O2N2N2N1O2000O0O2WI"}, "image_id": 839, "id": 13978}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 234.0, 41.0, 39.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "hgQ22l?3N2N1O2N2N2M3N2N2N1O2N2O100000000O0100000000000O010000N2M3N2N1O2N2N2M3N2N2N1O2NPhY5"}, "image_id": 839, "id": 13979}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 236.0, 89.0, 58.0], "area": 2744, "segmentation": {"size": [512, 512], "counts": "a7j0S?4M21O00010O0001O01O0001O01O01O01O0001O01O01O01O0001O01O00010O0001O01O0001O01O01O01O0001O01O01O01O0001O01O00010O0001O01O0001O01OPAUOk>Q1O01O0001O01O002OO01O01O01O00000L5L3L4M4K4L4M3LnWc6"}, "image_id": 839, "id": 13980}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 257.0, 68.0, 71.0], "area": 2628, "segmentation": {"size": [512, 512], "counts": "fhc32m?2N1O2N2N2N2M3N2N2N1O2N2N2N2M3N2O01000O1000000O10OO2N2N2M3N2N2M30OfA_NU>b1iA`NW>d11000O10O1000O1O0N3N2N2M2O200O10O1000OQOUBIl=4WBJk=4WBIl=5UBJl=^OnA?91k=]OoA`080S>NoA0R>NPB1R>MPB0S>NoA0S>MPB1Q>NPB0S>NoA0S>MPB1[eZ3"}, "image_id": 839, "id": 13981}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 265.0, 85.0, 76.0], "area": 2482, "segmentation": {"size": [512, 512], "counts": "PY_61n?2N2N2N2N2N2N1O2N2N2N2N2N2N1O2M3N2N2N2N1O0002N2N1O2N2000000000000000000O100005K00O10O1N1O11O1O1O100000O10nNTAo0o>O1000000000O1TAPOg>P1WAROi>S100O1N2N1O00000001O2N2N2N2@TAIn>5TAIn>5TAIn>5TAIn>5TAIn>5TAIn>5`0N1O2N^V6"}, "image_id": 839, "id": 13982}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 266.0, 30.0, 15.0], "area": 337, "segmentation": {"size": [512, 512], "counts": "`h^15e?7O00000001O0001O00000001O0001O00000001O0001O00000001O000IiWR6"}, "image_id": 839, "id": 13983}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 291.0, 65.0, 81.0], "area": 2689, "segmentation": {"size": [512, 512], "counts": "Vjo23l?2N1O2N2N2M3N1O2N2N2M3N2N1O2N2N2M3N2N1O2N2M3N2N1O2N2N2lAZNj=i1TBXNk=j1SBXNk=P2O1000O10O100000O010000000O01000ROVBDk=:WBDk=:VBEk=9XBDk=:WBDk=:WBDi=k@HT?c0]AWOn=i0PBXOQ>h0mAZOS>f0kA\\OS>e0lA]OR>e0kA^OS>c0lA_OR>a0oA_Oo=d0QB\\Om=f0SBZOk=h0TBYOk=h0SBZOm=e0QB^Oo=b0oA@Q>W101000000000O0dNoAj0Q>TOQBl0o=ROSBn0m=POTBQ1l=mNVBR1k=lNWBR1k=kNXBS1i=lNYBQ1j=mNXBQ1\\>01O1000000000O10O1000N2N2O10O0N3N2N2N2NO102N2N1O1N02O1O2N2N2M3N2N1Ih@EZ?8h@GZ?7h@GZ?79N2N2MRed0"}, "image_id": 839, "id": 13986}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 318.0, 16.0, 54.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "n9f1[>K4M3M4L3M3M4L3M3M4L3L4M4L3M3MPfg7"}, "image_id": 839, "id": 13987}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 328.0, 61.0, 79.0], "area": 2568, "segmentation": {"size": [512, 512], "counts": "XlS11n?2PONnA5o=OnA3P>OnA3P>0mA2Q>0mA1Q>2lA1R>2kA0S>2kA0S>3jAOS>4jAOT>4iANU>P1N2M2O2N00O010000O01000001N101O1O1N2O1JmA^NT>a16O1O1O1N2O001O1O1N2O1O1O1O1N2O1O001O1O1N3N1O1Ek@JW?5j@IX?5k@GX?6=Kcem5"}, "image_id": 839, "id": 13988}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 338.0, 56.0, 83.0], "area": 2639, "segmentation": {"size": [512, 512], "counts": "P\\35h?3M3M4L3M3M4M21O01O01OM4L3M3M4hAkN`=Y1\\BjNa=[1YBjNc=[1XBhNe=k1M4N1001O01O01O01O01O01O01O01O01O01OO2L3M3M4^OVBiNm=T1VBiNm=T1VBiNn=S1a0M3M4L3L4M4L3M3M4L3M3MVeP7"}, "image_id": 839, "id": 13989}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 344.0, 69.0, 73.0], "area": 2516, "segmentation": {"size": [512, 512], "counts": "b[W21m?3N1O2N2N2N2N2M3N2N1O20000O100000O1000O1N2N2N2N2M2O2N2N2N2N2N2M21000O100O1N2N2N1N3N2N2N2N2O1000O10O100000QOSBJm=4UBKl=3VBKl=3VBKk=4WBJk=4WBIl=4VBKl=3VBKl=AfA91I?;o=JdAI?;o=JYB4h=KZB3h=KZB2i=KZB3j>O2NaSf4"}, "image_id": 839, "id": 13990}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 353.0, 70.0, 79.0], "area": 2686, "segmentation": {"size": [512, 512], "counts": "[\\a52m?2N1WOLeA6Y>LdA7Z>KdA7Z>KdA7Z>KdA6[>LcA6Z>LdA7Z>KdA7Z>KdA7Z>KdA6[>LbA7\\>h0M3N2O10O1N2N1010000000O010000000O10O100000M3O0100000O10O10000eNiAl0V>SOlAm0T>QOmAo0T>nNoAR1Q>lNQBT1o=jNSBU1m=jNUBT1\\>N3N2N2N2N1N3N2N2N2M2O2N2N2M3N2N1O2M[c[1"}, "image_id": 839, "id": 13991}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 367.0, 65.0, 53.0], "area": 1996, "segmentation": {"size": [512, 512], "counts": "_lo61n?2M3N1O2N2N2M2O2N2N2M3N1O2N2N2M3N1O2000000O0100O1O001O1O1N101O1O0000000000000O010004LO10000000O1N2O001O1O1O1O1N2O001O1O1O1O1N2O0bD"}, "image_id": 839, "id": 13992}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 394.0, 71.0, 74.0], "area": 2809, "segmentation": {"size": [512, 512], "counts": "R]]12m?2N2N2N2N2N2M3N1O2N2N2N20000O1M3N2N1O2N2N2N2N2N2M3N2000O10O1000000000000N2N1O2N2O1O1N2O1O1O1O1O001O1O1O1OoNVBMj=1XBNi=OYB0i=NYB0i=NYB0i=NYB0i=NYB0i=NYB0h=OZBOh=OZBNi=0YBNi=OZBOh=OYB0i=NYB0o=HSB6m>N2N2NoQ_5"}, "image_id": 839, "id": 13993}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 399.0, 78.0, 74.0], "area": 2838, "segmentation": {"size": [512, 512], "counts": "Smm42m?2N2N2M3n@F[>=bAE\\>=bAE\\>;bAF]>;`AG`>m000O1O0O2N2N2N2N2N2N0001N3N2N2N1100000000O100000O10O1:F00000000O01000000N2M3N1O2N2M3N2N1O0O101O00000O10001O2M3N2N2N2N2M3N2N2N1N]Rk1"}, "image_id": 839, "id": 13994}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 400.0, 25.0, 24.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "glj62l?3N2N2N1O2N2O100O100000O1000000000OO2N2N2M3N2N2NRch0"}, "image_id": 839, "id": 13995}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 425.0, 52.0, 67.0], "area": 2045, "segmentation": {"size": [512, 512], "counts": "b=_1b>000O001N2N200000OO2N2M3N2N1O0002M3N2N2N2N1O2M10000000O01KaAjN_>V1cAhN]>X1502M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2N2M2O2NXbU7"}, "image_id": 839, "id": 13996}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 437.0, 15.0, 15.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "m]a72m?2M3N2N2N1O2N10O2N2N2M3N2N1OWR7"}, "image_id": 839, "id": 13997}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 439.0, 73.0, 73.0], "area": 2994, "segmentation": {"size": [512, 512], "counts": "o_U61Z?0YA1f>1XA0g>2WAOh>3UAOj>3TANk>4SAMl>5RAL`>HfA>HK`>JeA=JJ`>KdA=JIa>LcAi0\\>YObAh0]>YObAh0]>>O1N2O1O1O1O1O1O11O001O1O1O1M3N000000O3N2N1O2N1O00000O1000O10000000001O2N2M3N2N2N2N1O2N2N2M3N2N2N2N2N1O2Ej@IX?5j@HY?6i@HY?6:N2M3NeQf0"}, "image_id": 839, "id": 13998}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 440.0, 71.0, 72.0], "area": 2725, "segmentation": {"size": [512, 512], "counts": "nnm32m?2M3N2N1O2AEUA>i>DUA>h>EVA=h>EVAFTA=j>DUA>h>?N2N1O2M3N2N2N0O1101000O010000000O0100000N2N1O2M3N2000O010000000iNPB

    CRB=n=ATB>m=@TB`0m=]OVBb0k=\\OWBc0i=\\OYBc0h=[OZBd0g=YO[Bg0f=WO\\Bh0e=VO]Bi0c=VO_Bi0]>O001O1O1M3N1O2N2M3N2N1O2M3N2Na`n2"}, "image_id": 839, "id": 13999}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 450.0, 72.0, 62.0], "area": 2664, "segmentation": {"size": [512, 512], "counts": "]oa03l?2N2N2N2M2O2N2N2N2M3N2N1O2N2M3HPO]AR1a>PO]AR1`>9N1O1O1O1N2O1O11O001O0O000O11N3N2N2N1O2O01O001O1N2M3N2N1O2N2M3N2N2O010O1000O1N11N2N1UOWA>k>ZOUA13b0o>\\OSAb0W?N2N2M2O2N2N2N2M2OkPZ6"}, "image_id": 839, "id": 14000}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 454.0, 44.0, 48.0], "area": 1281, "segmentation": {"size": [512, 512], "counts": "V_Z71n?1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2M3N1O2N000O01000002N2N2O1O1O0O2OO1O1O1O2N2M3N2O0100000fA"}, "image_id": 839, "id": 14001}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 487.0, 44.0, 25.0], "area": 690, "segmentation": {"size": [512, 512], "counts": "eo`32m?2N2N2M3N1`@EZ?=d@D[?a0N2O1O1O1O11O1O001O1O1O1O1O1OO1O1O1O1001O1O1O1O1O1O001O1O1O1O1O1O1O1N2N1OWPi3"}, "image_id": 839, "id": 14002}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 0.0, 9.0, 5.0], "area": 23, "segmentation": {"size": [512, 512], "counts": "PPk01o?001O1O1O1OO1N2OQ`P7"}, "image_id": 840, "id": 14003}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 0.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "P`T11o?1OOQPj6"}, "image_id": 840, "id": 14004}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 0.0, 74.0, 63.0], "area": 2388, "segmentation": {"size": [512, 512], "counts": "c`^33l?2N2N1O2N2M3N2DATAa0k>@SAb0l>_ORAb0n>_Oo@d0P?7O1O1O1O1O1O0000N2O11O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O00\\NfAc1\\>fNdAo0\\>oNeAR1[>lNgAT1Y>iNjAW1V>gNlAY1^>0000O10000N1O2M3N2N2N2N2N2M3N2N1O2N2N2M3N2N2N2Nj^\\3"}, "image_id": 840, "id": 14005}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 0.0, 13.0, 7.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "P`d41o?1O1O001O1O1O1OO1O1N2O1OQPU3"}, "image_id": 840, "id": 14006}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 0.0, 38.0, 24.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "VPR62m?2M3N2N2N2O001O1O1O1O1O1O1O1O001O1O1OO1O1O1O1N2O1O1O1O1O1O1O1N2O1O1O1O2N2NooZ1"}, "image_id": 840, "id": 14007}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 4.0, 59.0, 84.0], "area": 2333, "segmentation": {"size": [512, 512], "counts": "kQc62m?2N2`@KR?7k@LS?6k@LR?7l@KR?7l@KR?b0O2M3N2N2N1O2M3N2N2N2N0O100E`NWB`1i=bNUB]1l=eNRB[1n=gNoAZ1Q>:O10O100000O010000000O0102N1O2N2M3N2N1O2N2M3_OZAEh>9ZAEh>8[AFf>9\\AEf>9\\ADg>:[ADg>:ZAEh>8b0N1O2N2N\\_?"}, "image_id": 840, "id": 14008}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 21.0, 73.0, 98.0], "area": 3243, "segmentation": {"size": [512, 512], "counts": "Wbk43l?1O2M3RAHQ>:mAHQ>:mAGQ>;lAGR>;lAGQ>;nAFQ>;lAGS>:kAHU>8iAJW>o01000N2N2N1N3N2N2N1N01000FoMfBQ2Z=QNcBP2]=RNaBm1_=VN_Bj1a=:000O010G_BTNa=l1aBRN_=m1dBPN\\=Q2eBnM[=R2902M3N2N2N2M3N2N2N1N3N2N2N2M3N2N2N1N3N2N2N2M3N2N2N2M2O2N2N2M3N2N`no1"}, "image_id": 840, "id": 14009}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 33.0, 87.0, 72.0], "area": 3064, "segmentation": {"size": [512, 512], "counts": "abl12m?2N2N3N1N3M2O2M2N2O2M2N3M2O1N3M2OO00010O002N3N1N0001O0JUOYAj0g>XOWAi0i>601O2O2M2N3M2O0O1O01O000010O0000010O1O2O0O0010O0000010O000010O0001O01O0002O1N3M2N3J[AmNf>Q17N1N2N3N1N3M2O2M2N2N3N1N3M2JY@1j]h4"}, "image_id": 840, "id": 14010}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 46.0, 35.0, 68.0], "area": 1394, "segmentation": {"size": [512, 512], "counts": "kb^72m?2N3M2N2N2N2N2N2N2N2BZO_Ah0_>ZO_Ah0Z>VOaA43h0Z>WO`A34h0[>_ObAd0[>b0N2N2N2N1O0000000001O2N2N01O000000000cN"}, "image_id": 840, "id": 14011}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 78.0, 77.0, 67.0], "area": 2588, "segmentation": {"size": [512, 512], "counts": "gcT32m?2BNk@4S?Nk@4S?Nk@4R?Nl@5R?Ml@5R?=O2N2N2M3N2N2N1O2N2N2M3N2N2N2N110O10000000N2N2N2N1O2N2M3N2N2N2N1O2N1N1000000000O10O10000000O111O000OO100000002N2M3N2N2N2N2M2O2N2N2N2N2M3N2NUmd3"}, "image_id": 840, "id": 14012}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 101.0, 65.0, 71.0], "area": 2432, "segmentation": {"size": [512, 512], "counts": "iTY52m?2M3N2N2N2N2N2\\O@eAa0Z>AdAa0Z>A`ALIf0e>@`AKJg0d>@`Ae0]>]ObAe0\\>]ObAe0\\>a0N2N1O2M3N1OO100000O10000000O101O2N2O0100000M2OO12N2N2N2N2N2M2O2N2N2N20000O1M3N1O2Ij@AX?@jAHNj0V>@jAHNj0U>@kAj0S>c0N1O2N2N2M2O0000O10002N2N2M3N20000O1N00000O3N2N2N2N2N2M3hN[AR1j>O1OO10O1000000000O0100000000O012000O10N2M3N2N2N2N2M3N1O2N2M3N2N2N2Nl[o2"}, "image_id": 840, "id": 14016}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 119.0, 5.0, 10.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "kcm72m?2N2N2N2XL"}, "image_id": 840, "id": 14017}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 126.0, 8.0, 31.0], "area": 248, "segmentation": {"size": [512, 512], "counts": "n3o0Q?0000000000000Rlk7"}, "image_id": 840, "id": 14018}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 132.0, 51.0, 65.0], "area": 1789, "segmentation": {"size": [512, 512], "counts": "cdR71o?1N3M2N3N1N3M2O2M2N3N1N3M2O4K3M2N3N1N3M2O2O010O010O0N3N1N3M2O2M1O1O01O00003M2UOjAIY>NhA^O2b0W>NjA^O0b0Y>MQB0Q>OPBOS>NPBOR>OPBOR>0oANT>OoAOR>OPBOS>OoANS>0Xl3"}, "image_id": 840, "id": 14019}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 158.0, 82.0, 85.0], "area": 3374, "segmentation": {"size": [512, 512], "counts": "deQ63l?2N3L3N3M2N3M2M4M2N3M2cAROf=P1XBQOf=Q1XBROe=Q1XBQOg=P1XBROe=Q1XBQOg=P1WBSOf=P1XBQOf=Q1XBROf=e1N3M2O2M2N3N10O1O2M2N3N2M2N3mMVBl1k=SNWBj1Q>N3N1N3M2O2M2N3NO01O01O01O01O01O2O1N3M2O2M2N3N1N2N010O00010O00010O00100O1O2O0O1O101N2N3N1N3JnYe0"}, "image_id": 840, "id": 14020}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 165.0, 74.0, 89.0], "area": 3203, "segmentation": {"size": [512, 512], "counts": "VgW42m?3M2O1N2N2N2N2N3M2N2O1N2N2SOVOaBl0]=VOaBm0\\=UObBm0\\=UObBm0\\=VOaBl0]=VObBk0]=VOaBk0^=WO`Bi0`=YO^Bg0b=[O\\Be0d=^OYBb0g=@WBa0h=AWB>i=l000001O0001O0000000000000010O00001O2N2N3M2O1N2N2N2N2N3N10000bNaAY1d>N2O1N2N3M2N2WOPA?R?_OPA?R?_OPA?Z?N2N3M2O1N2N3M^Yc2"}, "image_id": 840, "id": 14021}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 181.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "e54m?N[jn7"}, "image_id": 840, "id": 14022}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 182.0, 88.0, 65.0], "area": 2773, "segmentation": {"size": [512, 512], "counts": "^ff13k?NV@4g?4O2M3N2M1O10Oe@BU?`0j@AU?>k@ER?d0010O010O0030O10000N3M2M3N2M3M3NO0010O010O00010O010O01O01O0102M3N1N3M010HXAWOh>i0[AUOd>k0^ASOc>m07010O010O00010O010O010O00010O3N1N010O0002O1N3N2M3N1N3M3N2M2O2M3N2M2N3NaYm4"}, "image_id": 840, "id": 14023}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 191.0, 30.0, 72.0], "area": 1176, "segmentation": {"size": [512, 512], "counts": "o5X2j=M2N2N2N3M2O1N2O2_NgAY1X>eNjA[1V>dNkA[1V>cNlA[1_>M2N2N2N3M2O1N2N3M2N2N2_Of@9]?De@;b?M2N2N2N`h`7"}, "image_id": 840, "id": 14024}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 214.0, 17.0, 20.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "jf_31n?2O2M3N2M3M2O11M30O10O1N2M3M2O2MkhW4"}, "image_id": 840, "id": 14025}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 219.0, 66.0, 61.0], "area": 2185, "segmentation": {"size": [512, 512], "counts": "YgU53m?2M2N3N1N3M2N3N1N3M3o@WOg>j0XAXOe>k0XAWOf>k0YAWOe>S1O110O010O010N1O2M2N2O0O00010O00010O0000010O00010OKdAgN\\>Y1gAdNZ>[18M2O2N10010O010M2N3N1N3M2O1N3M2O2M2N3M2O2M2N2O2MSXi1"}, "image_id": 840, "id": 14026}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 230.0, 84.0, 83.0], "area": 3088, "segmentation": {"size": [512, 512], "counts": "]XT22l?2O2N2N2N2N2M3N1O2N2N2N2O100000OWAWO[>j0cAXO\\>i0bAYO\\>i0bAYO\\>h0cAZO[>e0fA\\OY>d0fA_OX>b0gA@V>b0iA@U>b0iA@U>W1N2N101000000O10N2N2N000000000O01000DkAnNU>R1mAlNS>T1oAjNQ>V1QBgNo=Z1RBeNn=]1RBaNn=a170002N2N2N1N10000000O10O102N2N1O2N2M3N2N2N2N1O2N2M3N2N2N2N1O2M3N2NZha4"}, "image_id": 840, "id": 14027}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 254.0, 69.0, 58.0], "area": 2219, "segmentation": {"size": [512, 512], "counts": "aX_43l?2O2M2N3N1N3M2O2M2N3i@XOR?l0N3M2O2M2N3N1N3M2O2M2N3NO002O2M1O10O00010O00010O000102M2N3N101O001M2N100O00010O00011N2N3N1N3M2O2M2N3N1N3M2O2M2N3N1NTW^2"}, "image_id": 840, "id": 14028}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 273.0, 58.0, 76.0], "area": 2307, "segmentation": {"size": [512, 512], "counts": "ZZj21o?2M2ZOM\\A6b>L\\A6a>M\\A6a>M]A4b>M\\A6a>M]A5`>N]A5a>M\\A6a>e0O10O00010O0010O0010OHdNmA\\1T>eNjA\\1U>gNiAX1W>810O01O0LgAcNX>]1jAaNW>^151O01O010O01O01O010O00010O0010O01O3N1N3SOZA;i>CXABZAON0k>NZA0L1k>M[A0M0k>NZAOM1k>NfA0[UY4"}, "image_id": 840, "id": 14029}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 295.0, 74.0, 64.0], "area": 2488, "segmentation": {"size": [512, 512], "counts": "bj`31n?3N1N3M2O2M2N3N2M2N3N1N3N1N1[OSOVBm0j=VOTBj0l=WORBi0n=ZOoAf0Q>\\OnAd0R>^OkAb0U>AhA?X>CgA=Y>EdA;\\>e00O000100O2OO3N1N3NO01O010O00010O00010O00101N3N1N20N01O01O01O01O01O01O02N2O2L3N3M3N1N3M2Ad@8^?Fc@8_?Fd@7d?O2M2OoUZ3"}, "image_id": 840, "id": 14030}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 340.0, 58.0, 78.0], "area": 2334, "segmentation": {"size": [512, 512], "counts": "VkP71n?3N1N3M2O2M2N3M2O2M2N3N1N3M2N3N1N3M2O2M2N3M2O2eA^NS>d1kA^NS>j10O01O010O010O001N1N3N1N1O01O00101nNTBMo=1RBMP>1SBMo=0SBNo=1RBMP>1SBLP>1RBNo=0TBMo=1RBMP>1RBNo=0TBMo=1RBMP>1SBLP>1RBNo=0SBNo=1_U2"}, "image_id": 840, "id": 14031}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 346.0, 72.0, 82.0], "area": 2875, "segmentation": {"size": [512, 512], "counts": "fk61n?2N2N2M3N2N1N3N2N2M3N2N1N3N2O10000N2M2O2N2M3N2N2M2O2O1O10000000O100000O100iA\\NQ>c1nA_NR>a1kAbNU>e1000000O010000000O1N2M21000000O0O2ROoAJS>4oAJS>4oAJS>4nAJU>3nAKT>3nAKT>3nAKT>2oALS>2oALR>3PBJS>4oAJS>3PBKR>3PBKR>3o0NVTe6"}, "image_id": 840, "id": 14032}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 347.0, 67.0, 89.0], "area": 3122, "segmentation": {"size": [512, 512], "counts": "j[Q61o?1N3N1N2N3N1N3M2O2M2N3N1N3M2O2M2N3N1HkNbAX1[>kNbAW1]>jNbAX1[>8N3N1N2N3N1N3M2O2M2N3N1N3M2O2M2N3N10N2N3N1N3M2O2M2N3N1N3M2O2M2oNmA2V>KmA3T>LmA2V>EgAE6c0U>GgAC6e0U>ESB8o=FSB8P>FRB8o=FSB8o=FTB7o=GRB8m>M2N3NPSm0"}, "image_id": 840, "id": 14033}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 387.0, 68.0, 89.0], "area": 2865, "segmentation": {"size": [512, 512], "counts": "U]X53m?1N2N3N1N3M2O2M2N3N1N3M2O2M2N0010O000101IPO[AS1b>oN\\AS1b>8N1N3I6L5N1N3M2O1N3M2O2N1O2N101N1O2N1O20O0SNgBQ1[=lNgBS1Z=kNiBR1Z=lNgBR1[=lNhBFFU1d=ROhBHET1e=SOhBFFT1e=SOnBl0S=ROPCk0R=TOoBj0T=SOoBk0R=SOPCk0U>M2O1N3M2O2M2N3M2O2M2N3Neae1"}, "image_id": 840, "id": 14034}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 44.0, 83.0], "area": 2000, "segmentation": {"size": [512, 512], "counts": "V00000O10000000O100000O10000000YNkAb1U>\\NmAd1X>O100000O1000000000`NeAZ1[>cNhA\\1_>00N2M2O2N2N2N2_ORALQ?1RAMP?1RAMP?1RAMP?1RALQ?1RAMo>2a0NZbY7"}, "image_id": 840, "id": 14035}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 406.0, 27.0, 54.0], "area": 777, "segmentation": {"size": [512, 512], "counts": "Xmb71n?3N1N3M2O1N3M2O2M2N3M2O2M2N3N1N2N3N1N3M2N3N1N3M2OO00[C"}, "image_id": 840, "id": 14036}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 409.0, 70.0, 84.0], "area": 3195, "segmentation": {"size": [512, 512], "counts": "[^S12n?1k@OX>4ZAKO3e>4[AJN5d>4[AJO3e>4ZAKO2f>6XAKO0h>l01O01O00101N2_OgNXB[1g=fNWBZ1i=iNUBV1k=lNSBT1n=nNoAS1P>oNoAP1Q>SOlAm0T>`00O00010O00010O00010O00010O00010O000010O000010O01O2OO00010O002O1N3M2O2M2N3N1N3VO[A4g>K[A3f>K\\A3g>J\\A3f>L[A3g>J\\A3f>K\\A3g>K[A3f>K\\A3Z?Nkai5"}, "image_id": 840, "id": 14037}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 435.0, 82.0, 77.0], "area": 3334, "segmentation": {"size": [512, 512], "counts": "\\^Z46k0Ko=;nAHP>:mAHQ>:nAHo=9PBIGHl=?\\BLEFi=0RB?`0LCFj=d0bBIBCl=d0aBKABm=e0`B0_=1aBO_=1`B0_=0aB1^=OcB1]=0aB1^=2_BO`=3^BMc=5[BJe=8YBIf=X11O011N3M2N3N2M2N3N1N3M1O01O01LbAeN^>\\1200010O0001O01O01O01O01O01O00010O00010O01O3N1N3M3N1N3M2N3N2M1O010O00010O2N3N1N3M2O2M2N3N1N3Mca\\2"}, "image_id": 840, "id": 14038}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 458.0, 75.0, 54.0], "area": 2367, "segmentation": {"size": [512, 512], "counts": "hod11n?3V@Mb?5\\@Ld?5[@Kd?9O100O1O1O100O1O100O1O1O1M]Oi@c0V?4O100O1O100O1EUO`Al0_>VO_Ak0`>XO^Ah0b>YO\\Ah0c>;O100O1O1O100O1O1O100O1O100O1O1O12N1O2N1O0000O1O11O1O2N1O1O2N1O2N1O2M2UOSAa0n>^OTA?o>^OSA`0o>^OSA`0Y?N1N3M2N3N1N3M\\`U5"}, "image_id": 840, "id": 14039}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 459.0, 70.0, 53.0], "area": 2320, "segmentation": {"size": [512, 512], "counts": "k^m62n?1N3M2O2M2N3N1N3N1N2N3N1N3M2O2M2N3O010O0010O010O0OO100O1O100O1O100O1O1O100O1O100O1O100O1O100O1O11MeA_N]>^14N3N1N2N3N101O0N3N1N3M2O2M2N3Je@C\\?;7N1N3Mg@"}, "image_id": 840, "id": 14040}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 486.0, 45.0, 26.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "doT62m?2[@M[?5b@L]?6b@J]?9`@H`?c?N2M3N1O2N2NcPl7"}, "image_id": 840, "id": 14042}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 500.0, 31.0, 12.0], "area": 195, "segmentation": {"size": [512, 512], "counts": "o_>1n?1O1O1O1O1N2O11O1O1O1OO1O1O1O1O1O1O11O1O1O1O1O1O1O1O1O1O001OQPR7"}, "image_id": 840, "id": 14043}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 507.0, 17.0, 5.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "o_o41o?000O1000000O10000O10000O100003MRPh2"}, "image_id": 840, "id": 14044}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 508.0, 8.0, 4.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "o_91n?1O1O11O1O1O00Q`b7"}, "image_id": 840, "id": 14045}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 509.0, 8.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "o_k51o?00000O10000O1S`P2"}, "image_id": 840, "id": 14046}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "ooQ31PPn4"}, "image_id": 840, "id": 14047}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "oon61o?0QPP1"}, "image_id": 840, "id": 14048}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 0.0, 49.0, 48.0], "area": 1401, "segmentation": {"size": [512, 512], "counts": "n`h32k?4M2M4M2M3N3e@]OU?i0L300010O010O010ON2N3L3N3L3O101O001O00001O001ON2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2MSP_3"}, "image_id": 841, "id": 14049}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 5.0, 116.0, 86.0], "area": 5182, "segmentation": {"size": [512, 512], "counts": "UQf53k?3L3N3M2M3N3UA^OS>e0kA^OR>e0jA^OS>e0kA]OS>e0jA_OR>e0kA]OS>e0jA_OR>Y100010O010O00010O010O00010O010O000N3L3N3L3O101O001M2M310O0010O0010O0010O0010O00lAhNa=X1\\BkNc=U1ZBnNf=S1WBPOi=o0TBTOl=l0RBVOo=j0nAYOQ>[1010O0010O0010O0010O010O00010O010OM3N3L3N3L30010O001M2N201O010O01O01O010O01O01O010O01ON3M2M4M2M3N3L3N3L3N2M4M2N3Len?"}, "image_id": 841, "id": 14050}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 6.0, 72.0, 80.0], "area": 2386, "segmentation": {"size": [512, 512], "counts": "XRX41l?4M2M4M2M3N3L301O01O01O010O0010OO1N3L3N3L3N3L3N2M4M2M4M2M4M2M3N2M01O010O003N3L3M4M2M3M4M2M4L3N2N30O01O01O01O010O0N3L3N2N3L3N3L3N2010OH\\@4h?1OO2M2M\\oc2"}, "image_id": 841, "id": 14051}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 40.0, 157.0, 119.0], "area": 9981, "segmentation": {"size": [512, 512], "counts": "Ucm12k?3N3L3N2M4M2M4M2j@ZOP?l0M3N3L3N3L3N3M2M4M2M3N3L3N3L3N3M20010O0010O010O0010O0010O010O010O00010O010O01O01O010O010O01O01O010O010O01O010O01O010O01O010O01O010O01O010O01O010O01O010O010ON3N10010O010O010O00010O010O010O01O01O010O010O01O01O010O010O01O01O010O010O01O010O01O010M2N2M4M2N3L3N3M2M3N3M2M4M2N3M2M3N3M2M4M2N3L3N2N3L3N3MRmc3"}, "image_id": 841, "id": 14052}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 65.0, 34.0, 34.0], "area": 740, "segmentation": {"size": [512, 512], "counts": "cR_73j?3N3L3N2M4M2M4O01O010O01O01O010O010O00010O010O01O01O010O01N1N2M4M2nM"}, "image_id": 841, "id": 14053}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 101.0, 43.0, 36.0], "area": 808, "segmentation": {"size": [512, 512], "counts": "mSe02l?2M4L3N3L3N2M4O001L3N2O20O00010O010O000XOPAa0P?\\ORAd0U?10O0010O0001L3N3L3O1010O0010O0010ON2N3L3NZ\\e6"}, "image_id": 841, "id": 14054}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 102.0, 197.0, 100.0], "area": 8508, "segmentation": {"size": [512, 512], "counts": "VT^43j?3N3L3N2M4TA@T>b0jAAR>c0jA@T>b0jAAS>a0jABS>b0jA@T>b0jAAR>X1N30O00010O010O00010O010O00010O0O2L3N2M4M2M4M2N210O010O00010O010O00010O010O0010jAjN`=V1^BlNb=U1ZBoNf=P1XBROh=n0UBUOl=k0QBXOn=h0oA[OQ>[10O0010O0010O010O00010O010O00010O001L3N2M4M2M4M2N201O010O01O01O010O010O00010O010O00010O010O00010O010O00010O010O000kNZAQ1k>O01O01O010O010O01O01ON3L3N3L3N2N30O010O00010O010O0010O0010O0010O0010O0010O0010O0010O0010O001N1M3N3L3N3L3N2N3O0010O00010O010O00010O010O01O01O01N1M4M2M3N3L3N3L3N2MnZ?"}, "image_id": 841, "id": 14055}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 117.0, 2.0, 5.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "hSo71l?4ZL"}, "image_id": 841, "id": 14056}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 126.0, 41.0, 54.0], "area": 1256, "segmentation": {"size": [512, 512], "counts": "^4V1j>0O00010O0001O01O01O01O01O01O01O00010O00010O00010O1O3N1N3M2N3N1N3\\Of@=\\?Ag@=_?N3M2O2M2N2NZ[[7"}, "image_id": 841, "id": 14057}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 142.0, 51.0, 75.0], "area": 2293, "segmentation": {"size": [512, 512], "counts": "]Vh11m?2M4nNJVB8^=IlA1c0:]=IlA0e09]=IlA1c09^=4`BO]=4_BO^=4`BN`=2]B2c=NYB5g=KWB8g=Q10010O0010O0010O0010O00010O010N1M3N3L3N3L3N2M4L3N3L3N2M4M2M4M2M3N3L3M4M2M3NV[^5"}, "image_id": 841, "id": 14058}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 171.0, 58.0, 58.0], "area": 1729, "segmentation": {"size": [512, 512], "counts": "_V81m?3N1O2N2N2N2M3N1O2N2N2M3N2N2N1O2N2M3N2N2N2N1O2M3N2NO100000O12N1N3N2N2N2N20O1000O100N2M3N1O2N2N2M3N2]Oh@;Z?Ch@;Y?Di@:`?N2M2O2N2Niij6"}, "image_id": 841, "id": 14059}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 171.0, 23.0, 18.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "ceZ43k?2M4M2O1010O01O01O010O010O01O01O010O0N3M2N2MajY3"}, "image_id": 841, "id": 14060}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 191.0, 59.0, 76.0], "area": 2124, "segmentation": {"size": [512, 512], "counts": "ngm22l?3M2FJi@8T?Ki@7U?Ki@8U?Ji@8T?;N3N1N3M3N1N3M2O2M3M2O2M2N2O0O00010O00010O3M10O00011N3M2O2M3M2O2M2N3N2M2N3N1N3M3N1N3M2O2M3M2O2M2N3N2M2NdiT4"}, "image_id": 841, "id": 14061}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 195.0, 28.0, 29.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "cVX11n?2M3N2N1N3N2N2N1N3N2N2N101N2O10O1N2M3N1O2N2M3N2N1O2G[@3k?NaiY6"}, "image_id": 841, "id": 14062}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 195.0, 29.0, 30.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "ifX21m?2M4M2M4M2M3N3L3N201O010O01O01O010O01O000M4M2N3L3N3L3N2MiiX5"}, "image_id": 841, "id": 14063}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 196.0, 27.0, 31.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "jVV61l?3M4L3N2M4L3M3N30O00010O00010O010O00010O000^Om@8S?DPA:\\?M4L3M^Y\\1"}, "image_id": 841, "id": 14064}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 197.0, 38.0, 27.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "af_53k?3L3O101O010O01O01O01O01N1N3L3M310O0010O0010O0010O0010O0010O001L3N2M4M2M3NeYm1"}, "image_id": 841, "id": 14065}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 206.0, 69.0, 85.0], "area": 2795, "segmentation": {"size": [512, 512], "counts": "ahh32l?3L3N3M2N2M4M2N3M2M4M2O110O010O0010O010O0010O0BQOkAP1Q>TOnAl0P>VOnAm0o=UOnAn0o=VOnAl0P>VOnAm0o=c0N3L3N3M2N3L3N2NO3N2N3M2M4M2N2N3L3N3M2M4M2N3M2M3N3M2N3L3N3M2M4M2N2N3L3N3M2N3LXiT3"}, "image_id": 841, "id": 14066}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 206.0, 4.0, 9.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "eVn71m?2M4M2bI"}, "image_id": 841, "id": 14067}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 227.0, 43.0, 36.0], "area": 863, "segmentation": {"size": [512, 512], "counts": "fW=3j?3N2M4M2M4M2N3M20010O010O00010O010O010O00010O010O0010O0010O010O0010M2M4M2N2M4M2N3L3NaXm6"}, "image_id": 841, "id": 14068}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 228.0, 49.0, 53.0], "area": 1540, "segmentation": {"size": [512, 512], "counts": "WhZ13j?3N2_OJUA:g>IWA9g>JUA:g>IWA9i>HTA:l>>0O01O0M4M2O110O010O01O01O010O010O00010O010O01O01O01QO\\Aa0c>]O`Ab0a>ZObAf0^>XOdAi0[>TOiAk0f>O2M2M3N3L3N3M2M4M2MRhl5"}, "image_id": 841, "id": 14069}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 233.0, 22.0, 18.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "ag[63k?2M4M2O110O010O0010O0010O010O010O00O2M2N3LbXY1"}, "image_id": 841, "id": 14070}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 238.0, 53.0, 69.0], "area": 2199, "segmentation": {"size": [512, 512], "counts": "UhY51m?2N3M2SAIn=;oAGo=;oAGo=9kAJT>6iAMW>o00O010O010O010O010O010O010O0010O01N1010O0010O010O001[OfACY>;iAEX>8kAHT>6nAIT>4oAIS>5oAIT>3oAKS>3PBJS>3oAJT>4oAIT>4nAJT>4n0MPhk1"}, "image_id": 841, "id": 14071}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 260.0, 33.0, 62.0], "area": 1217, "segmentation": {"size": [512, 512], "counts": "T8f1[>O010O00010O010O00010O0oNfA=Z>_OjA`0V>^OlAb0T>[OoAf0Q>WORBh0n=UOUBl0k=QOXBn0]>0010O0N3M2N2M4M2M4M2M3N3L3NlV_7"}, "image_id": 841, "id": 14072}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 292.0, 102.0, 54.0], "area": 3960, "segmentation": {"size": [512, 512], "counts": "TZ]67c?7H7J6I7J60000010O000000000010O000000000010O0000000000010O00000000010O0000000000010O000000000010O000000000010O0000000000010O00000000010O0000000000010O000000000010L3N200001O01O00000001O0001O00L4I7J7H7I7JhF"}, "image_id": 841, "id": 14073}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 295.0, 25.0, 25.0], "area": 515, "segmentation": {"size": [512, 512], "counts": "eY]19_?8J60000001O0001O0000000001O0001O0000000001L3H8HnVV6"}, "image_id": 841, "id": 14074}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 304.0, 74.0, 44.0], "area": 1903, "segmentation": {"size": [512, 512], "counts": "oYc05i?200010O0000000000]@2T?=N2N200001O0001O0000000K5J601O000000000001O01O000000000001O01O000000WOWA;i>]O_Ac0P?O01O0000000001O0001O0000000001O0001O0000000001O01OK5I7HTfW6"}, "image_id": 841, "id": 14075}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 333.0, 12.0, 32.0], "area": 220, "segmentation": {"size": [512, 512], "counts": "]:P1P?N3M2O2M2N3M3M2C`@8f?M2O2M\\ei7"}, "image_id": 841, "id": 14076}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 335.0, 83.0, 54.0], "area": 2408, "segmentation": {"size": [512, 512], "counts": "Ukg33k?2M4M2N2N3M2M4M2N3M2010O0010O0010O010O010O0010O0010OO2M201O010O00010O010O010O010O01O01O010O010O010O01O01O010O010O0N3M2M4N11O010O010O0N3M2N2M4M2N3M2M4M2N3M2N3L3N2N3MQen2"}, "image_id": 841, "id": 14077}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 353.0, 99.0, 41.0], "area": 2428, "segmentation": {"size": [512, 512], "counts": "`kc56f?4K5K5O20O000000010O000001O01O00j@\\OP?k0O01O000001O01O0001O0001N1L400010O000000010O000001O01O0000010O000000010O0000010O000000010f@[OW?g001O0001O01O0001O0001O000L4K5L5OM3000001O01O0000010O000001O0K5L4K5L5J5K5Lmdj0"}, "image_id": 841, "id": 14078}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 363.0, 88.0, 44.0], "area": 2670, "segmentation": {"size": [512, 512], "counts": "nk;9g?:F6J000O10000000O1000000000O10000000O100000000000O1E@VA`0j>;000O100000000000O10000000O1000000000O10000000O100000000000O10000000O1000000000O10000000O1000000_O[AGe>8b00O1000000000000009F]TX6"}, "image_id": 841, "id": 14079}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 382.0, 43.0, 48.0], "area": 1182, "segmentation": {"size": [512, 512], "counts": "olU33k?2M4M2N3M2N3M2N3M2N3L3N3M2N2N3N1010O010O010O010O010O0N3M2N2M4M2N3M2N3M2N3M2N3L3N3M2NhcT4"}, "image_id": 841, "id": 14080}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 387.0, 47.0, 43.0], "area": 1104, "segmentation": {"size": [512, 512], "counts": "jlj32l?3M2N3M2N3M2N3M2N3M2N3N110O010O010O010O010O010O010O010O010O010O010O10N1N3M2N3M2N3M2N3M2N3M2N^c]3"}, "image_id": 841, "id": 14081}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 389.0, 30.0, 25.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "c\\e41m?3M2N3M2O20O010O01N1M4M2O20O010O010@h@6X?Hj@6Y?Gj@8_?10O010O010O0O2M2N\\ck2"}, "image_id": 841, "id": 14082}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 402.0, 44.0, 54.0], "area": 1503, "segmentation": {"size": [512, 512], "counts": "\\]U67f?6L2N2N1O2N1i@_Oo>a0n@BQ?g001M2M3L5L3N200010O00010O0001O0O1M4N100010O000M3M4fN_Ao0e>nN^Ao0k>M4L3M3M4K4M3M4L3MVcT1"}, "image_id": 841, "id": 14083}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 414.0, 19.0, 13.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "o<NWANm>OVANm>NXANl>NWANZQZ6"}, "image_id": 841, "id": 14086}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 423.0, 10.0, 12.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "`mT32e?90001O0000000000KnRf4"}, "image_id": 841, "id": 14087}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 432.0, 32.0, 30.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "Sn]41m?3N1N3M2N3M2N3M2010O010O0100O001N1N3M0002N2O2M3M2N3M2N3M2O2M2N]RR3"}, "image_id": 841, "id": 14088}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 447.0, 21.0, 24.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "_nm43k?2N3M2O2M2N3M2N3N1010N1N3M2N3N1N3M2N3M2Nnag2"}, "image_id": 841, "id": 14089}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 447.0, 31.0, 32.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "f^k61m?2M4M2M4M2N2M4M2M4N11O010O010O01O01O010O000M4M2N3L3N3L3N2N3LkQe0"}, "image_id": 841, "id": 14090}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 449.0, 55.0, 52.0], "area": 1580, "segmentation": {"size": [512, 512], "counts": "nnb33k?2N3L3N3M2N3L3N2N3O0010On@WOm>n01O010O01O010O010O010O0O1N3L3N3M2N3M2O20O0010OO2M2N3M2M3N3M2N3L3N3M2N3M2M4M2N2N3L3Ngaa3"}, "image_id": 841, "id": 14091}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 452.0, 24.0, 27.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "e^b53k?3M2M4M2M4M2M3010O010O00010O010M2M3N3M2M4M2M4MgaQ2"}, "image_id": 841, "id": 14092}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 464.0, 88.0, 48.0], "area": 2465, "segmentation": {"size": [512, 512], "counts": "b_i02m?3N2M4M2M3N1O0O100O100O10000O100O100O10000O100O100O10000O100O100O1002N3M2ND]O]Aa0c>A\\A>d>DZAHXA8h>JUA7j>`000O10000O100O100O10000O100O1000O0100O010O0100O0100O010O0100O4M2N2M4M2M3N3L3N2N3L3N2M4M2M``j5"}, "image_id": 841, "id": 14093}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 474.0, 50.0, 38.0], "area": 1245, "segmentation": {"size": [512, 512], "counts": "oon41l?3N2N2N2M3N2N2M3N2N2M3N2N2N2M3N2001O001O001O00001O001O001O001O00001O001O001O00001M2N3L3N3M2N2M4M2NjPX2"}, "image_id": 841, "id": 14094}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 479.0, 62.0, 33.0], "area": 1273, "segmentation": {"size": [512, 512], "counts": "\\_R63k?3L3N3L3N2O2O0010O0010O001l@^Oi>a0UAAk>`0QACP?]>DaA>]>DaA=^>F`A;^>GaA9_>H`A8_>J`A6_>M]A5b>e0O1O1O01O00000001O2WO"}, "image_id": 842, "id": 14098}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 43.0, 46.0, 82.0], "area": 2033, "segmentation": {"size": [512, 512], "counts": "[1b2_=N2N2O2M2N2N002N2O1O2O01O0001M2N2O1WNlAc1V>[NlAc1Z>N2N2O2M2N2N2N3M2O1N2N3M1O101N2N2N3M2O1N2N2N3M2N2O1NQmX7"}, "image_id": 842, "id": 14099}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 244.0, 7.0, 14.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "d7>c?N3M2N2N2O1NVXl7"}, "image_id": 842, "id": 14100}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 279.0, 80.0, 110.0], "area": 3907, "segmentation": {"size": [512, 512], "counts": "gij13l?2N2M2O2m@G\\>;bAG[>:cAG\\>;bAG[>;bAG\\>:cAG\\>;aAH\\>P1N2N1N[OkADS>;RBEk=;XBDg=<[BDc=;_BF_=:cBE^=9dBE^=8eBE]=:dBF]=7fBIZ=5hBJY=4iBLV=2lBOT=OnB1R=MPC2P=LSC4jc0bA@^>a0_AAa>?\\AEc>;[AGe>k0O00001O001O00001O001O001ON2N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3NRP[5"}, "image_id": 844, "id": 14104}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 0.0, 16.0, 9.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "S`l21m?3N10001O001O001O00001OO1N2M3NR`k4"}, "image_id": 844, "id": 14105}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 0.0, 5.0, 1.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "PPj71o?0000000P`3"}, "image_id": 844, "id": 14106}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 4.0, 40.0, 49.0], "area": 1135, "segmentation": {"size": [512, 512], "counts": "XQm23k?3L3N2M4M2M4M2M3N3L3N3L30001M2N3M21O01O010O010O00M4M2M4M2M3N3L3N3L3N2N3L3N3L3Neo^4"}, "image_id": 844, "id": 14107}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 16.0, 57.0, 55.0], "area": 1857, "segmentation": {"size": [512, 512], "counts": "fa_31l?3N3L3N3L3N2N3L3N3L3N2M4M2M4M2O110O010O0010O0010O0010O0010O0010O0010O0N30O00010O0M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3NToc3"}, "image_id": 844, "id": 14108}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 26.0, 67.0, 41.0], "area": 1505, "segmentation": {"size": [512, 512], "counts": "bQ;1m?3M2M4M2M4M2M3N3M2M40O00010O010O00010O010O0010O001O0M4N10010O010O01O01O010O01O0M3N3L3N3L3N201O0010O0010O010O0010OO1O2N101N1O1O2N1O2Lg^c6"}, "image_id": 844, "id": 14109}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 44.0, 56.0, 45.0], "area": 1426, "segmentation": {"size": [512, 512], "counts": "WbY43k?2M4M2M4M2O110O010O00010L3N3L3N2M40O0010O0010O00010O010O00010O010O0001N1M4L3N2N21M2M3N3L3N3L3M3N3L3N3L3N2M4M2Ma^j2"}, "image_id": 844, "id": 14110}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 46.0, 27.0, 27.0], "area": 452, "segmentation": {"size": [512, 512], "counts": "oaU52l?3L3N3L3N2M4M201O00010O010O01O01O010O01L3N2N3L3N3L3N]n\\2"}, "image_id": 844, "id": 14111}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 52.0, 63.0, 54.0], "area": 1804, "segmentation": {"size": [512, 512], "counts": "hRX11m?3L3M4M2M3N3L3N2010O01O01O01O01O010O01O01O01O01O010O01O0O1M4M2M4M2M3M4M2M3O2O010O00010O0001M2N3L3N2M4L3N2M4L3N3L3M3N3L3N3L3MV^h5"}, "image_id": 844, "id": 14112}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 66.0, 64.0, 64.0], "area": 2065, "segmentation": {"size": [512, 512], "counts": "^SS23k?2M4M2M3N3M2M4M2O110O010O00010O010O00010O010O00010N1N3L3N2M4M2M4M2M4M2M3N3M2O20O0010ON3M2N2M4M2M4M2M3N3M2M4M2M3N3L3N3M2M3N3L3Nfml4"}, "image_id": 844, "id": 14113}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 66.0, 59.0, 59.0], "area": 1917, "segmentation": {"size": [512, 512], "counts": "ZcQ52k?4M2M3N3L3M4M2M3N3L310O0001N10010O010OQASOk>Q1010O0010O0M3N3L3N3L3N2M4M201O01O010O000M4M2M4M2M3N3L3M3N3L3N3L3N2M4L3N3L3N2MhmP2"}, "image_id": 844, "id": 14114}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 67.0, 27.0, 29.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "hbT32l?2M3N3L3N3L3M3N3N10010O0010O0010O0001M2M4M2M3N3L3M4Mhm]4"}, "image_id": 844, "id": 14115}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 78.0, 13.0, 14.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "fbV23j?3M3N30O00010O00010L3M3Mbmb5"}, "image_id": 844, "id": 14116}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 82.0, 62.0, 72.0], "area": 2085, "segmentation": {"size": [512, 512], "counts": "bSj53k?3N1N3N1N3N02N1100O0100OO2M3VAAo=a0mADQ>>kAES>>jADU>=iAFT>=iAEU>=jADU>>hAEU>T1N2O2O000100O0001N1N2N3NN100O3M4L3M4J6M2O2M2N3N2M2N3N1N3M2N3N2M2N3N1N3M2O2M3M2OQmV1"}, "image_id": 844, "id": 14117}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 90.0, 21.0, 19.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "Vc_41l?4L3N3L3O1010O0010O0010O00010O001M2M3M4MRmU3"}, "image_id": 844, "id": 14118}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 92.0, 32.0, 61.0], "area": 1215, "segmentation": {"size": [512, 512], "counts": "X3]1c>0N3M2M4M2N3N100010O0N3M2M3N3L3N3L3N2N3L3N3L3N3L3N2M4M2N3L3N2Mnl_7"}, "image_id": 844, "id": 14119}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 92.0, 16.0, 13.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "RSQ12l?2N2N3O0010O010O010O00010M2N3LRmf6"}, "image_id": 844, "id": 14120}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 101.0, 62.0, 69.0], "area": 1870, "segmentation": {"size": [512, 512], "counts": "^Tf61m?2M4M2N3L3N2M4M201O0010O0010O010O0010O00QA[Od>e0YA^Of>b0WAAi>l00O0010O010O0O2M2M3N3M2M4M1N1000O010O101O2M4M2N3L3N2N3L3N3M2M3N3L3N3M2M3N3M2Mgl:"}, "image_id": 844, "id": 14121}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 104.0, 68.0, 76.0], "area": 2879, "segmentation": {"size": [512, 512], "counts": "Xd<2l?3M2h@La>7\\AMa>6\\ALc>5ZAOe>2WA1j>NTA5k>`01O01O010O010O0010O0010O0010O010O0010O001DoNhAP1V>SOiAn0T>TOmAk0Q>WOoAi0n=[OQBf0l=\\OUBc0i=@TBc0h=@UBc0i=l0M201O00010O010O010O01O01OO2L3N3M2N3L3\\OiA]OZ>`0hA^OZ>?jA^OY>>jA_OY>?jA^OX>?kA_OX>=kAAW>=h0L3M4M2M_\\a6"}, "image_id": 844, "id": 14122}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 108.0, 57.0, 65.0], "area": 2089, "segmentation": {"size": [512, 512], "counts": "odk31l?4M2HKb@9Z?8N2M4M2M3N3N110O0N2M4L3N3L3N2M4O01O010O01O01O010O01O01O010O0O1M3NO00010O03N2M3M4M2M3N3L3M4M2M3N3L3M4M2M3NalW3"}, "image_id": 844, "id": 14123}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 128.0, 24.0, 49.0], "area": 691, "segmentation": {"size": [512, 512], "counts": "UUd73j?3N3L3N3L3N2M4M2M4M20001O010O01ON3M2M4M2M3N3O0PL"}, "image_id": 844, "id": 14124}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 132.0, 68.0, 70.0], "area": 2383, "segmentation": {"size": [512, 512], "counts": "fea11m?3N2M2O2M3M2O2M3N1N3N1N3M3N1N3N2M210O10ON3M3N1N3N1N3N2M2N3N1N01O010O010O2O1N3M3N1N3N2M201000O01O0O2M3N1N3N2M2N3N2M2O2M2O2M3M2O2M3N1N3NZ[\\5"}, "image_id": 844, "id": 14125}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 147.0, 101.0, 66.0], "area": 3268, "segmentation": {"size": [512, 512], "counts": "ZUg42m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N000000000001O2N2N200000000000000000000000O10000000O1N2N2O1000000000000000O1N2N2N2N1O000000002M3N2N2N2N2N2N2N2N2N2N1O1O0000O10002N2N2N2N2N2N2N2UO]A6e>H]A6e>H]A6e>H]A6e>H]A6e>H]A6e>H]A6e>H]A6e>H]A6X?N2N2NVZf1"}, "image_id": 844, "id": 14126}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 168.0, 70.0, 69.0], "area": 2228, "segmentation": {"size": [512, 512], "counts": "efT63k?3X@Kc?5[@Mf?601M2N3L3N3N10010O010O01O01O010O010M2N2M4M2N3L3N3M2M3N3M2O20O00010O010O010O00N1N01000O010001N3N3M2M4M2M4M2N2M4M2N3O001N1N2M4M2N3L3N3MaZh0"}, "image_id": 844, "id": 14127}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 171.0, 63.0, 63.0], "area": 1964, "segmentation": {"size": [512, 512], "counts": "cV^21o?1N2N2N2N2N3M2N2N2O1N2N2N2N3M2N2N2N2JlN^AU1`>mN^AU1`>lN_AT1a>510O00000000000000010O000000000000000101N2N2N2N2N000000011N3M2N2N2N2N2N2N2Fc@O_?Nc@0_?Nc@0_?Nc@0dib4"}, "image_id": 844, "id": 14128}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 176.0, 30.0, 25.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "PVY73k?2N3L3N3M2N2N3N1010O000N3N110O010O010O010O01O01M2N3M2M4M2NYj7"}, "image_id": 844, "id": 14129}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 190.0, 32.0, 32.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "_Vk31n?2N1O2N2M3N2N2N2N2N1N3N2N2N2N2O10N1O2N2N2N2N2N2M3N1O2N2N2N2M3Nfid3"}, "image_id": 844, "id": 14130}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 208.0, 61.0, 43.0], "area": 1875, "segmentation": {"size": [512, 512], "counts": "YW\\44h?4L5K4L4L4L5N10000010O0000010O0000010O0000010O000010O0000010O0000010K4M31O01O0001O01O0001O01O0001O01O0001O01O01XOYA5g>G]A7e>F^A6f>F_A5X?LkXe2"}, "image_id": 844, "id": 14131}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 229.0, 13.0, 37.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "lgi72l?2Z@N\\?5a@M\\?>M2N2M4M2M4M2N3O01OjH"}, "image_id": 844, "id": 14132}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 246.0, 72.0, 52.0], "area": 1865, "segmentation": {"size": [512, 512], "counts": "eho23m?2N3L3N3L4M2N3L100O0100O01000O01H[OUAd0l>_OPAb0o>80O10O10O10O010O10O10O010O10O10O10O10O010O10O10O010O10O10O10O10O010O10O10O010O10O10O13L3N3L4M2N3L3N3L4M2N`Wl3"}, "image_id": 844, "id": 14133}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 247.0, 24.0, 62.0], "area": 758, "segmentation": {"size": [512, 512], "counts": "g7n1S>M2N3M2M4M2N3M2N3M2N3L3N3M2N3M2N2N3M2M4M2N3M2NPhc7"}, "image_id": 844, "id": 14134}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 259.0, 14.0, 14.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "ZXY41o?3L3N1N1M210O10O10O04M2M3Nfg_3"}, "image_id": 844, "id": 14135}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 262.0, 35.0, 30.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "^h_42n?2M4M3L3N3M3L3N0O0100O0100O01000O010O12O200O0N000O012N1N3K`@Fa?95N1N3N1OUgn2"}, "image_id": 844, "id": 14136}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 266.0, 50.0, 67.0], "area": 1902, "segmentation": {"size": [512, 512], "counts": "Sja51m?2M3M4L3M3M4D_OTAd0i>_OTAd0i>HfA:X>IdA:Y>IdA;X>IeA9Y>IdA:Y>IeA:Y>g0O2L3N2N3O010O01O01O01O01N1O20O001O0M3N3L3N3M2M4M2N3L3N2N3L3N3L3N3M2M3N3M2M4M2NWgc6"}, "image_id": 844, "id": 14138}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 271.0, 67.0, 59.0], "area": 2067, "segmentation": {"size": [512, 512], "counts": "[Yk62k?4L3N2M4M2M3N3L3N3L3N201O010O01OUAQOe>U1010O0010O0010O0010O0010O001nNXAk0g>RO\\An0d>PO_AP1h>01O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O000_OQAOR?MRAOQ?OQAOQ?NSANQ?OQAOee3"}, "image_id": 844, "id": 14139}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 291.0, 71.0, 54.0], "area": 2081, "segmentation": {"size": [512, 512], "counts": "ji^34l?3M3L4M3M4K4M3M3L3N00O01000O01000O01000O01000O10O10O10O10O2OJQO^Al0b>WO]Af0c>^OZAb0f><00O0100JXATOg>l072N0O10O1000O01000O01000O01000O01000O010002M4M3M4K4M3M3M3L4M3Moe]3"}, "image_id": 844, "id": 14140}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 298.0, 31.0, 45.0], "area": 817, "segmentation": {"size": [512, 512], "counts": "W:9d?4M2M3N3M210N1N2M4M2N3L3N3L300010O0N3L3N2M4M2M4Ik@_OW??7L3N2M4M2NbV`7"}, "image_id": 844, "id": 14141}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 300.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "^Yo72l?2dF"}, "image_id": 844, "id": 14142}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 311.0, 56.0, 67.0], "area": 2303, "segmentation": {"size": [512, 512], "counts": "YZ\\11o?3M3L4M3GCh@`0V?Cg@`0V?8L4M3M2M4M3M3L4M2M2OO10O10003L4M00O01000O010O10O10O01000O012N2M4M3M3L4M2M4M3M3L4M2N3L4M3M3L3N3M3Lfdg5"}, "image_id": 844, "id": 14143}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 314.0, 29.0, 29.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "cjP52l?2M4M2M3N3L3N2M1O1003O01O01O010O01O01O01O01M2M3N3L3N2M4LRf`2"}, "image_id": 844, "id": 14144}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 325.0, 51.0, 70.0], "area": 1734, "segmentation": {"size": [512, 512], "counts": "R\\j51l?3N3M2M3N3M2M4M2M4M2N2M4M2M4M2N2M4M2N3L3N0O102N3L3O20O01O01UOWB\\Oj=c0WB[Ol=d0TBYOo=f0SBWOo=g0SBWOP>e0TBWOo=g0SBWOP>e0TBWOo=g0g0M2M3N3N110O01OO2L3N3M2MdU\\1"}, "image_id": 844, "id": 14145}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 336.0, 72.0, 95.0], "area": 3338, "segmentation": {"size": [512, 512], "counts": "bkf31n?3M2O1N3M2O1N3M2O1@@]Ac0a>^O]Ad0a>_O]Ab0a>@]Ac0a>_O\\Ac0b>_O]Ab0a>a0N1N2N3ZB]Nkd0_A[Ob>c0`A[Ob>d0`AYOc>d0_AZOc>d0_A[Oc>b0?N3M2O1N3M2N3M2NkTU3"}, "image_id": 844, "id": 14146}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 343.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "g:1Xeo7"}, "image_id": 844, "id": 14147}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 343.0, 33.0, 27.0], "area": 534, "segmentation": {"size": [512, 512], "counts": "S[k01l?3N3M2N2N3O0_@E]??10O01O01O0M4O0010O010O0010O0010O010N1N3M2M3N3M2N3LTUd6"}, "image_id": 844, "id": 14148}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 343.0, 27.0, 26.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "X[l42k?4M2M3N3M2M4M200010O010O00010O010O001O0N2M4M2M4M2N2MUUf2"}, "image_id": 844, "id": 14149}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 343.0, 56.0, 45.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "a[n63j?4M2M4L3N2M4L3N3L3N201O010O01O01O010O010O01O01O010O0N2O2O0010O0010O0010O0010ON2M4M2M4M2M3N3L3N3M21O010ON3M2M3Nnd5"}, "image_id": 844, "id": 14150}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 355.0, 41.0, 32.0], "area": 734, "segmentation": {"size": [512, 512], "counts": "dk`02k?4M2M3N3L3M4N11O01O01O01O010O01O01O010O00010O01O012M010O00010O010O00001L3N2M4L3N`dj6"}, "image_id": 844, "id": 14151}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 375.0, 39.0, 61.0], "area": 1424, "segmentation": {"size": [512, 512], "counts": "ig0QBWOQ>g0PBWOT>e0g0M3L4M3M2M4Mob\\5"}, "image_id": 844, "id": 14153}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 378.0, 52.0, 73.0], "area": 2104, "segmentation": {"size": [512, 512], "counts": "fmg42k?4M2M4M2M3\\OAeAb0W>BfA`0X>BeAb0X>AeAa0X>BeAb0X>@fAb0W>e0O2O010O01O010O01O010O0O1N3L3N11N0O10O03N3M2M3N3L3N3L3N3M2M3N3L3N3M2M3N3L3J_@Kd?1_@Mc?1UT^2"}, "image_id": 844, "id": 14154}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 385.0, 56.0, 72.0], "area": 2136, "segmentation": {"size": [512, 512], "counts": "f]g03j?3M3N3L3M4M2M3M4M2M3M4M2N3N1O1N3aAcNX>^1fAdNY>b1O2M2O2N1O1O2M2O1O2N1O20O1O1M3L4L3N3L4L4M3L3M4M3L4L4M2N3N2M3N2N2N1N3N2N2N2NZc\\6"}, "image_id": 844, "id": 14155}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 410.0, 48.0, 66.0], "area": 1823, "segmentation": {"size": [512, 512], "counts": "_nf52l?3L3N2M4M2XOAlAc0P>@nAb0P>AlAb0Q>AmAb0P>AlAb0Q>AmAb0P>@mAc0Q>g001O010O01O01O010O01O01O010OO2M2M3N3L3N3L3N2M4M2M4M2M3N3L3N2M4L3N3L3NkRa1"}, "image_id": 844, "id": 14156}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 433.0, 55.0, 65.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "RoZ63j?3N3L3N2M4M2M4M2M3O20O01O0N2N3L3N3L3O1010O01O0N2N3L3N3M2M301O010OO2L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3LXbi0"}, "image_id": 844, "id": 14157}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 499.0, 35.0, 13.0], "area": 233, "segmentation": {"size": [512, 512], "counts": "noU32l?2M3N2M3O11O001O001O00001O001O00001O001O00001O001O00001O001O00001O00Q`X4"}, "image_id": 844, "id": 14158}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 0.0, 40.0, 4.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "P`91o?000000000000000000001O000000000000000000001O000000000000000000001O00000000000P`R7"}, "image_id": 845, "id": 14159}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 0.0, 12.0, 3.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "P`X31o?000000001O000000001O0P`a4"}, "image_id": 845, "id": 14160}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 0.0, 59.0, 25.0], "area": 1074, "segmentation": {"size": [512, 512], "counts": "P`f41o?0000?A9G0000000000000000000000000000000000O1000000000000Hm@CS?=800O10000000000000000000000000000000000O10000000000000:Ffo[2"}, "image_id": 845, "id": 14161}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 1.0, 78.0, 52.0], "area": 2400, "segmentation": {"size": [512, 512], "counts": "cPi59g?`0@00O10000002N8H0000000O10O100000000000LPOXAP1h>4O10000O100O10O10O10O0100O04MO0100O010O010O10O10O010O10O010O02O3MO01KSAWOl>i060O010O010O01Mn@XOQ?h040O03N2M3N2M3N2M3M3N2M2O\\oo0"}, "image_id": 845, "id": 14162}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 8.0, 75.0, 62.0], "area": 2757, "segmentation": {"size": [512, 512], "counts": "bao02j?5K4K5L5K4L4K501O0ZAoN]>Q1^ATOb>V10O00000010O000001O01O00N3O000000010O0000010O00000010O0000010K4L40001O01L3L4L4K10O3N40000010O000000010O0000010N1L4L4K6K4L4L4L5Jhoj5"}, "image_id": 845, "id": 14163}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 12.0, 81.0, 60.0], "area": 2538, "segmentation": {"size": [512, 512], "counts": "kaX22g?7J6J7H7K500000001O01O00000001O004MO01O00000001O01O00000001O01O0000000001O01OM3J6I7N3O0^Og@>]?000000000O1M004K4M3M3M4L3M3O20O0001O01O01O01O01OM4L3L4M4N10001N1L4Eh@K\\?0h@M[?0io^4"}, "image_id": 845, "id": 14164}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 16.0, 48.0, 53.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "`0b1^>00000000000000000001O012mA1P>1oA0o=3nA0P>1nA1\\OGa>;QB3l=ORB3m=OPB4m=NRB3l=ORB4l=NQB4m=NRB4k=ORB2m=0QB0P>2nAMR>Q10O2N3N1N2N3N1N3M2O2M2N010O0000010O00010LYAnNg>R13010O000BWOgAi0Y>>00100O1O100O1O100O1O1O100O1O10O01O100O1O100O1O100O1O100O?A`0@W^Y2"}, "image_id": 845, "id": 14166}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 81.0, 88.0, 56.0], "area": 2860, "segmentation": {"size": [512, 512], "counts": "kSj05f?6J5K5K5O2O01O000001O01O0001O0001O01O0000010I6L40000010O000001O0001O01O0O1L1O2M5M3000001O01O000001ON3K400001O01O000fNbAR1f>1O000001lNYAj0Q?K41O000001O01O000001O01O000001O01O000001L3K5K5Kmli5"}, "image_id": 845, "id": 14167}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 94.0, 46.0, 42.0], "area": 1638, "segmentation": {"size": [512, 512], "counts": "n2S1m>00000001O0001O000000000000000001O01O3M1O00000000000001O0000000000000001O0001O0000000G9E;DZmX7"}, "image_id": 845, "id": 14168}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 107.0, 6.0, 12.0], "area": 38, "segmentation": {"size": [512, 512], "counts": "]Sm71o?2M2O2M3N2cL"}, "image_id": 845, "id": 14169}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 109.0, 95.0, 71.0], "area": 3616, "segmentation": {"size": [512, 512], "counts": "gd`62n?1N3M3N2M2O2M3N2G]OPAe0n>^Oo@e0[>YOlA4Ji0Z>TOjA5Jg0\\>WOhAQ1Y>QOdAP1[>=M3N0O01O010O01O010O01O01O0101N201O10O1000O10O1000O1M2N3N1N01O010O01O010O01O010O01O010O01OHaAPO`>P1bAnN]>R190O010O000100O2O2M10O01O010O01O010O01O010O3N2M3N1N3M3N2M2O2M3N2MiK"}, "image_id": 845, "id": 14170}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 128.0, 63.0, 71.0], "area": 2450, "segmentation": {"size": [512, 512], "counts": "aeR22m?2M3N1Z@Ja?;O2N2M3N2N1O2N2N2M3N1O2N2N2M3N1O2000O1N2M2O2N1O0O1000O1E:21CfNRB[1m==O001O1O1O0000O1O1N2O1O1O1N101LhA_NY>`14O1N2O1O1O4K;F001O1Nc0^OUkm4"}, "image_id": 845, "id": 14171}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 150.0, 34.0, 54.0], "area": 1304, "segmentation": {"size": [512, 512], "counts": "f4d0>MP>2PBNP>2iA5W>j00000000001O000001O00000000000001O0000ROnANS>EYB;f>0000000001O0001OJ6Dcj^7"}, "image_id": 845, "id": 14172}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 155.0, 63.0, 45.0], "area": 1646, "segmentation": {"size": [512, 512], "counts": "`eQ51m?3M3N1N3M2O2M3O010O010N1N3M5K2O2M2O2000O010O010O01000O010O01000O01N1N3N0O1O02N3N110O010O1N1N3N110O010O1M2VOm@e0X?O2M3M2O2M2N3M3N1Nfjn1"}, "image_id": 845, "id": 14173}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 171.0, 31.0, 45.0], "area": 1175, "segmentation": {"size": [512, 512], "counts": "Qf`7g0Y?000000000000000ZO[OQBe0o=f00000000000000000000000000000000000000O10eJ"}, "image_id": 845, "id": 14174}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 173.0, 98.0, 59.0], "area": 2905, "segmentation": {"size": [512, 512], "counts": "mUi02l?2O2N2M2O2M3N1O2M3N2M201000O10O1000O01000OO2N2M3N101000O01000O0100000O01000O010000O010000O01000O01N2M3N1O2M3N1N3N2M3OO1O2M3N1N3N2N2M2O2N20O10O1000OO2M30O01000O0100000O0100UOo@e0Q?ZOQAd0V?N3N2M3N1N3N2N2M2O2M`ie5"}, "image_id": 845, "id": 14175}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 185.0, 60.0, 49.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "]fh43k?2N3N1N3M2O2M20100O010N1N3M2N3N1N3000O010O0102NO010O0100O0100O0100O0100O010O0100O0100N1O2M2N3N2O010N1N3M2N3N2M2N3N1N3M`YY2"}, "image_id": 845, "id": 14176}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 216.0, 82.0, 54.0], "area": 1822, "segmentation": {"size": [512, 512], "counts": "WW_41m?3M2O2M3M2O2M2N3N101O010O10O10O010O01000O010O0100O0100O010M2N30O010O0100O0100O010O01000ON3M2010O10O10O010O010O10O010O10O1O0O2O00100ZOn@m1100O010O01000O010O010O10O10ROPBJQ>3RBMm=1UBOl=OVB1i=MYB3h=WOnA`0=9h=DZB=e=A^B>c=_O_Bb0`=]ObBb0_=[OdBe0[=YOgBh0Y=UOjBj0V>10M2N3M2N3N2M2N3M2N3M2OQgS6"}, "image_id": 845, "id": 14180}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 233.0, 49.0, 42.0], "area": 1298, "segmentation": {"size": [512, 512], "counts": "mgW72l?2N3L3N3L3N2N3L3O2O01O01O010O010O00010O010O01O01O010O010O00010O0N3M2M3O2O010O01O01O010O010O00010`H"}, "image_id": 845, "id": 14181}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 240.0, 82.0, 55.0], "area": 1853, "segmentation": {"size": [512, 512], "counts": "oWV41n?2M2N3M2O2M2N3M3N1010O0100O0100O010O0100O0100O010O0100O010M2O2N110O10O10O010O10O010O10O010M3N1010O010O01000O010O010O01000O0N30O010O01[Om@P?@RAa0n>]OTAc0T?010O01M3M2O2M2N3M2O2MZg`2"}, "image_id": 845, "id": 14182}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 262.0, 80.0, 55.0], "area": 1825, "segmentation": {"size": [512, 512], "counts": "ehl32l?3M2N3M2N3M3M2O2O010O01000O010O010O010O10O010O010O0100O0N3O0010O0100O010O010O10O010O01O0N3O10O010O10O010O10O010O10O010O01M201000O\\Om@;T?Bo@>P?@SA?n>_OSAb0V?O0100OO2M3M2O2M2N3M2OcVk2"}, "image_id": 845, "id": 14183}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 272.0, 12.0, 39.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "`8V1j>000000001O000001J5B>Bmgi7"}, "image_id": 845, "id": 14184}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 276.0, 88.0, 70.0], "area": 3066, "segmentation": {"size": [512, 512], "counts": "gYd61m?2M4M2M3N3M2M4O01O01O010O010O00010O010O00010O010O01O01D]OYAc0e>_O\\Aa0a>B[Aa0b>B\\Aa0a>`0N2N3M2010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010O0mNcAc0]>[OfAe0Z>XOhAh0X>UOkAl0U>QOnAn0R>oNQBQ1`>0O010O0001QORAk0m>SOVAl0o>10O010O0N2N3L3NTF"}, "image_id": 845, "id": 14185}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 288.0, 81.0, 54.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "_id32l?2O2M3M2O2M2N3N1N3O010O10O010O10O010O10O10O010O010O01000O0O2M201O010O01000O010O010O01000N1O2M2100O010O01000O010O010O10O10O0N3O10O01[Om@Q?_ORAa0W?0O010O01O1N1N3M2O2M3M2OieR3"}, "image_id": 845, "id": 14186}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 315.0, 84.0, 56.0], "area": 3024, "segmentation": {"size": [512, 512], "counts": "Uk62h?6I7J6J60001M2K5K5M31O01O000000000^AhN]>^1O0000000000010O00000000nNbAb0^>WOiAj0f>01K41O000000000010O0000000001O0O1N2N2O1N2N3O01O00O1O1N3M2O1N2N2O0O02N3N1N3M2N3M3N1N3M2N3CPAGR?7o@GS?7PAFS?7o@GS?8FlA:T>DoA

    ASB?n=^OUBb0j=\\OXBd0i=XOZBh0f=VO]Bj0b=SOaBm0Z>0O00010O010O00010O010O01O01O0O2M2N3L3O1010O01O01O001M2M3N3L3N3L3N2N3L3N3L3N2M4Mkd;"}, "image_id": 845, "id": 14188}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 322.0, 80.0, 78.0], "area": 2981, "segmentation": {"size": [512, 512], "counts": "l[f21m?3N1O2K4O2GDk@=S?Fj@=S?El@=Q?:N3M2O2M2N3M201O10O0N3N110OO2M2N3M2N3O10O010O010O010O10O10O01M2N3NO0001O000001O2O2M2N3M2O2M2N3M1O01O0O100002O1N3M3M2N3N1N3M3M2N3N1N3M3M2N3N1NgeQ4"}, "image_id": 845, "id": 14189}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 323.0, 1.0, 9.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "S:9deo7"}, "image_id": 845, "id": 14190}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 346.0, 73.0, 82.0], "area": 2914, "segmentation": {"size": [512, 512], "counts": "cl]32l?3N1N3M2N3N2M2N3M2O2M2N3N2M2N3M2O2M2N3M3N1N3M2N3N2M2N3M2OPOUBNh=2[BMc=3`BM]=4dBMZ=2iBMX=0kB0U=MmB3S=KPC5P=IRC6nDoA;T>f02N3N1N3M3M2O2M2N3M2O2M3M2O2M2N3M2O2M3M2N3N1N3Mad]3"}, "image_id": 845, "id": 14191}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 355.0, 8.0, 20.0], "area": 85, "segmentation": {"size": [512, 512], "counts": "d[l71l?3N2N3L3N3L3N2mD"}, "image_id": 845, "id": 14192}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 372.0, 16.0, 18.0], "area": 162, "segmentation": {"size": [512, 512], "counts": "Qlb71l?3N3L3M3N30O01O01O010L3N2M4L3NZT5"}, "image_id": 845, "id": 14193}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 384.0, 69.0, 92.0], "area": 3051, "segmentation": {"size": [512, 512], "counts": "Q]^52l?2^@MT?6j@LT?7h@LU?7i@LT?a0M4N11O04L010O0oAoNT=P1jBROV=o0fBUOY=k0eBWO\\=i0`BZO`=f0^B]Ob=b0[BAe=`0XBCg==VBFk=:RBHn=V10O01O01O010O01O01O010O01N1M10O010O010O010O010O102M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4MiS_1"}, "image_id": 845, "id": 14194}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 400.0, 73.0, 94.0], "area": 3574, "segmentation": {"size": [512, 512], "counts": "Sn[63k?3L3N3M2M3N3L3N3N11O_AWOk=j0RBXOn=h0PB[OP>e0lA^OT>b0jAAV>?fADZ>:bAI^>l0010O0010O0010O01XOaNnB^1oGUA7m>FWA6m>GUA7m>GVA5\\?N3Mcb?"}, "image_id": 845, "id": 14195}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 418.0, 24.0, 69.0], "area": 931, "segmentation": {"size": [512, 512], "counts": "^^d71;2R?2j@3R?0k@3Q?`0M3M4M21O01O00010O0001OO2K4M3^AeN\\>a1L5L3L4M3M4mB"}, "image_id": 845, "id": 14196}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 487.0, 74.0, 25.0], "area": 831, "segmentation": {"size": [512, 512], "counts": "n_W22k?3N2N2N2O11O001O001O001O00001O001O001O00O1N2001O001O001O001O00001O001O001ON2N2M3N2N2N2N20000001O00O1N2N2N2N2O11O001O001O001O00001O001N1N3M2N3L3N3Mb`c4"}, "image_id": 845, "id": 14197}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 77.0, 185.0, 323.0], "area": 38538, "segmentation": {"size": [512, 512], "counts": "]2k9U61O01O010O01OWFmIe9X6O010O01L3O20O0010O0010M2M2OO10O2O3L3N2M4M2M4M2TGTIa8o6]GTIa8n6[GUIf8S7O00010\\GhH^8X7`GkH`8Z701OO2L301O01O01O010O010O00010O010O01O01O010OTIiHo4X7nJjHR5V7kJnHU5Q7iJQIW5P7fJSIZ5l6ZJhHTO?b6j6WJiHUO?d6h6TJmHTO?h6c6TJfIl5[6SJfIm5Y6SJgIm5Z6oIjIQ6U6mImIS6S6kIoIU6R6gIRJY6m5eIUJ[6l5aIXJ_6g5_I[Ja6f5[I]Je6c5YI`Jg6\\7010O00010O010O00010O010O0010O0010O010O00010O010O00010O01M2M3N3L3O2O01O010O010O01O01O010O01O01OfN^FVLc9g3_FZL`9c3dF\\L]9a3eF_L[9_3hFaLW9\\3lFdLU9Y3mFhLR9U3RGjLo8S3SGmLm8P3WGPMh8n2ZGRMg8j2]GVMb8h2`GXMa8e2aG\\M^8a2fG^M[8_2gGaMY8\\2kGdMT8Z2nGfMS8V2PHiMQ8U2RHhMQ8T2RHiMQ8U2RHhMP8U2SHhMQ8U2QHiMQ8U2RHgMR8U2QHiMQ8U2RHhMP8U2SHhMQ8U2QHiMQ8T2SHhMQ8U2QHiMQ8T2SHhMP8V2o2M2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2M4M2N2M4M2M_YS5"}, "image_id": 849, "id": 14198}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 0.0, 61.0, 90.0], "area": 4632, "segmentation": {"size": [512, 512], "counts": "PPh3j2V=00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000bM^20000000000000000000P`Y3"}, "image_id": 850, "id": 14199}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 0.0, 67.0, 144.0], "area": 8126, "segmentation": {"size": [512, 512], "counts": "PP^5e0[?00000000000000000000k3UL0000000000000000000000000000000000000000000000000000000004L000000000000000000000000000000000000000000000000Q2oMk]`1"}, "image_id": 850, "id": 14200}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 1.0, 11.0, 25.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "1i0W?0000000000000000000o_j7"}, "image_id": 850, "id": 14201}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 51.0, 12.0, 25.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "c1i0W?00000000000000000007IVni7"}, "image_id": 850, "id": 14202}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 220.0, 241.0, 116.0], "area": 9790, "segmentation": {"size": [512, 512], "counts": "XW4o0Q?000F:0000000000000000000000000000000004L00000000000O1000000000000000000000000000000000XOjAFV>:jAFV>:jAFV>:jAA[>?c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000WAGk=9UBG9Dgm0nAQOS>h0UBVOl=j0UBSOm=m0c00000000000000000000000000000000000000000000l0TOeVS4"}, "image_id": 850, "id": 14203}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 228.0, 31.0, 32.0], "area": 954, "segmentation": {"size": [512, 512], "counts": "Ug_3c0]?c00000000000000000000000000000000000000000000000I^OfALZ>4i000000000000000000000000000000000000lWf5"}, "image_id": 850, "id": 14205}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 310.0, 21.0, 10.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "hi93l?6K0000000000000000O1000000000O100000000Zf[7"}, "image_id": 850, "id": 14206}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 417.0, 21.0, 38.0], "area": 677, "segmentation": {"size": [512, 512], "counts": "Q=V1j>O100O100O1O100O100O100O100O1O100O100O10oRe7"}, "image_id": 850, "id": 14207}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 463.0, 68.0, 49.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "T?l0S?1O1O100O1O1O100O1O1O100O1O1O100O1O1O100O1O100O1O1O100O1001O1O2N1O1O1O2N1O1O2N1O1O1O2N1O1O2N1O1O1O2N1O1O001O1O2N1O1O2N1O1O2N1O1O2N1O1OR`m6"}, "image_id": 850, "id": 14208}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 0.0, 104.0, 124.0], "area": 11042, "segmentation": {"size": [512, 512], "counts": "P`i0:f?b3^L000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000YLQD]3o;cLPG>P9BPG>P9BPG>P9BPG>P9BPG>P9BPG>P9BPG>P9BPG>P9BPG>P9BPG>Y<0000000000000g\\b5"}, "image_id": 851, "id": 14209}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 0.0, 287.0, 184.0], "area": 39886, "segmentation": {"size": [512, 512], "counts": "P``3l0T?0000000000000000000000O10000000000000000000000000000000000000000000000000000^2bMQ1oN0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000>B0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000R1nN0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000"}, "image_id": 851, "id": 14210}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 240.0, 27.0, 46.0], "area": 1121, "segmentation": {"size": [512, 512], "counts": "jhb72_??A?D<0001O000000000001O0000000000000001O00000000000_H"}, "image_id": 851, "id": 14211}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 241.0, 44.0, 43.0], "area": 1665, "segmentation": {"size": [512, 512], "counts": "hXk61_?`0@`0I700000001O0000000000000001O00000000000001O0000000000000001O000000000001O0000O1@`0Alh>"}, "image_id": 851, "id": 14212}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 437.0, 16.0, 18.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "e]j5a0_?000000010O0000000000000000000Zbm1"}, "image_id": 851, "id": 14213}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 444.0, 49.0, 36.0], "area": 1256, "segmentation": {"size": [512, 512], "counts": "Unm23m?5J6K5K6J3M0O1000O1000O1000O1000O1000O1000O1000O1000O1000O100000O10O100000O10O100000O11N6K6J5K5K^aY4"}, "image_id": 851, "id": 14214}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 445.0, 48.0, 35.0], "area": 1220, "segmentation": {"size": [512, 512], "counts": "Qn[6?]?40000k@_Ok>k000000010O0000000000000000000000000000010O00000000000009JM01O000000000001O0000000000J6@WRl0"}, "image_id": 851, "id": 14215}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 463.0, 26.0, 41.0], "area": 829, "segmentation": {"size": [512, 512], "counts": "a_c74^?>C?N000000001OM300001O00000001O000000L4L400000001`A"}, "image_id": 851, "id": 14216}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 466.0, 44.0, 46.0], "area": 763, "segmentation": {"size": [512, 512], "counts": "ln_21n?2N2N2N2O1N2N2N3M2N2N20000000000g@ZOW?h00000010O0000000000000XOj@e0Y?001OO1O1O2N1O1N2N2N2N2N2N2N2NZPj4"}, "image_id": 851, "id": 14217}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 477.0, 53.0, 35.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "h_[58Y??D<00000000001O000000000000000I7000005K000000i@^Oj>o00000000000000000006J0000J600000000000000001O000000000A\\ABd>OkA1W_j1"}, "image_id": 851, "id": 14218}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 478.0, 8.0, 7.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "n^W63m?4L00000000000Rad1"}, "image_id": 851, "id": 14219}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 482.0, 43.0, 30.0], "area": 998, "segmentation": {"size": [512, 512], "counts": "h_\\68h?000000000000000[Oe0O11O00000000000000000000000000000N20000000001O0000000000000000000@\\Qn0"}, "image_id": 851, "id": 14220}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 489.0, 36.0, 23.0], "area": 475, "segmentation": {"size": [512, 512], "counts": "doS41n?2N3[@KZ?7d@J[?8c@I\\?9b@H]?>O1O100O1O1O11O1O1O1O1O1O00O11O2N1O1O1O1O1O1O1O2N1O1O1O1O1OQPZ3"}, "image_id": 851, "id": 14221}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 502.0, 19.0, 10.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "ooT31n?1O1O1O1O1O100O1O1O11O2N1O1O1O1O1O1OQ`a4"}, "image_id": 851, "id": 14222}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 507.0, 22.0, 5.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "o_]71k?40000000000000000000000000000000000001O00T`7"}, "image_id": 851, "id": 14223}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 0.0, 164.0, 69.0], "area": 10539, "segmentation": {"size": [512, 512], "counts": "P`]1U2k=00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000kNU1000000000000000000000000000000000000000P`P4"}, "image_id": 852, "id": 14224}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 220.0, 60.0, 20.0], "area": 1141, "segmentation": {"size": [512, 512], "counts": "lVf6=c?7I00000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000=Cgh;"}, "image_id": 852, "id": 14225}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 254.0, 232.0, 258.0], "area": 47144, "segmentation": {"size": [512, 512], "counts": "S8m7S800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000OWOj0000000000000000000000000000000000000mNS1000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000O1000000000O10000000000000000000000000000000000000000000000000000gNXGZKh8f4Y10000000O1000000000000000000000000000000000000000000000000000000000000000000000000O1000000000O1000000000R2nM]2cMcc[4"}, "image_id": 852, "id": 14226}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 274.0, 67.0, 46.0], "area": 1522, "segmentation": {"size": [512, 512], "counts": "UY[64l?4L3L4M3M3L3N1O4K10O10O10O10O10IQA]Oo>c061000O01000O01000O01000O01000O010000O01000O01000O01000O01000O01000O10O10O10O13L4OO2M4L3L4M3M3L5L3MbVc0"}, "image_id": 852, "id": 14227}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 459.0, 53.0, 53.0], "area": 2809, "segmentation": {"size": [512, 512], "counts": "[^j5e1[>0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000eQ[1"}, "image_id": 852, "id": 14228}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 500.0, 40.0, 12.0], "area": 480, "segmentation": {"size": [512, 512], "counts": "d_P2"}, "image_id": 854, "id": 14233}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 61.0, 31.0, 38.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "mRk42m?2M2O2M3N1N3N000O10O010O10O10O010O10O010O10O101N3N1O2M3N2M2OP^e2"}, "image_id": 854, "id": 14234}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 90.0, 26.0, 33.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "fS\\51n?1N3N2M2N3N2M10O010O01O010O010O010O02N2O2M2O2M3N1NUmV2"}, "image_id": 854, "id": 14235}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 103.0, 52.0, 53.0], "area": 1672, "segmentation": {"size": [512, 512], "counts": "Udb22l?3N2X@Kb?6]@L`?oNnAR1P>POnAS1P>oNnAS1o=POnAR1P>POnAR1Q>?N2O1N1O10O010O0lNWB3i=NYB0g=O[BOe=1^BMb=3_BKa=5bBH_=8cBF_=7cBG`=7bBG_=7dBCZOHU>b0cBDc=:_BDb=:`BDc=:_BDb=:T1M3N1N3N]bk5"}, "image_id": 855, "id": 14238}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 509.0, 5.0, 3.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "ooP11m?21O1O00Q`l6"}, "image_id": 855, "id": 14239}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 0.0, 11.0, 6.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "P`41o?1O001O1O1O001ON2O1OQPf7"}, "image_id": 856, "id": 14240}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 92.0, 50.0, 49.0], "area": 1377, "segmentation": {"size": [512, 512], "counts": "hc<2m?1CNk@4S?Nj@5S?Nk@4S?Nk@3T?Nj@5S?>N1N3N2N2M210O1000O10O1O1N110O10O1000O10O1O1M2O2N2M3N10100O001N2M30OO2M3N2N1N3E]@6h?N2M3NY\\j6"}, "image_id": 856, "id": 14241}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 167.0, 25.0, 40.0], "area": 613, "segmentation": {"size": [512, 512], "counts": "[5o0P?2N2M3O010O10O1N1N3N2M3N1O2M3N1N3N2N2M2O2M3N1O2M]Zc7"}, "image_id": 856, "id": 14242}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 228.0, 61.0, 67.0], "area": 2278, "segmentation": {"size": [512, 512], "counts": "bhl02l?3N1N3N2]OF]A;a>G]AF]A<`>G]AF]A;b>F]A<`>G]AF]A;a>c0O0001N3N2M3N11N2O0010000M2O2M3N1O2M3N2M201000O0100000O01000O0ZOkABU>9oAGR>7PBFR>9PBER>8QBFQ>8PBFR>8QBFQ>8QBER>9oAFS>7PBGQ>8QBER>8m0N1O2M3NZhT6"}, "image_id": 856, "id": 14243}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 315.0, 31.0, 33.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "cZn01m?2O2N2M3N1N3N2M2O2N2M2O2M10O01000O3N2M2OO02O2N1N3N2M3N1O2M3NPVb6"}, "image_id": 856, "id": 14244}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 95.0, 155.0], "area": 14713, "segmentation": {"size": [512, 512], "counts": "0k4U;00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000P``6"}, "image_id": 857, "id": 14245}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 403.0, 270.0], "area": 77617, "segmentation": {"size": [512, 512], "counts": "b7a4Y2SMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMY5m2gJSMb5d2^J\\MT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNU7Q1kHoNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7R1lHnNT7Q2mGoMS8Q2mGoMS8Q2mGoMS8Q2mGoMS8Q2mGoMS8o4000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000mNjI\\HV6d7S10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\\NkG]KU8c4d10000K5000000000000000000000O100000000000000000000000O100000000M300000000000000000000000000000000nNRFRLn9n3R100000000000000000000000000000000000000TMfZf1"}, "image_id": 857, "id": 14246}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 259.0, 109.0, 253.0], "area": 24020, "segmentation": {"size": [512, 512], "counts": "\\jY6k3n9W200000000000000000000000000000000000000O100000000000000000000000000000000000000000k0UO1O000000000000000000000000000000000000000000G900000000000000V1jN0000000000000000000000000000000000000000000000000O1000000000000000000"}, "image_id": 857, "id": 14247}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 0.0, 377.0, 62.0], "area": 13078, "segmentation": {"size": [512, 512], "counts": "P`11o?:F:F:F:F9G1O000000O10000000000000000O10000006J7I0000000000000000O1000000000000000000O1CTBfNl=Z1=0000000000000000O10000000000000000O1000000000000000000O1001O7I00000000O100000000000000EkAkNU>U1;0000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O13M:F3M0000000000O1000000000000000000O10000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10000000000000000O10000000009G:F]oQ2"}, "image_id": 858, "id": 14248}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 69.0, 306.0, 93.0], "area": 16652, "segmentation": {"size": [512, 512], "counts": "oRo08h?:F:F9GO010000J60000000003MO10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O100000000I7O1000O10000000000000O1000O10000000006JO100000O10000000KeNeA[1[>50000O10O1000000000000000O10O100000005K0000O1000O10000000000000O1009F5L0L31000O10000000000000O1FUO_Ak0a>9100007I:F:F:F:E3N0000000000O100000O100000000000O100000O100000000000O100000O10000008H:F:E5L00000000000000A^N`Bb1`=>01000000000000:F4L0O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000000000000O10O1000000005K:F:F:E;F:F]kW2"}, "image_id": 858, "id": 14249}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 146.0, 28.0, 46.0], "area": 1100, "segmentation": {"size": [512, 512], "counts": "d4\\1d>000000O1000O1000000000O1000O1000000000O10008H8H8H7I8HWja7"}, "image_id": 858, "id": 14250}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 158.0, 357.0, 160.0], "area": 43113, "segmentation": {"size": [512, 512], "counts": "S6k3U<000000000O010000000000000000O01000000000000000O010000000000000000O01000000000000000O010000000000000000O01000000000000000O010000000000000000O01000000000000000O010000000000000000O010000000000000000O01000000000000000O01000000000000000O010000000000000000O010000000000000000O01000000000000000O01000000000000000O010000000000000000O010000000000000000O01000000000000000O010000000000000000O01000000000000000O010000000000000000O01000000000000000O010000000000000000O010000000000000000O01000000000000000O01000000000000000O010000000000000000O010000000000000000O01000000000000000O01000000000000000O010000000000000000O010000000000000000O01000000000000000O010000000000000000O01000000000000000O0103M3ON9G9G9G:E:G9G9G:F9G9G:F9G9FYW]2"}, "image_id": 858, "id": 14251}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 460.0, 8.0, 5.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "\\nj25k?0000000000000dQQ5"}, "image_id": 858, "id": 14252}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 461.0, 7.0, 5.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "^^b22n?2N0000000O1cQZ5"}, "image_id": 858, "id": 14253}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 462.0, 41.0, 50.0], "area": 1752, "segmentation": {"size": [512, 512], "counts": "bnn35k?9F;F:F9G3M000000000O1000000000000000000O1000000000000000000O1000000005K:F:F9G:FV`\\3"}, "image_id": 858, "id": 14254}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 464.0, 37.0, 48.0], "area": 1354, "segmentation": {"size": [512, 512], "counts": "U_Q57i?4L0]OK[A5e>4QAMo>b004K9H4L0000000000000O100002N6J0000O10000000000000000008H9G9G9GVP\\2"}, "image_id": 858, "id": 14255}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 470.0, 46.0, 42.0], "area": 1514, "segmentation": {"size": [512, 512], "counts": "W_i58h?0O10000DHPA8P?0h@0X?<06J8H3L100000000000000O10000000000000000O100000000000000O1000N2000007I9F9H8HZ`_1"}, "image_id": 858, "id": 14256}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 471.0, 45.0, 33.0], "area": 1158, "segmentation": {"size": [512, 512], "counts": "Xog24l?9YODXA?d>KSA4n>?00000000000000O01000000000000000O01000000000003M2N0O100000000000O1000O10003M9G9G``a4"}, "image_id": 858, "id": 14257}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 491.0, 30.0, 21.0], "area": 565, "segmentation": {"size": [512, 512], "counts": "^?b0^?0O10000000000000000O10000000000000000O10000000000001O:F9GQ``7"}, "image_id": 858, "id": 14258}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 503.0, 17.0, 9.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "i_j47h?100000000000000000000O10000004LUPm2"}, "image_id": 858, "id": 14259}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 510.0, 13.0, 2.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "ooS21o?0000000000000O100000000R`e5"}, "image_id": 858, "id": 14260}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 510.0, 12.0, 2.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "ooR31o?0000000000000O1000000RPg4"}, "image_id": 858, "id": 14261}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 221.0, 149.0], "area": 32587, "segmentation": {"size": [512, 512], "counts": "0e4[;00000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000b2^M^]a4"}, "image_id": 859, "id": 14262}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 0.0, 111.0, 131.0], "area": 12911, "segmentation": {"size": [512, 512], "counts": "R`X6i0W?0000000000000000000000000Na1aNi1WN0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O0gLXDZ2h;fMkD0k<"}, "image_id": 859, "id": 14263}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 250.0, 22.0, 26.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "oWd73m?1N3M2O1N3M2O2M210O0010O001N1N3M2O2M2N2N3Ncg0"}, "image_id": 859, "id": 14264}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 281.0, 40.0, 8.0], "area": 320, "segmentation": {"size": [512, 512], "counts": "ihc08h?00000000000000000000000000000000000000000000000000000000000000000000000000000WWh6"}, "image_id": 859, "id": 14265}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 286.0, 89.0, 194.0], "area": 12433, "segmentation": {"size": [512, 512], "counts": "oXe26j?k0UOj0VO?A1O2N1O2N2N000000000O100000000000000000000VEmMR8h1nGXNR8h1nGXNR8h1nGXNR8X2^GhMb8R3dFnL\\9l3jETLV:o4L8H0000O10000000000000000000000000000000O100000000000000000O1000000000000000000000000000000000000000b0^Ok0UOj0VOk0UOj0VOj0UOl0UOWQn3"}, "image_id": 859, "id": 14266}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 290.0, 206.0, 222.0], "area": 22992, "segmentation": {"size": [512, 512], "counts": "eYi44k?P1QOo0QO:F1O2eCbMP;`2mDcMQ;^2mDeM5Jc9c2VFdM5Lc9b2UFeM6Jd9c2TFeM5Ke9a2TFgM5Je9a2cEXM7?>Kf9`2bEXM8??Ke9`2bEVM:a0[Om8mNgFf1;^OP9iNhFf19AP9gNiFg16CS9dNiFf15FT9aNjFf12JV9^NjFf10LW9\\NkFf1NOY9YNkFf1L1[9VNlFf1I5c:I\\E8e:F\\E:f:DYE=i:@XE`0j:^OUEc0l:[OUEe0m:XOSEi0m:WOSEi0n:UOREl0o:SOPEn0R;oNoDQ1S;RNPDf0l0Y1V;nMQDf0j0\\1\\;bNcD_1^;_NcDa1_;]N`Dd1b;YN_Dg1c;VN]Dk1e;SN[Dm1g;PNYDQ2h;nMWDS2k;jMVDV2i<100O100O100O100O1002N1O2N2N2N1O2N2N1O2N2N2N1O2N2N1O00O100O100O100O1O100O100O100O1OcNnAn0R>ROoAm0P>SOQBm0o=ROSBm0l=TOUBk0j=UOYBi0g=WO[Bg0d=YO_Be0a=[OaBc0^=]OdBb0\\=^OfB`0Y=@jB>U=BnB00H8J600001O00D=N10000000000000000000000000001O01O00000M3[NXBm0`>1O000000000000000000000F:C=0001O0000000000000001O000000000C=A?A?@l__3"}, "image_id": 863, "id": 14274}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 26.0, 75.0, 73.0], "area": 2959, "segmentation": {"size": [512, 512], "counts": "bRU1:f?7I00000O10000000O1000lNDdB14I;HO1000O100000000000O1000O100000000000O1000O100000000000O1000O1000000000D]O[Ac0e><000O1000000000O100000O1000000000O18H9G9F:G[mP7"}, "image_id": 863, "id": 14276}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 72.0, 79.0, 60.0], "area": 2773, "segmentation": {"size": [512, 512], "counts": "Zcd49[?=Ca10aNkAS1a>000000000000000010O000000000000000000010O000J6D<0000001O000001O00000000000001O0000M4O01O000000000000000001O0H8E`mS2"}, "image_id": 863, "id": 14277}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 81.0, 34.0, 48.0], "area": 1304, "segmentation": {"size": [512, 512], "counts": "fS_78`?80000K5C=E;10O000000000000000000001O00000001O000000:F1O0001O00000RM"}, "image_id": 863, "id": 14278}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 102.0, 70.0, 55.0], "area": 2512, "segmentation": {"size": [512, 512], "counts": "cSU3f0Z?0000000000000000000000000J602N000000000000000000000000O10000000000000000000000c0]O0000000000000O100L4O10000000000000000000000O1000[OWBSOi=m0e0000n0ROlkg3"}, "image_id": 863, "id": 14279}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 103.0, 68.0, 48.0], "area": 2233, "segmentation": {"size": [512, 512], "counts": "\\3h0W?10O1000O104L3L5L3M3M3L10O10O10O4M1O0O10O1000O01000O10O1001N5L3M3L10O1000O10000000000O100000O10000000O10000000000000O10O10000000004Lf0UBPOj=R1a000O101O200N2N2N1N3N2N2Hk@BV?=l@@W?=8N2N1N3N2N2MY\\a5"}, "image_id": 863, "id": 14281}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 146.0, 86.0, 50.0], "area": 3094, "segmentation": {"size": [512, 512], "counts": "_U`4<]?70000000001O00000J6I70000000000000010O000000000000000000000010O000000000000000000L4I701O000001O0000000004L000000000001O00000000000000000000001O0001O000000H8@`00000000001L]kT2"}, "image_id": 863, "id": 14282}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 147.0, 50.0, 54.0], "area": 2124, "segmentation": {"size": [512, 512], "counts": "\\UW79_?81O00g@Jh>g000I7E;0001O00000000000001O000001O00000000000001O0O10000YOiAEW>4PBLP>4PBLP>4PBMo=3QBMo=3QBMo=3QBMP>2PBNP>2PBNP>2PBNP>2PBNP>2PBNP>2PBNP>3PBLP>4PBLP>4PBMo=3QBMo=3QBMY9"}, "image_id": 863, "id": 14283}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 155.0, 67.0, 46.0], "area": 2125, "segmentation": {"size": [512, 512], "counts": "S5b0^?0O10000000O14L9BSOWAU1a>5000000000000O0100000000000000000O01000000000:F3M0M210000000000000O1000O10000000000000O10O1001O7I0000000O10005K:FTZn6"}, "image_id": 863, "id": 14284}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 167.0, 71.0, 69.0], "area": 2852, "segmentation": {"size": [512, 512], "counts": "meU31o?8H9G3M0O1000008H3M00O10O100000000000O10O100000000000O1000O100000000000O10O10000000008H8H3^OeNXB\\1h=>000O100000O1000000000O100000O1007I9G8G9H9G8H8HPif3"}, "image_id": 863, "id": 14285}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 182.0, 82.0, 69.0], "area": 3177, "segmentation": {"size": [512, 512], "counts": "hUY14l?9G9G9G9F9H0000O1000000000O1003L9H3M0000000000000O03N8H9G3M0000O1000O1000000000O100000O1000000000O100TOjANU>3TBDl=<]B[Oc=d0m000000000000O10O1000000000000Mm@YOR?h0300000000O1000O13M8H9G_i]5"}, "image_id": 863, "id": 14286}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 203.0, 94.0, 67.0], "area": 3215, "segmentation": {"size": [512, 512], "counts": "Yg[41m?2N3M2N3M2N3M2N2N30O010O01O010O010O010O0O2M2N3N1010O010O010O010OUASOc>n0ZAUOf>j0XAXOh>Q10O010O010O010O001M2010O010O01O010O010O01M2N3M2N3M2N3M2N3O0010O000M10000000010110O01N1N3M2N3O0O2M2N3M2N3M2_OPAJ1LR?7PAJ1LQ?8PAJY?3;N3M]YU2"}, "image_id": 863, "id": 14287}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 211.0, 82.0, 40.0], "area": 2447, "segmentation": {"size": [512, 512], "counts": "V7;V?0n@8j>a0NO100000000000O01000000000000O010000000000000O10O1000000000O1000O1001O3M0O100000000000O0100000000000000O015N0M0000O10O10000000000000O010000000000000O10O7J8H9G_hf6"}, "image_id": 863, "id": 14288}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 212.0, 52.0, 45.0], "area": 2061, "segmentation": {"size": [512, 512], "counts": "oVV79b0KT>`0aAK_>k001O0000000001O0000000001O000000000001O0000000O1000000000001O01O000000000000000001O01OmN^Ah0m>00000000000mH"}, "image_id": 863, "id": 14289}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 250.0, 78.0, 68.0], "area": 3293, "segmentation": {"size": [512, 512], "counts": "hXo22m?6K5K5K5K5J6K5K1O000O0100000ZOiNcBW1\\=oN^BQ1c=TOXBl0h=YOSBg0m=f000O010000000O01000000JRBZNm=f170000000O0102N5K5K5K5J10O10000000O010000000O0100000000O010D<0000000O100000000O2O0000000G9Dj04O10000000O10O1000000000O01000000000O01000000000O010000000000O7J1O000000O10O10000000O10O5L6J6JVWi6"}, "image_id": 863, "id": 14291}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 263.0, 80.0, 69.0], "area": 3238, "segmentation": {"size": [512, 512], "counts": "bib17i?9F:G1O00000YOVOXBi0a=_O_Ba0Z=FfB:V=JjB6W=IhB8X=HhB8X=IgB7Y=IgB7Y=IgB7Y=IgB6[=IeB7[=JdB6\\=JdB6\\=JcB7]=IcB7^=HbB2_O@o=>bB2d=O[B1e=O[B1d=0\\B0b=2[B1e=OVB6j=JPBi00O10000000O1001O000O10001O00000000001N102N0N2L4K5L4M4O000QOVAh0Q?00000000O10000000000000000O1000O18H8H9GdVU5"}, "image_id": 863, "id": 14292}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 267.0, 66.0, 47.0], "area": 2287, "segmentation": {"size": [512, 512], "counts": "oXo68Z?>J7O0WAZOX>W10000001O00000000000001O0000000000eNeAQ1e>0001O000000000N2L400000000000001O0000000001O0000000000000001O0000000001O0F:1O0000000000`G"}, "image_id": 863, "id": 14293}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 274.0, 68.0, 69.0], "area": 2773, "segmentation": {"size": [512, 512], "counts": "_YX44a?=L3N3M2O2M2O2M2N2UAnNf>W1N3N1N3M20010O0100O010O10OO2N2M2N3N1N1O0010O00010O01ON3K4L4M4K5L5O10N1O100O1O2O0O1O1O101N1O100O1L`AgNb>W14O100O1N3J5YOTA5Q?FTA5^?KQge2"}, "image_id": 863, "id": 14294}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 309.0, 80.0, 40.0], "area": 2353, "segmentation": {"size": [512, 512], "counts": "ii31?2n>3m@3m>2n@5j>e0LO10000000O10O100000O100000O1000O100000O5L0000O1000O10000000O1000O100000O1000O10000000O10O100000006J2N0O10O10000000O10O10000000O10O1000000000O10O15K6J7IeUd6"}, "image_id": 863, "id": 14295}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 322.0, 90.0, 47.0], "area": 3055, "segmentation": {"size": [512, 512], "counts": "eZc6f0V?40000000000001O000000000004L0000000000000000000000001O000000000000000M30000000001OO100000000000000000000000G9O1000000000M300000000000M30000000M30000000000000000000000000001O0000mE"}, "image_id": 863, "id": 14296}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 332.0, 74.0, 56.0], "area": 2549, "segmentation": {"size": [512, 512], "counts": "ej^34l?5K6`@Eo>`0l@Eo>k0J7J5K0000O10O100000O10O10000003L5L4L4LO1O010O1O1O10O01O3N1N1O10O01O1O010O00100O00100OK_AlNa>T1cAiN]>W15000012M2N3N1N2N3M2O2M1O010O1O3N1N2N3N1N3M2N3N1NmT\\3"}, "image_id": 863, "id": 14297}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 333.0, 56.0, 74.0], "area": 2548, "segmentation": {"size": [512, 512], "counts": "hjd25j?7J6J5K0PA^Ob>b0^AD\\>7kAJT>6lAJT>6kAKU>5kAKU>5kAJV>8hAHW>?cAA]>Q100000O10O10000000OMaNfA`1Z>30000O104L6J6J2N0O01000000000O06K5K6J6J6I6K6J6J00000O7J5K6J6J_S_4"}, "image_id": 863, "id": 14298}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 353.0, 55.0, 64.0], "area": 2377, "segmentation": {"size": [512, 512], "counts": "[[i15k?6J6J0VABR>>nAHl=8TBMf=4YBLh=3YBMg=6VBJj=b0jA^OV>V1O01003M1J\\NnAd1R>5000O1000O100000O1000O100000L4000O1000O1000006J6J6I7J6J1O00O10O10003M6J6J6I]S[5"}, "image_id": 863, "id": 14299}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 354.0, 59.0, 86.0], "area": 3043, "segmentation": {"size": [512, 512], "counts": "b[24l?5K6J5K6I7J5K6J5K6J1N100000O10O10000000jNmA>T>BRB8m=IXB2h=N\\BN[=ETB=`0O\\=9eBG[=9eBF\\=:dBF\\=:dBF\\=:cBG\\=:dBF\\=`0^B_Oc=f0XBZOh=k0SBUOm=d1J7J5K3MO1000O1000O104L5K6J5K6I7J5K6J5K6J6I6K6J6JaRP7"}, "image_id": 863, "id": 14300}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 355.0, 46.0, 70.0], "area": 2356, "segmentation": {"size": [512, 512], "counts": "`lP14l?5K6J6hN@cBd0Z=B`B>W=XO[B`088]=^OUB?93b=\\OVAe0S?O0000010O000000010O000N2L5J5L4LlQ5"}, "image_id": 863, "id": 14302}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 455.0, 68.0, 57.0], "area": 2353, "segmentation": {"size": [512, 512], "counts": "k^Q52l?2O2N2M3N1N3N2N1N3N2M3N1O2O1000OdATOe=l0XBVOh=k0VBVOk=i0TBXOm=h0PBZOP>g0nAZOQ>h0lAZOS>g0lAZOU>f0iA[OW>f0fA\\O[>c0dA^O]>R1001O1O1O001O1O001O00O1N2O1O1N2O1N2O1O1N200001O1M3N1N3N2N2ZO^AJd>4_AJc>4_AJc>4^AJd>4_AJc>4_AId>5^AId>4^AKc>4f0M3N1O]al1"}, "image_id": 863, "id": 14303}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 471.0, 85.0, 41.0], "area": 2046, "segmentation": {"size": [512, 512], "counts": "Z_^61m?3M2N3M2N3M2N3M2N3N110O010O010O010O010O010O010O010O010O010O010N1N6J2O2O01O001O001O001O001O001O001O00N2O11O001O001O001O0N3M201O010O01N1N30O010O001O001N1N3M2N3M2N3M2N3M2NfP7"}, "image_id": 863, "id": 14304}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 492.0, 40.0, 20.0], "area": 571, "segmentation": {"size": [512, 512], "counts": "f_P35k?5K00000O100000000O1000000MF`@:_?400000000O100000000O100000000O100000000O12N6J5K5KR`[4"}, "image_id": 863, "id": 14305}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 493.0, 48.0, 19.0], "area": 669, "segmentation": {"size": [512, 512], "counts": "f_S42n?5J4M0000000O100000000O100000000O100000000O100000000O100000000O100000000O100000000O1001O5K5K5KS`T3"}, "image_id": 863, "id": 14306}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 502.0, 20.0, 10.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "n_T62m?1O1N2O1N2O11O1O001O1O001O1O1O001O1O00Q`a1"}, "image_id": 863, "id": 14307}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 508.0, 17.0, 4.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "oon11o?000000000O100000000O100000000O1T`h5"}, "image_id": 863, "id": 14308}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 509.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "o_`61m?200001O1O00QP\\1"}, "image_id": 863, "id": 14309}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 510.0, 7.0, 2.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "oo[21o?0000000O100R``5"}, "image_id": 863, "id": 14310}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "o_e61P`Z1"}, "image_id": 863, "id": 14311}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 79.0, 67.0, 54.0], "area": 2088, "segmentation": {"size": [512, 512], "counts": "ZSW43k?2M4M2M4M2M3N3L3N3L3N201O010O01O01O010O01O01O010O01O01O010N1M3N3O001O01O010O01O01O010O01O01O010O01O01ZO[AOe>O]A2c>J`A6`>HcA8\\>EgA;Z>BiA=W>@lAa0T>\\OnAd0h>0OO2L3N2M4M2M4M_\\g2"}, "image_id": 864, "id": 14312}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 83.0, 47.0, 33.0], "area": 848, "segmentation": {"size": [512, 512], "counts": "TS_52k?3M4L3M3N3L30010O01O01O01O01O010O00010O00010O01O01O01O01O01O01O010O00010O01O01O01L3M3N3L3M3MR]i1"}, "image_id": 864, "id": 14313}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 171.0, 70.0, 36.0], "area": 1694, "segmentation": {"size": [512, 512], "counts": "WVQ66b?8G9H9L30000001O000001O00000001O000000000H8O1YOj@d0Z?0O000000000001O0001O0000000001O0001O00000001O00000001O0001O0000000001O01O000000000001OM4G8Gbjk0"}, "image_id": 864, "id": 14314}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 13.0, 26.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "0j0V?N2O1N2N2N2N2N2N2N2N2N2MS`i7"}, "image_id": 866, "id": 14315}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 0.0, 42.0, 20.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "[`b01m?2M3M4M2O101O001O00001O00001O00001O001O00001ON2M3N2M3M3M3N200001O00001O00001O00N2NR`h6"}, "image_id": 866, "id": 14316}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 0.0, 47.0, 31.0], "area": 951, "segmentation": {"size": [512, 512], "counts": "c`T12k?3N2M4L3N3L3M4O00001O001O00001O001O00001O001O001O00N2N2M3N2M3N2001O00001O001O00M3N2M3N2N2M3NRPT6"}, "image_id": 866, "id": 14317}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 0.0, 51.0, 18.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "P`X21o?001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O00N2N2M3N2N2M3NRPn4"}, "image_id": 866, "id": 14318}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 0.0, 60.0, 68.0], "area": 2255, "segmentation": {"size": [512, 512], "counts": "fa]41m?3L3N3M2M4M2M3N3M2M4M2N3L3N2M4M2N3L3N2M4M2N3M201O00001O001O00O1N2M3N2N2M3N2N2M3N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2NR`d2"}, "image_id": 866, "id": 14319}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 0.0, 26.0, 15.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "UPV61l?4M201O00001O001O001O00001O001O001O001ON2M3N2N2M3NRP]1"}, "image_id": 866, "id": 14320}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 0.0, 53.0, 47.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "nPQ73k?2M3M4M2M4L3N201O00010O0010OM3N3FTO[Ao0e>QOXAR1h>4O001O00001O001O00001O00001O001O00001O0000N2M3N2@[ABh>;\\AAg>=?M3N2M3M3NR`4"}, "image_id": 866, "id": 14321}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 6.0, 48.0, 40.0], "area": 1108, "segmentation": {"size": [512, 512], "counts": "PQg14j?2M4M2N2M4M2M4M2M3N3M2010O010O00010O010O0010O0010L3N3L3N2M4N101ON3L3O2O01O010N1N3L3N2N3L3N3L3Ndo`5"}, "image_id": 866, "id": 14322}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 15.0, 43.0, 43.0], "area": 1079, "segmentation": {"size": [512, 512], "counts": "_QY23k?2M4M2M3N3L3N3L3N2M4L3N3M21O010O01O01O010M2N2M4M21O010O01SOQAg0T?10O010O000O2M2M4M2M4M2M3NR_Q5"}, "image_id": 866, "id": 14323}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 30.0, 61.0, 54.0], "area": 1747, "segmentation": {"size": [512, 512], "counts": "ma82k?4M2M3N3L3N3L3N2M4M2M4M2O110O010O00010O010O00010OkNWAS1l>0O01O01O010O01O01O010O01O01O010O01O0UOTAc0k>ZOYAe0h>XOZAi0n>01O010O01OO2L3N3L3N2M4M2M4MUnh6"}, "image_id": 866, "id": 14324}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 33.0, 80.0, 65.0], "area": 2803, "segmentation": {"size": [512, 512], "counts": "WRX52l?3L3M3N3L3M4M200010O01O0M3N3L3M3O20O01O01M2N3L3M3N3L3N2O2O010OWAAQ>?mACS>>iAFW>9fAJZ>6dAL\\>5`AO]>3aAO\\>5`AO]>k0M301O01O01O010O01O01N1M3N3L301O01O01O01O0O2L3M3N3L3M3N3L3M4M2M3M4M2M4L3N2M4L3N2Min_1"}, "image_id": 866, "id": 14325}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 60.0, 72.0, 66.0], "area": 2681, "segmentation": {"size": [512, 512], "counts": "Wc]62l?2M3N3M2N3L3N3M2M3N3M2N3L3N30O01O01O010O010O01O01O010O010O010OO2L3N2N3L3N3M2N3M200010O010O010O01OO2M2N3M2M4M2N3L3N2N3L3N3M2N3L3N2N3L3N3M2N3L3Ni]>"}, "image_id": 866, "id": 14326}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 70.0, 69.0, 44.0], "area": 1619, "segmentation": {"size": [512, 512], "counts": "iRX12k?3N2M4M2M4M200010O0010O0010O0010O0010O0010O0010O0001M2M3N3L3O2O01O010O01O01O010O01O01O01O01O010O01O01O0RORAk0R?O01O01O01O010ON3M2M3N3L3M4M2M3NZ]e5"}, "image_id": 866, "id": 14327}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 84.0, 34.0, 34.0], "area": 645, "segmentation": {"size": [512, 512], "counts": "hRj04j?2N30O00g@Jh>7TAMl>2QA1o>On@4S?Lj@7U?:0010O010O01O01O01O010O00010O01O01O0N3L3N2M4L3N2MSmd6"}, "image_id": 866, "id": 14328}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 99.0, 19.0, 23.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "dcZ22l?3L3N2M4M2M4M20010O0001L3N3L3N2M4M2Mll[5"}, "image_id": 866, "id": 14329}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 103.0, 21.0, 46.0], "area": 560, "segmentation": {"size": [512, 512], "counts": "Zde72l?3M2M4M2M3N3L3N3L3N3N10010N1N3L3N2M4O001hL"}, "image_id": 866, "id": 14330}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 110.0, 57.0, 55.0], "area": 1741, "segmentation": {"size": [512, 512], "counts": "XdT14i?3M3N3L3M3i@^Oo>d0m@@S?f000010O010O0001L3N3L3N2M4N110O00010O010O00010O01O0kN^Am0a>PObAP1g>10O0010O0010O0nNUAo0n>10O01O01O010N1M3N3M2M4Gf@H\\?68M4Mjkn5"}, "image_id": 866, "id": 14331}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 127.0, 55.0, 51.0], "area": 1465, "segmentation": {"size": [512, 512], "counts": "o3S1m>1O010O01O01O010O01POUAi0k>UOXAk0n>010O00010O010O00010O010O0001M210O00010O010O00010O010O00010O010O0010ON2M4M2M4M2M3N3L3NW[T7"}, "image_id": 866, "id": 14332}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 135.0, 56.0, 83.0], "area": 2343, "segmentation": {"size": [512, 512], "counts": "]Vb32k?4M2M3N3L3N3L3N2M4L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2N02M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3M4M`ka3"}, "image_id": 866, "id": 14333}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 159.0, 72.0, 45.0], "area": 1788, "segmentation": {"size": [512, 512], "counts": "bUl04i?3M4L3M3M4L3O1010O00010O0010O00010O0l@XOQ?k000010O0010O00010O01O01O01O010O00010O0001L300010OPAROn>Q10O00010O001N10001N101N10001N101O0O101O0O2O0N2M4M2M4M2M_jo5"}, "image_id": 866, "id": 14334}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 168.0, 49.0, 66.0], "area": 1726, "segmentation": {"size": [512, 512], "counts": "nfa42l?3L3N3L3N2M4M2J[On@i0o>5M3N3L3M4O00010M2M4M2M3N3L3N3M2001O0M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3Naje2"}, "image_id": 866, "id": 14335}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 190.0, 49.0, 70.0], "area": 2067, "segmentation": {"size": [512, 512], "counts": "`We52k?3N3L3M3N3d@BQ?a0l@BR?g0M4L3N3L3M3N3L3N3L3M3N3L300010O01O01O010O00010O0O1M4M2M4M2M3M4M2M4A[A^Oh>`0ZA^Oh>?[A^Oi>?=M3M4M2M4LjYb1"}, "image_id": 866, "id": 14336}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 215.0, 62.0, 83.0], "area": 2618, "segmentation": {"size": [512, 512], "counts": "jX^62k?4M2N3M2N3L3N3M2N2M4M2N3M2N3L3N3M2N2N3L3N3M2N3L3N3M2N3M2M3N30N1N3M2N3M2M3N3M2N3L3N3M2N3M2M3N3M2N3M2M4M2N3M2N3L3N2N3M2M4M2Nmhb0"}, "image_id": 866, "id": 14337}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 240.0, 27.0, 27.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "Phf42l?3M2M4M2N3M2N3N1010O0010O010O0010O010M2N3L3N3M2N3M2M[hk2"}, "image_id": 866, "id": 14338}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 245.0, 36.0, 63.0], "area": 1184, "segmentation": {"size": [512, 512], "counts": "SY^71m?2N3M2N3M2N3M2M4M2010O01O01O010O01M2N3M2M4M2N3M2N3M2N3M2M3N3M210O010O0YH"}, "image_id": 866, "id": 14339}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 246.0, 55.0, 44.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "]XT32l?2M4M2N3L3N2N3L3N3M2010O00010O010O010O00010O010O010O00010O010O01O01O010O010O01O01O010O010M2N2M4M2N3L3N3M2M3NjWP4"}, "image_id": 866, "id": 14340}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 260.0, 29.0, 36.0], "area": 582, "segmentation": {"size": [512, 512], "counts": "ih^41l?3N3M2M3N3L3N3k@\\Oi>g0UA[Oj>f0SA]Om>l0O010O01O01O01N1N3L3N3L3N2N3L3N3L3N2MhgR3"}, "image_id": 866, "id": 14341}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 269.0, 20.0, 13.0], "area": 156, "segmentation": {"size": [512, 512], "counts": "aXi53k?3O000010OO2O00001M2010O0010O00O2M2M4Magl1"}, "image_id": 866, "id": 14342}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 272.0, 73.0, 63.0], "area": 2042, "segmentation": {"size": [512, 512], "counts": "`i22l?2N3M2N3L3N3M2N3M2N3M2M4M2N2N3O0010O01O010O010O01O010O010O0oN[Ah0e>UO]Ak0c>SO_An0j>O010O010O0010O010O010O010O0010O0010O01XOPA?o>_OTA`0m>]OUAd0j>ZOYAe0Q?1O010O010O01O010O0M4M2N3M2N3M2N3MUfh6"}, "image_id": 866, "id": 14343}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 289.0, 28.0, 29.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "cYP72l?2M4M2N3M2N3L3N201O010O010O010O00010O010O01M2N3Bc@4`?Jc@3f?N3Mbfa0"}, "image_id": 866, "id": 14344}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 291.0, 37.0, 28.0], "area": 696, "segmentation": {"size": [512, 512], "counts": "[im43l?5L4L4L4K2O00O10O1000O10O1000O10O1000O0100000O0100000O0100000O3N4L4L4K5L[f_2"}, "image_id": 866, "id": 14345}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 292.0, 63.0, 42.0], "area": 1663, "segmentation": {"size": [512, 512], "counts": "oik22j?4L5J5L4K5L4O2O01O0001O01O000001O01O0001O01O0001O0001O0001O01O0001O01O000001O01O0001O01O0001O0001O0001OYORA_OUAa0U?010O0000O1L5K4K5LYfT4"}, "image_id": 866, "id": 14346}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 305.0, 60.0, 60.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "QZd53m?3M3L4M3MO02O3M3L4M3M3L4M3M2M4M3M0O01000O01000O01000O01000O01000O01000O01000OFeAPO\\>P1gAmNY>S1jAjNU>V1;000O010O10O4M2N3L4M3M3L4M3M3L4M3M_e]1"}, "image_id": 866, "id": 14347}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 308.0, 32.0, 40.0], "area": 736, "segmentation": {"size": [512, 512], "counts": "ajT43k?3M2M4M2M3N3M2M4M2M3N3M2O2O010O00010ON3M2N2M4M2M4M2N3L3N2M4M2NWV[3"}, "image_id": 866, "id": 14348}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 329.0, 70.0, 61.0], "area": 2235, "segmentation": {"size": [512, 512], "counts": "Sk_42l?3N2M2O2M3N1N3N2M2O2N110O10O10O10O10O10O10O10O10O10OHXOXAi0f>YOWAi0g>ZOWAh0g>:M2N3N2N10100O010N2M2O2O00100O01000O01000OO2N2M2O2M3_O`A_Oa>`0aA]Ob>`0`A_Oa>`0aA]Ob>`0`A^Oc>`0_A^Ob>`0a0N2M2N3N2M2O2MXU]2"}, "image_id": 866, "id": 14349}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 329.0, 3.0, 7.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "^jn71l?3N3fE"}, "image_id": 866, "id": 14350}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 333.0, 42.0, 49.0], "area": 1315, "segmentation": {"size": [512, 512], "counts": "h:i0U?3M2M4M2N2010O010O010O00010O010O010O00010O010O010O00010mNWAm0i>QOZAo0k>01L3N3M2M3N3M2M4M2N3L3NPeZ7"}, "image_id": 866, "id": 14351}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 340.0, 13.0, 16.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "oji73j?3M3M4M200010O00001L3M3M]E"}, "image_id": 866, "id": 14352}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 341.0, 87.0, 61.0], "area": 2510, "segmentation": {"size": [512, 512], "counts": "`ka21m?2N3L3N2M4M2N3L3N3L3N2O2O010O01O01O010O010O01O01O010O010O00010O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O010O00010O010O01O01O010O010O00010O010O01[ORA6n>GUA:j>DYA;h>BZA=g>@]A=T?M3N3M2MQdR4"}, "image_id": 866, "id": 14353}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 353.0, 59.0, 63.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "]\\m63k?3M2N3[@F`??M2M4M2O20O01O001M2N3M2M3N3M2N3M2M4M2N3M2N3O01O01OO2L3N3M2N3M2N3L3N3M2O02M2N3M11N3M2N2M4M2N3M2N3L3N3M2N3M2M3NkT5"}, "image_id": 866, "id": 14354}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 389.0, 53.0, 40.0], "area": 1454, "segmentation": {"size": [512, 512], "counts": "olW24g?5L4K6K4K5M30010O0000010O000000010O00000010O00000010O000000010O0000010O000000010O00000010O000K5L5J5L4K5Kfcm4"}, "image_id": 866, "id": 14355}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 25.0, 48.0], "area": 667, "segmentation": {"size": [512, 512], "counts": "V0O00nNZAj0g>SO\\Am0c>QO_Ao0i>10O010O0010OZOSA9m>EUAAWA?i>^O[Ab0Q?01O001L3N2N3L3N3MmRc7"}, "image_id": 866, "id": 14356}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 405.0, 60.0, 45.0], "area": 1828, "segmentation": {"size": [512, 512], "counts": "S]X41o?4L4FJa@;Z?:L4L3M4K4M000O0100000O0100000O01000O10O4M2N0O10O1000O0100000O0100000O010000O0100000O01000O12M4M3M3L4M3M4L3L4M3M]bi2"}, "image_id": 866, "id": 14357}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 407.0, 27.0, 26.0], "area": 424, "segmentation": {"size": [512, 512], "counts": "Yma31l?4M2M3N3L3N3L30010O0010O0010O0010O001O0M3N3L3M3N3L3NTcP4"}, "image_id": 866, "id": 14358}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 412.0, 7.0, 17.0], "area": 63, "segmentation": {"size": [512, 512], "counts": "Zml71m?2M3N3L3N3M2TC"}, "image_id": 866, "id": 14359}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 436.0, 60.0, 49.0], "area": 1549, "segmentation": {"size": [512, 512], "counts": "Xnm12m?2N2N2N2N2N2N2N2M3N2N1O2N2N2N2N2N1O1O2O010O01O100O100O1O100O100O1O010O010O00010O010O00010O010O000O2M2M4M2M3N3L3N3L3N2MiQT5"}, "image_id": 866, "id": 14360}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 19.0, 27.0], "area": 356, "segmentation": {"size": [512, 512], "counts": "R>a0_?NO3M4M2O20O00010O001O0M3M4M2M3M4M2MURf7"}, "image_id": 866, "id": 14361}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 450.0, 30.0, 40.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "f^U32;OU?3i@OT?4i@OU?4h@OT?`0N2M4L3N3O01O010O01O01O01O01M2M4M2M3N3L3N3L3N2M4L3Nha[4"}, "image_id": 866, "id": 14362}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 470.0, 54.0, 42.0], "area": 1392, "segmentation": {"size": [512, 512], "counts": "k_a12l?2M4M2M3N3L3M4M2M3N2M3O100O1M3N2N21O001O00001O001O00001O0ROXAd0i>YOYAg0g>VO\\Aj0n>O001O00001O00001O001O00001O00001M2M4L3M3N3Le`c5"}, "image_id": 866, "id": 14363}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 474.0, 60.0, 38.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "f_X41n?3M2O1N3M2O2M1O100O1O1O100O1O100O1O1O100O1O100O1O100O1O1O100O1O100O1O1O100O1O100001O2N1N2N3N1N3M2N2O2M2N3N1N3M2N2O2M2N^`i2"}, "image_id": 866, "id": 14364}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 489.0, 34.0, 23.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "o_g61l?3M3M3M3M3M3M3O100001O00001O00001O00001Ee@O[?Nh@2X?Kl@4`?000001O00001O00001O0NV`g0"}, "image_id": 866, "id": 14365}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 500.0, 12.0, 12.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "d?2N2N2N2N2N2O1N3M2N1001O\\BPNY=o1fBSN[=k1dBWN\\=h1cB[N\\=e1bB]N^=c1aB^N^=c1`B_N^=S2N2O1O101O00000001O000001O01OO1OZNgBf0X=POhBB2]1V=POjBA2]1U=QOkB@2\\1U=TOjB^O3\\1T=UOVCh0mN2N2N2N2N2N2N2N3M2N2N2N2N2N2N2NU]Q7"}, "image_id": 867, "id": 14368}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 83.0, 13.0, 13.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "hR`13k?4M3M101N100001O0O2O1N2M[]Y6"}, "image_id": 867, "id": 14369}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 169.0, 35.0, 35.0], "area": 627, "segmentation": {"size": [512, 512], "counts": "ked22m?3M2O1N2N3M2N2N2O1N3M2N2N001O01O00000000010O2N2N2N2N3M2O1N2N3M2N2N2NZji4"}, "image_id": 867, "id": 14370}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 447.0, 52.0, 65.0], "area": 2601, "segmentation": {"size": [512, 512], "counts": "l_P74f?6J6K5J6J6J6K5J6J6J6M31O000000001O00000000001O00000000001O00000000001O000000001O000O1K5J6J7I6K5J6J6K5JPb5"}, "image_id": 867, "id": 14371}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 492.0, 11.0, 20.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "ooj71j?5J6J6N20000001O0000"}, "image_id": 867, "id": 14372}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 0.0, 103.0, 42.0], "area": 1977, "segmentation": {"size": [512, 512], "counts": "RPm32l?201O001O00001O001O003M00001O001ON21O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O01O010O01O01O010O010O00010O010ON3N11O01O010O01O01k@VOS?k001O010O01O000O20O01ON30O010O00010O010M2N2M4M2M4M2N201O0M4M`__2"}, "image_id": 868, "id": 14373}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 0.0, 43.0, 38.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "c`Z73m?5]OMo@8l>Mn@9m>>O0O010000000O010000000O010000000O010000000O10O100000000O100000000O100000000O1"}, "image_id": 868, "id": 14374}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 20.0, 104.0, 75.0], "area": 3484, "segmentation": {"size": [512, 512], "counts": "kQ\\32k?4M2M4M2N3L3N2N3L3N3M2M3N3N101N1M4M2O110O0010O010O0010O0010O0010O00O2O010O001fNbAQ1]>mNeAS1[>kNgAV1b>O0WOXA:i>CYA>f>@]A?d>]O_Ad0`>ZOcAe0DZOV?h00010O010O01O01O0M4M2N3L3N201O010O0010O0010O0010O0010O010O0010O0010O01M2N2N3L3N3M2M40M2M4N11N1N3M2M4M2N2M4M2N3LYno2"}, "image_id": 868, "id": 14375}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 43.0, 58.0, 59.0], "area": 1871, "segmentation": {"size": [512, 512], "counts": "]RS22m?2N2N3N1N2DFo@=n>EPA=o>DPA=n>EPA=n>=M2O1N3M2N2N3N1N2N1O01O01O0001O01O00000100O3M2N2N2OO0001O01O000001E\\AZOe>e0\\AYOf>e0\\AYOf>e0]AXOf>e0[1O010O01O01O010O010O00010O010O0001N1M4M2N2M40O01O0M4M2O1010O01O01O010O01O01ON3M2M4M2M3N3L3N3L3N2M4M2Mnl\\6"}, "image_id": 868, "id": 14377}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 62.0, 26.0, 28.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "bRQ31m?2M4M2N3L3N2N3L3O2O01O010O01O01O010M2M3N3L3N3L3N2Moma4"}, "image_id": 868, "id": 14378}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 70.0, 57.0, 55.0], "area": 1762, "segmentation": {"size": [512, 512], "counts": "Zcj61l?3N3L3N3M2M3N3L3N3M2N30OO1N6I3N2M4O0001M210O00010O010O00010O010O010O00010mN^Af0b>XOaAh0_>TOdAl0\\>ROfAn0g>0O010O0001M2N3M2M4M2M3N3L3N3M2MQm8"}, "image_id": 868, "id": 14379}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 90.0, 63.0, 57.0], "area": 1994, "segmentation": {"size": [512, 512], "counts": "mST33k?3L3N2M4M2M4M2M3N3L3M4M2M3N30O01O01O010O00010O010O00010O010jN]Am0c>QO`An0`>oNcAR1e>010O01O01O010O01O01O0N3M2O1010O0O2M2M3M4M2M4M2M3N3L3N3L3N2Md\\l3"}, "image_id": 868, "id": 14380}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 91.0, 4.0, 13.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "SSn74i?3N2M4TM"}, "image_id": 868, "id": 14381}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 95.0, 28.0, 22.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "YSh12k?4M2N3L301O01O010O01O010O01O010O01O010O01O010M2N2N3L3Njli5"}, "image_id": 868, "id": 14382}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 129.0, 24.0, 24.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "_Ti51n?2N2N1O2N2N2N2N2N2N2M10O101O2N2N1O2N2N2N2N2N2Nhkj1"}, "image_id": 868, "id": 14383}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 135.0, 52.0, 33.0], "area": 1463, "segmentation": {"size": [512, 512], "counts": "fd]6`0V?:01O000000000000000000000000000000000001O000000000000000000J60001O00000000000000000000000000000>B000O\\[h0"}, "image_id": 868, "id": 14384}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 142.0, 54.0, 63.0], "area": 1602, "segmentation": {"size": [512, 512], "counts": "ie_11n?2N2O1N2N2N2N2N3M2N2N2N2N2[OWOPBk0n=WOPBk0n=WOPBk0o=VOoAl0o=VOoAl0o=VOoAk0P>WOnAi0R>YOlAg0T>[OjAe0V>d0N2N2N10O2FnAcNT>[1nAcNT>[1nAcNT>[1nAcNU>Z1:N2N2N2N2N2O1N2N2N2N2N2N3M2N2N2N2N2N2N2O1NgZe5"}, "image_id": 868, "id": 14385}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 144.0, 70.0, 86.0], "area": 2418, "segmentation": {"size": [512, 512], "counts": "We_22k?3N2N3L3N3M2M3N3iA[OnfBDZ=ZO^Af0b>XO`Ai0_>TOeAk0h>010O010O0010O0010O0010O010O0010O0010O0O2M2N3L3N2M4M2N3L3N3MeY]4"}, "image_id": 868, "id": 14386}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 148.0, 73.0, 84.0], "area": 3957, "segmentation": {"size": [512, 512], "counts": "\\6G^A6e>F^A8d>F^A7f>E^A8d>F^A7X?M2MbZk6"}, "image_id": 868, "id": 14387}, {"iscrowd": 0, "category_id": 1, "bbox": [459.0, 154.0, 53.0, 45.0], "area": 1558, "segmentation": {"size": [512, 512], "counts": "jeU72l?3L3N2M4M2M4M2O101O001O00001O001O00001O001O00001O00NO10O03N3L3O2O01O01O010O01O01O010O01O01O010O01O2L2M3NoJ"}, "image_id": 868, "id": 14388}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 172.0, 63.0, 47.0], "area": 1831, "segmentation": {"size": [512, 512], "counts": "Wfc53j?3M3M4L3M3M4L3M3M4O000010O00010O00010O00010O00010O00010O00010O00010OO1M40O00010O00010O00010O0010O00010OM3M4L3M3M4L3M3M4L3M3MZj\\1"}, "image_id": 868, "id": 14389}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 201.0, 27.0, 28.0], "area": 486, "segmentation": {"size": [512, 512], "counts": "lVa72k?4M2M3N3L3M3N3N101O01O01O010O01O01O01O0N2M4L3N3L3N2MdY1"}, "image_id": 868, "id": 14390}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 204.0, 28.0, 37.0], "area": 621, "segmentation": {"size": [512, 512], "counts": "oVT23k?2[@NZ?4d@OX?>N2M4M2N3L310O01O010O01O01M2M4M2N2M4O0M4M2N3L3N2M_i]5"}, "image_id": 868, "id": 14391}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 209.0, 31.0, 36.0], "area": 661, "segmentation": {"size": [512, 512], "counts": "[Wo02l?2N3L3N3L3N3M2M3N3M2M4O0010O00010O010O0O2L3N2M4M2M4M2M3N3M2M[Ya6"}, "image_id": 868, "id": 14392}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 220.0, 56.0, 46.0], "area": 1645, "segmentation": {"size": [512, 512], "counts": "eWY53k?2Z@L^?7_@K_?=M2N2M4M2N3M1O001O201O010O010O010O01O010O01O001M2N3N10010O010O0010O010O010O0O2M2N3M2M3N3M2N3M2M4M2N3M2N3Lkhj1"}, "image_id": 868, "id": 14393}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 222.0, 22.0, 17.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "UW^73k?2M3N30O01O010O01O010O01O01O010O01OO2M2M4Mlh6"}, "image_id": 868, "id": 14394}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 224.0, 21.0, 22.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "`gn41m?3M2M4M2N2M4M201O010O01O000N3M2M4M2M3N3Mmhf2"}, "image_id": 868, "id": 14395}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 224.0, 7.0, 19.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "^gl72l?3L3N3L3N2N3oH"}, "image_id": 868, "id": 14396}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 232.0, 35.0, 26.0], "area": 560, "segmentation": {"size": [512, 512], "counts": "eW[13k?2M3M4M2M301O0010O0010O0010O00O2M21O01O010O00010O01\\Og@`0]?O001N1M3N3L3N2M`XS6"}, "image_id": 868, "id": 14397}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 234.0, 59.0, 61.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "`XZ23k?2N3L3N3M2M4M2N3L3N2N3L3N3M2M4M2O1010O010O01O01O010O010O01OfNbAR1_>kNdAT1\\>jNfAW1b>O01O010O01O010O010O0mNZAm0e>QO^An0j>010OO2M2N2M4M2N3L3N3M2M4M2NgWh4"}, "image_id": 868, "id": 14398}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 257.0, 67.0, 80.0], "area": 2603, "segmentation": {"size": [512, 512], "counts": "`Ye43k?2M3N3L3N3L3N2M4M2M4M2M3O2cAmNl=R1RBPOn=Q1nAROR>n0lAUOT>j0iAYOW>Y10O00010O010O00010O010O00010OO2L3NO010O0010O010O03N2M4M2M4M2M3N3L3N3L3N2O20O01O0N2N3L3N3L3N2M4M2MiWY2"}, "image_id": 868, "id": 14399}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 257.0, 69.0, 44.0], "area": 1795, "segmentation": {"size": [512, 512], "counts": "fhY63j?3N3M2M3N3M2N3M210O0O101O010O010O0001N1N3O0010O001M2N3L3O2O01O010O01O010O01O010O01O010O001M2M310O0010O010O0010O010OM3ROTAf0o>VOUAg0S?M4M2N2N3L3N3M2Mbgc0"}, "image_id": 868, "id": 14400}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 258.0, 5.0, 13.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "\\hm71l?3N3L3N3mG"}, "image_id": 868, "id": 14401}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 271.0, 45.0, 35.0], "area": 815, "segmentation": {"size": [512, 512], "counts": "Pi`02l?2M4M2M4M2M4M21O010O010O0010O0010O010O0010O0010O010O0010O0010O0010O010O0010N1N3L3N3L3N2MTgh6"}, "image_id": 868, "id": 14402}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 291.0, 64.0, 81.0], "area": 2861, "segmentation": {"size": [512, 512], "counts": "T[`52l?3L3O20ODIo@8m>KTA4j>OUA2g>1YAOe>4[ALa>7_AI_>9bAGZ>=eACY>?hAAT>c0kA\\OT>f0mAYOP>j0nAXOo=l0mAXOP>j0nAXOo=_1M2M3N3L3N3O00010O010O00010O010O00N3M2M2O0O2O2M4M2M4M2M3N3L3M4M2M3N3L3N3L3N2M4M2M4L3N2M4Mef_1"}, "image_id": 868, "id": 14403}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 312.0, 61.0, 58.0], "area": 1836, "segmentation": {"size": [512, 512], "counts": "fj43j?3N3M2M4M2N210O010O010O00010O0O2L3N2M4M2M4O0010O0010O001O0M4M2N2O20O01_AcN^>_1010O01O01O010O010WObAN^>0dA1[>MhA2Y>JjA6V>HmA8S>EoA;Q>BSB>m=_OUBa0k=]OWBa0k=\\OYBa0d>N3L3N3M2M3NTel6"}, "image_id": 868, "id": 14404}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 325.0, 18.0, 44.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "X[g72k?4M2M4M2M3N3L3N3L3N2M4M2M4M20010O0jE"}, "image_id": 868, "id": 14405}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 334.0, 63.0, 86.0], "area": 3342, "segmentation": {"size": [512, 512], "counts": "d\\e62l?2]ONUA6h>MUA5h>NUA6h>MTA6i>MUA5i>c0L3N2M4L3N3L3N2M4M2M4L3N2M4M2O20O00010O0010O0010O00010O010N1M3N2M010O3N3L3KRBUNQ>h16O000EkAlNU>Q1nAlNT>Q1oAlNU>Q1>M3N3L3N2O2O001Eh@MW?0l@MW?1k@MX?OgU;"}, "image_id": 868, "id": 14406}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 346.0, 70.0, 46.0], "area": 1788, "segmentation": {"size": [512, 512], "counts": "^kQ13j?3N2N3L3N3M2N3O01O010O01O010O01O01O0N3L3N3N11O010O010O01O01O010O010O00010O010O010O00010O010O010O0001N1N3O001O01O0O2M2N3L3N2M4M2N3L3N3M2M3NiTk5"}, "image_id": 868, "id": 14407}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 372.0, 25.0, 28.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "Ulc72l?3M2M3N3L3N3L30010O0010O0010O0010O010O00010N1N3LXD"}, "image_id": 868, "id": 14408}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 379.0, 48.0, 57.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "Z\\Q12k?4j@K]>7aAK\\>8aAK\\>9bAI[>9fAFX>=gADY>;hADX>=gAFW>9jAFV>:jAFV>;iAFW>9fAJZ>7cAL]>3`A0`>0^A2b>g00O0010O0010O00010O010O00010O010O00010O010YO]AOc>NaA1_>McA4]>HgA7Y>FjA7Z>FhA8Z>EjA7Y>GiA6S?M2MPcV6"}, "image_id": 868, "id": 14409}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 383.0, 55.0, 53.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "o\\Q42l?2N3L3N3M2M4M2N2N3L3N3M2M4M200010O010O01O010O01O010O0SOYAb0f>[O]Ae0d>XO^Ai0a>UObAj0j>010O010O0010O0010O010O0010O0N3M2N3L3N3M2N2M4M2NWSS3"}, "image_id": 868, "id": 14410}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 389.0, 36.0, 29.0], "area": 622, "segmentation": {"size": [512, 512], "counts": "fl81l?4M2M3N3M2M4M20001O010O01O01O010O01O01O01O010O010O00010O0O2M2M3M4M2M4MbSU7"}, "image_id": 868, "id": 14411}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 389.0, 27.0, 27.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "f\\l41m?3M2N3L3N3M2N2N30O010O010O010O0010O0010M2N3M2M4M2N3MdSf2"}, "image_id": 868, "id": 14412}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 416.0, 40.0, 49.0], "area": 1142, "segmentation": {"size": [512, 512], "counts": "P=o0Q?01O01O01O01M21O01O010O01OUASOc>n0ZATOf>T10O00010hNZAT1i>010O00TOXA`0i>\\OZAe0e>XO^Ah0n>0O00010O0001M2M4L3N2M4L3M3NXb[7"}, "image_id": 868, "id": 14413}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 418.0, 30.0, 25.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "`]g61m?2M3N3M2N3L3010O010O0010O0010O010O010O0010O0010O0N3L3N3M2Nfbi0"}, "image_id": 868, "id": 14414}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 433.0, 27.0, 25.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "omT73k?2N3L3N3M2M3010O01O010O01O010O01O010O01M2M4M2N2M4M2NYb="}, "image_id": 868, "id": 14415}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 439.0, 57.0, 59.0], "area": 1661, "segmentation": {"size": [512, 512], "counts": "knT62k?3N3M2M4M2N3L3O1010O010O01m@ZOk>e0SA^Ol>b0QAAo>i00O0010O010O0010O0010L3N3M2M2O000O102N2M4M2000M4M2N3L3N2N3L3N3M2M4M2N2M4M2N3L3N3MSbn0"}, "image_id": 868, "id": 14416}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 443.0, 58.0, 42.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "d^m31l?3M3M4L3N2M4L3M4M200010O00010O00010O0010O0010O00010O00010O00010O010O00010O0001L3M301O01O01O01L3M4M2M3M4L3M3N3L3M3MoaU3"}, "image_id": 868, "id": 14417}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 446.0, 59.0, 46.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "gnk03j?3N3L3N2M4M2M4L3N2N3O010O01O01O01O01O010O01O01O010O01O01O010O01O01O010O01O01O01O01O010O0N2O2ON3L3N3L3N2M4M2M4M2M3M4MeaV6"}, "image_id": 868, "id": 14418}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 451.0, 34.0, 37.0], "area": 671, "segmentation": {"size": [512, 512], "counts": "_^Q52l?2N3M2M3N3N1010O01O0l@Bg>>VADj>9QAIo>d0010O010O00010O010O0O1M4M2M4M2M3N3L3N3Mda]2"}, "image_id": 868, "id": 14419}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 469.0, 18.0, 16.0], "area": 179, "segmentation": {"size": [512, 512], "counts": "o^62k?3N2M4M20001O010O01O01O01N1M4M2M3NYa`7"}, "image_id": 868, "id": 14420}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 470.0, 11.0, 28.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "]oj71m?2N2N3L3N3M2M4M2N3L3ZA"}, "image_id": 868, "id": 14421}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 475.0, 92.0, 37.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "d_a32k?4M2N3M2M4M2N3L3N2N3M201O0010O0010O010O010O0001O001O001ROo@k0S?01O001O00001O001O001O001O00001O001O001O00001O001O001O001O00001O001O001O00001O001O001O001O00001O001O001O00001O001O001O001O0000Q`P3"}, "image_id": 868, "id": 14422}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 477.0, 30.0, 31.0], "area": 575, "segmentation": {"size": [512, 512], "counts": "boh63j?3N3L3N2M4M2M4M2O1010O01O01O010O01O01O001M2M3N3L3N3L3N2M4MmPh0"}, "image_id": 868, "id": 14423}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 487.0, 75.0, 25.0], "area": 1130, "segmentation": {"size": [512, 512], "counts": "no=2l?2M3N2M3N2M3N2M3N2O1001O00001O001O00001O001O00001O001O00001O001O000Ad@:a?0001O001O00001OO1N2M3N2M3001O001O00001O001O00001O001O00001O001O00001O001O0N2M4MZ`\\6"}, "image_id": 868, "id": 14424}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 501.0, 31.0, 11.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "n_]72l?2M3N2N200001O001O00001O001O00001O001O001O00001O001O001O0000QP3"}, "image_id": 868, "id": 14425}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 0.0, 64.0, 26.0], "area": 830, "segmentation": {"size": [512, 512], "counts": "ZP<2m?2N2N2M3N1O2N2M3O1O001O1O1O1O001O1ON2O1O1O1N2O1O1O1O11OO1O1O1N2O1O11O0000N2O1O100001O1O1O1O001O1O1O1O001OO1O1N2O1O1O1N2O1O1O1NRPd6"}, "image_id": 869, "id": 14426}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 0.0, 47.0, 54.0], "area": 1547, "segmentation": {"size": [512, 512], "counts": "P`W22n?2N2N3M2N2l@F^>=_AF^><_AG_><^AG_>;^AH`>:^AI_>o0M2N3M0000O10000O100O10000O100O100O100ZOdAH\\>8gADZ>=lA_OU>a0mA]OS>b0QB[Oo=e0f000O1]Oh@>X?Al@;U?Em@9S?FPA7Q?IRA4n>KUA3k>Md00PPQ5"}, "image_id": 869, "id": 14427}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 0.0, 13.0, 2.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "P`i32n?000000000000O1000000000PPP4"}, "image_id": 869, "id": 14428}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 0.0, 65.0, 56.0], "area": 2188, "segmentation": {"size": [512, 512], "counts": "Qao62m?2N2M2O2N2N2N2IAj@a0T?Ai@b0T?6O2N2N2N2N2M3N2N1O2N2N2N2M3N2O00O1O1O1O1O1O1N2O1O1O1O1O2M3N1O2N2N2N2N2N2000O100000O100000O1N2O10O10O1000O1N2M^O"}, "image_id": 869, "id": 14429}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 2.0, 68.0, 87.0], "area": 3154, "segmentation": {"size": [512, 512], "counts": "URQ11n?2N2N2N2N2N2KDc@>[?5N3M2N2N2N2N2N2N2N2N2N1JgNdAY1\\>iNbAW1^>5\\OlN]BU1b=mN\\BS1d=oNZBQ1f=QOXBo0h=SOVBm0j=d000001O0000000000000000001O1001O1O1O1O1O1O00O1O2N3M2N2UOoADS>:oADS>:oADS>:oADS>;nACT>;nACT>;nACT>;nAC[>4gAJ[>4gAKZ>LoA2Q?N2NYnl5"}, "image_id": 869, "id": 14430}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 45.0, 22.0, 62.0], "area": 829, "segmentation": {"size": [512, 512], "counts": "]1i1W>1000001ON3M2N2N2N2N2WO^A1d>M^A1d>M^A1e>K^A3d>K^A3d>K_A2c>L_A2c>L_A2c>L_A2c>L_A2Y?NXmd7"}, "image_id": 869, "id": 14431}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 45.0, 68.0, 67.0], "area": 2489, "segmentation": {"size": [512, 512], "counts": "UR^61n?2X@N_?4_@N_?4_@N_?3`@O^?;M3N1O2N2N2M3N2N2N1O2N2O10000O010N2N2N2M3N2N1O2N200000000O0100000000000O01O1M3N2N2N1O000O3N2N2N2N2N1N3N2N2Hm@@U?>m@@U?=n@AT?=8O2N2N2N2M3Ngm?"}, "image_id": 869, "id": 14432}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 87.0, 64.0, 67.0], "area": 2161, "segmentation": {"size": [512, 512], "counts": "f3k0RAWOl>o0N3N2N2N2N2N1N3N2N2N2N2M201000000OO2N2NPM"}, "image_id": 869, "id": 14434}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 112.0, 81.0, 93.0], "area": 3310, "segmentation": {"size": [512, 512], "counts": "leT41m?3N1O2N2N2M3N2N2N2N1O2M3N2N2N2N2M3N2BjNoAW1`=iNdB2JU1b=kNbB1KS1d=mN`B2IR1g=nN^B2IP1i=PO[B3Jm0f=iN]B91Z1b=_NZB:2W1d=oNZBQ1f=c0010002N2N1O2N000O12N2N2N1O2N2M3N2N2N2N2N1O2M3[NgA`1^>N2N2N2N2N1O2000000O10000lNVAP1m>O2M3N2N2N2N2N2N1N3N2N2N2N2N2N2NUkb2"}, "image_id": 869, "id": 14435}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 113.0, 74.0, 57.0], "area": 2463, "segmentation": {"size": [512, 512], "counts": "cTk26b?8G:M2001O000000000001O01O000000000006J0001OK5G9L40000010O0000000000000103L000000000001O000ROdA9[>]OoAc0Q>SOYBm0]>01O00000001O000001O0000000001O000001O0UOUAa0V?O00000H8Gako3"}, "image_id": 869, "id": 14436}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 154.0, 76.0, 69.0], "area": 2676, "segmentation": {"size": [512, 512], "counts": "XfP21o?2M2N3M2O2M2N3HAk@`0T?Bi@a0T?8M1]OTORBl0n=UOPBl0o=WOnAl0o=VOPBk0n=XOoAk0o=WOnAi0R>YOlAg0T>\\OjAd0V>b0000101N3M3M10O01O01O00012M2N3N1N3M2O1N3M2N3N1N3M2O1N3M2O0O000010O00010O00010O000002O1N3M2O2M2N3N1N2N3N1N3M2N3NTZi4"}, "image_id": 869, "id": 14437}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 160.0, 66.0, 87.0], "area": 2770, "segmentation": {"size": [512, 512], "counts": "SWn41n?2M3N2\\OI\\A9b>I\\A9a>J]A8a>J]A7b>K[A8c>I\\A9b>I\\A9b>I\\A9a>J]A8a>e0N1O2N2O10000N2N1N3N00OJYNVBg1j=[NTBe1l=700000O0101O2N1O2N2N20N2M2SOWBBh=?ZB_Of=`0]B^Oc=b0_B\\Oa=d0aBZO_=f0cBWO^=h0eBVO]=h0dBWO^=g0dBWO]=h0eBVO]=g0fBWO\\=g0fBVO]=h0eBVO]=h0o0M2O2N2N2M3N2N2N1N3N2N2NajP2"}, "image_id": 869, "id": 14438}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 201.0, 82.0, 72.0], "area": 2984, "segmentation": {"size": [512, 512], "counts": "lW_31m?3UOMfA5X>MfA5X>MfA5X>MfA5W>NgA4W>NgA4W>NgA3X>OfA3X>NfA5X>MfA5W>NgA4W>NgA4W>l0N2N2N2M2O2N2N2N2O00O2N2M3N2N2N1O2M3N2N2N2N1O2M3N2N2N2N2N1O2N2O1O1O10O10O10000000O01000N2M2O2N2N2M12N2M2O2N2N2M3N1O2M3N2N2N1N3N2N2N2M2OfhW3"}, "image_id": 869, "id": 14439}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 203.0, 75.0, 86.0], "area": 2789, "segmentation": {"size": [512, 512], "counts": "oVW11m?3N2N2N2M3N2N2N2M3N1O2k@XOP?m00000000000O1N00002M3N2N2N2N2N1O2O10000SBaNX=_1eBdN[=\\1cBfN\\=[1bBgN^=Y1`BiN`=W1^BkNb=U1\\BmNd=S1ZBnNg=DUBZ12TOi=@VBQ2m=00000000000000N2N2N1O2N2N2N2N2M3N2[O_AHc>6_AHc>6_AHb>7`AGb>8_AFc>:]ACf>=ZAAh>?YA^Oi>`0=N2N2N2N2N1O2M3NZXc5"}, "image_id": 869, "id": 14440}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 204.0, 88.0, 89.0], "area": 3642, "segmentation": {"size": [512, 512], "counts": "Zgi52m?1O2[AMZ=5dBMZ=5dBMZ=5dBMY=6eBLY=6eBLY=6PB@<[BDb=>]BDa=>\\BEb==\\BEb==\\BDc=^1N2M3N1O0000O3N2N2N2N1O2M3N2N00002N2N2M3N2N2N1O2N2M3N2N2N2N2N1O2M10000000O10O1000000000O10O12N2N1O0O10O101O2N2N1O2M3N2N2N2N2N2N1N3N2N2N2N2N2MWYj0"}, "image_id": 869, "id": 14441}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 231.0, 66.0, 83.0], "area": 2654, "segmentation": {"size": [512, 512], "counts": "Uid61m?3N2N2N1O2M3N2N2N2N2M2O2N2N2DUO`Am0^>UO_An0Q>oNWB5Fn0Q>oNWB\\1f=gNXBZ1g=hNWBZ1g=gNWB\\1g=fNWB\\1g=`0N1N10000000O1000O12N2N2N2M2O2N2N2N2N2M3N1O2N2N2N2N2M300O0N3SOTAc0n>[OTAc0n>[OTAc0U?O2N2N2M3N2N2N2N1O2MgW:"}, "image_id": 869, "id": 14442}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 238.0, 69.0, 89.0], "area": 2628, "segmentation": {"size": [512, 512], "counts": "ihf01n?2N2ALo@6o>Lo@6o>Lo@6o>Lo@6o>Lo@6o>Lo@6o>?_OROmAP1Q>ROmAP1Q>ROlAo0T>SOjAn0U>TOiAn0U>TOiAn0V>?0000000O1_NhAY1X>eNjA[1V>cNlA]1T>aNnA_1Z>0gAaNP>_1nAcNR>]1lAeNT>[1jAgNV>b10000000000O10O10000000000O1M3N2N2N2N2N2N1O2N2N2N2N2N2Kn@YOT?e05N2N2N2N2N2N2N1O2N2M3NjfV6"}, "image_id": 869, "id": 14443}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 270.0, 76.0, 76.0], "area": 3084, "segmentation": {"size": [512, 512], "counts": "Zjf41n?2N1O2M3N2N2N2M2O2N2nNA`Ba0^=A`Ba0^=AlAG`0j0b=E\\B=b=E\\B=b=E\\B=b=E[B>d=CZB>e=DYB>f=CXB=i=l01N2O1O2N20O1M3N2N1O00O01001O1N3N2O10000O1M2O2N2N2M3N2N20O01000000O1M3N1O2N2N2N2M3N1O2N2N2N2M3N2N1O2N2N2M3N2NaVS2"}, "image_id": 869, "id": 14444}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 272.0, 87.0, 74.0], "area": 3407, "segmentation": {"size": [512, 512], "counts": "SiQ21n?2N2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2O10000`AmNR>S1lAoNT>Q1jAQOV>o0hASOX>m0fAUOZ>k0dAWO\\>W100N2N2N2N2N20000000000000000OO001O2N2N2N2N2N2N2N000000000002N1O0000000000002N0000000002N2N2M3N2L5M2N2N2N2Jm@\\OU?2k@:[?Dg@:a?N2N2N2M3Nifb4"}, "image_id": 869, "id": 14445}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 295.0, 52.0, 74.0], "area": 1939, "segmentation": {"size": [512, 512], "counts": "fZV71n?1N3N2N1N3N2O1O001O1N101O1O1M2j@\\Oo>f0o@\\Oo>l0M2O2M3DkNjAW1T>kNjAV1U>kNiAX1T>kNjAW1T>;fAC\\>;eAC^>;dAC^>:eAC]>;eAD]>A`Aa0^>A`Aa0^>@aAb0\\>AbAa0\\>AbAa0\\>AaAa0^>A`Aa0^>b0M3N2N00000O1000O11O2M2O2N2N2O1000O100000O1000O100000OhNTB=l=AVB?j=_OWBa0j=\\OYBd0g=ZO[Bf0e=XO]Bh0b=WO`Bi0`=UObBk0^=ROdBn0]=POeBn0]=POeBn0]=POeBm0^=QOdBm0Y>N1O2N2M3N2N2N1O2M3N2N2N2N1NVdW3"}, "image_id": 869, "id": 14448}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 339.0, 71.0, 62.0], "area": 2407, "segmentation": {"size": [512, 512], "counts": "X[`12m?2N1N3N2N2N2N2M2O2N2N2N2N2M3N1100000O1000O10000000O1N2N1O2N2N2M3N2N1100000000N11000OO2N2N2N2M3N2N1O1O02N1O2M3N2ZOfAC\\>;fAC\\>;fAC\\>;fAB\\>;fAC\\>;fAC\\>;fAC\\>;fAC\\>;e0N3N2N2N2NnT\\5"}, "image_id": 869, "id": 14449}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 345.0, 98.0, 70.0], "area": 3704, "segmentation": {"size": [512, 512], "counts": "X[Q6X1kAjNT>W1jAkNU>V1iAlNV>T1iAnNV>_1O1O0010000000O0N3N2N2N2N2O0100000N2N1O2N2N2O1000O100000000N1O2N2M30000O10N2O100000000N2N1O2N2M3DSABo><1bAT12mNY>Z1eAgNZ>a1M3N2M2_D"}, "image_id": 869, "id": 14451}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 388.0, 74.0, 77.0], "area": 3220, "segmentation": {"size": [512, 512], "counts": "h\\U33l?2N2N2N2N1N3N2N2N2N2_AZOe=h0YBZOd=h0[BZOd=h0XB[Oh=e0VB]Oj=c0TB_Ol=a0RBAn=?oACQ>=nAER>;lAGT>9jAIS>:jAIT>S1N2N1O2N2M3N2N1O02N2N1O2M300000000O010000O1N2O01N2N2M3N2N1O2N2M3N2N2N2N1N3N2N2N2M3N1O2N2Fl@EV?9l@DW?:k@DW?9:N1O2N2Mmbe3"}, "image_id": 869, "id": 14452}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 391.0, 74.0, 76.0], "area": 2792, "segmentation": {"size": [512, 512], "counts": "llU51n?2N2N2N2N2N2M2O2N2N2N2N200O0O2M3N2N2N2N2N1O200000000O10O100000000000O01000O1\\AhN]>Z1aAhN]>^1O2N2N2N2N2O1O001N2O100O1O10QOSBIn=4UBLj=3WBNi=0YB0g=N[B0g=N[B0g=N[B0g=M\\B3c=L_B4a=J`B7`=GbB8_=FcB9^=EdB9]=EfB9\\=EfB9\\=EfBNQO6[>JeBORO4\\>KdBORO4g?N2NhQe1"}, "image_id": 869, "id": 14453}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 398.0, 38.0, 60.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "eO0001O01O01O01O00011N3M2O1N3N1010ON3M2O1N3M2O2M2N3M2O1N3M2O2M2N3N1N2N3M2O\\b\\7"}, "image_id": 869, "id": 14454}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 421.0, 62.0, 67.0], "area": 2187, "segmentation": {"size": [512, 512], "counts": "d^P72l?3N2N1O2N2M3N2J@i@b0T?Aj@`0n>]OWAj0g>XOWAj0g>WOXAk0f>WOXAk0f>9M2O2N2N000O102N2N1O2N2M3N2N2N0002N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2N2M3N1O2N2N2M3N2N1O2N2MSb0"}, "image_id": 869, "id": 14455}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 437.0, 62.0, 59.0], "area": 1994, "segmentation": {"size": [512, 512], "counts": "b^11n?2N3[@KZ?7e@JZ?7d@LY?7d@KZ??O2M2N3N1N2N3M2O2M2N10O01O01O01O00010O0001O01O011N3M2N10O01O01O0001O01O2O1N3M2O1N3M2ITAWOo>f0TAXOm>g07N3M2N3N1N3M2O1N3M2O2M`ao6"}, "image_id": 869, "id": 14456}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 439.0, 89.0, 70.0], "area": 2911, "segmentation": {"size": [512, 512], "counts": "]^b42m?2M3N1O2N2M3N2N2M2O2N2N2M3N2N2M2O20000O1000O1000000000O1[AkN^>T1`AoN`>Q1^AQOb>W10O100000O1000O1000000000O10O100OO0O1000O1000O010000O012N2M30000000O10N2N2N2N0O10O100002M3N2N2N2N2M3N2N2N2N2M3N2N1O]QQ2"}, "image_id": 869, "id": 14457}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 441.0, 73.0, 71.0], "area": 2728, "segmentation": {"size": [512, 512], "counts": "RoT21n?2N2N2M3N2N1O2N2G@o@b0n>Ao@b0o>_OPAc0n>9N1O2N2M3N2N2N2N1N3N2N2N2N2N2M2O2N2O10000000O1O001O1O1O1O1O1O0aNnAP1S>nNnAR1S>lNoAS1R>jNQBU1P>iNRBV1o=hNSBW1\\>O1O001O1O1SOVAb0k>\\OWAc0j>[OXAd0i>ZOYAe0g>ZO[Ae0P?01O1O1O1O1O1O0O2M3N2N2N2N2M\\`f4"}, "image_id": 869, "id": 14458}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 468.0, 17.0, 34.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "Wog72m?2N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3[A"}, "image_id": 869, "id": 14459}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 470.0, 67.0, 42.0], "area": 1677, "segmentation": {"size": [512, 512], "counts": "foZ61n?2N1O2N2N2M3N2N2N1O2M3N1O1O1O1N2O1O1001O1O1O1O1OO1O1O1N2O1O1O1O1O1N2O1O1O1O11O1O1O001O1O1O1O1O1O001O1O1O1Mn@VOS?h04M2O2N2N2N2N2M3N2N1O2N2Nc`c0"}, "image_id": 869, "id": 14460}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 478.0, 82.0, 34.0], "area": 1364, "segmentation": {"size": [512, 512], "counts": "eoi31m?3N2N1O2N2N2M3N2N2N1O2M3N1O1O1O1O1N2O11O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O001O1O1O1O1O1O001O1OO1N2O1O1N2O1O1N2O1O1001O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1ORPm2"}, "image_id": 869, "id": 14461}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 482.0, 17.0, 15.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "Yoe12n?2M2N2O2M001O01O01O01O1O3N1N3M2Oh`Q6"}, "image_id": 869, "id": 14462}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 491.0, 2.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "[?5l?Ne`n7"}, "image_id": 869, "id": 14463}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 495.0, 42.0, 17.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "o_d11n?1O1O1O1N2O1O1O1O1N2O1O1O11O1O1O1O1O001OO1O1O1N2O1O11O001O1O1O1O1O001O1O1O1O1N1O2NV`f5"}, "image_id": 869, "id": 14464}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 504.0, 15.0, 8.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "o_\\71n?1O1O1O1O1N21O1O1O1O001O1O1OQP<"}, "image_id": 869, "id": 14465}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 37.0, 37.0], "area": 798, "segmentation": {"size": [512, 512], "counts": "0R1n>001O1O1O00O1O1O1N2O1O1N2O1O1N2O1O1O1N2001O00N2O1O1N2O1O1O1N2O1O1N2O1O1NR`]7"}, "image_id": 870, "id": 14466}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 0.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "PPk01ooT7"}, "image_id": 870, "id": 14467}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 84.0, 55.0], "area": 2415, "segmentation": {"size": [512, 512], "counts": "QPb22m?2O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O001O1O1ON2O1O1O2M3[O_AHc>6_AHc>5`AIa>6aAGb>7_AHc>6_AHc>5`AHc>6_AHb>7`AGb>7e0M3N2NeoS4"}, "image_id": 870, "id": 14468}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 0.0, 25.0, 12.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "P`l31o?1O1O001O1O1O1O001O1O1O1O1O0000O1N2O1O1O1O1N2O1OQPg3"}, "image_id": 870, "id": 14469}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 22.0, 87.0, 79.0], "area": 3312, "segmentation": {"size": [512, 512], "counts": "]Qn11n?2N2N2M3N1O2N2M3N2N1O2N2M3N2O010O1000O1N20O010000000O01000N2O1MROSAm0l>UOTAk0j>VOVAk0g>XOYAh0e>ZO[Af0c>0cAN_>0cAN^>1dAL_>1cAN_>0cAN_>0cAN_>0cAM_>1dAM^>1cAN_>0cAN_>0Q_f4"}, "image_id": 870, "id": 14470}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 74.0, 102.0, 86.0], "area": 4267, "segmentation": {"size": [512, 512], "counts": "QcV11n?2N1O2M3N2N2N2N1N3N2N2N2M2O2N2O10000O0O2M100001O20O10000000O0100N2N2N1O2N2M3N200O0100000000O0100000000O010000000O0100000N2M3N1O2N2N2M3N1O2N2O1O1000O1000O1000O100000OPOWBJi=3ZBMf=1[BNg=0[BMh=1ZBMg=1\\BMf=1\\BMf=1\\BMf=1[BMh=1ZBMh=0[BNf=1\\BMf=1\\BLg=2ZBMh=0[BNg=0[BNf=1\\BMf=1V1Nj\\V5"}, "image_id": 870, "id": 14471}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 112.0, 66.0, 62.0], "area": 2139, "segmentation": {"size": [512, 512], "counts": "aTh02m?1N3N2N2N2M2O2N2N2M3@CZA>e>CZA?d>CZA?d>`0N2O001N2OO1O1O1O101O010000000O100000O1O001O1O1O1N11O1N2N200mN]Ah0c>UO_Al0k>00O1000O10N2N2N2N1N3N2N5K2M2O2N2N2M3N1OWkV6"}, "image_id": 870, "id": 14472}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 165.0, 75.0, 73.0], "area": 2748, "segmentation": {"size": [512, 512], "counts": "_5d0Z?3N2N1O2N2N2M3N2O10O10O100000000O010000000O100000O10O100N2N2N2N1O200O10000000O010000O1O1O1O001N2O1O1ON3N2N2N1O2N2M;F1O2N2N2N2M3N2N2N1O2N2M3N2N2N2N1OaYj6"}, "image_id": 870, "id": 14473}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 165.0, 17.0, 15.0], "area": 138, "segmentation": {"size": [512, 512], "counts": "\\ed21m?3N2N1N3N2O1O010000O0O2N2M3N1O2NejR5"}, "image_id": 870, "id": 14474}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 169.0, 62.0, 65.0], "area": 2147, "segmentation": {"size": [512, 512], "counts": "_Vd11n?1O2M3N2N2M2O2N2M3N2_OD[A>c>DZA>d>a0N2N2N2N1O2OO1O1N2O0101O1000O10O10000000O10O1O100O001O1O1O1000O1gNgAh0Z>VOiAg0Z>WOhAg0`>QObAm0j>M3N2N2N1N3N2N2M3N2N2M3N2N^i\\5"}, "image_id": 870, "id": 14475}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 210.0, 81.0, 80.0], "area": 3426, "segmentation": {"size": [512, 512], "counts": "dhc21m?2O2N2iNJaB8]=J`B9^=I`B8_=J_B8^=K`B7^=J`B9^=IYBDFe0o=IYBDFe0n=JYBDGc0o=KXB?f=BYB`0e=BYB`0d=CYB`0e=BYB?f=m00000O10O102N2N1O2M3N2N2N101000000O0100O0O00O0100000O2O2N2N1N3N1OO10O1000000O101O2N1N3N2N2N2M2O2N2N2M3N1O2N2M3N2N1O2N2M3N2N1OnhS4"}, "image_id": 870, "id": 14476}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 214.0, 40.0, 70.0], "area": 1469, "segmentation": {"size": [512, 512], "counts": "g6d1[>200O100000000O10dNhAn0Y>POhAQ1X>lNkAT1T>kNnAT1S>jNoAT1S>iNoAW1^>00000O0100000O1000N2N1N3N2N2N1YOn@?T?_On@?T?^Oo@?[?N2N2N2N2N2N2Mjg[7"}, "image_id": 870, "id": 14477}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 236.0, 14.0, 14.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "agj02m?2N2N2N2N10100000N2M2O2N2N_Xn6"}, "image_id": 870, "id": 14478}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 239.0, 79.0, 95.0], "area": 3052, "segmentation": {"size": [512, 512], "counts": "oig02m?2N2UOKhA6V>MgA6W>LgA6W>LgA6W>LgA6W>LgA6V>LhA7V>KhA6W>LgA6W>LgA6W>LgA6V>l0N2O00O2N2N2N1N010000000O010000000O0100F_NVBa1j=aNTB_1l=cNRB\\1n=fNPB[1P>gNnAY1R>:000O01000000002M3N2N1O2N2M3N2N2N1O2M3N2N2N2N2M2O1O00000O2O2N2N2N2M3N1O2N2N2N2MogP6"}, "image_id": 870, "id": 14479}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 251.0, 72.0, 92.0], "area": 3680, "segmentation": {"size": [512, 512], "counts": "`iW32n?5K6J6I7J6J5K3MO1000O10000000O1000O1000000000O4M00000000O10O1000bNEWC;iLSVd3"}, "image_id": 870, "id": 14480}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 266.0, 36.0, 33.0], "area": 648, "segmentation": {"size": [512, 512], "counts": "PiP21f?1`@1^?1_@2_?0_@2_?9N2M3N2N2N1O2N2N02N2N2N200N1O2N0000000O01001O2N2N1O2N2M3N2N2N2N_W]5"}, "image_id": 870, "id": 14481}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 293.0, 74.0, 67.0], "area": 2298, "segmentation": {"size": [512, 512], "counts": "Sja12m?1O2N2M3N2N2N1O2M3N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2N2000O1000O100000O01000N2N2N1O2N2O1O010O1ROZA`0g>^OZAc0e>[O^Ad0c>ZO_Af0a>XOaAh0k>10O1000N2M21000O1000M3N2N1N3N2N2M2O2N2N2M2O2N2M_UY5"}, "image_id": 870, "id": 14482}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 318.0, 21.0, 20.0], "area": 207, "segmentation": {"size": [512, 512], "counts": "Vj;1n?2M3N2N1O2N2N200000O0100000N2N2N1O2N2N2MieY7"}, "image_id": 870, "id": 14483}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 327.0, 69.0, 56.0], "area": 2136, "segmentation": {"size": [512, 512], "counts": "W[R12m?2N2M2O2M3N2N1d@AU?b0i@_Om>1WAf0g>\\OWAf0g>\\OVAg0h>9O1O0O3N2N1O2N200O010000O1000O10O1O1O1O1O001O1N2O1O001O1O1N2O1O0VOSAa0n>]OTAd0k>ZOWAe0j>XOXAh0Q?O1N2O00000O010000000O1000O1000O10004L5K6I[Tk5"}, "image_id": 870, "id": 14484}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 337.0, 37.0, 79.0], "area": 1885, "segmentation": {"size": [512, 512], "counts": "^;b1]>000XOeNlBZ1U=h01O1O1N2O1O1O1O1N101O1O1N2O1O1O1N2O3M4L1N2O1O001N2O1O1O0O2O9G9G9FfT]7"}, "image_id": 870, "id": 14485}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 351.0, 76.0, 46.0], "area": 2278, "segmentation": {"size": [512, 512], "counts": "f[V36j?7I6J4K10000000O10O10000000O10O1000000B[OaAd0`>BZA>f>>O10000000O10O10NWAmNi>S120000O01000000000O01000000000O10O10000000O10O10000000O102N3M0000000O10O10000002M7J6J6J7IRdc3"}, "image_id": 870, "id": 14486}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 374.0, 68.0, 75.0], "area": 2586, "segmentation": {"size": [512, 512], "counts": "W]92m?2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N1O1O02O1O1N2O00O1O1^OROPBQ1n=QOPBP1o=ROoAP1o=ROoAP1o=QOPBQ1m=ROPBQ1n=QOPBQ1n=a0O2N2M3O100000O100O0O2N2M3N2N2N1O2M3N2N2N2E`ASOb>k0`AROb>m0`AQOb>l0aAROa>l0;N2N2]Ok@8V?Gk@7X?Gj@7X?Gj@7a?N2N2MYcd6"}, "image_id": 870, "id": 14487}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 393.0, 63.0, 58.0], "area": 2675, "segmentation": {"size": [512, 512], "counts": "n\\^35k?6^OGPA`0j>Go@9P?<0006J7I7I000O010000000000O01004L0000O10O1000000000O10O100000000MN4100000O100000O1000O10000000O1000O100006J7H8I7I7I6J_Rb3"}, "image_id": 870, "id": 14488}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 396.0, 53.0, 62.0], "area": 2325, "segmentation": {"size": [512, 512], "counts": "b\\n11o?8H8H8H9G8G9H4L000000O11N3N00000000O10OMmA[NS>e14000000000O0100000000000O010000]OgA_OY>a0oAWOQ>h0WBQOi=o0d00O10000000O103M7I7I8H7IdRW5"}, "image_id": 870, "id": 14489}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 437.0, 37.0, 63.0], "area": 1183, "segmentation": {"size": [512, 512], "counts": "X>\\1b>0100000O10O100000O10O100000O12M2O2N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2NkQ]7"}, "image_id": 870, "id": 14490}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 443.0, 15.0, 17.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "o]`12n?2M3N3L3N2NO010O012M3N3M2M3NjQX6"}, "image_id": 870, "id": 14491}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 452.0, 81.0, 60.0], "area": 2624, "segmentation": {"size": [512, 512], "counts": "jo?1n?1O2V@Mc?6[@Kd?9O1O1JDf@>Y?De@=Z?5O1N2O1O1O1N2O1O1O1N2O11O1O001O1O1O00O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1001O001O1DkAjNW>T1kAjNW>T1jAjNX>T1kAjNW>T140O10O107I6J1O00000O10000000000O1000000000000O100000O4M6J6J7I6J7H7J6JT`g3"}, "image_id": 870, "id": 14493}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 483.0, 54.0, 29.0], "area": 1050, "segmentation": {"size": [512, 512], "counts": "`oT23m?3L3N2M4M2M2O000O100O100O100O10000O100O100O100O10000O100O1002N2N3M2N2N1OO100O10000O100O1002N2N2N3M2N3M2N2NSPP5"}, "image_id": 870, "id": 14494}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 496.0, 19.0, 16.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "foX72m?2M3N2N2N2O10O^@D`?>1O1O1O1O1I[@1f?L]@3h?01O1O1OQ`="}, "image_id": 870, "id": 14495}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 36.0, 50.0, 102.0], "area": 3295, "segmentation": {"size": [512, 512], "counts": "Sdd33f?7I7I7I7J6I8H7I7I7I7J6I7I80O0000K5N2000001O000001O00000001O0000N3G8I7H8H8H8H8H8H9K41O00000000M4VOPA;^?Hi^b3"}, "image_id": 871, "id": 14496}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 140.0, 44.0, 48.0], "area": 1098, "segmentation": {"size": [512, 512], "counts": "\\Ub32l?2O2M3N1N3N2M2N3N1N3N2M2O2M3M2O2M2O2M3M20100O01M3M2O2M3N1N3M2O2M3N1N3N2M2N3N2M2O2M2N3NWkg3"}, "image_id": 871, "id": 14497}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 403.0, 81.0, 88.0], "area": 3646, "segmentation": {"size": [512, 512], "counts": "Xn\\31c00e>3XA1e>2XA0e>3YA0d>3XA0f>2XA0e>3XA1e>f001O01O01O0N3M2M4M2M3N3M2M4M0O1011001O010O01O010O01O_NUBm0l=oNWBR1h=lN[BS1f=iN]BW1c=gN_BZ1`=cNdB\\1]=aNeB`1n=O00010O010OO2M1O0O10O10O01000O0100O010O10O10O2O2M4M2N2M4M2M3N3L3N3M2M3N3L3N2M4M2NYcZ3"}, "image_id": 871, "id": 14498}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 425.0, 75.0, 87.0], "area": 3455, "segmentation": {"size": [512, 512], "counts": "f^_43k?2M4M2M3N3L3O20OaA]Oa=c0\\BAc=`0ZBBg==jA]O39S>;fA_O4:V>6dAC39Y>=dAG[>o01O010O01O01O010O00001O001O000000N2N2M3N2M3N2N2M3N2M3N2N2M3N2M3N200001M2M4AbBWNa=f1cBWN_=g1cBVNa=f1>M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M`R[2"}, "image_id": 871, "id": 14499}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 456.0, 74.0, 56.0], "area": 2066, "segmentation": {"size": [512, 512], "counts": "mo^52l?2M4M2N2M3N2M3N2N2O1001O001O00001O001O0000N200001O001O00001O0000N2M3N2N2M3N2M3N2N2M3N2M300001O001O001O00N2M10O012N2M4M2N2M4M2N3L3N2M4M2N2M4M2N3L3NdQ\\1"}, "image_id": 871, "id": 14500}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 477.0, 33.0, 35.0], "area": 718, "segmentation": {"size": [512, 512], "counts": "noQ72l?2M3N2M3N2N2M3N2M3N2N2M3N2N200001O00001O001O0N2N3L3M3N3L3M4M2M3MPa="}, "image_id": 871, "id": 14501}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 497.0, 21.0, 15.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "n_`62k?3N2M3N2M3001O001O00001O001O001O00001L3N[PU1"}, "image_id": 871, "id": 14502}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 0.0, 12.0, 6.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "P`d21o?1O001O001O1O001OO1N2OQ`U5"}, "image_id": 873, "id": 14503}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 0.0, 43.0, 28.0], "area": 674, "segmentation": {"size": [512, 512], "counts": "YPW31n?2N2N2N2N2KFa@<^?4O1O1O1O1O1O1O1O1O1O1O1O00O1O1O1O1O1O1O1O1O1O1O1O1Kb@G_?85O1O1O1O1O1O1O1O1OQ`S4"}, "image_id": 873, "id": 14504}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 0.0, 71.0, 77.0], "area": 2889, "segmentation": {"size": [512, 512], "counts": "RRh34j?2N3L3N2N3WOAlAb0S>@jAc0f=[OVB40d0i=ZOUB4Oe0j=YOUB4Ne0l=XOUB5Kf0o=WOTBY1j=iNTBX1k=jNSBX1k=?O1O2M10N2O1N2O2N3L4M3M3M21N2N1O2N2O1O00O1O1O1O1O1O1O1O1O2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N00002N2N2N2N2N2N2N2N2N2NZ_T3"}, "image_id": 873, "id": 14505}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 0.0, 43.0, 25.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "V`g52m?2N1O2N2N2N2O1O1O1O1O1O1O1O1O1O1O1O1O0000O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1OQPc1"}, "image_id": 873, "id": 14506}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 0.0, 67.0, 61.0], "area": 2116, "segmentation": {"size": [512, 512], "counts": "VQa61n?2N2N2N2N1O2N2N2ZOBfAa0X>AfAa0X>AfAa0X>AfAa0JXOX>9lAa0JXOX>9lAa0JXOX>9lAg0R>[OlAg0R>[OlAg0R>c01O1O1N2O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1001O1OO1O1O2N2N2N2N2M3N2N2N2N2N2N2N2Nd_="}, "image_id": 873, "id": 14507}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 28.0, 54.0, 75.0], "area": 2422, "segmentation": {"size": [512, 512], "counts": "RRU71n?1O2N2N2N2h@Hd>;ZAGd>;ZAGd>;ZAGd>:[AHc>:[AHc>:ZAI\\>DfAe0LH]>EdAW1Z>9N2N2N2N2M3N1O2N2N1O000O1002N2M3N2N2N1O2N2N2M3N2N2N2N1O2N1N1000O1002N1N01000000QO"}, "image_id": 873, "id": 14508}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 51.0, 81.0, 61.0], "area": 3001, "segmentation": {"size": [512, 512], "counts": "fRd41l?4M2N3L3N2M4M2M4M2M3N3M2N3O01O010O01O010O01O01O010O01O01O010O01O010O01O01O010O01O001M2M3N3L3N3L31O010O01O01O010O01O01O010N1M3N3L3N30O0010O0010O]O^AHc>5_AIc>4`AIc>5`AHc>4`AIc>5`AHc>4f0N2MS^S2"}, "image_id": 873, "id": 14509}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 81.0, 59.0, 98.0], "area": 3938, "segmentation": {"size": [512, 512], "counts": "TcP15Q2N_;9YDOf;2RD6n;KjC=Ul0000000001O01O02N0000001O00M4F9G9J6N2M3N2M301O01O001O00001G8H9G8H8H9L3J601O0L5G8H8K6Mj\\P4"}, "image_id": 873, "id": 14512}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 98.0, 74.0, 68.0], "area": 3185, "segmentation": {"size": [512, 512], "counts": "^do52l?3M2N3N1N3M2N3M2N3M3EVO\\Al0b>VO\\Am0a>VO\\Al0c>UO\\Am0a>:N3M2N3M2100O010O010O010O0100O010O01N1N3M2N3O00100ON3M2N3N101O010O010O010O10O01N110O010O010O01O1M2N3WOcAK_>DbA416`>D`A524`>EaA416`>CaA525j>HYA5j>HXA6[?M2Nc[k0"}, "image_id": 873, "id": 14513}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 116.0, 54.0, 72.0], "area": 2707, "segmentation": {"size": [512, 512], "counts": "deS24d?8H8H8H8G:G8H8K50000000000010O000000000000010O000000M3000001O000O10H8H8H8H8N20001O0001O000000K5H9L300L4H8Hb\\Q5"}, "image_id": 873, "id": 14514}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 151.0, 66.0, 63.0], "area": 2577, "segmentation": {"size": [512, 512], "counts": "[VP43h?6J5L4000010O000000000M4J5J6K5K5J7J5O1000010O000L4000001O0001O0001O01O001O01O0000VOnAFR>5TBJm=5SBKm=6RBES>;mA@W>a0iAZOZ>h0fARO`>n091O0001O000001O01O000001OL4K:E6K5KVkn2"}, "image_id": 873, "id": 14515}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 153.0, 55.0, 71.0], "area": 2179, "segmentation": {"size": [512, 512], "counts": "YfT71n?2N2M3N2N2N1O2N2N2N2C\\OZAg0d>[OZAg0d>[OZAg0d>[OZAf0e>\\OYAf0d>=N2N2N2N2N2N1N3N2N2N1OO100000O100001N3N2N1O2N2N2N2M3N2N2N2N1O2N2N2M3N2N2N2Cj@MW?2k@Ll9"}, "image_id": 873, "id": 14516}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 188.0, 92.0, 49.0], "area": 3032, "segmentation": {"size": [512, 512], "counts": "jVW52h?6K5J6K5N3O00000001O01O000000010O00000001O01O000000010O00000001O01O000000010O0L6H6N20001O0001O0001O000001O0001O0001O0001O000001O0001O0001O0001O000001O0001O0001O0001O0000O2I6K5J6K5J7IQjZ1"}, "image_id": 873, "id": 14517}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 193.0, 20.0, 15.0], "area": 229, "segmentation": {"size": [512, 512], "counts": "^f`31g?81O00000000K5O1000000010O00000000000MQZU4"}, "image_id": 873, "id": 14518}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 204.0, 53.0, 70.0], "area": 2952, "segmentation": {"size": [512, 512], "counts": "Phm09c?4J6G9H8G:F9K5000000000010O00000nAVNn=n10000010O000000L4001O0001O0000000001O000001O00000000N3L300YOTBZOl==]BCc=4fBMY=KoB5Q=BXC>U>00G]hW6"}, "image_id": 873, "id": 14519}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 205.0, 61.0, 78.0], "area": 2859, "segmentation": {"size": [512, 512], "counts": "\\XS25R?LbA8Z>LaA9[>KaA:^>F]A?c>BXAb0i>:0001O0001O01O0001O0001O0001O01O0001O00WOmNeBT1U=QOjBP1R=TOiBQ1V=POeBU1\\=kN_BZ1`=fN[B_1e=>001O01O0000010O000001O01O00L401O0BVBbNn=Y1WBbNn=Z1?J5L4K5L4K6K4K5L4KaYn4"}, "image_id": 873, "id": 14520}, {"iscrowd": 0, "category_id": 1, "bbox": [204.0, 221.0, 58.0, 88.0], "area": 2661, "segmentation": {"size": [512, 512], "counts": "`XV34g?5L5J5O10001O01OL4K5N30O0000010O000000RAROj>R1cAROi=n0RBVOn=j0mA[OS>e0iA_OW>V10OK5L4K6J51O000001M2L4K5O101O00N2K6K4K5L4K6J5L4K5L4K6K4K5K5L5J5L4KTil3"}, "image_id": 873, "id": 14521}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 221.0, 27.0, 25.0], "area": 485, "segmentation": {"size": [512, 512], "counts": "_Wi63i?4K5K5L5O000001O01O0001O01O000001O01O000001N1K5L4K5KTYi0"}, "image_id": 873, "id": 14522}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 231.0, 54.0, 85.0], "area": 2715, "segmentation": {"size": [512, 512], "counts": "TXX45g?4K5L4K6K4K5N200010O00_BnNT^O^Ad0`>^O]Ae0NUO^>:aAg0]>`0L3N3M200010O01O0O2L3N1O0O3N3L3N2M4N110O00010O010O00010O010O01N1N2YO^BmNf=P1\\BnNf=o0^BmNf=P1\\BnNf=o0]BnNf=P1]BmNf=o0]BoNe=o0h0M2M3N3L3N3L3N2M4M2M4MRXk1"}, "image_id": 873, "id": 14524}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 260.0, 66.0, 75.0], "area": 2696, "segmentation": {"size": [512, 512], "counts": "SjQ64j?2N3L3N3M2M3O2O0010O01O0N2M4M2N3L3N3^OkNUBW1i=lNSBV1l=lNRBT1m=oNQBQ1o=>10O2O3O01O010O01M2O2O01O010OM4O010O01O01O01M2DTBbNo=Z1UBcNm=[1UBbNo=[1g0ZA@f>m000001O0001O00000YAlNb>Z10O000N2M30N2O11O000001O00000001O0001ON2I7K501O00000L4H8I7Ikdn6"}, "image_id": 873, "id": 14527}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 369.0, 53.0, 78.0], "area": 2522, "segmentation": {"size": [512, 512], "counts": "blU13`?=I701O000I7J60000000000000000010ORBTOhNTB2l=C_B=b>1O01O000000000000000001O01I6E^cU5"}, "image_id": 873, "id": 14529}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 384.0, 39.0, 55.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "\\mm24a?;E;E;E;000001O0000000000000001O00aAcN[>a10000000000000]NhA^1]>1O000001O0000000000000TOfA3Y>BRB3RR_4"}, "image_id": 873, "id": 14530}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 416.0, 63.0, 42.0], "area": 1839, "segmentation": {"size": [512, 512], "counts": "R^11h?7H8000000L4I8M2001O0000N2K6O000001O00000001O0001O00000001O0001O00000001O000001O00000001O0001PO[Ac0R?O000001O00000001O0000000I7H8IdRo6"}, "image_id": 873, "id": 14531}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 433.0, 37.0, 51.0], "area": 1667, "segmentation": {"size": [512, 512], "counts": "ink48h>1aA>V>KhA6X>JhA6X>JbA<^>c00000000000001O000001O000000000000000001O00000001O00000000H8B>Bkba2"}, "image_id": 873, "id": 14532}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 436.0, 53.0, 58.0], "area": 2206, "segmentation": {"size": [512, 512], "counts": "S^e58_?9J6TA[O^>d0WAGi>h0000000000000_AmNV>R1bAVO^>[1K1O0000000001O0001O000000000001O01O00M300000001O000XOfAJZ>0lA1S>OmA1S>OmA1S>OmA1S>OnA0S>KQB5o=CYB=e>00001O0001J5H_Q`1"}, "image_id": 873, "id": 14533}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 453.0, 86.0, 43.0], "area": 2632, "segmentation": {"size": [512, 512], "counts": "P_X28_?:G8H800000000001O01O000000000000010O000O1M31O00000001O00000001O000001O00K5000001O0000000001O0001O000004L00000000010O00000000RO\\A`0R?00001O0000000001O000001O00000001O00000M3G9H^a\\4"}, "image_id": 873, "id": 14534}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 462.0, 28.0, 50.0], "area": 1076, "segmentation": {"size": [512, 512], "counts": "U_b7:`?601O000000000i@Gg>i0I7F:0000000000000000000_NeA]1_>000000000001eN^AV1"}, "image_id": 873, "id": 14535}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 465.0, 55.0, 47.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "Y_c6;\\?9F:K500000000UAWO_>U100001O000000000000000fNcAQ1f>01O000000000000M3N20000000000000000001O000J6G90000000000010O000000H8FaQa0"}, "image_id": 873, "id": 14536}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 467.0, 49.0, 45.0], "area": 1503, "segmentation": {"size": [512, 512], "counts": "k_15k?01O0000000000N2G9G9H8J6001O00000000000000N2J60000001O00000000000000001O00000000000I7G9N3O0L4H8GcQV7"}, "image_id": 873, "id": 14537}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 477.0, 46.0, 35.0], "area": 1451, "segmentation": {"size": [512, 512], "counts": "goQ19X??H80000001O00000000000000000000000000001O00M3N20000000000000000000000001O0000000000000I7AaQW6"}, "image_id": 873, "id": 14538}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 499.0, 82.0, 13.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "j_i46f?400M31O00000000000000001O00000F\\@8f?00000001O000000M30000N2000000000000001O000000000000003M004L000000000000001O0000000000000000001O00000000000000001O000000000000000OS`m1"}, "image_id": 873, "id": 14539}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 510.0, 17.0, 2.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "noh22n?0000000000000001O00000000000000Q`n4"}, "image_id": 873, "id": 14540}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 20.0, 42.0, 27.0], "area": 647, "segmentation": {"size": [512, 512], "counts": "PQ91l?4M2M3N3N1010O0010O00010O010O00010L3N3N11O010O01O01O010O01O01O010O01O01L3N3L3N2M4MUoQ7"}, "image_id": 874, "id": 14541}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 48.0, 32.0, 20.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "iQg61m?3L3M3O2O01O01O010O00010O01O01O010O00010O01O01O010O0001O0M4M2MYnh0"}, "image_id": 874, "id": 14542}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 338.0, 68.0, 78.0], "area": 3386, "segmentation": {"size": [512, 512], "counts": "g[n62m?1i@O]>4aANZ>7cAK\\>6cALZ>7cALZ>6eAKZ>7cALZ>7dAJ[>7cALZ>7cAL[>m0N3M3N1N3N2M2O2M2N3N2N101000N1N010O3N2M2O2M3M2O2VNlAd1Z>N2M2O2M3M2100O0N30O01000O01000O01000O010O01O1M201000O0100O01000gD"}, "image_id": 874, "id": 14543}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 479.0, 42.0, 33.0], "area": 705, "segmentation": {"size": [512, 512], "counts": "]_T31n?1O2M3N2N1N3N2N2M2O2N2O10O0100000O0100000O0100000O001O1O1O001O1O1O001M3N2N1N3N2N2M]`V4"}, "image_id": 874, "id": 14544}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 0.0, 8.0, 4.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "PPa31o?1O001O1O00O1NRP[4"}, "image_id": 875, "id": 14545}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 226.0, 27.0, 21.0], "area": 311, "segmentation": {"size": [512, 512], "counts": "\\Wb62k?4M2N3L3010O0010O010O00010O010O00010O010O00N3M2M4M2NgXP1"}, "image_id": 875, "id": 14546}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 259.0, 10.0, 17.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "_Xk71m?2M4M2M3N30O010O01jG"}, "image_id": 875, "id": 14547}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 488.0, 79.0, 24.0], "area": 1083, "segmentation": {"size": [512, 512], "counts": "ooo21o?0O1O1O1O100O1O1O1O100O1O1O1O1001O1O2N1OO1O1O1O100O1O1O1O100001O1O2NO1O1O1O100O1O1O1O100O1001O1O2N1O1O1O00O100O1O1O1O100O1O1O1O100O11O1O1O1N3M2O1N2N3M2N2O1N[`h3"}, "image_id": 875, "id": 14548}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 0.0, 76.0, 58.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "m`X42l?3L3N2M4M2M4L3N2M4M2M3O2O001O00001O001O00001O001O00001OYO\\A2d>K`A5_>IcA7]>FgA9Y>EiAAmA?S>_OPBa0o=\\OTBd0e>O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O01O010O01O01M2N3L3N2M4L3Nj^a2"}, "image_id": 876, "id": 14549}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 35.0, 71.0, 57.0], "area": 2195, "segmentation": {"size": [512, 512], "counts": "UbP61l?3M3N3L3N2M4L310O00010O01O01O001L3M3N3L3N3L3N210O00010O010O00010O0010O001N11O01O01O010O01O01O01jN\\Ao0c>oN_AQ1i>O01O01O010O01O01O010O01OM4M2M3M4Eg@K[?3h@I\\?3:M3NVnk0"}, "image_id": 876, "id": 14550}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 50.0, 17.0, 16.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "lQ_31l?3N2N3L3010O010O0010O0010O0N3L3N[^X4"}, "image_id": 876, "id": 14551}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 50.0, 93.0, 51.0], "area": 1604, "segmentation": {"size": [512, 512], "counts": "Qbf31m?2N2M4M2M4M2O1010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O010O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O010O01O01O0O2L3N3L3N2N]mj2"}, "image_id": 876, "id": 14552}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 491.0, 18.0, 15.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "bol23k?2M4M2010O00010O010O010O00001M2M4MaPj4"}, "image_id": 876, "id": 14553}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 0.0, 14.0, 21.0], "area": 208, "segmentation": {"size": [512, 512], "counts": "`PP23h?5L4K6M200000001O00M3L4K5KUPi5"}, "image_id": 877, "id": 14554}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 16.0, 27.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "gPb23i?4K5L4L5J5O10000000L4L4K5L4K5LTPV5"}, "image_id": 877, "id": 14555}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 37.0, 181.0, 294.0], "area": 40403, "segmentation": {"size": [512, 512], "counts": "U1V8k70O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O0000010O000000010O0000010O00000WHlGg7W8O00000010O0M3L4L5J5L4L4K6K4L4O10001O0[OUGVJj8f5ZGZJf8b5^G^Jc8\\5bGdJ^8X5fGiJY8S5lGlJT8o4QHQKP8j4THVKl7f4XH[Kg7`4_H_Ka7]4cHcK]7Y4gHgKZ7S4kHmKU7o3PIQLo6k3UIULk6f3ZIZLg6a3]I_Lc6]3aIdL^6W3hIhLX6T3lIlLU6o2oIQMQ6j2TJWMk5e2ZJZMf5b2^JYMg5b2^JZMg5a2]J[Mg5a2n3L4K6K4L4K5L5K4K5L4L4K6K4L4K5L5K4K5Le]U5"}, "image_id": 877, "id": 14556}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 387.0, 188.0, 125.0], "area": 18526, "segmentation": {"size": [512, 512], "counts": "SV1`AmN`>Y1010O10O10ON3M201O001O1O001O001O001O1O00N2N2N2O1N2N3M2O2M2N3M2O2M3TOPAf0P?XOSAh0R?010O01O1M2N3M2O2M2N3M3N1N3M_oc3"}, "image_id": 878, "id": 14559}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 0.0, 23.0, 15.0], "area": 196, "segmentation": {"size": [512, 512], "counts": "X`Y43k?2N3N2L30D^@;a?F_@9a?4OD`@8`?31F_@3c?41N2M31O001O1ON2O1N2N2OQP[3"}, "image_id": 878, "id": 14560}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 37.0, 28.0, 37.0], "area": 659, "segmentation": {"size": [512, 512], "counts": "hQb72l?3N2M2N3M2N3N1N3M3M2O2O010O0100O0100O010O001M2O2N20O01hN"}, "image_id": 878, "id": 14561}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 39.0, 80.0, 80.0], "area": 3188, "segmentation": {"size": [512, 512], "counts": "dR[21m?2O2M2N3M3N1N3M2O2M2O200O0100O0100O0100O01M2N3M3N1N3M2O2M3M2N3N1N3eA\\NV>i1N2M2N3M2O2M20100O010O010O10O10O010O01000ON3M2N3N4K3M2\\OjA]OX>`0kA]OW>c0iA[OZ>e0fAXO\\>h0dAWO^>i0bATOa>i0bATO`>j0=N1N3M3N1N3M2N3N1N3M3NQn\\4"}, "image_id": 878, "id": 14562}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 52.0, 77.0, 76.0], "area": 3008, "segmentation": {"size": [512, 512], "counts": "hb93l?1N3M2N3N2M2N3N1N3G\\ORAg0k>[OSAg0k>9N1N3M2O2M3N1010O0100N1O01N3O010O010O10O010O10O10O010O10O10O010O01000O0100OgAcNo=^1oAcNR>]1kAfNT>Z1jAiNV>`110O010O01N2N1N3M201O1\\O_AFb>8aAEb>8`AFb>9`ADc>9`AEb>8`AFb>9`ADc>:^AEc>j@ES?=k@ES?f0M2N3M2N3M2N3N110ON3M3M2010O010O010O0YAmN_>V1^AlN`>\\1M2N3N1N3M2O2O010O10O10O010O010O010O01O0N3M3M2N3M2N3M2N3M2N3M2N3N20O0VM"}, "image_id": 878, "id": 14564}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 103.0, 33.0, 66.0], "area": 1148, "segmentation": {"size": [512, 512], "counts": "W3P2Q>0000N2N2M2O2N2N1O0O2O2N2N2M3N1O2HXAUOj>i0XAUOj>h09N1O2N2M3N2N2N1O2M3N2N2N1OS\\_7"}, "image_id": 878, "id": 14565}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 108.0, 86.0, 68.0], "area": 2960, "segmentation": {"size": [512, 512], "counts": "TTf12l?3N2M2O2M2O2M3M2O2M3N1N3M2O2M3O01000O01000O010O01000O01000O010O010XAoN`>Q1^AQOa>o0]ASOd>U100O0100O0100O01000M2N3N2N1100O0100OO2M3M100O010O01O01O010O3N2M2O2O010O10O10O1M2O2M3VOn@c0S?\\On@a10O10O010O010O10O10O010O010O10O0QOcA<]>AeA?\\>_OfAa0Y>]OiAc0X>ZOkAc0W>[OkAc0X>[OjAb0X>\\OkAb0j>N3M2N3N1N3M3M2OoZb0"}, "image_id": 878, "id": 14567}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 132.0, 76.0, 67.0], "area": 2703, "segmentation": {"size": [512, 512], "counts": "^e91n?2N2M2GJg@8W?Jg@8W?Ih@9U?Jh@9V?9O2N2M3N2N1O2M3N2N1O0O2O20000O0100M3N2O010000000O0100M3N2N1O2M3N2N1O0O1000O10O10O10001O2M3N2N2J`AiNa>V16N2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2N2MZ[`6"}, "image_id": 878, "id": 14568}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 156.0, 74.0, 76.0], "area": 2980, "segmentation": {"size": [512, 512], "counts": "\\en52h0O\\>2bA1[>2bA1[>1dA0Z>3cA0Z>3cAO\\>2bA1[>2bA0\\>2cA0Z>3cA0\\>0bA2^>i00O010O10O10O010O010O10O1dAaNV>^1iAcNW>d100O0100O010O0100O0100O010O0100O010O0100O010O001M2N3M3N1N3M2N3N1N3M3M2N3N1N3O010N2M2N3N1N3M2N3N1NSZl0"}, "image_id": 878, "id": 14569}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 219.0, 80.0, 67.0], "area": 2994, "segmentation": {"size": [512, 512], "counts": "hWX51m?3M2O2M3M2N3M2O2M2N3M2N3O00100O0100O010O010O01N1O2M3M2O2O0100O010O010O01[AhNa>[1O2M3M2N3M2O2O010O010O10O10O010O010O010O010O10O1O0N3M2O2M2N3M2N3nNZAg0g>WO[Ah0g>UO\\Aj0m>N3M2Gh@H[?5h@IY?5i@IZ?4:N3NZh_1"}, "image_id": 878, "id": 14570}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 241.0, 11.0, 12.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "hgR52l?3L3N3O0010O001L3N3L_hg2"}, "image_id": 878, "id": 14571}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 261.0, 17.0, 18.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "_XR34i?4L3L40001O01O00010O0001O01L3L4LlWe4"}, "image_id": 878, "id": 14572}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 272.0, 88.0, 67.0], "area": 2925, "segmentation": {"size": [512, 512], "counts": "XYl43l?1N3M2N3N2M2N3N1N3M2N3N2M2N30O10O10O01N1N30O0100O0100O0100O0100O010O0100O0100O0100XAkNc>Y1O2N2O010O01000O010O0100O0100O0O2M2O2O10M3N1N3M2O2M2N3M3N1N3M2O2M3M210O010N1N3M3N1N3M2N3N2Mefg1"}, "image_id": 878, "id": 14573}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 277.0, 33.0, 54.0], "area": 1202, "segmentation": {"size": [512, 512], "counts": "li_71m?3M2_OMSA6j>MTA4k>MSA6j>MSA6k>KSA7n>>O010M2N3M2O2M3M2N3N110OO2N2O0010O010O010O01000O0SG"}, "image_id": 878, "id": 14574}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 321.0, 67.0, 66.0], "area": 2154, "segmentation": {"size": [512, 512], "counts": "bZg41m?2N3M2N3M2O2M2n@B`>a0]AB`>`0^ABc>>ZAEe>;YAGh>i0O10O10O010O10O010O0100O010O0100O0100O010O3NO0100O010O0100O0100O010O010O10O010O10OO2M2O2M2N3DPADR?;PACR?:QACQ?;QACR?:;N3N2M2NRUW2"}, "image_id": 878, "id": 14575}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 328.0, 45.0, 65.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "bkY71m?2VOOcA4Z>OcA3[>OdA3Y>0dA2Z>0dA3Z>OcA4Z>NdA4Z>OdA3Z>NdA4\\>MaA6_>e0010O0100O0100O010O0O2O010O010O10O010O10O010O010O10O010O10O010O01UE"}, "image_id": 878, "id": 14576}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 333.0, 18.0, 19.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "iZb02l?2N2N3M2N3M2010O010O010O0O2M2M4M2MaeT7"}, "image_id": 878, "id": 14577}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 355.0, 20.0, 20.0], "area": 221, "segmentation": {"size": [512, 512], "counts": "^kV22m?1N3N2N2M2O2N2M3O10O1O0O2N2M2O2M3N2M2OgT_5"}, "image_id": 878, "id": 14578}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 374.0, 70.0, 71.0], "area": 2695, "segmentation": {"size": [512, 512], "counts": "V\\m61m?2l@0V>3gA0V>3hANV>4hAOU>4hANW>3gA0V>3gAOW>3hAOV>3gA0Y>OeA3[>NbA5^>f0010O0100O010OO2N20O010O010O010O10O10O010O010O10O010O010O10O10O010O0100O0100O0lN`Ai0`>UOaAi0a>UObAi0`>TObAj0`>TOcAi0l>M2O2O010O010O10O010O10O010OmB"}, "image_id": 878, "id": 14579}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 434.0, 68.0, 73.0], "area": 2311, "segmentation": {"size": [512, 512], "counts": "S^_63k?2N3M2O2M2N3M3M2N3O001j@WOT?j010PAWOg>j0WAXOi>g0UA\\Oj>m010O010O010O01000O010cAmNk=S1SBPOm=P1PBROP>n0nAUOR>k0kAWOU>i0jAYOV>g0gA[OY>W10O10O010O10O010O010O010O10O01M2O2M2N3M2N3N2M2N3M2TOPAf0V?N3N1N3M2N3M3M2O2M]a>"}, "image_id": 878, "id": 14580}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 486.0, 35.0, 26.0], "area": 521, "segmentation": {"size": [512, 512], "counts": "no^72m?1O100O100O1O100O1O100O100O1KHa@9_?4O1O100O100O1O100O1O100O100O1O11O2N1O"}, "image_id": 878, "id": 14581}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 0.0, 38.0, 54.0], "area": 1323, "segmentation": {"size": [512, 512], "counts": "RQ]72m?3M3N1oNHWB;g=GWB;g=FWB=g=EVB=i=EUB=i=DUB?i=CTB`0j=AUB?k=CRB>n=DoAEmA:T>HiA9W>HhA7Y>KdA5]>g00O100O1O100O1O100O1O100D^AYOc>g0^AXOb>g0aAVO`>j0aATO`>k0;O100O1"}, "image_id": 879, "id": 14582}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 58.0, 17.0, 30.0], "area": 310, "segmentation": {"size": [512, 512], "counts": "Sbg72n?1N3M3N1N3N2M2N3N1N3N2M2N10O0101UN"}, "image_id": 879, "id": 14583}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 469.0, 19.0, 43.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "m_e23h?5L4K5L4K5L4K5L4L40L4L4K6K4K5L4L4K6K]QQ5"}, "image_id": 880, "id": 14584}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 509.0, 11.0, 3.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "m_R23m?00000001O000000001OQPh5"}, "image_id": 880, "id": 14585}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 0.0, 95.0, 66.0], "area": 4200, "segmentation": {"size": [512, 512], "counts": "UQ62l?2N3L3N2M4M2M4M2M3N3M2M4M2M4M2O101O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O000000N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2N2M3N2M3NR`Z6"}, "image_id": 881, "id": 14586}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 0.0, 88.0, 52.0], "area": 1511, "segmentation": {"size": [512, 512], "counts": "PPf21o?00001O001O0T@Nj?40001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00010O01O01O010O01O01O010O01O01O010O01O01O0VA\\OY>d0dA_O[>a0bAB_>=^AGa>:\\AHd>j010OXO[A5f>H\\A5g>H\\A6f>H]A4g>H\\A6f>H]A4Y?N2MSom3"}, "image_id": 881, "id": 14587}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 0.0, 226.0, 86.0], "area": 10126, "segmentation": {"size": [512, 512], "counts": "PP_41o?00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001ON2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2"}, "image_id": 881, "id": 14588}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 13.0, 21.0, 15.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "dPm21m?2N3L30010O010O0010O010O0010O0010O001M2N^_h4"}, "image_id": 881, "id": 14589}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 24.0, 22.0, 16.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "oP[31l?4M2N30O0010O0010O0010O0010O010O00010M2M4MRoY4"}, "image_id": 881, "id": 14590}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 36.0, 24.0, 21.0], "area": 291, "segmentation": {"size": [512, 512], "counts": "`Qb41l?4M2N3L3N2010O010O0010O0010O010O0010OO1M4M2N3LgnQ3"}, "image_id": 881, "id": 14591}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 46.0, 28.0, 18.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "bQn44i?3O101O010O10O01O10O01O0O101O010O0001D^@9d?00010O0001L3N2M\\nc2"}, "image_id": 881, "id": 14592}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 57.0, 29.0, 22.0], "area": 323, "segmentation": {"size": [512, 512], "counts": "Sbh51m?2N3N101L3N3O00010O010O010O010O010O010O010O0010M2N3M2M4Momh1"}, "image_id": 881, "id": 14593}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 68.0, 42.0, 10.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "[bP21h?70001O000000000000000000000000000001O00000001O000000000000000000000000000001O000Nl]Z5"}, "image_id": 881, "id": 14594}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 80.0, 108.0, 110.0], "area": 9883, "segmentation": {"size": [512, 512], "counts": "]e[2;e>P1POP1SOm001O0000000000000000003M000000000001O000000000L400I7A?00001O0000000000000000000000000000000000000001O0000000000000000000001O0000000000000000000000000RCeMWT1lNT1WOi01O00000000000000000000K50000000000005K00001O0001O000000_O^CdMb<[2_CcMc<]2]CcMc<]2]CcMc<]2`0000000000000000000001O00000000000000000001O000000000000000000000000000000000b0^O000000000000000000000000000001O000000000001O0000000000000000YOg0lNT1lN[^`1"}, "image_id": 881, "id": 14597}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 98.0, 24.0, 57.0], "area": 962, "segmentation": {"size": [512, 512], "counts": "hS]41Y?f0000000m@5Q>m0000000000000001kNjA>l>000000000000000000G`lV3"}, "image_id": 881, "id": 14598}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 124.0, 37.0, 93.0], "area": 2383, "segmentation": {"size": [512, 512], "counts": "dUP1U1k>0000000000000000000`NUOkCk0Uh>j000000000000000000001O0E;00000000000000000000000M30H800001O000000000001OlNmA;S>EmA2\\>Nk00000000000000oZW3"}, "image_id": 881, "id": 14602}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 172.0, 28.0, 56.0], "area": 1320, "segmentation": {"size": [512, 512], "counts": "ZV`6i0c>d00000000000F:000000000000000000001O009G0000000001OH8mN]kQ1"}, "image_id": 881, "id": 14603}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 187.0, 32.0, 37.0], "area": 1053, "segmentation": {"size": [512, 512], "counts": "kUh1o0Q?000000000000000000000000001O00005K00000000000000000000000000POUkg5"}, "image_id": 881, "id": 14604}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 227.0, 22.0, 21.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "Xgn1`0[?50000000000000000000000000000000000000000mXf5"}, "image_id": 881, "id": 14605}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 363.0, 246.0, 149.0], "area": 20369, "segmentation": {"size": [512, 512], "counts": "d_1P1L400001O0000000000000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000K5L4K5L4K5L4K5L4N200000000000000000000000000000000000_Oa0\\O1c0[Oe000VCoMe;V30000000000000000000000000000000000000000UOk000000000000000000000000000000000001O0000000000000000000G9[Oe0J6000000000000000000000001O000001O00000000000000000000000000000000000000010O0000000000000000000000000000E;]Oc000VOeBoN[=7_CIa<\\OZDd0a=0gbS4"}, "image_id": 881, "id": 14606}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 367.0, 37.0, 56.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "[l]7k0Y>l00000000000000000001O00000000000000000000000000000000000001O00000000000`D"}, "image_id": 881, "id": 14607}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 374.0, 39.0, 64.0], "area": 2082, "segmentation": {"size": [512, 512], "counts": "o;d1\\>003M00000000000000M3000000000G900001O0000000000009G00000000000000000000000hNXU\\7"}, "image_id": 881, "id": 14608}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 375.0, 60.0, 116.0], "area": 6485, "segmentation": {"size": [512, 512], "counts": "^^T4l0j=Z1B>0000H8YOg0000000000000000000000000000000000000001O000000000001O000000000000000000000000000000000000000000000000E;eN[1fNbem2"}, "image_id": 881, "id": 14609}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 377.0, 50.0, 88.0], "area": 4119, "segmentation": {"size": [512, 512], "counts": "im]5f0X>R1ROn00000001O01O00000000000000000000000000000000000000000000000000000000000000010O0000000000000g0VO3nN`Ti1"}, "image_id": 881, "id": 14610}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 439.0, 64.0, 73.0], "area": 4532, "segmentation": {"size": [512, 512], "counts": "a_P7?b>o0UOk00000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000001O0000000000000000000000"}, "image_id": 881, "id": 14611}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 473.0, 10.0, 21.0], "area": 182, "segmentation": {"size": [512, 512], "counts": "U_o59[?<000000000000000@gak1"}, "image_id": 881, "id": 14612}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 0.0, 45.0, 92.0], "area": 3976, "segmentation": {"size": [512, 512], "counts": "P`Y79g?P1POo0QOd0\\O00000000000000000000000000000000000000000000000000000000000000O1000000000000000000"}, "image_id": 882, "id": 14613}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 4.0, 123.0, 69.0], "area": 7687, "segmentation": {"size": [512, 512], "counts": "Y`f3=c?f0ZOf0ZO7I000000000000000000000O1000O10000000000000000000000000000000000000O1000O10000000000000000000000000000000000000O1000O10000000000000000000000000000000000000O1000O10000000000000000000000000000000000000O1000O1000000000000000000000000000>Bf0ZOf0ZORn[2"}, "image_id": 882, "id": 14614}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 78.0, 142.0, 72.0], "area": 7835, "segmentation": {"size": [512, 512], "counts": "cbW37i?d0\\Oc0\\Oe0\\O1O00000000000000000000000000000000O10O1000000000000000000000000000000000O10O100000000000000000000000000000000000O10O1000000000000000000000000000000000O10O100000000000000000000000000000000000O10=Cc0]O9G00000000000000000000O10000000O100000000000000000000000000000O10000000O10009GR\\a2"}, "image_id": 882, "id": 14615}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 82.0, 68.0, 144.0], "area": 8838, "segmentation": {"size": [512, 512], "counts": "eR25k?e0[Of0ZOf0ZOf0YOg0ZOe0[O6J0000000000000000000000O100000000000O100000000000000000000000000000O100000000000O100000000000000000000000000004Km0TOk0UOl0TOl0TOkik6"}, "image_id": 882, "id": 14616}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 149.0, 43.0, 76.0], "area": 2973, "segmentation": {"size": [512, 512], "counts": "ndZ7>b?i0WOj0VO2FTN\\Bl1d=80000O10000000000000000000000000000008G100000000000000000000000000000000000SK"}, "image_id": 882, "id": 14617}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 161.0, 84.0, 56.0], "area": 3633, "segmentation": {"size": [512, 512], "counts": "QVX46j?b0^O0000000000000000000M300O1000000000000000O10O100000000WOLfA4Z>i0000000000000000000000000000000000000O10O10000000000000000000000000000000000000000000000000O10O1000000000i0WOVj]2"}, "image_id": 882, "id": 14618}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 233.0, 20.0, 10.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "YgZ3:f?0000000000000000000000000000000000000gX[4"}, "image_id": 882, "id": 14619}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 236.0, 17.0, 9.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "\\WQ39g?0000000000000000000000000000000dXf4"}, "image_id": 882, "id": 14620}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 239.0, 11.0, 20.0], "area": 214, "segmentation": {"size": [512, 512], "counts": "_7d0\\?00000000O1000000000aXj7"}, "image_id": 882, "id": 14621}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 269.0, 71.0, 170.0], "area": 10379, "segmentation": {"size": [512, 512], "counts": "`X6=c?a0_Ob0^Ob0^Ob0^Ob0^Ob0^Oa0_Ob0^OBa0_Ob0^Ob0^Ob0^Ob0^Ob0^Oa0_Ob0^OgRf6"}, "image_id": 882, "id": 14622}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 306.0, 37.0, 85.0], "area": 2928, "segmentation": {"size": [512, 512], "counts": "ci]7>b?c0]Ob0^Oc0]O>B00000000O10000000000000000000O10000000000000000000000000000000O_F"}, "image_id": 882, "id": 14623}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 415.0, 33.0, 83.0], "area": 2481, "segmentation": {"size": [512, 512], "counts": "Qm_77i?c0]Ob0^Oc0]Ob0^O000000000000O0100000000000000000000000000000000000O016JkB"}, "image_id": 882, "id": 14624}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 492.0, 32.0, 20.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "]o^1oA`NS>]1PBaNR>]1oAbNR>]1:N2N2N2M3N2N1O2N2M3N2N2N1O2N2M3N2N2N2N1N3N2NaRV3"}, "image_id": 886, "id": 14637}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 451.0, 123.0, 61.0], "area": 3787, "segmentation": {"size": [512, 512], "counts": "ooY21n?1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O11O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1OQ`h3"}, "image_id": 886, "id": 14638}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 61.0, 70.0], "area": 2340, "segmentation": {"size": [512, 512], "counts": "e0=a?3N1N3N2N1N3N2M2O2M3N1O2M3N1O2O1O1O001O1O001O1O001O1O001fA^NT>g101O1O001O1OfNoAe0Q>ZOPBg0o=WOTBi0k=VOWBi0i=UOYBl0f=SO\\Bm0c=RO_Bm0a=QOaBP1^=oNdBQ1[=mNhBS1W=lNkBS1U=kNmB6ZOd0h=UOPC4[Of0c>XO_Af0b>XO`Af0c>XO_Af0o>M2O2M3N1N3N2M2O2N2M`^Q7"}, "image_id": 888, "id": 14639}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 0.0, 43.0, 28.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "P`P11o?1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O2N1O1OO1O1O1O1O100O2N3M2N2N2N2N200O1N2KV@1`_Z6"}, "image_id": 888, "id": 14640}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 0.0, 44.0, 22.0], "area": 527, "segmentation": {"size": [512, 512], "counts": "P`V41o?1O1O1O1O1O2N1O1O1O1O2N1O1O1O1O2N1O0000O1O1O1002N1O1O0000Fh@KY?5h@IY?6j@GW?89O1O1O1O100O1O1O1O1O10P`S3"}, "image_id": 888, "id": 14641}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 0.0, 8.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "PPe52n?001O001O1OO1NRPW2"}, "image_id": 888, "id": 14642}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 0.0, 15.0, 14.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "V`h53k?2N3M201O001O1O001OD`@8d?O2M2N3Mloo1"}, "image_id": 888, "id": 14643}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 0.0, 37.0, 15.0], "area": 255, "segmentation": {"size": [512, 512], "counts": "P`e61o?001O1O1O001O1O1O1O001O1O1O001O1O1O001OO1O1O1N2O1O1N2O1O1O1N2001O001O00NRPh0"}, "image_id": 888, "id": 14644}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 3.0, 57.0, 63.0], "area": 1883, "segmentation": {"size": [512, 512], "counts": "`ai41S?0hA2V>OhA3V>OhA3V>OhA3V>0gA2W>0gA2W>0gA3V>O]AC8`0Z>1dA1Z>2dAOZ>3dAOZ>2eA0Y>1fA2W>m00O1N3M1O00010O0000002N2N2OO0000L_AjNb>U1`AiN`>W141O000001O2N2N3N1N2N2N2N2N3M2O1N2N2N2N3M2N2O1N2NXoY2"}, "image_id": 888, "id": 14645}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 7.0, 16.0, 17.0], "area": 149, "segmentation": {"size": [512, 512], "counts": "aPn52m?2M2O2M3M2O2N110OO2M2O2M3N1N3Neoi1"}, "image_id": 888, "id": 14646}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 11.0, 50.0, 69.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "`Qo52m?2N2M3N1O2M3N2N1N3N2N2M2O2N2TAROZ>2eAo0OPO[>3dAn0ORO[>1eAV1X>;N1O2O1O10O10N2M3N1O2O1000TOTBBl=`1eAbNX>e1N1UBYN\\=i1bBYN\\=i1bBXN\\=k1bBWN\\=k1aBXN]=i1bBYN[=V2001O1O2M3N200O1nN[BHb=9`BE`=;bBC_=;cBD_=:cBC_=5O1N1O00001O01O001O3M10O0001O000001O01O2N2N2N3N1N2N2N2N00010O00000001O01O02N2N2N1O10O001O3M2O1N2N2N3M2N2O1N2N3M2N2N2O_^m3"}, "image_id": 888, "id": 14649}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 55.0, 56.0, 68.0], "area": 1790, "segmentation": {"size": [512, 512], "counts": "[bY51m?3N2N2N1N3N2N2N2N2M2O2^AZOg=h0WBYOh=i0VBYOg=j0VBYOh=h0WBZOg=h0WBZOh=g0VB[Oj=e0SB]On=c0PB_OP>a0nAAR>>mADR>=lAET>S110000000OO2N2M3N2N1O2M3N2N2N2N1N3N2JRAWOP?g040002M3N1O2N2N2M3N2N2N1N3N^]j1"}, "image_id": 888, "id": 14650}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 72.0, 32.0, 47.0], "area": 953, "segmentation": {"size": [512, 512], "counts": "X2Z1g>O0100O010O01000O01N1O0O1O003N1N3M2O2O1O0O2N1N3M3M2O2M2N3N1N3M^m_7"}, "image_id": 888, "id": 14651}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 72.0, 77.0, 58.0], "area": 2030, "segmentation": {"size": [512, 512], "counts": "XS]32m?3N1N2N2N2O2M2N2N2N3N1N2N2N2O2M2N2N2N3N0O02O0O001O000001O01O000001O0001O001O2O2M2N2N2N3NO000001O0001O0001O0002N2N2N100O0001O2N3N1N2N2N3M2N2O1N2N3M2N2O1NX]\\3"}, "image_id": 888, "id": 14652}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 79.0, 16.0, 33.0], "area": 281, "segmentation": {"size": [512, 512], "counts": "SSh72l?2O2N2M3N1O2M3N2N1N3N2N2M2O2N2`M"}, "image_id": 888, "id": 14653}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 88.0, 83.0, 68.0], "area": 2669, "segmentation": {"size": [512, 512], "counts": "^SW22m?2N1O2N2B1g@2W?=N1O2N2N2O1O1O007I1O1O1O1O0000000000000010O10O1008H00O010000000O010000000O010000000O01N2N2M3N1O10O2N2N2O001O1N2O1O001000000O01POYAe0h>YOZAe0h>YOZAd0i>ZOYAd0S?N1O2M3N2N2N1N3N2N2Ng[_4"}, "image_id": 888, "id": 14654}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 103.0, 67.0, 56.0], "area": 1787, "segmentation": {"size": [512, 512], "counts": "hS`41n?2N3M2N2O1N2N2N3M2N2N2N2N3M2O1N2N2N3M2ZAiNa>\\1000010O000000010ON2N2N2N3N1N2N2N2N2N10O0000000000010O00003M1O01O002N2O2M2N2N2N2N3M2O1N2N2N2NR\\^2"}, "image_id": 888, "id": 14655}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 105.0, 89.0, 50.0], "area": 2378, "segmentation": {"size": [512, 512], "counts": "SdX62l?3N1O2N2N2N2N2N2N2N2N2M3N1O2N2N2N2O10000000000O0100N2N2N2N2N2N2N2N2M11O2N2N2N2N1N3N2N2N2N2N000000000O01000002N2N2N2N2000O100000O10000000000000O10O1N2N2N2N2N2N2M3N2N1O2N2N2N2N2Nik:"}, "image_id": 888, "id": 14656}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 117.0, 67.0, 103.0], "area": 2942, "segmentation": {"size": [512, 512], "counts": "\\UV51n?3N1N3M3N2M2O2M3N2M2N3N2M3QASOg>n0XATOe>U1O2M3M3N1N3N2M3N1N2N010O010O01O01O010XOSN\\CU1\\O@X=^OZCQ1@@U=BXCn0F]OR=HVCk0JZOP=NSCh00XOl<3mBj08QOkN2M3M3NeZh1"}, "image_id": 888, "id": 14657}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 147.0, 53.0, 69.0], "area": 1966, "segmentation": {"size": [512, 512], "counts": "aeX11m?3M2N3M2N3M2XABl=a0QBBl=`0RBBl=a0QBBm=?QBCn=?oADP>:jAIU>7iAKX>5eANY>m0N3M2010OO2M2N3M2O11N1N3M2N3M2N3M2N3M2N3I_AkNc>S1402N2N3M2O2M2N3M3M2N3M2N3M2N3MTkl5"}, "image_id": 888, "id": 14658}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 149.0, 70.0, 74.0], "area": 2556, "segmentation": {"size": [512, 512], "counts": "Xeb31n?2N3N1N2N2VAGj=;TBGj=;TBGj=Q1000O1N2N3M1O01O00002O2M2N2N2N2N[OTBkNO4n=P1UBjNO4l=R1WBhNO4j=U1aBhN_=X1cBfN]=Z1eBeNZ=[1e001O00010O2N2N2N2N3N1N2N2N2N1O10O02N2N2N2N2N3N1N2N2N2N3MfZZ3"}, "image_id": 888, "id": 14659}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 171.0, 4.0, 7.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "]Un71n?3N1N2dJ"}, "image_id": 888, "id": 14660}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 177.0, 55.0, 91.0], "area": 2690, "segmentation": {"size": [512, 512], "counts": "]VR72n?2M3N2M3M3N2M3N2M3JXOPAj0n>5N3N2M3bAiNn=Z1oAiNo=Y1oAiNn=Z1oAiNo=Y1nAhNQ>d10OJTNYBm1f=UNXBk1i=WNUBj1i=9N2M3M3N2M3N2M3NO012M3N1N3N3L3N1eN]B6f=H\\B6f=G]B6f=H\\B6e=H^B5e=I]B@C>R>O^B@C>P>2fBLZ=3iBJY=5iBIW=6lBGV=7mBGT=8nBET=9\\1LcY2"}, "image_id": 888, "id": 14661}, {"iscrowd": 0, "category_id": 1, "bbox": [123.0, 179.0, 48.0, 71.0], "area": 1918, "segmentation": {"size": [512, 512], "counts": "lfm13k?2O2M2N3M2N3M2N3M2N3FWOYAk0e>WOYAl0d>9N3M2dAdNR>_1kAdNS>]1kAeNV>c1O010N1N3N1010O010O10O010O01UOPBEo=:RBGn=6UBIk=5XBKh=2ZBNf=0]B0c=M_B3a=KbB2a=KaB3a=KbB2a=KaB3a=KbB2a=KaB3a=LaB1b=L`B2b=LaB1bgZ5"}, "image_id": 888, "id": 14662}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 210.0, 75.0, 79.0], "area": 2984, "segmentation": {"size": [512, 512], "counts": "VgX41n?2O1N3M2N2N2N2N2N2N2O1N2N3M2N2N2N2N2N2O1N201O0000000cAgNR>Y1lAiNT>W1jAkNW>T1gAnNY>R1eAPO[>[1000001O000000O1N2OO3M2N2N2N2000N2N2N1O000001O000G_NTBa1k=bNTB]1l=;0FTBaNl=_1VB_Nk=9RBg05nNj=;hA53^OV>=hA34^OV>>gA2d>M^A1d>N]A0d>O_AOb>O`AOc>M`A1Y?N2N2N`ha2"}, "image_id": 888, "id": 14663}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 219.0, 76.0, 67.0], "area": 2344, "segmentation": {"size": [512, 512], "counts": "ggm53m?2M3N2M3N2M3N2M2OO010O010O010O010O010O03N2M3N2M010O010HRO]An0d>TOZAl0e>WOXAi0i>7O010O102M3N2M3N2N0O10O010O3N2M3O10000000M3N2M3N2OO2cNeAP1^>nNdAP1^>mNeAP1^>nNdAP1h>M3N2M010O3N2M3N2M3N2M3N2M3N2M^Wl0"}, "image_id": 888, "id": 14664}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 224.0, 60.0, 94.0], "area": 3138, "segmentation": {"size": [512, 512], "counts": "]Yf22l?3L3N3L3M3N3L3N3L3N2[OSOTBQ1h=ROUBQ1i=ROTBP1i=SOTBQ1i=ROTBP1i=SOTBQ1i=d0M3M4M2M4M2M3010O01O01O010O01O01O010OO1O20SOlBlNT=R1nBmNS=P1PCmNT=P1oBmNS=P1PCmNS=Q1PClNS=P1PCmNS=P1PCmNT=P1oBmNS=P1PCmNS=Q1P1L3N3L3M3N3L3N2M4M2M4Mfh[4"}, "image_id": 888, "id": 14665}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 226.0, 76.0, 57.0], "area": 2828, "segmentation": {"size": [512, 512], "counts": "ZXo03j?4K4AHSA;j>HSAHUA;h>`0M4L3N21O01O01O01O01O01O00010O00010O00010O00010O00010O0001O01O01OO2M20010O00M4L3O101O01O01O01OnN]Ag0c>UOaAk0j>10O00010O00010O000010O00010O00N3L3M3Fi@HZ?5;LThj5"}, "image_id": 888, "id": 14666}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 273.0, 70.0, 55.0], "area": 2604, "segmentation": {"size": [512, 512], "counts": "mYc33`?=DObA4]>h0000000EDVOaAj0`>XO^Ah0a>;100O01000O010O01000O010O10O10O10O010O10O10OE]AZOd>c0bAZO^>c0hAZOW>g0a0O10O10O01000O010O10O10O01002M3N3M3L4M2M4M3MPUW3"}, "image_id": 888, "id": 14671}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 380.0, 23.0, 18.0], "area": 387, "segmentation": {"size": [512, 512], "counts": "mko2a0_?000000000000O01000000000000000000000000003MQdd4"}, "image_id": 888, "id": 14672}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 402.0, 62.0, 72.0], "area": 3202, "segmentation": {"size": [512, 512], "counts": "l\\n01o?b0^Ob0^O1O000HQO_Ao0a>80000O10000000000000000000O100:F?A00000O1004K100000000000000000000000O10000000VOaBROb=l0cBoN]=Q1cBQO[=o0eBQO[=o0eBPO\\=P1dBPO[=Q1eBoN[=Q1eBoN[=Q1j0000004L1NOeA1[>OeA1[>OeA1[>OeA2Z>NjAN[?H10lag4"}, "image_id": 888, "id": 14674}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 72.0, 93.0], "area": 3561, "segmentation": {"size": [512, 512], "counts": "i0R1m>01000O0100000O010000O010000bAROZ=0aBn06UOV=0aBk08ZOR=0bBf0<^OnN2O10O0100000O010M3N2N2N1N3N2N2M2O0002N1N3N2N2N1N3N2N2M2^OQA2Q?LQA2Q?KRA2Q?LPA3R?KPA3Q?KRA3_?M2Od[]7"}, "image_id": 889, "id": 14676}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 379.0, 63.0, 68.0], "area": 2095, "segmentation": {"size": [512, 512], "counts": "YmS61n?2N2M2O2M3N2N1N3N2N1N3O1O1N1N3L4M3N1O2M3N1N3N2N2M2O2N2M3OO1O2M3N2N1N100O01000O01000O102N1N3N2M2O2N2M3N1O2M3Gk@DW?:k@DV?;k@CX?:9N2N1N3N2Micl0"}, "image_id": 889, "id": 14677}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 431.0, 12.0, 12.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "em`03k?2N3M30O010O001M2O2M2O^RY7"}, "image_id": 889, "id": 14678}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 468.0, 25.0, 37.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "Xoc72m?2N1N3N2N2M2O2N2M3N1O2M3000O10O100O0N2O002M3N2N1\\A"}, "image_id": 889, "id": 14679}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 0.0, 28.0, 13.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "R`i53k?201O00001O001O00001O001O00001O00001O001O0000O1M3M3N2MS`h1"}, "image_id": 890, "id": 14680}, {"iscrowd": 0, "category_id": 1, "bbox": [419.0, 0.0, 34.0, 28.0], "area": 616, "segmentation": {"size": [512, 512], "counts": "Z`a62l?3L3N3M2O101O0e@BT??i@CW?c001O001O001O00001O001O00001OO1M3N2N2M3N2M3N2M3N2MS`m0"}, "image_id": 890, "id": 14681}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 0.0, 43.0, 49.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "UQZ73k?2M3N3M2ZOFcA>Z>EcA=[>EbA?[>DbA>^>B`A`0`>@]Ad0b>=01O00001O001O001O00001O00O1M3N2M3N2N2M3N2N2M3N2N2M3N2N3L3N3L3NP`0"}, "image_id": 890, "id": 14682}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 58.0, 67.0, 142.0], "area": 5778, "segmentation": {"size": [512, 512], "counts": "R2o3o;3M2N2N30O01O010O01O010O01O010O01O0O2M2M3N3M2M4M2N2M4M2M4M2N3L3N2N3L3N3M2M4M2N2M4M2M4M2N3L3N2N3L3N3M2M4M2M3N3M2M4M2N2M4M2N3L3N3M2M3N^]n6"}, "image_id": 890, "id": 14683}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 80.0, 185.0, 207.0], "area": 16109, "segmentation": {"size": [512, 512], "counts": "TX=1m?3M2N3M2N30O0010O010O00010O010O0010O0010O010O00010L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2N3L3N2M4M2N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3M2M100O01000O0100O0100O0100O0100O01001N4M2M4M2N3L3N2M4M2N3L30001O010O010O0M3N3EWK[Ek4b:XK[El4b::N2N30O01O0N3O00010O010O01O0N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3M2M3N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2N2M4M2N3L3N2M4M2N3M210O01O01O010O0M3N3XOk@a0[?M4M2M4M2N2M`\\f4"}, "image_id": 890, "id": 14684}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 140.0, 248.0, 310.0], "area": 50096, "segmentation": {"size": [512, 512], "counts": "[\\X31l?3N2M4M2M4M2N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2VFPJa9S6\\FPJb9X6N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3N3M201O01O01N1M4M2M3N3M2M4M2M3N3L3N3MO03N2M3N3L3O2O01O010O01O01O010O01O01O0N3O01O010O01O01O010O01O01O010O01M2M3N3L3N3L3N2M4M2M4M20010O010O00010O010OO1N3L3N3N11O01O010O01O01O010O01O01O010O01O01O010O010O00010O010O00010O010O00010O010OhGiHg7W7WHlHh7U7THnHm7Q7QHRIn7o6nGTIS8\\701O0hIWHZ4i7cKZH\\4f7aK]H`4b7^KaHe1FAj7hNaHd1HDg7iN`Ha1KGe7gNaH^1NJa7iN`H[11M^7hNbHW140[7hNaHV162Y7iN`HR1:4X7aMSHm0<[1<3X7cMRH7O8:i1?3X7bMTH6N<8e1b03X7cMRH7O>4c1e03c7iMgGa01`1h0oMnNl1f8QNfGb00_1i0oMmNm1W9bNVG_1P1Lm7cNVG^1P1Lm7cNUG^1Q1Lm7cNUG_1Q1Km7cNUG^1Q1Lm7cNUG_1P1Lm7cNVG]1Q1Mo81SGLP91TGLo81SGLP91SGMP90SGLP91SGMP90SGLP91SGMo81SGLQ90SGMo81SGLQ90SGMo81SGLP91SGMP90a3Mjgk0"}, "image_id": 890, "id": 14685}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 206.0, 22.0, 16.0], "area": 192, "segmentation": {"size": [512, 512], "counts": "eVi62k?3N2N3O010O00010O010O010O00010O010O00O2M2M^ik0"}, "image_id": 890, "id": 14686}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 209.0, 44.0, 43.0], "area": 1185, "segmentation": {"size": [512, 512], "counts": "oVZ71m?3L3N3e@Gj>9PAJQ?c0O000PATOm>P1O01O01O010O01O01O010O01O01O010O01O0N2O20O010O00010OO2APAIS?5o@IS?4PAIT?4=M4MRI"}, "image_id": 890, "id": 14687}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 421.0, 30.0, 22.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "^]l33k?2M4M2N2010O010O00010O010O01O01O010O01O01O010O010M2N2N3L3Ncbd3"}, "image_id": 890, "id": 14688}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 498.0, 38.0, 14.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "m_X53k?2M3N2M3O1001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00Q`T2"}, "image_id": 890, "id": 14689}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 499.0, 6.0, 13.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "o_m71m?2N2M3N2M3"}, "image_id": 890, "id": 14690}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 0.0, 315.0, 235.0], "area": 40819, "segmentation": {"size": [512, 512], "counts": "fbg01n?2N2N1O2M3N2N2N2M2O2N2N2M3N1O2N2N2M3N1O2N2M3N2N2N1O2M3N2O10O100O1N1O2N2N2M30O100000O1N2M2O1O0000O3N1O2N2M3N2N2N1O2M3N2N2N1N3N2N2N2N1N3N2N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1N3N2N2N2N1N3N2N2N2M2O2N2N2M3N2N101O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001\\EPKV:Q5gERKX:o4fESKY:n4eETKZ:m4dETK\\:l4cEVK\\:X5O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1OO1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1O1N2O1O1O1N2O1O1O1N3N2N2N2N2M2O2N2N2M3N1O2N2N2M3N2N1O2M3N2N2N1N3N2N2N2N2M2O2N2N2M3N1O2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N1N3N2N2N2M3N1O2N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1N3N2N2N2M2O2N2N2N2M3N1O2N2Bn@JU?4m@JU?3n@KS?4n@KT?3n@KT?3>MhmZ2"}, "image_id": 891, "id": 14691}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 114.0, 219.0, 288.0], "area": 35204, "segmentation": {"size": [512, 512], "counts": "Xhb41n?2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M21000O100000O1000O100000O1000O100000O1000N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2O0100000O10[OnHZHS7c7PI[HR7c7PI[HR7e7nHXHU7h7jHWHW7j7iHTHY7l7gHRH[7n7eHPH]7o7dHnG_7R88O0O2M3N2N2N1_OiGZIU8f6mGXIS8h6nGTIMGU8U7PHRIU8l6mGRIU8n6kGoHX8Q7hGmHZ8S7:0O01000000000O0100N2N2M3N1O2N2N2M3N2N1O2N2M3UO\\FWKf9g4\\FWKf9g4\\FWKe9h4]FVKe9h4]FUKf9h4\\FWKf9g4\\FWKf9g4\\FWKh4"}, "image_id": 891, "id": 14692}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 256.0, 11.0, 21.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "P8e0[?O2N2N2M3N2N1O2N2M3NiWj7"}, "image_id": 891, "id": 14693}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 338.0, 65.0, 58.0], "area": 1385, "segmentation": {"size": [512, 512], "counts": "oZV43l?2N2N1N3N2N2N2M2O2N2N2O10O010000000O0100000000O0100000000O010000000O010000000O0100000000O010000000O010000N2N2N1O2M3N2N2N1N3N2N2NUTi2"}, "image_id": 891, "id": 14694}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 352.0, 51.0, 56.0], "area": 1141, "segmentation": {"size": [512, 512], "counts": "^\\c12m?2N1JL_@6_?L^@7`?6N2N2M2O2N2N000O1000O11O2M2O0000000O010000000O010000000O010001O2N1N3N2N2N2N2M2O2N2N2N2M3N1OfTc5"}, "image_id": 891, "id": 14695}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 375.0, 77.0, 85.0], "area": 1784, "segmentation": {"size": [512, 512], "counts": "o]W22l?2O2N2N2N2N2N2M3N1O2N2N2N2N0O0100000000000O0100000000000O0100000000000O0100000000000O0100000000000O0100000000000O01000000000002M3N2N2N1O2N2N2M3N2N2N2N1OoSb4"}, "image_id": 891, "id": 14696}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 408.0, 16.0, 15.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "RmU31m?2M4M2M301O010O00010O01M2M3N3LWSb4"}, "image_id": 891, "id": 14697}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 463.0, 26.0, 34.0], "area": 630, "segmentation": {"size": [512, 512], "counts": "X_Z55f?6J5K5K5K5010O000000010O000000010O00000L4K6J5K5K5KbaX2"}, "image_id": 891, "id": 14698}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 41.0, 46.0], "area": 930, "segmentation": {"size": [512, 512], "counts": ";3l?3M2O2M2N2O2M2N3N1N3M2N3N1N2N3N1O2O0010O00010M2O2M2N3M2O2M2N2O2M2N3N1N3M2N2O2M2NQ_[7"}, "image_id": 893, "id": 14699}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 0.0, 94.0, 58.0], "area": 2490, "segmentation": {"size": [512, 512], "counts": "UP?1o?1N2N3M2O1N3M2O1O2N1O1O1O2N1O1O2N1O1O2N1O1O2N1O1OO1O100O1O100O1O1O100O1O100O1O1O100O1001O2N1O2N1O1O2N1O2N1O1O2O00010O0010O00010O010O0O1N3M2O2O00010N1O2M2N2O2M2N3M2O1N3M100O0000101N3M2O1NfnQ6"}, "image_id": 893, "id": 14700}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 7.0, 4.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "PPb21o?2N1O00O1O1OQ`Z5"}, "image_id": 893, "id": 14701}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 0.0, 51.0, 19.0], "area": 592, "segmentation": {"size": [512, 512], "counts": "QPX31n?3N1O1O2N1O2N1O2N1O1O2N0000O1O1O100O1O1O11O2N1O1O2N00O100O1O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1OQ`n3"}, "image_id": 893, "id": 14702}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 0.0, 99.0, 78.0], "area": 3934, "segmentation": {"size": [512, 512], "counts": "a`]61o?1N3M2O2M2N2O2M2N3M2O2M2N2O2M2N3N1N2N0010O000010O01002N1cAiNo=Y1nAiNQ>X1mAkNQ>W1mAjNR>W1lAkNS>c1N1O2N1O2N1O2N1O1O2N^N\\Bi0c=VO_Bk0_=UOcBj0\\=UOfBj0Z=UOiBi0W=VOkBh0V=XOkBf0V=YOmBe0S=ZOmBf0T=ZOkBf0V=YOkBf0V=YOjBh0V=XOiBh0X=WOiBh0X=WOiBi0W=WOiBh0X=WOiBh0X=WOhBj0X=UOhBk0Y=UOfBk0[=TOfBk0[=TOeBm0[=SOdBm0]=ROdBm0]=ROcBo0]=QObBo0_=PObBo0_=POaBQ1a=lN_BT1c=kN\\BU1X>O01O01O01O01O01O02N2O2M2N2N3N1N3M2O2M2N2O2M2N3M2O2Min0"}, "image_id": 893, "id": 14703}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 15.0, 23.0, 24.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "hPU31n?2N2N3N1N2N3M2O2M2N2N20O1N3M2N3N1N2N3M2O1N3MS__4"}, "image_id": 893, "id": 14704}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 18.0, 56.0, 54.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "\\ae32n?2M2N3M2O1N3M2O2M2N2N3N1N3M2O1N3M2N3N0O0001O02N2N2OO0000010O00010O0L_AjNa>V1411N2N3M2O1N3M2O2M2N2N3N1N3M2N2O2M2N2O2Me^^3"}, "image_id": 893, "id": 14705}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 21.0, 69.0, 58.0], "area": 2189, "segmentation": {"size": [512, 512], "counts": "UaV21n?2N2d@Nh>4VAMh>5VANg>5VAMh>5WALh>5VANg>5VAMh>5VANh>f0N2N3M2O2M2N2O1N001O01O2N2O2M2N3N1N2N3M2O2M0010O0001O01O01O01O0001O01O01O01O00010O0001O101N3M2O1N3M2N3N1N2N3N1N3M2N2Ohnf4"}, "image_id": 893, "id": 14706}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 40.0, 45.0, 63.0], "area": 1779, "segmentation": {"size": [512, 512], "counts": "]1^1b>1N3M2O1N3M2N2O2O00N3N1N2N3M2O1N3M2OO01O00010O0000010O0WO_A4a>LaA3^>NcA0^>OeAN[>2gAL\\>1dA0]>OcA0_>NaA2b>K^A5d>I\\A8e>G[A8h>EZA9h>E[A9W?N2N3M2Od]Y7"}, "image_id": 893, "id": 14707}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 42.0, 76.0, 69.0], "area": 2142, "segmentation": {"size": [512, 512], "counts": "ebR41n?2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N11M2O1N3M2N010O0000010O0001O01O0001O01O00@UOlAl0S>WOjAi0V>YOiAf0X>[OfAe0Z>^OcAc0\\>_ObAa0^>AaA`0^>b0N2N000101N2N2N3N1N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3Nf]g2"}, "image_id": 893, "id": 14708}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 44.0, 27.0, 25.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "iaR33l?2O1N3M2O2M2N2N010O000010O000010O000101N2N3M2O1N3M2N\\n_4"}, "image_id": 893, "id": 14709}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 63.0, 53.0, 48.0], "area": 1264, "segmentation": {"size": [512, 512], "counts": "hR`12m?3N1N2N3N1N2N3M2O1N3M2O1N3M2N2O1N1O01O0001O01O01O01O0001O01O00010O0000101N2N3M2O1N3M2N2O2M2N3N1N2N3M2O1N]]e5"}, "image_id": 893, "id": 14710}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 76.0, 30.0, 48.0], "area": 952, "segmentation": {"size": [512, 512], "counts": "PSa73l?2O2M2N3GGf@;X?Gg@:X?9M2N3N1N3M2O1N3M2N3N1O10N3M10O00010O0001O011NdM"}, "image_id": 893, "id": 14711}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 92.0, 29.0, 28.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "Xcd31n?2N2N2O2M2N2N2O2M2N2N2O2M001O01O1O2O2M2N2N3N1N2N3M2O1N3Mfll3"}, "image_id": 893, "id": 14712}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 98.0, 24.0, 27.0], "area": 336, "segmentation": {"size": [512, 512], "counts": "ZS\\13l?2N2O2M2N2N3N1N3M2O1N30O0N3N1N2N3N1N3M2N2O2M2N^lW6"}, "image_id": 893, "id": 14713}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 113.0, 52.0, 59.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "md]41k?6J5BGPA>n>Co@`0o>Cn@?P?;M2O2M2N2O0O0001O01O00H]ATOd>k0^ATOa>m0aAPO_>P19O01O01O01O0001O01O01O01O01O01O02N2N3N1N3M2O2M2N2O2M2N3M2O2M2Nk[h2"}, "image_id": 893, "id": 14714}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 114.0, 54.0, 48.0], "area": 1474, "segmentation": {"size": [512, 512], "counts": "YT;1o?1N3M2N2O2M2N3N1N3M2O2GZOSAh0k>ZOSAh0k>9M2O2M2N100O0001O01O02N2O2M2N2OO0001O01O00010O02N3N1N3M10O0001O010Ak@2W?Lk@2X?Kk@3V?Kl@3W?Kj@3d?N1Nhki6"}, "image_id": 893, "id": 14715}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 120.0, 29.0, 28.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "TT`11n?2N3M2O1N3M2N3N1N3M2O1N1O00010O01O2N3N1N3M2N2O2M2N2N3N1Nj[Q6"}, "image_id": 893, "id": 14716}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 123.0, 12.0, 19.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "RTj73m?1N3M2O2M3M2OO01O01O01UL"}, "image_id": 893, "id": 14717}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 141.0, 5.0, 11.0], "area": 33, "segmentation": {"size": [512, 512], "counts": "`dm72m?2N3M2O2aK"}, "image_id": 893, "id": 14718}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 142.0, 18.0, 18.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "ddm31o?1N2N3N1N3M2O1N3M12M2N2O2M2N3N1N2NX[i3"}, "image_id": 893, "id": 14719}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 148.0, 47.0, 41.0], "area": 879, "segmentation": {"size": [512, 512], "counts": "\\eo11n?2N3M2O2M2N2O2M2N3N1N3M010O000010O00010O000010O000010O00010O000010O02N2O2M2N2O2M2N3N1N3M2N2OljX5"}, "image_id": 893, "id": 14720}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 153.0, 59.0, 56.0], "area": 1685, "segmentation": {"size": [512, 512], "counts": "dej01n?2N3N1^@KV?8h@IV?9h@JU?8i@JV?a0N3M2O1N2N3M2O1N1O01O01O01O0001O01O01O01O0001O01O01O01O0001O01O3N1N2N3M2O2M2N2O0O001O102M2N3N1N2N3KZ@Lg?26NajW6"}, "image_id": 893, "id": 14721}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 154.0, 20.0, 23.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "Pe52m?2N3N1N3M2O1N3M201O01O0O2M2N2O2M2N3M2OgZ`7"}, "image_id": 893, "id": 14722}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 154.0, 35.0, 32.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "Xea3120i?5N2O2O0O1N3N1N2N3M2O1N001O01O00010O0001O01O002O1N3M2O1N3M2N2O2M2N2Ohjl3"}, "image_id": 893, "id": 14723}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 156.0, 18.0, 16.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "Uej11n?3N1N2N3M100O00010O000010O3M2N3N1NoZl5"}, "image_id": 893, "id": 14724}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 161.0, 64.0, 52.0], "area": 1569, "segmentation": {"size": [512, 512], "counts": "RV]51n?2N3M2O2M2N2O2M2N2O2M2N3N1N00010O0001O01O010O2N3M100O00010O00010O0001O01O01O01O01O00010O0001O01O3N1N3M2N3N1N2N3N1N3M2N2O2M2N3NYjb1"}, "image_id": 893, "id": 14725}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 168.0, 18.0, 24.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "]eY435Na?4^@N`?4]@Na?;M2O1N3M21M2O2M2N3N1N3M2O2M2NZZ]3"}, "image_id": 893, "id": 14726}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 196.0, 81.0, 59.0], "area": 2228, "segmentation": {"size": [512, 512], "counts": "RWW11o?2M2N2W@Ld?9N3M2O1N3M2N3N1N3M2O1N3M2N3N0O1O01O002OO00010O0000010O00010O00010O0000010O00010O0003M2O1N3M2O2M2N2O2M000010O000010O00010O000010O000010O3M2N3N1N3M2O1N3M2O^Y`5"}, "image_id": 893, "id": 14727}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 197.0, 61.0, 59.0], "area": 1601, "segmentation": {"size": [512, 512], "counts": "\\Wl51o?1N2N3N1N3M2GEj@=T?Fj@Q120001O01O01O01O0001O01O01O0001O01O011N3M2N3N1N2N3M2O1N3M2O2M2N2N3NVYU1"}, "image_id": 893, "id": 14728}, {"iscrowd": 0, "category_id": 1, "bbox": [299.0, 198.0, 56.0, 56.0], "area": 1554, "segmentation": {"size": [512, 512], "counts": "Tge43m?1N3M2O1N3M2N2O2M2N3M2O1N3M2N2O2M2N1O10O000010O0000010O0000010O00LYAPOg>P1\\AnNd>Q151O01O01O0001O2O1N2N1O01OJQA\\OR?a0PA]OR?a08Eb@2`?Kb@3`?Kb@3h?M2OXY^2"}, "image_id": 893, "id": 14729}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 206.0, 52.0, 49.0], "area": 1241, "segmentation": {"size": [512, 512], "counts": "]g^32m?3N1N2N3M2O2]OC^A?a>B]A`0a>C]A?`>C^A?`>D]A?a>B]A?b>C]A`01O01O00010O0001O101N2N3N1N3M2N2O2M2N3N1N1O00010O0001O01O00012M2N3N1N2N3M2OPYg3"}, "image_id": 893, "id": 14730}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 240.0, 61.0, 48.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "WXR52m?2O2M2N2N3N1N3M2N2O2M2N3N1N2N3O0000OO00010O0000010O0000010O00010O00000102M2N2N01O00010O00000100O2N3ON01O012M2N2Cc@5_?Ib@5`?Ic@4f?N2OoWo1"}, "image_id": 893, "id": 14731}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 244.0, 51.0, 43.0], "area": 1210, "segmentation": {"size": [512, 512], "counts": "UXk23l?2N3N1N2N3N1N2N3M2O2M2N2N3N1N3M2O1N1O0002O2M1O1O01O01O01O0001O102M2N2O2M2N3M200010O01N1O1N3M2N2O2M2NdW[4"}, "image_id": 893, "id": 14732}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 248.0, 55.0, 55.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "ohT62m?2N3M2O1BGRAFSAERA=l>FRAFSA?GmNaAR1_>PO_AP1b>QO\\Ao0d>8N010O0000010O000100O3M2N2OO0000010O000010O00002O1N2N3N1N3M2N2O2M2N3N1N2N3M2O1N3Mbgo0"}, "image_id": 893, "id": 14733}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 255.0, 80.0, 59.0], "area": 2218, "segmentation": {"size": [512, 512], "counts": "Ui^32n?1N2N3M2O2M2N2N3N1N3M2O1N3M2N01O0001O01O0001O01O00010O0001O2O2M2N2N2O00O3M00010O0000010O0001O01O00010O000002O1N0001O2O2M2N3M2O1N3O01OO2N1N2N3M2O2M2N2O2M2N2N3NTWY3"}, "image_id": 893, "id": 14734}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 262.0, 81.0, 61.0], "area": 2480, "segmentation": {"size": [512, 512], "counts": "hXh13m?1N2N3N1N3M2N2O2M2N3M2O1N3M2O2M2N2N3N1N2N3O010O00010O0010O0N3N1N2N00010O0001O01O00010O0000010O0001O01O00010O0001O01O002O2M2N2O2M2N3M2O1N3M2N3N1N2O2O0O1O1N2N2N2OoVo4"}, "image_id": 893, "id": 14735}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 272.0, 28.0, 28.0], "area": 418, "segmentation": {"size": [512, 512], "counts": "khd52m?3N1N3M2O1N3M2N3N1N2N2O0O00010O03M2N3N1N3M2O1N3M2N3N1NQWm1"}, "image_id": 893, "id": 14736}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 295.0, 70.0, 74.0], "area": 2459, "segmentation": {"size": [512, 512], "counts": "kZj42m?3N1N3M2N2O2M2N3E]OUAd0_>\\ObA2Me0^>\\OcA1Le0_>\\OcA1Le0_>]ObAn0\\>SOcAn0[>;0010O0000010O000010O0001O01O0KeAfN[>[1gAcNY>\\15001O01O0001O01O01O00010O02N3M2OO02N2O1N3M201O0O2M2O1N3M2O2M2N3N1N3M2N2O2KmeR2"}, "image_id": 893, "id": 14737}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 304.0, 58.0, 55.0], "area": 1549, "segmentation": {"size": [512, 512], "counts": "VZb61c00h>1VA1h>1WA1f>2WA0g>2WA0h>2UA1h>1WA0g>2WA0g>3VA0h>1WA0g>d0000010O0002N2O1N0000010O0001O01O0001O01O00010O0000010O1O3M2O1N3M2N2O2M0010O0001O3N1N2N3M2O1N3MPf`0"}, "image_id": 893, "id": 14738}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 308.0, 79.0, 63.0], "area": 2286, "segmentation": {"size": [512, 512], "counts": "jZZ23=OQ?2m@0Q?2m@0Q?3l@0Q?2n@OQ?3l@0Q?`0O0001O01O01O02N00010O0000010O000010O000010O002N2O1N3M2N11N2O0O001O01O0001O01O00010O0001O01O00101N001O03M2O1N3M2N3N10001O0O1N3M2N2O2M2N3N1N2N3M2O_U^4"}, "image_id": 893, "id": 14739}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 315.0, 31.0, 39.0], "area": 633, "segmentation": {"size": [512, 512], "counts": "Yjo51n?3N1N2N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2OO3M2O1N2\\OQA4R?Jo@4S?JPA4Q?JQA4R?IPA5R?IPA5`?M2OYe`1"}, "image_id": 893, "id": 14740}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 322.0, 30.0, 52.0], "area": 859, "segmentation": {"size": [512, 512], "counts": "R:d1^>N1O1N3M2O2M2N1O01O01O003N1N2N3N1N3M2N2O2M2N3N1N2N3M2O2M2NPe`7"}, "image_id": 893, "id": 14741}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 342.0, 53.0, 71.0], "area": 1891, "segmentation": {"size": [512, 512], "counts": "W[o31o00o=2PBOo=3nA0o=2oA0o=2PBOo=3nA0o=2oA0o=2oA1n=2PBOo=2oA1n=1PB1G[OT>g0SB3k=NSB4k=NSB4k=OSB0m=2QBOo=m0RBUNn=k1310O00010O00011N3M2N3N1N3M2L`AfNa>X16M2O2M2N3M2O1N3M010O2N2O2M2N2N3N1N3M2O2MWTV3"}, "image_id": 893, "id": 14742}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 363.0, 63.0, 62.0], "area": 2077, "segmentation": {"size": [512, 512], "counts": "gkf62m?2N2N2O2M2a@EV?=h@FV?6UAHm>7UAFn>7TAGn>7TAHm>7`0M2N3N1Nic9"}, "image_id": 893, "id": 14743}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 369.0, 28.0, 28.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "mkd02m?3N1N3M2O2M2N3N1N3M010O00010O00010O03M2O2M2N3N1N3M2N2ORTm6"}, "image_id": 893, "id": 14744}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 370.0, 45.0, 51.0], "area": 1223, "segmentation": {"size": [512, 512], "counts": "_lS53l?2O1N3M2O1AGTAFUAFUA=i>EUAFUA`0N1N2N3M10O01O0001O01O01O2O2M2N2N3N1N2N3M2O2M2N2N3N1N2N3N1N2N3M2O2MbcU2"}, "image_id": 893, "id": 14745}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 380.0, 52.0, 67.0], "area": 1839, "segmentation": {"size": [512, 512], "counts": "]lU31o00o=3nA0o=2oA0o=2PBOo=3nA0o=2oA0o=2oA0P>2nA0o=2oA0o=3nAOP>3oAOo=2oA0o=3nA0o=2oAOP>3oALR>P100010O0000010O002N2O1N3M2O1N3K4O2M2N2N3N1N2N3N1N2N002O1N3M2N2O2M2N3N1N2NSSP4"}, "image_id": 893, "id": 14746}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 385.0, 26.0, 25.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "]la41n?3N1N3M2O2M2N3N1N1O01O01O00010O0002O1N3M2O2M2N3N1NeSQ3"}, "image_id": 893, "id": 14747}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 386.0, 34.0, 36.0], "area": 643, "segmentation": {"size": [512, 512], "counts": "]lo53m?1N3M2O2M2N3N1N2N3N1N3M2O2M2N2O1N02O1N3M2N3N1N2N3M2O2M2N3N1N3M2N3NWS_1"}, "image_id": 893, "id": 14748}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 388.0, 36.0, 49.0], "area": 1027, "segmentation": {"size": [512, 512], "counts": "[01O01O00010O0001O101N0010O01O3M2O1N3M2O2M2N3M100O00102M2N2N3N1N3M2O1NWc]7"}, "image_id": 893, "id": 14749}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 414.0, 52.0, 52.0], "area": 1438, "segmentation": {"size": [512, 512], "counts": "hmo31n?2N2O2M2N3M2`@C[?>c@D[?b0N3N1N3M2N2O2M2N2O0O000010O00010O0000010O0000010O00011N1O00010O2O2O0O1TOSAb0P?\\OQAc0P?[OSAb0W?N2Ja@Ia?4b@I`?67M2N[RV3"}, "image_id": 893, "id": 14750}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 418.0, 32.0, 36.0], "area": 600, "segmentation": {"size": [512, 512], "counts": "cmU52n?1N2N3N1N3M2N2O2M2N3N1N2N2N10O000010O000010O00002Cn@JT?3n@KT?3o@JS?5n@JT?3n@KT?3?NdRZ2"}, "image_id": 893, "id": 14751}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 420.0, 59.0, 53.0], "area": 1588, "segmentation": {"size": [512, 512], "counts": "n]=2m?2O1N2N2N3M2N2N2O1N2N3M2N2N2N2O1N2N3M2N2N2N20O2N1N1O1O0000000001O01O00000000002O1N3M2N2N2N2N2O1N3M2N2N2N2N2N3N1N2N2NWRe6"}, "image_id": 893, "id": 14752}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 424.0, 19.0, 17.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "amm61o?2M2N3M2O1N1O010O0000010O02N3N1N3M2Oabh0"}, "image_id": 893, "id": 14753}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 428.0, 53.0, 65.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "Znl61o?1N3M2N2O2M2N3M2O1N3M2N2O2M2N3PAPOl>T1M2N002O1N3M2N2O2M2N1O10oNiA8W>HkA6U>KmA3R>MPB1Q>NRBOn=2SBLm=4UBKk=4XBIh=7ZBGf=:[BEd=;_BBb==`BA`=`0bB]O^=c0dB\\O\\=c0fB[O]=b0R1O2M2N2N3N1N3M2O1NWb8"}, "image_id": 893, "id": 14754}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 457.0, 51.0, 55.0], "area": 1578, "segmentation": {"size": [512, 512], "counts": "ao_41n?2@No@5o>Mo@4o>No@4o>No@5n>No@4P?Mo@4o>No@5n>`0N2O1N1O1O1O100KjN_AW1`>500O1O0001O01O0001O01O02N3M2O1N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2JY@Ni?1n`f2"}, "image_id": 893, "id": 14755}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 463.0, 66.0, 49.0], "area": 1740, "segmentation": {"size": [512, 512], "counts": "l_h51n?2O2M1O1O100O1O100O1\\OH_A9`>I^A8b>J\\A6c>L[A5d>MZA4f>NXA2g>0WA1h>1VA0i>d000O1O100O1O1O100O1O100O1O02O2M1O010O000010O000002O1N2N3N1N3M2N2O1N00011N3M2N3N1N2N3N1N2N3M2Oi`V1"}, "image_id": 893, "id": 14756}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 469.0, 30.0, 30.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "So31n?2N2N3M2N2O1N2N2N3M2N2N2O1N000001O2O1N2N2N3M2N2N2O1N2N3M2NnP]7"}, "image_id": 893, "id": 14757}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 473.0, 65.0, 39.0], "area": 1652, "segmentation": {"size": [512, 512], "counts": "Uom02n?2M2N3M2O2M2N3N1N2N3N1N00010O2N2O2M2O2O010O01N1O2N1O2N1O0000O1O100O1O100O1O100O1O100O1O1O100001O2N1O2N1O2N1O2N1O2N1O1O2Be@3\\?Kf@4\\?Jf@4d?N3MT`Q6"}, "image_id": 893, "id": 14758}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 482.0, 16.0, 30.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "R?n0S?O1O1O2M2N2O1N2N3M2N2N2N2O1N3M]`g7"}, "image_id": 893, "id": 14759}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 499.0, 25.0, 13.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "noY72m?1O100O1O1O100O1O1O100O1O100O11O1O1O2N1O1O2N1O2NQ`9"}, "image_id": 893, "id": 14760}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 508.0, 8.0, 4.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "o_^41n?1O100O1001O2NQ`]3"}, "image_id": 893, "id": 14761}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 509.0, 5.0, 3.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "o_Y51o?0O1O12NQPd2"}, "image_id": 893, "id": 14762}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 509.0, 6.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "oo]51o?0O1O1001ORP_2"}, "image_id": 893, "id": 14763}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 511.0, 3.0, 1.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "oo<1o?000Q`a7"}, "image_id": 893, "id": 14764}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 340.0, 26.0, 11.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "djc1;e?0000000000000000000000000000000000000000000000000\\Uo5"}, "image_id": 894, "id": 14765}, {"iscrowd": 0, "category_id": 1, "bbox": [81.0, 341.0, 11.0, 11.0], "area": 121, "segmentation": {"size": [512, 512], "counts": "ejX1;e?0000000000000000000[ea6"}, "image_id": 894, "id": 14766}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 368.0, 60.0, 69.0], "area": 2158, "segmentation": {"size": [512, 512], "counts": "nlZ12m?2M3N1N3N2N1N3N2F_OQAc0n>_OPAc0m>_OQAd0m>8N3N2N1N3N2M3N1N3N2M2O2M3N1O2M3O01000OO2M3N1N3N2M2O2N2M3N1N3N2M2KYAPOi>n06M2O2M3N1O2M3N2M2O2M3N1N3N2NkSg5"}, "image_id": 894, "id": 14767}, {"iscrowd": 0, "category_id": 1, "bbox": [169.0, 0.0, 50.0, 58.0], "area": 1539, "segmentation": {"size": [512, 512], "counts": "[`d23l?2N2O1N3M2N2O2M2N2O2M2N2N3N1N2O2N1O1O1O2N1O1O2N1O1O2N1O1O2N1OQOgA8X>GkA9S>GnA9Q>FQB:n=EUB9k=FWB8j=GXB8h=HZB5g=J[B4f=K\\B3e=L]B3c=M_B0b=O`BOb=O`BOb=OaBO`=0aBNb=O`BOb=OaBNa=0Qab4"}, "image_id": 897, "id": 14768}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 1.0, 22.0, 20.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "[PU61n?2O1N2N3M2N2N2O1N001O2N10O2N2N3M2N2N2N2O2Mfo_1"}, "image_id": 897, "id": 14769}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 8.0, 31.0, 32.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "c`[31o?2M2N2N2N3N1N2N3M2O1N2N3M2N2O2ON2N2O2M2N2N2O2M2N2N3M2O1N2N3MVoT4"}, "image_id": 897, "id": 14770}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 10.0, 28.0, 28.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "dP\\62n?2M3M3N2M3M2O2M3N1N1O010O010O000100O3N2M3N2M2O2M3N2M3MVoU1"}, "image_id": 897, "id": 14771}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 15.0, 61.0, 45.0], "area": 1436, "segmentation": {"size": [512, 512], "counts": "XQj61n?3N2M3N2M3N2M3N1N3M3N0O10O010O010O010O02O2M010O010O010O010O010O010O01O01O010O010O010O010O010O0101N3N2M3N1N3N2M3N2M3N2M3Nk^7"}, "image_id": 897, "id": 14772}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 19.0, 72.0, 52.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "Qai11n?2N3N1N2N3N1N2N3N11O01M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2N1000N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N2O02M2N2N3N1N2N3M20010OO1N3M2N2O2M2N2N3N1NU^R5"}, "image_id": 897, "id": 14773}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 58.0, 52.0, 50.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "`RV72n?2M3N2M2O2M3N2M3N2M3N1N10O010O010O010O0100O3N1N102M3N2M2OO010O010O0010O01F[AYOe>g0]AVOc>j0`ATO`>l0aARO_>n0:0O010O02N3N2M3N2M3NkM"}, "image_id": 897, "id": 14774}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 65.0, 74.0, 56.0], "area": 1898, "segmentation": {"size": [512, 512], "counts": "Pcg41n?3M2O1N3M2O2M2N2N3N1N2N3N1N3M2N2O2M1O01O01O0001O01O01O0001O01O01O01O0001O01O00010O00012M2N2N3N1N2N3M2O0O0010O0000010O0001O01O0002O1N3M2N2O2M2N2N3Na]S2"}, "image_id": 897, "id": 14775}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 68.0, 19.0, 18.0], "area": 177, "segmentation": {"size": [512, 512], "counts": "]RY61n?3M2O1N3M2O1N0010O0001O02N2N2O2M2N3Nd]]1"}, "image_id": 897, "id": 14776}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 69.0, 47.0, 60.0], "area": 1379, "segmentation": {"size": [512, 512], "counts": "gRQ41n?2O1N3M2O2M2N2N3N1N2N3N1N2N3M2O2M2N2O2M2N2O2M2N2N3O010O00010OO2M2N2O2SO^A7d>G^A7e>G\\A7f>G]A7d>G^A7e>G]A6e>H]A7e>F]A8X?M2N3N1N\\\\W3"}, "image_id": 897, "id": 14777}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 85.0, 34.0, 34.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "Ucf52m?3M2N2O2M2N2N3M2O1N2N3M2N10O01O00000010O2N3M2N2O2M2N2N2O2M2N2N3N1Nk\\h1"}, "image_id": 897, "id": 14778}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 90.0, 71.0, 66.0], "area": 2428, "segmentation": {"size": [512, 512], "counts": "]cc11n?3M2O2M2N2N3N1N2N3M2O2M2N2N3N1N2N3N1N3M2N2O2M2N2N3N1O20N1O2M3M2N3N1N3M2O2OO1N3M2N2O2M2N2O2M21N1N2O2M2N3M2O1N3M2N2O2M2N3N1N2YOj@a0]?M2O1N3M2N2O2M2NhkX5"}, "image_id": 897, "id": 14779}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 99.0, 19.0, 24.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "^Sb32m?2O1N3M2N2N3N1N3M1O00Ff@O[?0g@OX?2i@LW?4l@IV?5l@IV?5=N1Ni\\T4"}, "image_id": 897, "id": 14780}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 106.0, 14.0, 14.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "`ch22m?3N1N3M2O0O1O0002N3N1N3M2O_\\P5"}, "image_id": 897, "id": 14781}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 109.0, 28.0, 36.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "kSb73m?2M3N2M3N2M3N3L3N2M010O010O010O010O002O2M10O010O010O01cL"}, "image_id": 897, "id": 14782}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 115.0, 35.0, 39.0], "area": 731, "segmentation": {"size": [512, 512], "counts": "QdU31n?2N3N1N2N3N1N2N3N1N2N3N1N3M2O1N3M2O1N1O02N2O2M2N2N3N1N2N3^Og@8[?Fg@9Z?Fg@8c?M2O1NekX4"}, "image_id": 897, "id": 14783}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 134.0, 44.0, 52.0], "area": 1209, "segmentation": {"size": [512, 512], "counts": "[U[51n?2YO0[A3c>N[A4c>O[A3b>O\\A3b>O\\A3c>O[A3b>O\\A3b>0[A3c>N\\A3b>e001O01O01O01O01O01O00010O2N3N1N3M2N3N1N2N3N1N3M2O2M2N2N3N1N3M2O1Nojn1"}, "image_id": 897, "id": 14784}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 139.0, 99.0, 101.0], "area": 3581, "segmentation": {"size": [512, 512], "counts": "efi52m?2N2N2N3N1N2N2N20000O2M2N2N2N2N2M4M2N2N2N200O0O00001O000001O000001O00000HfNkAZ1Q>gNjA14W1R>oNlAQ1U>POiAP1W>=O0000000001O01O000000000001O01O0000DjAoNV>Q1lAmNT>S1nAkNS>U1nAiNR>W1PBhNo=X1SBfNm=Z1<000001O01O00000000000100O2N3M2N2N2N2N2O2M2N2N2N2N200N3M2L4N2N2O1Nojd0"}, "image_id": 897, "id": 14785}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 152.0, 19.0, 21.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "lTd32n?1N3M2O1N3M2O2N10010O01M2O2M2N3N2M2OiZR4"}, "image_id": 897, "id": 14786}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 156.0, 53.0, 46.0], "area": 1219, "segmentation": {"size": [512, 512], "counts": "dUk22m?3M2O1N3M2N3N1N2N3M2O1N3M2O1N2N1O01O0001O01O0001O01O01O01O0001O01O2N3N1N2N3M2O1N1O010O001O2O1N3M2M4M2O1NeZZ4"}, "image_id": 897, "id": 14787}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 159.0, 26.0, 26.0], "area": 346, "segmentation": {"size": [512, 512], "counts": "ZeX22m?2N2O1N3M2N2O2M2N2N3NO01O0002N2O1N3M2N2N3N1N2N2N3NcZZ5"}, "image_id": 897, "id": 14788}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 167.0, 28.0, 29.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "bUX73l?2N2N2O1N3M2N2O1N3M2N2N2O1N11N2N2N3N1N2N3M2N2O1N3M2N2OYj9"}, "image_id": 897, "id": 14789}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 178.0, 37.0, 30.0], "area": 578, "segmentation": {"size": [512, 512], "counts": "Vfm33i?OX@3f?6M2N2O2M00010O2N1O10O00010O000010O000010O003N1N1O1O01O01O011N2L4N3N1NYj_3"}, "image_id": 897, "id": 14790}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 181.0, 6.0, 12.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "e51]A87IZ>1]A88IX>1^A88IY>1]A87IZ>c0eA_OX>c0fA_OX>d0eA_OY>U1N2N2N1O01O00011N2N3M2O1N3M2O1N3M2N3N1N2N3M2O1N3M1O01O01O2O1N2Be@4^?Id@5^?Jd@4e?N2N]Xc3"}, "image_id": 897, "id": 14793}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 213.0, 33.0, 31.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "Rgl41o?2M2N3N1N3M2O2M2N3N1N3M10O00010O00010O00101N3M2O2M2N3N1N3M2O2M2Nlhb2"}, "image_id": 897, "id": 14794}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 233.0, 79.0, 71.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "bX;2m?3N1N3M2O1N3M2N2O2M2N3N1N2N3M2O2M2N2N3N1N1O010O0000010O00010O0000010O00010O0000010O000010O000010O0000010O00010O0003M2O1N3M2O2M2N2N3N1N3M2O1N3M2N2O2M2N3M2O1NkW]6"}, "image_id": 897, "id": 14795}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 260.0, 54.0, 71.0], "area": 1977, "segmentation": {"size": [512, 512], "counts": "hYQ41o?2M2N2O2M2N2O2M2N3N1N2N3N1N1O001L3M3L5K4L4L4L5K4N2100O1O101N1O100O1O2N100O1O101N1O100O1O2M2K5L4K5K6J5K5K5KlgS3"}, "image_id": 897, "id": 14796}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 264.0, 60.0, 50.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "hXa63l?2N3N1N2N3N1N3M2O2M2N2N3O010l@TOP?P1M2O2M2N2O2M001O100O03M2O2M2N01O01O00010O0001O01O00010O1O3N1N3M2O2M2N2N3N1N3M2O1N3M2N3N1NRg`0"}, "image_id": 897, "id": 14797}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 267.0, 17.0, 16.0], "area": 144, "segmentation": {"size": [512, 512], "counts": "ahX51o?1N2N3M2O1N3M2N01O101N2N3M2O1N3M^g^2"}, "image_id": 897, "id": 14798}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 286.0, 128.0, 99.0], "area": 5023, "segmentation": {"size": [512, 512], "counts": "W[82n?2M2N3M2O2M2N3N1N3M2N3N1N2N3N1N3M1O010O00010O000010O0000ClNoAU1P>mNoAR1Q>QOlAo0T>SOjAn0V>TOhAk0X>WOfAi0Z>?0O00010O00010O03M2O1N3M2N3N1N2N10O00010O0CXBcNh=]1ZBaNg=o0XBUO3Jd=Q1[BSO3Jb=T1]BoN4J`=V1^BoN3J`=U1`BnNP>P1QBnNQ>P1QBoNo=P1`010O000102M2N3N1N3M2O11M1O10O00010O0000011N2N3N1N3M2OO01O_OjAYOW>g0kAVOU>j0mAUOR>k0PBSOQ>m0QBPOo=P1SBnNm=R1VBlNi=T1YBjNh=V1b0N2N2O2M2N3N101O010O0010O0001M2O2M2N3M2O2M2N3N1N2NPfg5"}, "image_id": 897, "id": 14799}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 296.0, 44.0, 35.0], "area": 711, "segmentation": {"size": [512, 512], "counts": "oYX52m?2N2N3N1N2N2N3N1N0000010O000001O01O0001O01O00101N30ON1O10O000001O03M2N2O2M2N2N2N3N1N2N\\fQ2"}, "image_id": 897, "id": 14800}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 300.0, 21.0, 41.0], "area": 441, "segmentation": {"size": [512, 512], "counts": "\\9Y1h>N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2NQVe7"}, "image_id": 897, "id": 14801}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 303.0, 30.0, 34.0], "area": 585, "segmentation": {"size": [512, 512], "counts": "jiZ63l?2O2M2N3N1N2N3N1N3M2O2M2N1O101N3N02M2N3M2O1N3M2O2M2N3N1H]@0f?M\\@1deV1"}, "image_id": 897, "id": 14802}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 333.0, 17.0, 15.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "djW62n?1N2N3N1N2N1O01O01O01O3N1N3M2N2O\\e_1"}, "image_id": 897, "id": 14803}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 337.0, 53.0, 67.0], "area": 1889, "segmentation": {"size": [512, 512], "counts": "g[k41n?2O2XON]A4a>N]A5FIb>4fA4GIa>5fA4GIa>5gA=V>FjA9U>HkA9R>JmA6Q>LPB3n=ORB2l=0TBOj=3UBNi=5TBLl=5RBKn=7QBHo=;nAER>U10O0000010O1O2O1N3M2N2O2M2N3N1N2N3M2O2M2N2O2M2N3M2O1N3M2O2M2N2N3N1N[TZ2"}, "image_id": 897, "id": 14804}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 341.0, 24.0, 26.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "ljf52m?2O2M2N2N3N1N3M2O2M2O1010M2O1N3M2N3N1N2N3N1N3MkTm1"}, "image_id": 897, "id": 14805}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 346.0, 42.0, 82.0], "area": 2008, "segmentation": {"size": [512, 512], "counts": "h\\[72n?1N3_N0mB2h0003M2N000000000POoA1Q>O[BEe=;gBYOY=g0P10O1000000RE"}, "image_id": 897, "id": 14806}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 352.0, 47.0, 48.0], "area": 1160, "segmentation": {"size": [512, 512], "counts": "fkl13l?2O2M2N2N3N1N3M2O1N3M2N2O2M2N3M2O1N00010O0000010O0001O01O000102M2JSAVOo>i06M2N3M2O1N3M2O2M2N2N3N1N\\d[5"}, "image_id": 897, "id": 14807}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 378.0, 68.0, 71.0], "area": 2105, "segmentation": {"size": [512, 512], "counts": "\\l\\61n?3M2N2O2M2N2N3N1N3M2O1N3M2N3N1N2N3M2O2M2N2N3N1N3O00010O010ON2O2M2N3N1N2N3N1O2N1O101N101O01O010O01O01O010O00O0O01O01O2N2O2M2N2O2Dc@0_?Ob@Oa?Nb@O`?Ob@0Qba0"}, "image_id": 897, "id": 14808}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 387.0, 29.0, 28.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "]li52m?3N1N2N3N1N3M2N3N1N3M2O0O0001O2O1N3M2O2M2N210O0O2M2N4M1N[cg1"}, "image_id": 897, "id": 14809}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 402.0, 20.0, 19.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "klf12m?2N3N1N2N3M2O0O1O01O01O02N2O1N3M2N3N1NVSo5"}, "image_id": 897, "id": 14810}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 403.0, 66.0, 58.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "g]V22m?3N1N3M2O1N3M2N3N1N2N3N1N3M2N010O0001O01O00010O0001O01O00010O0001O03M2N10O0KVAUOj>l0WASOi>l05000010O00010O0000010O1O3N1N3M2N2O2M2N3N1N2N3N1N3Mkbh4"}, "image_id": 897, "id": 14811}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 407.0, 24.0, 39.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "jO01O2N2O02M2O1N3M2N2N3N1N2N3M2O1N3M2N2O2M2N2Nbbc7"}, "image_id": 897, "id": 14812}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 409.0, 54.0, 53.0], "area": 1501, "segmentation": {"size": [512, 512], "counts": "\\mk51n?3M2O2M2N2O2M2N3M2O2M2N2N3N1N3M2O2M2N2N2OO0001O010O2O20O0010O00O2M2O1N3M2N3N1N2N3M2O1N3M2O2M2N2N3N1N3M2O1NTRY1"}, "image_id": 897, "id": 14813}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 417.0, 53.0, 48.0], "area": 1258, "segmentation": {"size": [512, 512], "counts": "kmh01n?3N1N2N3N1N3M2N2O2M2N3N1N2N3M2O0O1O01O0001O01O01O01O01O0001O01O01O01O0001O2O2M2N3N1N2N3M2O2M2N3N1N2N3M2OZb\\6"}, "image_id": 897, "id": 14814}, {"iscrowd": 0, "category_id": 1, "bbox": [175.0, 444.0, 57.0, 53.0], "area": 1570, "segmentation": {"size": [512, 512], "counts": "ing2160b?2[@1b?1\\@1b?8JCf@`0W?Bh@?W?6N1O001010000O2M2O1N3M1O10O0000IoN`AR1_>PO`Ao0`>SO^Am0c>701O0001O010O3M2N2O2M1O0001O01O1O3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N^a[4"}, "image_id": 897, "id": 14815}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 449.0, 49.0, 50.0], "area": 1167, "segmentation": {"size": [512, 512], "counts": "_^Y11n?2N3N1N3M2N2O2M2N3N1N2N3M2O1N3M2O2M2N210O00010O010OO1N3M2N3N0O0010O02N3M2O1N3M2N2O2M2N3N1N2N3M2OnPn5"}, "image_id": 897, "id": 14816}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 451.0, 54.0, 61.0], "area": 1719, "segmentation": {"size": [512, 512], "counts": "o>43=T?Ej@=U?9M2HTOYAn0f>TOWAo0f>7N2N01O01O01O0001O01O000M]AjNa>O`AV1d>3O0000010O00010O01O2N2O2M2N010O2N3M2O2M1O01O02N2N3N1N3M2O1N3M2N3N1NWaT7"}, "image_id": 897, "id": 14817}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 451.0, 27.0, 28.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "]n^72m?3M2O1N3M2O1N3M2N2O2M2N2N01O2O1N3M2O1N3M2N3N1N2N3M2O]a3"}, "image_id": 897, "id": 14818}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 463.0, 22.0, 21.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "jnT72m?2O2M2N2N3N1N000010O000010O0003M2O2M2N2O2MYQ`0"}, "image_id": 897, "id": 14819}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 488.0, 29.0, 24.0], "area": 400, "segmentation": {"size": [512, 512], "counts": "d_]21n?2O1N2N3M2N2O2M2N2N100O1O1O1O1001O1O1N3M2N2O1N3M2N2N3N1NZPT5"}, "image_id": 897, "id": 14820}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 489.0, 47.0, 23.0], "area": 546, "segmentation": {"size": [512, 512], "counts": "ooc01n?1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O100O1O1O11O2N1O2N1O1O2N1O2N1O1O2N1O2N1O1OQ`d6"}, "image_id": 897, "id": 14821}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 491.0, 48.0, 21.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "o_Y31n?100O1O1O100O1O100O1O1O100O1O1O100O1O1O100O1O100O1O1O100001O2N1O1O2N1O2N1O1O0000O12N1O1O2N1O1OR`n3"}, "image_id": 897, "id": 14822}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 491.0, 44.0, 21.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "ooj61o?0O1O1O100O1O100O1O100O1O100O1O1O100O1O100O1O100O1O1O100O11O2N1O2N1O1O2N1O2N1O2N1O1O2NQP?"}, "image_id": 897, "id": 14823}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oo81PPg7"}, "image_id": 897, "id": 14824}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 0.0, 80.0, 24.0], "area": 994, "segmentation": {"size": [512, 512], "counts": "PPm31o?00001O00001O00001O00001O00001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O00001O0000M3M3M3M3M3M3MSPk2"}, "image_id": 898, "id": 14825}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 223.0, 50.0, 85.0], "area": 2678, "segmentation": {"size": [512, 512], "counts": "ZXW7170_?2`@0V?Oi@3O0V?Oi@4N0V?Nk@W1jAlNU>R1kAPOV>m0kAUOR>l0mAVOQ>l0mAWOQ>j0nAUOR>n0kASOT>\\10010O000J6O2O002O1N3M2O2M001O01O01O0001O01O00010QI"}, "image_id": 898, "id": 14826}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 362.0, 28.0, 52.0], "area": 786, "segmentation": {"size": [512, 512], "counts": "Z;d1\\>O2N2N2N3M2N2N2N2N2N2N0000003N1N2N2N2N2N3M2N2N2N2N2N3Moca7"}, "image_id": 898, "id": 14827}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 389.0, 127.0, 73.0], "area": 4099, "segmentation": {"size": [512, 512], "counts": "PmX41o?2M2N2O2M2N3N1N2I@k@c0R?_Ol@c0R?_Om@c0Q?6O1O2N100O2N010O0001O01O00101N3M2O1N3M100002O0O2N100O2N1O2O0O2NO2N1O100010O1O2O0O2TOTA`0m>]OUAc0l>\\OUAd0l>YOUAi0P?010O0O1K6N1001O01O00010O0001O01O0001O01O01O01O0001O01O01O01O0001O01O01O01O0001O01O00010O0001O01O0001O01O01O01O00N3K4M3N3O01O00010O00@j@6V?Fo@7^?M3L[bg1"}, "image_id": 898, "id": 14828}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 395.0, 87.0, 91.0], "area": 3563, "segmentation": {"size": [512, 512], "counts": "k=Z1f>000O01001O1N10O10O10O101O0L]AkNc>U1ElNRBT1m=QOnAP1R>SOkAl0V>?O10@iAVOX>j0kASOU>m0oAoNP>Q1TBlNl=T1`01N2O00O10O10O10O1000002M01000O0100000O01LFkNQBU1o=oNlAR1S>ROjAm0W>>00O10@kATOV>l0nAPOR>P1QBmNn=T1?O11N1000O10O10O10O1000O13L1000O01000O104L3IVAROn>j07M4K4M4L3M4K4MbRd6"}, "image_id": 898, "id": 14829}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 395.0, 94.0, 66.0], "area": 2973, "segmentation": {"size": [512, 512], "counts": "h\\S62l?2a@Mo>6n@LP?6n@Mo>6m@MR?4l@OT?>010O01M2N3M210O000O20O010O010O01O010O010O010O010O010O010O010O010O010O010O01O010O010O010O01O010O010O010O010O010O010O010O010O010O01O010O010O010O010M2N2N3M2N30O010O010O01M2\\Oh@5N2`?Lb@1b?La@1mQ>"}, "image_id": 898, "id": 14830}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 399.0, 22.0, 18.0], "area": 235, "segmentation": {"size": [512, 512], "counts": "g\\[13j?3N3L300010O010O0010O0010O010O00010O0N3L3N\\cY6"}, "image_id": 898, "id": 14831}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 442.0, 32.0, 46.0], "area": 862, "segmentation": {"size": [512, 512], "counts": "c^`71n?2M2O2M3N1N3N2N1N3N2M2O2M2O2M3N1N3O10O010O10O10O10O10O010O10O1mA"}, "image_id": 898, "id": 14832}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 499.0, 28.0, 13.0], "area": 195, "segmentation": {"size": [512, 512], "counts": "ool21n?100O1O1O100O1O1O100O1O1O100O1O1001O1O1O2N1O1O1O2N1O1OQPe4"}, "image_id": 898, "id": 14833}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 110.0, 17.0, 35.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "`3Q1o>O10O1000O1000O10O3N4L4L4K5L4L4Lh[g7"}, "image_id": 899, "id": 14834}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 147.0, 80.0, 64.0], "area": 2907, "segmentation": {"size": [512, 512], "counts": "jeg11l?3N2M4L3N3L3M3N3L3M3N3L3N3L3N210O01O01O010O00010O01O01O010O01O01O010O00010O01O01O010O00010O01O01O010O00010O01O01O010O00010O01O01OO2L3N3L3M3N3L3M3N3L3M4M2M3N3LgZP5"}, "image_id": 899, "id": 14835}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 402.0, 20.0, 19.0], "area": 296, "segmentation": {"size": [512, 512], "counts": "e\\c64l?5J6K2N00000O10O100000O10O1000002N5J6KRcR1"}, "image_id": 899, "id": 14836}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 407.0, 27.0, 20.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "klP67i?7I2M100000O100000O100000O100000O100000O100000O13M7Ioba1"}, "image_id": 899, "id": 14837}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 0.0, 103.0, 77.0], "area": 4474, "segmentation": {"size": [512, 512], "counts": "g`X62m?2N2N2N2M2O2N2N2N2N2M3N1O2N2N2N2M3N2N1O2O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O2N1O1O1O1O1O001O1O1O1O1OO1O1O1O1O1N3N2N2N1O2N2M3N2N2N2N1O2M3N2N2N2N2N1N3N2N2N0000O0102N2N2N2N1N3N2N2N2N2N2M2O2NYo3"}, "image_id": 900, "id": 14838}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 0.0, 9.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "P`e71o?1O1O1O001ON2O1OQP6"}, "image_id": 900, "id": 14839}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 77.0, 135.0, 170.0], "area": 9719, "segmentation": {"size": [512, 512], "counts": "XSQ42m?2M2O2N2N2N2N2M3N1O2N2N2N2M3N2N1O2N1O0O10O1000bDlNR8S1nGoNR8o0nGROS8l0_GRO^M4R;j0_GTO]M4T;h0]GUO]M6V;d0[GYO]M5X;b0YG[O]M5Z;FdDc0c2D]M5\\;AgDf0^28R9IlF9T9GjF;V9EgF>Y9AfFa0Z9_OdFc0\\9]ObFe0^9[O`Ff0`9[O^Fg0b9YO[Fj0e9VOjDXOo0d1W:SOiD[On0d1Y:QOgD]On0d1[:oNeD_On0d1]:mNcDAn0c1_:mNaDAn0e1a:jN^DDo0d1c:hN\\DFo0b1g:XOWEh0k:XOSEh0o:XOoDh0S;XOjDh0X;XOgDh0Y;ZOeDf0[;\\OcDd0];^OaDb0^;A`Da0_;@^Db0c;^O[Dd0e;[OZDg0e;ZOYDh0g;WOXDk0h;SOXDo0h;nNXDU1h;iNXDX1i;fNWD\\1i;bMTDg03i1h;^MXDg00m1h;ZMZDg0NQ2h;VM\\Df0LW2RN2N2N2M2O2N2N2N2N2ZOYAOj>OXAOi>0YANi>0XAOj>OXAOj>NYA0i>NYAOj>OXAOj>OXAOi>0Z[k1"}, "image_id": 900, "id": 14840}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 241.0, 87.0, 86.0], "area": 3891, "segmentation": {"size": [512, 512], "counts": "PY]61n?2M3N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2N200O010000000O010000OO00000O02O2N2N1oMZBf1i=XNYBf1i=XNYBf1i=XNYBf1R>M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2N2N2M2O2N2N2M3N1OZW7"}, "image_id": 900, "id": 14841}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 283.0, 50.0, 101.0], "area": 2949, "segmentation": {"size": [512, 512], "counts": "fZW71n?2M3N2N1d@In>9PAIn>9PAIm>:QAHm>:QAHm>f0O2N2M3N2N2N1O2N2M3N2N2N1O2M3N2N2N2N2M2O2N2N2N2M3N1O1O00O10O1000002N2M3N2N1O2N2M3TG"}, "image_id": 900, "id": 14842}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 295.0, 22.0, 21.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "^iZ61n?2N2N2N2M3N11000000000O01000000N2N2N1O2N2M^VZ1"}, "image_id": 900, "id": 14843}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 305.0, 36.0, 32.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "fYW62m?2N2N2M210O10000000O10O10000000O10O10000000O10O10000000O10O10O1M3N2N2NfeV1"}, "image_id": 900, "id": 14844}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 322.0, 17.0, 16.0], "area": 140, "segmentation": {"size": [512, 512], "counts": "Zjk61n?2M2O2N2M2O2O1000O10OO2M3N2N1N3Nhek0"}, "image_id": 900, "id": 14845}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 502.0, 22.0, 10.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "ooQ71o?0O1O1O100O1O100O1O100O1O1001O1O2N1O1O2N1OQPc0"}, "image_id": 900, "id": 14846}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 0.0, 61.0, 72.0], "area": 2666, "segmentation": {"size": [512, 512], "counts": "X`Q71n?3M2O2M2N3N2M2N3N1N3M2O2N1O2N2N1O2XAnN_>R1`AoNa>Q1\\AROa>P1^AQOa>Y1M2N3N1O2N1O2N1O2N1O00O100O100O1O100O1O100O11O2N1O\\NoA[1o=^NWBd1R>2M201O01^NjAX1U>fNmAZ1T>cNoA[1R>dNPBY1]>N3N1N3M2OmN"}, "image_id": 903, "id": 14847}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 26.0, 74.0, 77.0], "area": 2760, "segmentation": {"size": [512, 512], "counts": "VQX62n?1N3M2O2M2N3N1N3M3N1N3M2O2M2N3N1N3M2O10N2O2M2N3N2M201O]AVOo=k0oAWOQ>h0mAZOQ>i0lAZOR>g0mAZOQ>i0lAZOQ>h0mAZOR>h0lAZOS>f0kA]OU>W1010OO0O1O01O01O01O01O01O01O01O01O0102M2N3N1N3M2PO`A>b>@_A>c>@`A=c>@_A?b>@`A=c>@`A=b>A`A>b>@`A=S?N3N1N3M2O2Mamb0"}, "image_id": 903, "id": 14848}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 47.0, 71.0, 87.0], "area": 3023, "segmentation": {"size": [512, 512], "counts": "Rcd51n?3M2N3N1N3M2O2mNA`Ba0^=B_Ba0_=@`Ba0^=B_Ba0^=A`Ba0_=A_Ba0^=AaB`0]=CbB>\\=CeB\\OUBk0i=VOVBk0h=XOUBk0h=h0O2M2N3N1N3M2O2M2N3N1N30O010M2O2M2N3N1N3M3N1N3M2O2M2N3N1N3N1N3M2^NgA[1[>bNgA\\1a>M2O2M2N3N1N3M3N1N3M2O2M2N3N1N3M2O2M2N3NZkP2"}, "image_id": 903, "id": 14851}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 110.0, 58.0, 74.0], "area": 2410, "segmentation": {"size": [512, 512], "counts": "nSS72m?2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2N3N1N3aAcNV>_1iAcNU>^1iAdNU>f1N1010O01O010O001N1N3M11N3N1N3M2O2M2N2O2M2N3NO02N2N3O0010OO1N010O0001fK"}, "image_id": 903, "id": 14852}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 117.0, 62.0, 80.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "SdS41S10h=2UB1h=1WB0g=3VB0h=1VB1h=2jAFN:U>2kAFN:V>2jAFM;V>1kAFN:U>3jAFN:V>1kAk0M2O2M2N3N1N3M2O2M0011N2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2010O010ON3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2NaZm2"}, "image_id": 903, "id": 14853}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 150.0, 79.0, 74.0], "area": 3017, "segmentation": {"size": [512, 512], "counts": "QUY62m?2N3N1N3N1N3M2O2M2N3N1N3M2O2M2N3N10001O010O010M2O2M2]AfN]>`1O2M2N3N1N2N3M2O20O010O01ON3M2O2M2N3N1N2N01O02N2O2M2N3N1N3M21O0N00010O00010O03M2O2M2N3N1N3M2O2Ae@4]?Jf@3]?Kd@4f?M2NiY?"}, "image_id": 903, "id": 14854}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 153.0, 76.0, 78.0], "area": 3116, "segmentation": {"size": [512, 512], "counts": "`UR31n?2N3N1a@KP?8m@JR?8k@KR?7m@KP?8m@JR?d0M2N3N1N3M2O0O0002O1N100O02N3N1N3M2O2M2N2OO00010O0012M2N3N1N3M3N1N3O010O010ON3M2O2ZNRBU1Q>hNQBV1Q>iNQBU1Q>hNQBV1Q>hNRBV1P>hNQBV1^>N0101N3N11M3N1N3N1N3M2O2M2N3N2]Of@;a?N3N1N3M2OYig3"}, "image_id": 903, "id": 14855}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 156.0, 16.0, 17.0], "area": 148, "segmentation": {"size": [512, 512], "counts": "Qeo53m?1N3M3N1N3N1N0011N3N1N3M2O2M2NkZh1"}, "image_id": 903, "id": 14856}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 192.0, 73.0, 72.0], "area": 2731, "segmentation": {"size": [512, 512], "counts": "ffV23l?2O2M2N3N1N2N3M2e@_OU?g0N3M2O2M2N3N1N3O001N1N1O10O00010O000010O1O2O2M2N3N1N3M2O2M20N2N3N101O010O010O000N3M2_NeA[1]>bNfA[1a>N3N1N3M2O2M2N2O0O1O2O2M2N3N1N3M2E^@6g?N3M2OZhd4"}, "image_id": 903, "id": 14857}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 192.0, 74.0, 67.0], "area": 2490, "segmentation": {"size": [512, 512], "counts": "`Vc51o?1N3M2N3N1N3M2O2M2N3N1N3M2O2M3M2O2M2N3N1N3M2O2M2010O010O010O010O010N1O2M1O10O00010O0002O0O1O100O2N100O1O101KXAoNi>Q13O1O101O20ON3M2O2M2N3N1N3M2O2M2N3N1NjhW1"}, "image_id": 903, "id": 14858}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 211.0, 78.0, 64.0], "area": 2503, "segmentation": {"size": [512, 512], "counts": "YWj43l?3N1N3M2O2M2O2M2N3N1N3M2O20O0100N1N3N1N3M2O2M2N3HcNiA^1V>cNhA_1V>51O01O01O01O010O0001GjAhNU>Y1lAeNV>Y1mAdNV>Y1lAfNU>Y1:N3M2O2M2O101O010O01O010O010O1O010O010OO1N3M100O002O2M2N3N1N3M2O2M2N3N2Mkgn1"}, "image_id": 903, "id": 14859}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 233.0, 78.0, 87.0], "area": 2959, "segmentation": {"size": [512, 512], "counts": "ZXY12n?2M2N3N1N3M2O2M2N3N1N3M2O1@TOiAm0W>UObA0Kk0c>WO`Am0`>VO^Aj0b>900010O00010O00010O00011N3O0010O0_AhNZ>X1cAjN]>V1bAlN^>[1010O010O01M2N3N1N3M2O2M2O2O01]NRBR1m=lNVBS1k=kNVBV1i=hNZBW1g=fN[BY1f=fNZBY1i=dNXB\\1U>00010O000100O2TO]A8f>E]A9d>E^A9e>E]A8e>F]A9e>D^A9d>F]A8f>E]A9V?N3M2O2M`f_5"}, "image_id": 903, "id": 14860}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 239.0, 75.0, 89.0], "area": 3301, "segmentation": {"size": [512, 512], "counts": "QYR42n?2M2N3N1N3CDQA>m>EQA=m>DQA>m>EPA>m>DRA=X>\\OYBQ1d=QOZBQ1e=QOXBR1e=POZBQ1d=ROYBQ1e=POYBR1e=QOYBQ1d=QOZBQ1e=g0M2N3N1N3M2O1N1O01O011N3M2O2M2N3N1N3M2XOVBXOl=f0VBWOl=g0VBWOm=f0VBXOk=g0VBWOm=f0VBWOl=g0VBXOj=i0WBTOi=n0WBPOj=R1VBlNl=S1a010O010OO2O0100O010N1N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3MeVh2"}, "image_id": 903, "id": 14861}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 270.0, 74.0, 85.0], "area": 3032, "segmentation": {"size": [512, 512], "counts": "^i?2m?3N1N3N1N3M2O2M3M2O2M2N3N0O001ESOcAl0]>WO`Ai0`>YO_Ai0_>YO`Ag0^>[OcAe0Z>^OcAd0\\>]ObAf0[>a0N3N1N3M6K1N3M2O2M2000O2N20O010O010OO2N1N001YOVBXOi=h0YBVOg=k0[BROf=n0[BQOf=P1ZBmNi=R1WBlNk=U1UBiNm=3mAg06TOo=3mAj04POR>4lAk02POS>3mAn0\\>oNgAQ1X>nNiAR1d>N1O2M3N1N3M2O2M2N3N1N3M2O2M2N3NcU[6"}, "image_id": 903, "id": 14862}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 286.0, 84.0, 74.0], "area": 2964, "segmentation": {"size": [512, 512], "counts": "]io22m?2N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3bAcNU>^1iAeNT>^1jAcNU>f10O010O010O010ON3M2O2N02N1N3M2O2M2N3N1N3M2O1N0010O00012M2OO2O2M2N3N1N2N30O01O0O2M2N3N1000O1N0010O00102M2N3N1N3M2O2M2N3M2O\\Uf3"}, "image_id": 903, "id": 14863}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 287.0, 13.0, 11.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "Uim21n?2N3N1N1O0010O002O2M2O1Nmfk4"}, "image_id": 903, "id": 14864}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 311.0, 61.0, 81.0], "area": 2677, "segmentation": {"size": [512, 512], "counts": "U:P1o>2O0O1O01O01O03M20N3M2O2M2N3N1N3M2kAaNg=a1VBaNh=a1WBaNf=b1WB`Nh=a1VBbNg=m1N3N1N3M11O2M2N3N1N3M2O2M2N3N1N3[NiA\\1Y>bNiA]1Y>aNiA\\1_>N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3N`TQ7"}, "image_id": 903, "id": 14865}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 319.0, 77.0, 88.0], "area": 3143, "segmentation": {"size": [512, 512], "counts": "oZY22n?1N3M2^OIYA9d>JYA8e>JYA9e>IYA8e>JYA9d>JYA8f>IYA9d>c0N3N1N3M2O2M2mA\\Ng=g1WBZNh=g1VB\\Ng=P2N3N1N3O010O010O010O01M2N3N1N3M2O2M2N3N1N3M2O2M2N3N1O2N1N3N1N3M2O2M1O010O0Lo@ZOQ?g04O00010O00010O1O3N1N3M2O2M2N3N1N3M2O\\T`4"}, "image_id": 903, "id": 14866}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 321.0, 2.0, 4.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "RZo71n?3mE"}, "image_id": 903, "id": 14867}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 338.0, 74.0, 77.0], "area": 2761, "segmentation": {"size": [512, 512], "counts": "Uli51n?3N1N3M2O2M2N3N1N3M2O2M2M4M2N2O2M2N10O00010OAkNTBU1m=mNPBT1o=nNoAR1Q>QOmAn0T>SOjAn0U>UOhAk0X>>10O2N10O00010O00010O000010O00010O00010O00010O02N2O2M2N3N1N3M2POYAe0i>XOYAf0i>XOZAe0i>YOXAf0S?M2N3N1N3M2O2M2N3N1NWTQ1"}, "image_id": 903, "id": 14868}, {"iscrowd": 0, "category_id": 1, "bbox": [87.0, 356.0, 78.0, 82.0], "area": 3166, "segmentation": {"size": [512, 512], "counts": "mk[12m?3N1FLe@7X?Kf@7Y?Ke@7X?:N3N1JVOSAm0k>TOSAn0k>6M2O2M2N2O2M2N3jA\\Nl=e1SB\\Nk=g1RB\\Nn=c1PB_No=k1N1O2O0010O010O010O001M2O2M2N3M2O2M2N3N1N2N01O02N3N101O010M10O01O00010O0001O01O011N3M2O2M2N3N1N3\\Og@<[?Bg@=`?M2N3N1N3M2OTS]5"}, "image_id": 903, "id": 14869}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 380.0, 12.0, 33.0], "area": 204, "segmentation": {"size": [512, 512], "counts": "l;Q1P?O2M2[Om@:U?El@:V?Cm@:U?Dm@:_?N1N3M2O2M\\ci7"}, "image_id": 903, "id": 14870}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 385.0, 77.0, 78.0], "area": 3035, "segmentation": {"size": [512, 512], "counts": "em[61o?2M2N3N1N3M2N2JAi@a0U?@i@b0U?7M2CUOcAm0Z>VOcAl0\\>UObAn0[>UOcAl0[>VOcAm0[>=N1O00010O00010O00010O0001O01O01O01O01O01O01O01O01O01O00010O00010O00010O02N3N110N1N2N3N1N3M2O2M2N3VOo@a0R?]OPAa0S?]Oo@`0Z?O1N3M2O2M2N3N1Nhb="}, "image_id": 903, "id": 14871}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 392.0, 90.0, 82.0], "area": 3439, "segmentation": {"size": [512, 512], "counts": "gm[42m?3N1N3M3N1N3M2O2M2N3N2M2N3NO00010O000102M2N3N1N3M3N1N3M2O2M00010O0010O00010O00010O0001K[NoAd1Q>^NmAc1R>51GmAdNS>]1oAaNP>_1RB_No=`181O010O01O01O01O01O010O01O010O3M3N1010ON3M2O2M3N1N3M2O2M2N3N2M2N3N1N3N2M2N3N1NcRW2"}, "image_id": 903, "id": 14872}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 401.0, 70.0, 74.0], "area": 2665, "segmentation": {"size": [512, 512], "counts": "n\\d02n?2M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3O0`AjNV>U1hAnNW>S1gAnNZ>Q1dAROZ>P1cARO\\>[1M2N3M2O2M2N3N1N3M2O0O02O1N3M2O2M2N3N1N3M2O2M2N3M2O2O000N3N1N3M2O2M2N3N1N3M2O2M2N3N1N3M2O2M2NmaX6"}, "image_id": 903, "id": 14873}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 434.0, 72.0, 78.0], "area": 3117, "segmentation": {"size": [512, 512], "counts": "\\_l62m?2N3N1N3M2O2ADUA>i>DUA?i>CUA>i>ETA>j>CUA>i>?M100O1O100O100O1O1@cN]B]1b=eN\\B[1e=gNXBZ1g=iNWBV1i=lNUBT1l=nNRBR1m=POQBP1P>?01O01O01O01O010O00010O000010O01O01O01O2O2M2N010O00012M2N2O2M2N3mN[Ag0f>WO\\Ag0g>WO[Af0g>XO[Af0Q?O2M2N3N1N3M2O\\A"}, "image_id": 903, "id": 14874}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 435.0, 58.0, 75.0], "area": 2133, "segmentation": {"size": [512, 512], "counts": "c=W1j>POWAh0j>VOWAk0h>SO[Al0m>1O010O010O0SASOg>m0VAUOj>R10OO2M2O2M2N3N1N3M2O0O00010O00010O000010O2N2N3N1N3M2J]AmNe>P17O2O010O0N3M2O2M2N3N1N3M2O1N3M2O2M2N3Nj`R7"}, "image_id": 903, "id": 14875}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 458.0, 85.0, 54.0], "area": 2659, "segmentation": {"size": [512, 512], "counts": "ool41n?100O1O100O1O1UOLjA4U>OhA2X>OfA2Y>0eA1Z>2dAN\\>3bAN]>5YAC19e>6XAC27f>=XADg>k0O100O1O100O1O11O1O1OO100O1O100O1O100O1O100O1O100002M2N1O01O01O01O01O01O000100O3M2O2M2N3M2O2M1O10O00010O00010O0001O2O1Ji@@Z?>5N3M2O1N3M2O2MTah1"}, "image_id": 903, "id": 14876}, {"iscrowd": 0, "category_id": 1, "bbox": [495.0, 494.0, 17.0, 18.0], "area": 199, "segmentation": {"size": [512, 512], "counts": "iog72m?2O2M2N2O0O1O100O1O100O1O1O100O1"}, "image_id": 903, "id": 14877}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 496.0, 56.0, 16.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "o_c21o?0O1O1NNV@2j?2O1O100O1O100O1O100O1O100O1O100O1002N1O2N1O2N0000O1O100001O2N1O1OO100O1O100O1O100O1O1001O2N1O2N1O2N1OR``4"}, "image_id": 903, "id": 14878}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 503.0, 7.0, 9.0], "area": 34, "segmentation": {"size": [512, 512], "counts": "g?9h?O2N1O2N1O1OQPl7"}, "image_id": 903, "id": 14879}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 507.0, 11.0, 5.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "o_W61o?0O1O100O1O1001O1O2NQPc1"}, "image_id": 903, "id": 14880}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 0.0, 61.0, 31.0], "area": 1213, "segmentation": {"size": [512, 512], "counts": "PPb22n?2N2N3M2N2N2N3M2N2N2N3M2N2N0000O10000O100O100O10000O100O100O100O10000O100O100O10000O100O100O10000O100O100O10000O3N2M3N2Mi__4"}, "image_id": 905, "id": 14881}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 62.0, 71.0, 196.0], "area": 13637, "segmentation": {"size": [512, 512], "counts": "Q2Q6o9000000000000000000000000000O100000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000O1000000000000000lJVcl6"}, "image_id": 905, "id": 14882}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 66.0, 93.0, 98.0], "area": 4830, "segmentation": {"size": [512, 512], "counts": "nS_51n?2O2M2N2N3YOEeAEdA=Z>FcA=Z>EdA=Z>EeAFcA=Z>EdA=Z>EdA=Z>FdAEdA=Z>f0N3M2O1N3M2N2O2M2N2N3N1N2N1O010O0000010O0000010O0000011N001O01O0001O01O00011N2N3M2O1N3M2O1N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3NZ\\R1"}, "image_id": 905, "id": 14883}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 84.0, 43.0, 41.0], "area": 821, "segmentation": {"size": [512, 512], "counts": "ZSW32n?2M2N3N1N2N3M2O2M2N2O2M002N2O0O000010JQAZOo>g0401O01O0001O01O01O01O02N2N3N1N3M2N2O2M2N3N1Nn\\S4"}, "image_id": 905, "id": 14884}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 115.0, 156.0, 204.0], "area": 8920, "segmentation": {"size": [512, 512], "counts": "fdo31n?3M2O1N3M2N2O2\\OAbAa0\\>AbAb0[>AbAa0]>@bAa0\\>AbAb0[>AbAa0\\>AcA`0\\>AbAb0[>c0N2N3N1N3M2O1N3M2O110O0010O0010O00010O00010O010O00010O00010O010O00010O00010O0010O0010O00010O00010O010O00010O00010O010O00010O00010O00010O010O00010O00010O010O00010O00010O0010O0010O00010O00010O010O00010O00010O010O0001O0N2N3M2O1N3M2O2M2N2N3N1N2N3M2O2M2N2O2M2N2N3N1N2N3M2O2M2N2N3N1N2NlVb1"}, "image_id": 905, "id": 14885}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 0.0, 271.0, 192.0], "area": 24504, "segmentation": {"size": [512, 512], "counts": "b`U2`0`?j0VOk0UOj0VOj0VOk0UOj0VO00O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O2N2O1N3M2O2M2N2N3N1N2N3M2OO000010O0000010O000010O000010O0000010O000010O000010O000010O0000010O000010O000010O0000010O000010O1O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O100O1O1O100O1O1O100OQPc1"}, "image_id": 906, "id": 14886}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 48.0, 73.0, 146.0], "area": 6563, "segmentation": {"size": [512, 512], "counts": "Tdk61n?2O2M2N2N3N1N3M2O1N3e@\\Ob=OoCj0o;XOoCk0n;XOPDi0n;YOPDi0o;XOoCk0n;XOPDi0n;YOPDj0n;XOoCj0g;hN\\C`0k0j0g;hN\\C`0l0j0f;gN\\Cb0m0g0e;B[D?b;C_D<`;F_D:_;HaD9\\;IeD6Y;MfD4Y;LhD3VOcNk;[1nD2UOeNn;X1mD4ROgNPgBGU==gBGT=>hBFT=>hBFT=>gBGU==gBGT=>hBFT=>hBEU=g1L4K4M0000O10O1000O10O102N4L4K5L4L4L4L4K5L4L3M3L4M3M3M3L5L3M3M3L4M3M3M3M4K4M2N22O1N2N2N2O1N2N2N3N1N2N2N2O1N2N2N2O2K4L4L4L4L4L4L4L4L5K4L4L4L4L4L4L3M003M3M3M3M3M3M3M3M3M2N3M3M3M3M2N2N3M2N2N3M2N2N3M2N2N2N3M2N2N3M2N2N3M2N2N2N3M2N2NQPa4"}, "image_id": 906, "id": 14892}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 0.0, 68.0, 76.0], "area": 2241, "segmentation": {"size": [512, 512], "counts": "mQQ41m?2O2N2N2M3N1O2N2M3N1O2IZOQAh0l>ZORAi0l>7N1kNmN\\CU1cTOlAm0U>>1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1N3N2N2N2M2O2N2N2M3N1O2M3N2N1O2Mfol2"}, "image_id": 907, "id": 14893}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 22.0, 33.0, 31.0], "area": 575, "segmentation": {"size": [512, 512], "counts": "[a`53k?2N3M2N3L3N3M2N3M2N30O00010O010O0M4M2N3M2N30O010O00010OO2M2N3L3NRon1"}, "image_id": 907, "id": 14894}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 29.0, 34.0, 32.0], "area": 626, "segmentation": {"size": [512, 512], "counts": "`am42l?3M2M3N3M2N3M2M4N110O01O010O010O010O01O010O01O010N1N3M2N3M2M3N3M2Nj^a2"}, "image_id": 907, "id": 14895}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 35.0, 38.0, 33.0], "area": 712, "segmentation": {"size": [512, 512], "counts": "gQo51m?2M4M2M4M2N2M4M2O20O00010O010O010O00010O010O00010O010O00010L3N3L3N3L3N2N3Ldn]1"}, "image_id": 907, "id": 14896}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 57.0, 51.0, 39.0], "area": 1013, "segmentation": {"size": [512, 512], "counts": "[bS13k?2M4M2N2M4M2N3M210O01O010O01O010O010O01O010O01O010O010O01O010O01O010O01O010O01O010OO2M2M4M2N3M2M3N3MfmR6"}, "image_id": 907, "id": 14897}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 62.0, 16.0, 18.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "YRP53k?2M3N3M201N1O2O010O01L6K2N3M2Nnmg2"}, "image_id": 907, "id": 14898}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 73.0, 24.0, 19.0], "area": 290, "segmentation": {"size": [512, 512], "counts": "gb;1m?3L3N2M4M2N30O01O01O001M2010O00010O01O0N3L3N2Mc]X7"}, "image_id": 907, "id": 14899}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 102.0, 14.0, 12.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "\\cf41m?3L3O2O0010O010O010O001M2Mi\\R3"}, "image_id": 907, "id": 14900}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 112.0, 42.0, 28.0], "area": 560, "segmentation": {"size": [512, 512], "counts": "lcW51m?2M4M2N3L30010O0010O010O0010O0010O010O0010O0010O010O0010O0010O010O00010O0O2L3N3M2MT\\S2"}, "image_id": 907, "id": 14901}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 373.0, 72.0, 81.0], "area": 2756, "segmentation": {"size": [512, 512], "counts": "cmc31n?2N2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2N00000UOnNiBR1W=POgBP1Y=ROeBn0[=TOcBm0\\=UOVBNOm0k=WOUBLOm0m=XOSBKOm0n=ZOSBHNn0o=]OQBj0o=c0000000001O2N3M2N2N2N2N000001O2N3M2N2O1N0002N2YOjAAY>;jACX>;jACW>?nA_OR>a0PB]OP>c0RB[On=f0SBXOm=h0UBVOl=i0g0N2N2N2Fe@L]?2e@L]?2e@L]?2e@L^?1ZTX3"}, "image_id": 907, "id": 14902}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 54.0, 41.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "^Pl53l?2M2N3M2O2M2N3N2N101O001O1O001O001O1O001O001O1O001O001O1O001O001O1O001O001O00ROXAe0g>YO\\Ag0c>XO_Af0b>XO`Af0b>XOaAf0m>N3M3N1N3M2O2M3M2O2M_oX1"}, "image_id": 908, "id": 14903}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 0.0, 29.0, 21.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "[`k61m?2N3M2N3M2O2O001O001O001O001O001O001O00O1N2N2N2N2N2N2N2KW@1k_f0"}, "image_id": 908, "id": 14904}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 0.0, 23.0, 28.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "PP\\7h0X?4L000000000000000000000000000000000009G;E00\\_8"}, "image_id": 908, "id": 14905}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 0.0, 6.0, 1.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "PPi71o?000000000PP4"}, "image_id": 908, "id": 14906}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 0.0, 2.0, 9.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "PPo79g?00"}, "image_id": 908, "id": 14907}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 31.0, 28.0, 36.0], "area": 604, "segmentation": {"size": [512, 512], "counts": "fQb71m?3M2M4M2M4M2N210O010O010O00010O0N3M2M3N3M210O010O01O01mN"}, "image_id": 908, "id": 14908}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 50.0, 61.0, 64.0], "area": 1733, "segmentation": {"size": [512, 512], "counts": "nad52m?2O2M3M3N2M2O2M3N2M3N1N3M3N2M3N1N3N2M3N2M2010000000O01000N2M3N1N3N2M3N2M2N3N2M3N2M10O0010O0010O010O010O02O2M3M3N2M2O2M3NRm\\1"}, "image_id": 908, "id": 14909}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 59.0, 29.0, 29.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "[Rc62l?2O2M2O2M3N1N3M3N1N3O10O0100O0100O010N2M2O2M3N1N3N2M2N3Nk]n0"}, "image_id": 908, "id": 14910}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 65.0, 34.0, 87.0], "area": 1692, "segmentation": {"size": [512, 512], "counts": "Q2f2Z=0010O001N101HfBhMZ=V2hBkMW=R2mBmMT=P2nBnMT=P2`0L3N2M4M2N3L3N3L3N2M4M2N3L3N2M4M2N3L3N2M4M2Mem^7"}, "image_id": 908, "id": 14911}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 65.0, 29.0, 29.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "cbd02l?2N3M2N3M2N3M2N3M210O01O010O010O010O010N1N3M2N3M2N3M2N3Mgml6"}, "image_id": 908, "id": 14912}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 69.0, 23.0, 35.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "XRo41o?7H7J6J6J6JO10O10000000O10OEVABj>>\\A\\Od>d0<000002N6I7J]]e2"}, "image_id": 908, "id": 14913}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 76.0, 55.0, 53.0], "area": 1545, "segmentation": {"size": [512, 512], "counts": "PcT73k?3M2M4M2N2N30O01O010O01O010O01O001L3N2N3L3O20O010O0010SAROf>n0XAUOg>R110O01O010O01O010O01O010oNWAi0i>UOYAl0f>QO]Ao0k>0O01O010O01O010O01O01nL"}, "image_id": 908, "id": 14914}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 82.0, 44.0, 56.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "gbZ45k?6J6J1O000O100000O11O6I7J6J6J6J3MO01000000000O01000000000O010000000O106J6J5K6J6I7J6J6Jo[o2"}, "image_id": 908, "id": 14915}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 93.0, 49.0, 64.0], "area": 1876, "segmentation": {"size": [512, 512], "counts": "]SV53l?6K5K6J5K5J6K1O0000OFROcAo0]>:00000O01000002N0O1000005K6I3N00O10004L5K5J6K6J5K3MO10O100000005J6K5K6J5KY[Q2"}, "image_id": 908, "id": 14916}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 100.0, 17.0, 16.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "^Sh61n?1N3M3N1N3M20100O01O0O2M3N1N3N1Ni\\o0"}, "image_id": 908, "id": 14917}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 106.0, 45.0, 63.0], "area": 1641, "segmentation": {"size": [512, 512], "counts": "ncb01m?3M2SAKm=8PBJn=8PBJn=9oAJn=8PBJn=9oAJn=8PBJn=9oAJn=8PBJP>7mALS>3kAOU>2hA1X>k0010O010O01O010O010O010O01M2N3M2N3M2N3M2N3M2N3Do@ES?9o@ET?7PAFR?8PAFS?7SOcAn0Z>TOgAk0V>XOjAi0S>ZOlAf0Q>_OnA`0Q>BmA`0R>g0010O0001M2M4M21N1N2M4L3N3L3N2M4M2M4L3N2M4M2010O00O2L3N2M4L3N3L3N2M4Mi[d5"}, "image_id": 908, "id": 14920}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 130.0, 66.0, 58.0], "area": 2079, "segmentation": {"size": [512, 512], "counts": "oTo61m?3L3N3M2M3N3M2M4M2N3L30010O010O010O00010O0O2M2M4N10010O0010O010O0010O0010O010nNZAh0f>VO\\Aj0e>SO^Am0j>010O010O00010O010O00010O010O010O00010O010O01SORAg0m>WOUAi0o9"}, "image_id": 908, "id": 14921}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 133.0, 26.0, 28.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "hTX35e?7I6J6O1000001O0001O000001O0001O000001O0001K4K5J6JnkZ4"}, "image_id": 908, "id": 14922}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 139.0, 48.0, 81.0], "area": 1933, "segmentation": {"size": [512, 512], "counts": "nU^22l?2M3M4O0N3L3M3N3L2N3]AVOn=l0oAXOm=l0oAWOc=I\\BS1OVOc=J[BR1OXOb=I\\BS1NWOc=k1O000010O0001O010O0001XO_BPO`=m0cBSO]=j0fBVO[=g0hBUO[=h0hBUO[=h0hBVO[=g0hBUO[=h0hBUO[=h0iBUOZ=h0P1M3M4M2M3M4M2M3010OO1M_ki4"}, "image_id": 908, "id": 14923}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 140.0, 12.0, 16.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "idb61l?3L4M4L300010O0N2M3L5Le[W1"}, "image_id": 908, "id": 14924}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 155.0, 28.0, 27.0], "area": 476, "segmentation": {"size": [512, 512], "counts": "]eS12k?3N3L3N2M4M2M4O00010O0010O0010O0010O001O0N2M4M2M4M2M3NP[^6"}, "image_id": 908, "id": 14925}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 170.0, 98.0, 65.0], "area": 2823, "segmentation": {"size": [512, 512], "counts": "Wf^62k?3M4M2M3M4M2M3M4M2M4M20010O01O01O010O00010O01O01O010O00010O01O01O010O00N3M2010O0010O00010WOPA`0P?]OSAc0V?0O0010O00j@\\OQ?d0l@_OS?g00010O010O00010M2M301O010O00010O00010O010O00010O00010O010O00010O0010M2M4M2M3M4M2M3N3L3M4Me9"}, "image_id": 908, "id": 14926}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 176.0, 12.0, 14.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "ieh43k?2N3M2N3N1001M2N2N3N1N`ZQ3"}, "image_id": 908, "id": 14927}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 177.0, 41.0, 59.0], "area": 1664, "segmentation": {"size": [512, 512], "counts": "Ug[31j?6J5K5K5K5K6K4K5K5K5M4O01O000001O01O000001O01O000001O01O000O1K5K6J5L4K5K6J5K5K5K^jo3"}, "image_id": 908, "id": 14928}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 179.0, 29.0, 28.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "Tfl42l?2N3M2N3M2N3M2N3N1010O010O010O010O010O0N3M2N3M2N3M2N3M2NVjd2"}, "image_id": 908, "id": 14929}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 199.0, 53.0, 72.0], "area": 2143, "segmentation": {"size": [512, 512], "counts": "QXS42l?3L3N2YOHdA;X>HfA:X>IeA:X>HeA;Y>GeAGeA;Y>GeAHfA:Z>g0N1N3M2N2M4M210O010O010O001O00N2M4M2N3M2N3M2M4M2N2N3M2N3L3N3M2N3M2N3L3N2N3M2N3M2M^YR3"}, "image_id": 908, "id": 14930}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 204.0, 38.0, 33.0], "area": 690, "segmentation": {"size": [512, 512], "counts": "mVm52l?2N3N1N3M2N3M2N3M2010O010O010O010O0100O010O010O010O010O01N1O2M3M2N3M2N3M2NXi_1"}, "image_id": 908, "id": 14931}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 205.0, 58.0, 89.0], "area": 2495, "segmentation": {"size": [512, 512], "counts": "hhn41m?2N3M2N3M2N3M2N3M2N3M2N3L3@POnAR1P>POnAS1o=POnAR1P>POnAS1o=POnAR1P>a0M2N3M2N3M2N3M2N3M1O0003M2N3M2N3M2N3L3N3M2BnAlNT>R1nAlNU>Q1nAlNT>R1nAlNU>Q1>N3M2N3M2N3M2N3M2M4M2N3M2NXYT2"}, "image_id": 908, "id": 14932}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 249.0, 50.0, 66.0], "area": 1798, "segmentation": {"size": [512, 512], "counts": "]io51m?3M2M4M2N3M2N3M2\\O\\OjAg0S>\\OjAf0T>\\OiAh0T>[OiAg0U>[OiAh0T>[OiAg0U>d0L31O01O010O010O010O010OO2M2N3M2N3M2M4M2N2N3M2N3M2N3L3N3M2N3M2N3M2N3LjWW1"}, "image_id": 908, "id": 14933}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 250.0, 65.0, 85.0], "area": 2834, "segmentation": {"size": [512, 512], "counts": "QZc62l?3M2N3L3N3M2O20O01M2J[Oo@g0o>[Oo@h0n>6N3L3N3M2N3M2M4M2N2N3M2N3L22M20O1N3L3N3M2N3M1O00O4M2N3M2N2N3L3N3M2N3M2M4M2N3M2N2N3L3N3M2N3M2N3\\Oi@:`?N2N3M2NfW<"}, "image_id": 908, "id": 14934}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 259.0, 38.0, 62.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "d8W1f>3N3L3N2M4L3O20O00010O01O0O1O2O01O0N3M2M3N3L3N2M4L3N3L3N2M4L3N3L3N2M4M2M4Lgg\\7"}, "image_id": 908, "id": 14935}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 280.0, 59.0, 63.0], "area": 1942, "segmentation": {"size": [512, 512], "counts": "Tj`04j?2M3N3L3N3N11O010O010O00010OO2L3N2M4M2M4M2M3N3L3N3M2M4M2N210O0010O0010O0010O0M4M2N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3Nnfa6"}, "image_id": 908, "id": 14936}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 299.0, 49.0, 63.0], "area": 1717, "segmentation": {"size": [512, 512], "counts": "mZb13k?2M3N3L3N3M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2N201O0010O001O0N3L3N2M4M2N3L3N2M4M2N3L3N3L3N2N3L3N3L3N2N]Ve5"}, "image_id": 908, "id": 14937}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 319.0, 58.0, 65.0], "area": 2181, "segmentation": {"size": [512, 512], "counts": "_[Y23k?2M4L3N2M4M2M3M4M2M4O000010O01O0M3N3L3M3N3L3M4M2N2010O00010O01O01O010O01O01N1N3L3M3N3L3N2M4L3N3L3M3N3L3N2M4L3N3L3Mhei4"}, "image_id": 908, "id": 14938}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 332.0, 34.0, 33.0], "area": 601, "segmentation": {"size": [512, 512], "counts": "kj43k?3M2N2M4M2N3M2010O01O01O01O010O010O003NO01O01O010]Oj@:V?Dl@=S?@QA?X?1OM4M2M4L3NTUZ7"}, "image_id": 908, "id": 14939}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 347.0, 58.0, 62.0], "area": 1851, "segmentation": {"size": [512, 512], "counts": "YlU32k?3N3L3N3L3M3N30O010O00010O010O0001G\\OUAc0i>_OWAb0e>B[A=c>EZA>c>b0M2O2O01O010OO2L3N2M4L3O2O01O0N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3N2M4M2M4M2MoTm3"}, "image_id": 908, "id": 14940}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 352.0, 28.0, 34.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "b;9e?2M4M2N2N3L3N3M210O010O01O01O010O01N1N3L3N3L3N3M2M3N3Mida7"}, "image_id": 908, "id": 14941}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 355.0, 32.0, 28.0], "area": 541, "segmentation": {"size": [512, 512], "counts": "c[Q13j?3N3L3N2M4L301O01O01O010O00010O01O01O010O01O01O01L3M4M2M3N3L3Nfd^6"}, "image_id": 908, "id": 14942}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 362.0, 49.0, 65.0], "area": 1717, "segmentation": {"size": [512, 512], "counts": "nlQ43k?2M3N3L3N3M2H^Oo@f0n>\\OPAf0n>7M4M2M4O00010M2N3L3N3M2M3N3L3010O001M2M4M2M3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M4M2N2M_dU3"}, "image_id": 908, "id": 14943}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 376.0, 33.0, 34.0], "area": 675, "segmentation": {"size": [512, 512], "counts": "^lj03k?2M4M2M3N3L3N3L3N201O010O00010O010O00010O010ON2N3L3N3L3N2M4M2M4MQdd6"}, "image_id": 908, "id": 14944}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 376.0, 29.0, 28.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "[ll11m?2M4M2N3L3N2M4M210O0010O0010O0010O0010O00N3L3N3L3N3L3N2NRdd5"}, "image_id": 908, "id": 14945}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 47.0, 77.0], "area": 1970, "segmentation": {"size": [512, 512], "counts": "V=?f0BQ>b0lA@R>b0kABS>`0jABW>=fAFZ>:dAI[>8bAJ_>5^AOa>h01OO2M2M4M2N2M4M2M4M2M3N00O103M2M4M2M3N3M2M3N3L3N2M4M2N3L3N2M4M2M3N3M2M3N3L3NcSX7"}, "image_id": 908, "id": 14946}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 393.0, 51.0, 63.0], "area": 1917, "segmentation": {"size": [512, 512], "counts": "jmo43j?3N3L3BEUA=h>FUA>h>DVA>g>FVA=g>?M4M2N2M4M2O20O00010O010O00010O010O00010ON1N10O101N4M2M3N3M2M4M2M4M2M3N3L3N3M2M3N3L3NbcV2"}, "image_id": 908, "id": 14947}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 397.0, 30.0, 32.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "Tmf21l?4L3N3L3N2M4L3N2N3O0010O00010O010O00010O0N3M2M3M4M2M3N3L3M_Sj4"}, "image_id": 908, "id": 14948}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 411.0, 59.0, 59.0], "area": 1783, "segmentation": {"size": [512, 512], "counts": "o]g53l?2N2N2O2M2N2N2O2M2N2N2O2M2N2N2O2M1O00010HnNaAR1^>QO`Ao0^>:O1N3N11O1O1O2O0OO11O2N100O2N1O2N100O2O0101NO1O2O0JUAVOm>g0VAWOk>f09N3L3N3L3N2N3L3NmR[1"}, "image_id": 908, "id": 14949}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 414.0, 53.0, 78.0], "area": 2453, "segmentation": {"size": [512, 512], "counts": "Poh01S?2dA1Y>1dA2Y>2dA1X>2eA1Y>1eA1X>3dA1T>_OgAb031T>7hAMT>Q1N2M4M2M4M2O110O010O00010O010O00010O01M2M3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3Neb\\6"}, "image_id": 908, "id": 14950}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 424.0, 33.0, 38.0], "area": 715, "segmentation": {"size": [512, 512], "counts": "Tn`22l?3L3N2N3L3N3M2N2M4M2N3M2O1010O01O01O01O01M6J2N3M2M3N3M2N2N3L3N2N`bn4"}, "image_id": 908, "id": 14951}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 433.0, 51.0, 79.0], "area": 2304, "segmentation": {"size": [512, 512], "counts": "m_d11m?3M2M3BIQA:Z>FkA3I9Y>HkA2H9[>GjA3I8[>GjAh0S>\\OjAf0T>\\OiAg0U>c0M3N2N2M3N2N2M3N2N21O00001O001O0M4M2N2M4M2N3L3N3`NfAV1\\>hNgAT1\\>jNfAT1e>M2M4M2N2M4M2N3L3N3L3N2N3LmQb5"}, "image_id": 908, "id": 14952}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 434.0, 32.0, 31.0], "area": 578, "segmentation": {"size": [512, 512], "counts": "VnP41m?3M2N3L3N3M2N3L3N2010O010O01O010O01O010O010O0N2N3M2N3L3N3M2N3LWR_3"}, "image_id": 908, "id": 14953}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 443.0, 36.0, 38.0], "area": 818, "segmentation": {"size": [512, 512], "counts": "e^\\32l?2M4M2N3M2M4M2N3M2M3N3N110O010O0010O0010O010O0001M2N3M2M4M2N3M2M3N3M2NmaQ4"}, "image_id": 908, "id": 14954}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 444.0, 29.0, 29.0], "area": 499, "segmentation": {"size": [512, 512], "counts": "^^_42l?3L3N3M2M4M2N3M20010O010O010O0010O0010O01L3N3M2N2M4M2N3LnQR3"}, "image_id": 908, "id": 14955}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 447.0, 47.0, 42.0], "area": 1665, "segmentation": {"size": [512, 512], "counts": "l^c69\\?010O000000010O00N2]OWAJn>1WAJo>0c0LV_8"}, "image_id": 909, "id": 14968}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 458.0, 9.0, 25.0], "area": 125, "segmentation": {"size": [512, 512], "counts": "Z>i0W?M3N3M2M4M2M4M2NeQk7"}, "image_id": 909, "id": 14969}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 17.0, 55.0, 51.0], "area": 1683, "segmentation": {"size": [512, 512], "counts": "]aT71l?4M2M4M2M3N3L3N3L3N2M40O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01OO2M210O00010O010OORO"}, "image_id": 911, "id": 14970}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 41.0, 43.0, 82.0], "area": 2206, "segmentation": {"size": [512, 512], "counts": "na03j?3M3L4M4L3M301dAZOD0U=f0TC^OCOY=c0QCACO\\=a0lBDD0_=NXAOk>NXAOk>MZAOj>MYAOT]Z7"}, "image_id": 911, "id": 14971}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 72.0, 68.0, 69.0], "area": 2508, "segmentation": {"size": [512, 512], "counts": "TSn62k?3N2N3L3N3M2M4M2N3L3N2N3O010O0010O0010O010O010O0010O0010O010O0010O0010O010O010XAQO]>o0aASO_>m0_AVOa>T100O2M2O2O010O01O010O010O01O010O01O010POdA:\\>DgABjA>W>^OmAb0R>\\OPBd0Q>YORBf0n=XOTBi0Q;"}, "image_id": 911, "id": 14972}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 73.0, 46.0, 67.0], "area": 1806, "segmentation": {"size": [512, 512], "counts": "Pdm04i?3L4^OF[A>`>F]A=`>G\\AG\\A=`>F]A=`>c010N1M3M4N11O01O01O01O01O01O01O00N3K3N01O4L3L4M4L3M3M4L3M3M4K4M3M4L3M3M4L3Md][6"}, "image_id": 911, "id": 14973}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 84.0, 49.0, 78.0], "area": 2094, "segmentation": {"size": [512, 512], "counts": "gTf13[?NSA5k>MSA6i>NTA4j>NSA5j>OSA4j>b0N3L3N3L3N2D`NUBd1g=`NVBb1h=`NXBa1d=cN[B]1c=eN^BZ1_=iNaBX1^=iN_BY1b=c0O0001L3N3M2M3N2N2M4M2N3L2O2N3L3N2N3L3N3M2M4M2N2M4M3M3L3N3MR]a5"}, "image_id": 911, "id": 14974}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 104.0, 26.0, 25.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "jca01l?3M3M4L3M3M4O00010O00010O00010O00010O00M3M4L3M3M4Le\\Q7"}, "image_id": 911, "id": 14975}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 104.0, 51.0, 83.0], "area": 2413, "segmentation": {"size": [512, 512], "counts": "aU_21m?3`NOlB4Q=OmB4P=ObBJ_O9m=OaBK_O9m=ObBK^O9m=ObB>[=EbB?[=DbB>[=EbB>\\=EaB>\\=DbB>^=S1O01O01O010O010O0001N1M4M2M4M2N2M4M2M4M2N2M4M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N[\\g4"}, "image_id": 911, "id": 14976}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 108.0, 53.0, 63.0], "area": 1532, "segmentation": {"size": [512, 512], "counts": "ReQ61l?3N3L3N2N30O01O010O0N2M4M2N3L3N2M4M2N3L3NO01000O02O2M4M2O20O0O2L3N3M2M3N3L3010O0010O0N3L3N3M2ZOj@?]?N3L3N2N3LYlS1"}, "image_id": 911, "id": 14977}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 135.0, 55.0, 53.0], "area": 1585, "segmentation": {"size": [512, 512], "counts": "Vel62k?4L3M4M21O010OO1N3L3N2M4M2M3N3N1YAoN`>P1\\ATOd>U1O01O010ON2M4N11O010O00010O010O001aNbAY1d>L3M3M4L3M4L3N2N30O010O01O0O1M4M2N3L3NZk7"}, "image_id": 911, "id": 14978}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 136.0, 47.0, 66.0], "area": 1879, "segmentation": {"size": [512, 512], "counts": "YeX31f00`>2]A2_>2]A1a>1]A2_>2]A1`>2^A0`>2]A2_>i0N3L3N2M4L3N201O010O01O01O010O01O000M4M2M3N3L3N3L3M3N3L3O2O0O1M4L3N3L3N2M4L3N2M_ko3"}, "image_id": 911, "id": 14979}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 146.0, 50.0, 80.0], "area": 2360, "segmentation": {"size": [512, 512], "counts": "gVU42Z?1UA1h>2UA1i>2TA1S>HWB:C1S>HXB9C2R>GXBn0e=VOXBl0f=VOWBn0e=UOYBm0e=VOWBn0e=h0N210O0010O0010O0010O0010O00M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N2M4M2M4M2M3N3LRkQ3"}, "image_id": 911, "id": 14980}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 179.0, 49.0, 65.0], "area": 1766, "segmentation": {"size": [512, 512], "counts": "Ygm41l?3M4M2M4L3YOAkAa0S>BiAb0S>AjAb0T>AiAa0T>BiAb0S>AjAb0U>f0O01O010O01O01O01O01O010O01OO2L3N2M4L10O01O012M4L3N3L3M3N3L3N2M4L3N3L3M3NZjY2"}, "image_id": 911, "id": 14981}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 191.0, 47.0, 85.0], "area": 2227, "segmentation": {"size": [512, 512], "counts": "Yhj53j?3@MPA6n>LPA7l>MPA6m>MQA6l>`0M3M4M2M3BeNUB^1g=eNVB^1h=eNTB_1h=>N2M4L3N2N3O0010N1N3O0JaBlM^=Q2eBoM\\=n1fBoM]=n1:TACo>:>L3N3L3Nii]1"}, "image_id": 911, "id": 14982}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 205.0, 24.0, 26.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "oVc42k?3N3L3M3N3L3N3O01O010O00010O010O0N2N3L3M3N3L3N_iP3"}, "image_id": 911, "id": 14983}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 217.0, 54.0, 79.0], "area": 2638, "segmentation": {"size": [512, 512], "counts": "eh^62l?2WOOaA4\\>NaA5\\>NbA5[>NaA5]>MaA5\\>OaA4\\>NaA5]>MaA6_>e0010O0010O001O0M3N3M2M4M2M4M2N2O2O0010O0010O0010O010O001ROYBBf=<\\BDd=9`BG`=6bBJ^=3fBL[=1gBM[=1gBM[=0iBMZ=0hBM[=0iBMZ=0hBM[=1hBL[=0hBNZ=0hBM[=0iBMZ=0XZf0"}, "image_id": 911, "id": 14984}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 218.0, 44.0, 87.0], "area": 2125, "segmentation": {"size": [512, 512], "counts": "k7c1[>2N2N3M2M4M2N3M000O101O2N3L0102N2N3M2M4M2N2N3L3N3M2N3M2M4M2N3M2M3N3M2N3M2M4M2N3M2N2M4MlhY7"}, "image_id": 911, "id": 14985}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 243.0, 46.0, 83.0], "area": 2330, "segmentation": {"size": [512, 512], "counts": "kYg04\\?MPA6Q>JfA0d03B7P>NZBOB6Q>NZBOB6Q>NZBb0b=BZBc0b=AZBb0e=_OXBd0h=k0O01O01O01O01N1M3L5M2001O01ON2L5L3M3M4L3M3L5L3M3M4L3L4M4L3M3M4L3L4M3M4L3MXha6"}, "image_id": 911, "id": 14986}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 251.0, 69.0, 82.0], "area": 2747, "segmentation": {"size": [512, 512], "counts": "PZZ13k?2N3M2N3M2N3M2N3M2N3O010O010O0N3M0000002N3M2N3ZOiN_BY1_=jN^BY1_=iN_BY1_=jN^BY1_=iN_BY1_=jN^BY1_=iN_BY1_=f0O010O010O01N1N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2NeWc5"}, "image_id": 911, "id": 14987}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 286.0, 44.0, 63.0], "area": 1710, "segmentation": {"size": [512, 512], "counts": "dZ[21k?4M3M4K4M3M3[O\\OjAi0R>[OjAh0S>[OjAh0R>\\OjAi0T>XOiAk0X>>01O01OL4010O00010O00010O000010ON2M4L3L4M3M4K4M3M4K4M3M4K4M3M4Kmfn4"}, "image_id": 911, "id": 14988}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 300.0, 62.0, 75.0], "area": 2536, "segmentation": {"size": [512, 512], "counts": "UjR31n?2M3UAKh=6VBLh=7VBKh=7UBLh=6VBLh=7VBKh=6VBMg=6WBKh=7UBLh=6VBLi=6UBLh=7UBLh=6WBKh=7UBLh=6WBLh=V101000O010O01000O0O2N2M2O2M3N1N3M01O010O03N1N3M3N1N3N2M2O2M3M2O2M2O2M3M2O2M3N1N3N2M2N3N1NTVn3"}, "image_id": 911, "id": 14989}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 305.0, 22.0, 22.0], "area": 274, "segmentation": {"size": [512, 512], "counts": "ni52l?2N3M2N3M2N3N110O010O010O010OO2M2N3M2N3M2NZV_7"}, "image_id": 911, "id": 14990}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 324.0, 59.0, 58.0], "area": 1612, "segmentation": {"size": [512, 512], "counts": "X[k31n?2O1N3M2N2O1N3M2N2N2O2M2N2N2N3N1N2N000010O0000010O0000O1N3N1N2O1N2100O2N1O2N100O2N1O2O0O1O2N1O2O0O1N3M2M4M2M4M2N2M4MdUW3"}, "image_id": 911, "id": 14991}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 345.0, 29.0, 28.0], "area": 459, "segmentation": {"size": [512, 512], "counts": "Zkh12l?3M2N3M2N3M2N2N3N110O010O010O010O010O01N1M3N3M2N3M2N3M2NPeh5"}, "image_id": 911, "id": 14992}, {"iscrowd": 0, "category_id": 1, "bbox": [317.0, 349.0, 45.0, 66.0], "area": 1802, "segmentation": {"size": [512, 512], "counts": "`kn41l?3XAMa=5\\BNa=6\\BM`=6]BMa=6eAAd0JnA8S>k0O0O1010O01O01O010O01O01O0N3L3N2M4M2M4M2M3N3L3IRAYOR?d06M3N3L3M3N3L3NidZ2"}, "image_id": 911, "id": 14993}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 358.0, 48.0, 63.0], "area": 1582, "segmentation": {"size": [512, 512], "counts": "h\\22k?3N3M2M4M2N2M4M2N3M2M4M2N3L3N2N3L3N3M2N3L3O110O010ON2M4M2N3L3N3M2M3N3M2M4M2N3M2M4M2N2M4M2N3L3NadU7"}, "image_id": 911, "id": 14994}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 381.0, 53.0, 71.0], "area": 2013, "segmentation": {"size": [512, 512], "counts": "fmm02l?3L3N3L3N2M4M2M4M2M3N3L3_OmNVBS1g=oNYBQ1d=SO\\Bl0b=VOYBGIT1n=TOWBJKR1n=UOSBMOn0n=TOQB01l0n=ARB>n=BoAb0Q>f001O0M4M2O1010M2N3L3N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2MlcW6"}, "image_id": 911, "id": 14995}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 385.0, 70.0, 89.0], "area": 3381, "segmentation": {"size": [512, 512], "counts": "Y^Z62l?2M4M2\\@H]?`0L3N2M4M2M40O00010O010OO0N2O2M4M2^OgNZB\\1c=gNZB]1c=fNZB\\1c=gNZB\\1d=fNZB]1b=b0M4M2M3N30O01O01O010O0M3N3L3N3L3M3N3L3N0O002O3L3N3O01O01O010O01OcNeAU1\\>hNfAW1[>fNhAW1c>M2M4L3N2ZOPA9T?Dn@:]?M4L3NXcb0"}, "image_id": 911, "id": 14996}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 387.0, 34.0, 26.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "`lP43k?2010O01O01O010O01M2N3L3N2M4M201O01O01O010O01O01O01N1N3L3N2M4M2M4MhS^3"}, "image_id": 911, "id": 14997}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 398.0, 61.0, 83.0], "area": 2486, "segmentation": {"size": [512, 512], "counts": "cni13k?2M3N3M2M4M2M4M21ON3M2N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M13M2M3N3L3N3L3N3M2M3N3L3N3L3N2M4M2M4N10001O0M4M2M3N3L3N3M2M3NXcW5"}, "image_id": 911, "id": 14998}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 404.0, 28.0, 28.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "Vmb02k?4M2N2N3M2M4M2N30O010O0010O010O010O0001M2M4M2N3M2N3L3NUSo6"}, "image_id": 911, "id": 14999}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 410.0, 33.0, 35.0], "area": 596, "segmentation": {"size": [512, 512], "counts": "V]R32m?2O2M2N2N3N1N2N3M2N2O1N3M2N2O2M20N2N3N1N2N3M2O1N2N3M2N2O2M2N2N3NaR]4"}, "image_id": 911, "id": 15000}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 411.0, 85.0, 54.0], "area": 2321, "segmentation": {"size": [512, 512], "counts": "dmg31m?3M2M4M2M4M2N2M4M2M4N11O01O010O010O00010O010O01O01O010O010O000SAROi>n0TATOl>Q10O010O00010O010O010O00010O010O00010O010O0N3M2M3010O010O0010O0010O010O00010O010O0010O0N3M2M4M2M3N3M2M4MXbm2"}, "image_id": 911, "id": 15001}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 422.0, 43.0, 67.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "fnZ71n?2N2M3N2N2N2N2O1N3O0O1N2XOZOTBg0j=[OVBe0h=]OWBd0g=^OWBd0g=^OWBd0g=^OWBc0h=@UB`0k=BSB>m=DQBSOkAd048Q>LmA4S>NkA2U>l01O01O02N2N2N2aNcAX1_>fNcAX1c>O1O1O2N2N2O1KRAUOP?i0_B"}, "image_id": 911, "id": 15002}, {"iscrowd": 0, "category_id": 1, "bbox": [205.0, 452.0, 47.0, 54.0], "area": 1713, "segmentation": {"size": [512, 512], "counts": "]oV35f?5K6I6K5K5K5K6J5001O0001O0001O000001O01O000001O01O000001O01O000TO`A8`>CfA=Y>^OlAb0U>XOPBh0d>00010O000000M3K6J5K5KUaQ4"}, "image_id": 911, "id": 15003}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 480.0, 29.0, 32.0], "area": 570, "segmentation": {"size": [512, 512], "counts": "a_l63j?3N2M4L3N3j@_Oh>d0VA^Oi>c0UA_Ok>b0RA@o>?QAAo>`0QA_Oo>a0QA_OP?`0QA_Oo>b0PA^OQ?a0PA^OP?c0o@]OQ?c0PA\\OQ?c0o@]OQ?d0o@[OR?d0m@]OS?b0l@@T?=m@ET?7o@IQ?5RAJo>2TANl>0WAOi>NZA2[?O0OUPe0"}, "image_id": 911, "id": 15004}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 484.0, 13.0, 13.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "Z_f63k?2M4N10010O010O010ON3M2NjPS1"}, "image_id": 911, "id": 15005}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 493.0, 15.0, 19.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "^oh72n?`0@000000000O100000000000000"}, "image_id": 911, "id": 15006}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 494.0, 38.0, 18.0], "area": 363, "segmentation": {"size": [512, 512], "counts": "ooP61m?2O1O1001O00O1N2O1N2N2O1N2N2O11O1O001O001O1O001O001O1O001O001O1O001O001N1NWP\\1"}, "image_id": 911, "id": 15007}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 511.0, 20.0, 1.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "o_\\71o?0000000000000000000000000000000000000Q`9"}, "image_id": 911, "id": 15008}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 245.0, 6.0, 22.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "WXm73i?4L4M4K4M3[H"}, "image_id": 912, "id": 15009}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 307.0, 68.0, 74.0], "area": 2727, "segmentation": {"size": [512, 512], "counts": "[k\\66i?3L3N1O2N2M3N2N1O2N2M3N1O001N3N2N2N1OO0100000000O1000000O10O100001N3N2N2N1O2N20000N2N2N2N2M3N2N2N1O0002N2N2VORB_OP>?RB_OP>?RB_Oo=`0SB^Oo=`0SB^Oo=`0SB]OP>`0SB^Oo=3dAO?Lo=2eA0>Lo=2eA0>Lo=2eA0>Ln=3fAO=Mo=2fAOT?O^Va0"}, "image_id": 912, "id": 15010}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 316.0, 33.0, 81.0], "area": 2026, "segmentation": {"size": [512, 512], "counts": "Zk_71l?4L3[@Kh>OgA?V>DfAa0V>BgAa06ZO]=8YBa07ZO]=8YBb06ZO]=7ZBb05[O^=Z1_BiNa=l10O00010O00010O00010O00010O00O0N03M3M3M4L3M3MUF"}, "image_id": 912, "id": 15011}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 351.0, 86.0, 92.0], "area": 3281, "segmentation": {"size": [512, 512], "counts": "V]f11n?2N2N2N2N2N2N2N2N2N2N2N2N2N2IUOVAm0h>UOVAm0h>7N2N2N2M2O00000000000000000001O00000000000000000000000002D`NUBb1i=`NUBb1i=`NUB`1k=bNSB^1m=dNRB[1n=;00000000001O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2Ih@CZ?;h@CZ?;7N2N2N2N2NVdn4"}, "image_id": 912, "id": 15012}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 416.0, 27.0, 25.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "UmU31n?2U@Ne?4Y@Ne?8N1O200O1N200O1000O100000O1000O1N2N2N1O2N2N2N2Mcb\\4"}, "image_id": 912, "id": 15013}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 423.0, 39.0, 57.0], "area": 1224, "segmentation": {"size": [512, 512], "counts": "om\\72m?2M3N2N1O2N2M3N2N2N2N1O2O100O1O1N1N3N00002N2M3N2N1010N20[AiN`>[101000000000O010000^B"}, "image_id": 912, "id": 15014}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 443.0, 66.0, 69.0], "area": 2583, "segmentation": {"size": [512, 512], "counts": "n_g42m?1O1N2O1O1O1N2O1O1ADYA=f>EWA=h>EVAAlA6I:Z>BkA6H:\\>BiA7I8]>ChAl0W>VOgAk0X>WOeAk0Z>>O100000000O1O1N2O1N2N2O1N2O1N21O1O1O1O001N2O10O01O1O1O001O1XNlAa1Z>O2N2lN`Ad0c>ZOdA?^>^OeA`0\\>_OfA?\\>_OfA?\\>^OfA`0o>N2N2N2N2M3N1O2Nf`W2"}, "image_id": 912, "id": 15015}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 465.0, 52.0, 47.0], "area": 1131, "segmentation": {"size": [512, 512], "counts": "Q_[11n?2N2N2N2N2N2N2N2N2N2N2N1O2N2N2N20000000000000000000000000000000O1O2N1O1O1O1O1O1O1O1O1N2N2N2N2N2N2N2N2N\\`j5"}, "image_id": 912, "id": 15016}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 489.0, 45.0, 23.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "o_e51n?1N2O1O1O1O1O1O1N2O1O1O1O1O11O1O1O001O1O1OO1N2O1O1O1O1O1N2O1O1001O1O001O1O1Ld@B]?;e@E\\?97N1O2N2N^Pd1"}, "image_id": 912, "id": 15017}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 506.0, 11.0, 6.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "o_S21n?1O1O1O1O11O1O1O1O1OQPg5"}, "image_id": 912, "id": 15018}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 25.0, 35.0], "area": 718, "segmentation": {"size": [512, 512], "counts": "0S1m>0000000000000000000000000000B>000000000014K000JQ`c7"}, "image_id": 913, "id": 15019}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 0.0, 49.0, 51.0], "area": 1497, "segmentation": {"size": [512, 512], "counts": "mPg11m?3L3N3L3N2M4M2M4M2M3N3M201[AmN[>S1cAoN]>Q1`ASO_>W10001O001O00001O00001O001OM3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2NR``5"}, "image_id": 913, "id": 15020}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 0.0, 27.0, 14.0], "area": 263, "segmentation": {"size": [512, 512], "counts": "TPT42k?4N10001O001O00001O001O00001O00001O001O0000O1N2M3N2MS`^3"}, "image_id": 913, "id": 15021}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 0.0, 29.0, 25.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "_P`42l?2N2M4M2N3L3O2O00001O001O001O00001O0010N1N2N3L3N3M2M3N3Mn_Q3"}, "image_id": 913, "id": 15022}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 0.0, 33.0, 12.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "P`T51o?001O00001O001O00001O001O001O00001O001O00001O001O00001O00N2N2M3NRP[2"}, "image_id": 913, "id": 15023}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 0.0, 17.0, 8.0], "area": 82, "segmentation": {"size": [512, 512], "counts": "PPo52n?1O00001O001O001O00001O001OO1M3NR`h1"}, "image_id": 913, "id": 15024}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 0.0, 30.0, 40.0], "area": 646, "segmentation": {"size": [512, 512], "counts": "bPh63j?3N2M4M2M4M2M301O001O0RAYOc>g0[A\\Od>e0YA]Og>c0WA_Oi>m0OEWA_Oi>a0WA@h>a0XA^Oh>`0ZAAe>=^ABb>;aAE_>9dAD^>9eAE]>8gAE[>9g0M3N2M3NRPi0"}, "image_id": 913, "id": 15025}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 7.0, 15.0, 18.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "c`j52l?3L3N2M4M201O01O010M2M3N3L3Nhom1"}, "image_id": 913, "id": 15026}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 9.0, 45.0, 59.0], "area": 1731, "segmentation": {"size": [512, 512], "counts": "eQf23o>N]A203_>O^A013_>O]A203`>M^A203_>=^AF`>o0M210O00010O010O00010O010O010O00010O010O00010ON3N100H_AROa>j0cAUO]>i0eAXO[>d0iA[OW>c0kA]OU>`0nA^OU>?nA]OU>a0g0L3N2M4M2M[_c4"}, "image_id": 913, "id": 15027}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 17.0, 52.0, 73.0], "area": 2116, "segmentation": {"size": [512, 512], "counts": "^R]31P?1kA2Q>1lA2R>1kA1S>1kA2Q>2kA1S>1kA2Q>1lA2S>0jA2W>MgA6X>KdA8\\>g00O010O00010O01M2N3M2M3N3L3N3M11M3N3M2M4M2M3N3M2M4M2N3L3N2M4M2N3L3N3M2M3N3L3N3M2MXoh3"}, "image_id": 913, "id": 15028}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 18.0, 14.0, 13.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "iP\\63k?2N2M40O010O010O00001L3N3M[o\\1"}, "image_id": 913, "id": 15029}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 23.0, 28.0, 39.0], "area": 765, "segmentation": {"size": [512, 512], "counts": "_Qb7132c?7N2M4M2M4M2N2M4M201O0N210O010O00010O010O00N3M2M4M2N2MVO"}, "image_id": 913, "id": 15030}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 27.0, 35.0, 48.0], "area": 946, "segmentation": {"size": [512, 512], "counts": "gQ`01m?3M2_@KU?8h@KU?7i@LS?8j@JT?c0M2N3M2N3M2N3L301O010O01L3N3M2N2N3M2N3M2N3M2N3M2N3L3N3M2Nn^n6"}, "image_id": 913, "id": 15031}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 27.0, 27.0, 28.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "^QX11m?3L3N3L3N2M4M2N30O0010O0010O0010O0010O0N3L3N2M4M2M3NP_Z6"}, "image_id": 913, "id": 15032}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 29.0, 25.0, 31.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "caX53k?2N3M2M4M2N3M2M3N3M210O0010O0N3M2N3M2M4M2N2N3L3NonZ2"}, "image_id": 913, "id": 15033}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 30.0, 12.0, 14.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "WQd63k?2M4L3O110O0001O0M4L3NQoU1"}, "image_id": 913, "id": 15034}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 34.0, 19.0, 29.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "iao43j?3N2M4M2N3L3N2M4M10N4M2N3L3N3L3N2N3Lmnf2"}, "image_id": 913, "id": 15035}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 41.0, 30.0, 41.0], "area": 605, "segmentation": {"size": [512, 512], "counts": "[bo51m?2N3L3N3M2M3N3M2M4M2N3L3N2N3M11M4M2N3L3N2N3L3N3M2M4M2N2M4Mc^a1"}, "image_id": 913, "id": 15036}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 48.0, 38.0, 51.0], "area": 1540, "segmentation": {"size": [512, 512], "counts": "XbY4f0Z?000000000000000000000XOh000000000000000000005K00000000000000000000000000000`^S3"}, "image_id": 913, "id": 15037}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 57.0, 29.0, 27.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "Zbl62l?2M4M2M3N3L3N3O01O010O01O01O010O01O01O010O0M3N3L3M4M2M3NQnd0"}, "image_id": 913, "id": 15038}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 62.0, 27.0, 23.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "ZRS12k?3N3L3N3M20010O010O0010O0010O010O00010O010OO1M4M2M4Mk]_6"}, "image_id": 913, "id": 15039}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 72.0, 53.0, 59.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "dcP51l?4M2M4M2N2M4O010O0010O0001M2^O]OdAg0Y>\\OdAf0Y>]OeAe0Y>^OcAf0Y>]OeAe0Z>b0O01O010O01O01O010O01O01O010O0M3N3L3N3M2M3N3L3N3L3N3L3N2N3L3N3L3N]mT2"}, "image_id": 913, "id": 15040}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 80.0, 24.0, 35.0], "area": 538, "segmentation": {"size": [512, 512], "counts": "YSd71l?3N2M4M2N3L3N3L3N201O010M2N201O010OTOQAg0U?O010O01O0XM"}, "image_id": 913, "id": 15041}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 97.0, 58.0, 52.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "Tdb03k?2M4L3N3L3N2M4L3N2M4L3N3O00010O01O01O010O01O01O010O00001M2M3M4N1010O0010O0001O0M3N3L3M4M2M3M4O01O010O01O0Dl@LT?1o@MT?Oo@NT?0o@MT?OU]`6"}, "image_id": 913, "id": 15042}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 98.0, 48.0, 55.0], "area": 1654, "segmentation": {"size": [512, 512], "counts": "Pdj51m?2M4_@LQ?7m@KQ?8k@KR?8l@KQ?c0M4M2M3N3L3N3O000010O01O01O010O01O01O010O00010O010O0N2M4M2M4M2M3N3L3N3L3N2M4L3N3L3Nc\\]1"}, "image_id": 913, "id": 15043}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 105.0, 3.0, 7.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "Y37i?N3Li\\n7"}, "image_id": 913, "id": 15044}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 109.0, 59.0, 52.0], "area": 1656, "segmentation": {"size": [512, 512], "counts": "\\TW23k?3L3N3L3N2M4M2M4M2M3N3L3O2O00010O010O00010O010O0001oNZAg0e>VO^Aj0c>SO_An0j>O0010O0010O0010O0010O0010O0O2M2M31O0M4M2M3N3L3N3L3N2M4M2M4MP\\k4"}, "image_id": 913, "id": 15045}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 113.0, 48.0, 54.0], "area": 1347, "segmentation": {"size": [512, 512], "counts": "iTd62k?4L3N3L3N2N3O01O01O010O000N3L3N3L3N2M4M2M4M200010O010O0001NO0O011N4M2M3N3L3M3N3L3N3L3N2M4M2M4M[lc0"}, "image_id": 913, "id": 15046}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 133.0, 16.0, 26.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "U4g0Y?10O00010O01O01O0O2L3M3N3L3N3Lhkg7"}, "image_id": 913, "id": 15047}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 133.0, 33.0, 44.0], "area": 815, "segmentation": {"size": [512, 512], "counts": "WUe14j?2M4M2M3N3L3M4M2M3N3L3N3L3O110O0001O0M4M2M3N3L3N3L3N2M4M2M4M2M3Nf[j5"}, "image_id": 913, "id": 15048}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 135.0, 44.0, 59.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "bUZ72l?3L3N2M4M2N3M21O01O010A]O]Ae0a>^O[Af0b>\\O\\Af0a>]O]Ae0a>?M2M4N110O01O010O01O01O010O01O010OO2L3N3L3N2N3L3N3M2M4M`K"}, "image_id": 913, "id": 15049}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 141.0, 24.0, 26.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "odZ13k?2M3M4L3M3N3M2010O00010O00010O000N3L3M4L3N2M4L`[Y6"}, "image_id": 913, "id": 15050}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 168.0, 31.0, 30.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "mUY11m?2M4M2M3N3L3N3L3O1010O0010O0010O0010O0010O001L3N2M4L3N3L3N2McZW6"}, "image_id": 913, "id": 15051}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 178.0, 16.0, 13.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "heh12l?2M3O20O01O01O010O00010O01OM4M[Zo5"}, "image_id": 913, "id": 15052}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 184.0, 56.0, 42.0], "area": 1348, "segmentation": {"size": [512, 512], "counts": "XVk21o?3M2M4M3M3L3N3M3L4M0O10O10O10O10O10O10O010O10O10O10O10O10O10O010O10O10O10O10O10O10O010O10O103M3L3N3M3L4M2M4M3M_iX4"}, "image_id": 913, "id": 15053}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 187.0, 26.0, 27.0], "area": 461, "segmentation": {"size": [512, 512], "counts": "Qf[22n?3L4M3M3L4M3M1N1000O01000O01000O01000O4M3M3L4M3M3LdYW5"}, "image_id": 913, "id": 15054}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 190.0, 63.0, 61.0], "area": 2055, "segmentation": {"size": [512, 512], "counts": "Vg32l?2M3N3L3N3L3N2M4L3N3L30001M2N3L3N2M40O01O01O010O01O01O010O00010O01O01jNcAg0]>WOeAj0[>ROiAm0W>POlAQ1a>01O010O01O01O010O01O01O0O2M2M3N3L3N3L3N2M4M2M3MVil6"}, "image_id": 913, "id": 15055}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 196.0, 56.0, 63.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "dWT41m?3L3N2M4M2N3L301O00010O010O010M2M3N3L3N3M2M3010O01N1M4M2M3N3M2M4M12M2M3N3M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3M2M3Neio2"}, "image_id": 913, "id": 15056}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 205.0, 25.0, 20.0], "area": 308, "segmentation": {"size": [512, 512], "counts": "ffo14i?3N2M4N100010O010O00010O01O01O010O01O01O0N3L3N2M_ic5"}, "image_id": 913, "id": 15057}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 219.0, 14.0, 18.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "XWX12k?3N2M4M2N3N1010O0M4M2M4M2NTi`6"}, "image_id": 913, "id": 15058}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 228.0, 53.0, 65.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "YXU51m?2M4L3N2M4HBj@`0T?8M20001N110O000O2O000010O01O01OM4L3N3L3M3010O01lA`Nf=`1WBcNj=\\1TBgNk=Z1QBiNP>V1nAmNQ>a11L3N2M4L3N3L3N2M4M2M4L3Go@AS?7`AL^>6`AM_>4^ANc>1ZA2f>OWA4i>KTA8l>?O01O001M2M3N3L3N3L3N3M2M3N3L3NcXd7"}, "image_id": 913, "id": 15060}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 248.0, 50.0, 53.0], "area": 1418, "segmentation": {"size": [512, 512], "counts": "mXm53k?2N3L3N3L3O101O010O0N3M2M3N3M201O01O01O001L3N2M4M2N3L310O01O01OO2M2N3L3N2M4M2M4M2N3L3N2M4M2N3L3N2MRhY1"}, "image_id": 913, "id": 15061}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 260.0, 72.0, 59.0], "area": 2471, "segmentation": {"size": [512, 512], "counts": "khU24j?2N3\\@JY?9e@IY?a0L3n@ZOh>h0UA[Oi>P1L3N2N11N3M200010O01O010O01O010O01O01O010O010O01O01O010O0O1N1N4M2N210O010O00010O010O01VOaA2^>LeA3[>JhA6Y>GjA9U>DnAAoA`0P>^OSBa0n=[OTBf0l=XOVBi0a>O010O010L3N2N1N4M2M3N3M2MlVf4"}, "image_id": 913, "id": 15062}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 264.0, 19.0, 25.0], "area": 273, "segmentation": {"size": [512, 512], "counts": "ih^44j?2M3N3M2M4M2N3N11O010N1M4M2M3N3M2M4MegW3"}, "image_id": 913, "id": 15063}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 270.0, 14.0, 13.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "dXT42l?3M2N3O0010O010O010N1N3M2N`gd3"}, "image_id": 913, "id": 15064}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 271.0, 36.0, 39.0], "area": 839, "segmentation": {"size": [512, 512], "counts": "[iS11l?4M2M4L3N2M4M2M3M4M2N30O00010O01O01O010O00010O010N1M3M4M2M3N3L3M4M2M3NZWZ6"}, "image_id": 913, "id": 15065}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 273.0, 36.0, 30.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "SYe41l?3N3L3M4M2M3N30O01O01O010O01O01O010O00010O010O00010O010O0M3N3L3N3L3N2MWgh2"}, "image_id": 913, "id": 15066}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 281.0, 37.0, 52.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "ki]72k?4M2N3M2M301O010O010O001N1N2M4M2N3M2M4O010O00010O010O0O2M2N3L3N2N30O010LXG"}, "image_id": 913, "id": 15067}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 282.0, 48.0, 52.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "PZb62l?3L3N2M4M2M4M2M3N3L3N3L3010O0010O001N11O01O010M2N2M4M2N30O001N1M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3NPge0"}, "image_id": 913, "id": 15068}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 298.0, 59.0, 77.0], "area": 2527, "segmentation": {"size": [512, 512], "counts": "j:g0Y?01O010O00010O01O0N2M4M2M3M4M2M4L3N2M00013L3M3N3L3M3N3L3O2O01O01O01O0O1N3L3M4M2M3M4M2M3M4M2M4L3N2M4L3N2M4L3N3L3M3M4M]VR7"}, "image_id": 913, "id": 15069}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 308.0, 54.0, 44.0], "area": 1441, "segmentation": {"size": [512, 512], "counts": "^ZR42l?2M4M2M4M2N2M4M2N3L3N2010O01O010O01O010O01O010O01O01L3010O00010O010O010O00010NO0O04M2N3L3N2M4M2N3L3N3L3N2NVfR3"}, "image_id": 913, "id": 15070}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 313.0, 21.0, 24.0], "area": 311, "segmentation": {"size": [512, 512], "counts": "YjT52k?4M2M4M2N3L3O101O010O01O01ON3M2N3L3N3L3NTf`2"}, "image_id": 913, "id": 15071}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 321.0, 24.0, 25.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "`j_53k?3M2M3N3L3N3N10010O0010O0010O0010N1N3L3N2M4M2MlUT2"}, "image_id": 913, "id": 15072}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 332.0, 28.0, 27.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "TkR72l?2K6M2M2N011N3N3M210O01O01O010O01O01O01N1N3L3N2M4M2M4M^U?"}, "image_id": 913, "id": 15073}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 340.0, 42.0, 60.0], "area": 1237, "segmentation": {"size": [512, 512], "counts": "Uln54j?2M4M2N3N1O1N3L3JZOPAi0l>7N2N3L3N3L3N3L3N1O0O03N2M4M2M3N3M2M4M210O0O1M4M2M4M2N2M4M2M4M2NVU\\1"}, "image_id": 913, "id": 15074}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 342.0, 40.0, 55.0], "area": 1410, "segmentation": {"size": [512, 512], "counts": "Q\\o01l?3M4L3M4K4M4M21O01[O]OjAd0V>d0O0O101O001O001O00001O0O2O001OO1N2M3N2M3N2N1N3N2M8I7I8G9Hfd\\6"}, "image_id": 913, "id": 15075}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 343.0, 46.0, 69.0], "area": 1905, "segmentation": {"size": [512, 512], "counts": "i[Y31l?4L3N2BIPA;l>HQA;l>HRA:a0@d=o0YBSOd=P1YBSOf=o0VBUOi=k0UBWOk=i0RBZOo=f0mA^OR>X1010O0010O00010O0010O0010L3N210O010OM3ZORBXOQ>d0RBPON0S>m0RBPON0S>n0g0L3M4M2M3M4M2M3M4M2MTeo3"}, "image_id": 913, "id": 15076}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 355.0, 48.0, 61.0], "area": 1621, "segmentation": {"size": [512, 512], "counts": "d\\R22k?3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2N30O00010O010O000N3M2M4M2M4M2N2M4M2M4M2M3N3L3N3L3N2M4M2MedU5"}, "image_id": 913, "id": 15077}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 375.0, 40.0, 53.0], "area": 1217, "segmentation": {"size": [512, 512], "counts": "o\\h34j?2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2O101O010O01O01OM4M2M3M4M2M4M2M3N3L3M4M2M3N3L3MTdc3"}, "image_id": 913, "id": 15078}, {"iscrowd": 0, "category_id": 1, "bbox": [489.0, 386.0, 23.0, 20.0], "area": 277, "segmentation": {"size": [512, 512], "counts": "]ld72l?2N3M2M4M2010O010O0010O0010O010O001O0M3N3M2MjC"}, "image_id": 913, "id": 15079}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 394.0, 29.0, 44.0], "area": 787, "segmentation": {"size": [512, 512], "counts": "\\m`13j?3M4L3EDo@?n>Do@`0m>CPA`0m>;M4M21O01O01O01O01O01O01O0YOYA6f>G]A9d>C`A<`>AcA=_>@dA=R?L3M3M4LPcP6"}, "image_id": 913, "id": 15080}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 400.0, 28.0, 23.0], "area": 324, "segmentation": {"size": [512, 512], "counts": "Qm01l?3M4O000KIb@7Z?Mb@6[?910O00010O00010O00M4L3M3M4O00010O00010M2M\\Sa7"}, "image_id": 913, "id": 15081}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 408.0, 48.0, 66.0], "area": 1873, "segmentation": {"size": [512, 512], "counts": "k]k21l?3N3L3CGQA;m>GPA=l>GQA;m>GPA=l>=cAPOh=R1VBQOg=R1UBQOl=n0RBTOn=m0nAWOQ>i0mAYOT>Z1O01O01O010O01O01O010O01ON3L30M4M2M3N3FeAjN^>T1dAjN_>R19N2M4M2M4M2M3N3L3N3L3N2MRc\\4"}, "image_id": 913, "id": 15082}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 415.0, 65.0, 62.0], "area": 1979, "segmentation": {"size": [512, 512], "counts": "[^X41m?3L3N2M4M2M3M4M2N3O00010O010O00010O0010O0010O00010O010O00001M2M4L3N2M4M2M4M2M3M4M2O110OO1N3L3N3L3N2M4L3N2M4M2M4M2M3M4M2M4M2M3N3LkRg2"}, "image_id": 913, "id": 15083}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 433.0, 8.0, 20.0], "area": 86, "segmentation": {"size": [512, 512], "counts": "a=d0]?M2N3M2M3N3M2N^bk7"}, "image_id": 913, "id": 15084}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 436.0, 63.0, 76.0], "area": 2448, "segmentation": {"size": [512, 512], "counts": "n_72k?3N2N2M3N2M3O10000M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N2M3N2N2M3N2M3N2001O001L3N2M4M2N3L3N3L3N2N3L3N3L3N2M4M2N3L3N3L3N2N3L3N3L3NRRi6"}, "image_id": 913, "id": 15085}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 438.0, 109.0, 74.0], "area": 4354, "segmentation": {"size": [512, 512], "counts": "bog51m?2M4M2N3L3N2M4M2M4M2N2M4M2M4N110O00010O010O01O01O010O01O01O010O01O00001O001O001O00001O001O00001O001O001O00001O001O0000N2M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2M3N21O00001O0O2O00010N1M4M2M3N3HkA`NX>]18M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2Noaa0"}, "image_id": 913, "id": 15086}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 448.0, 30.0, 35.0], "area": 585, "segmentation": {"size": [512, 512], "counts": "j^d13k?2M4L3N3L3N201O01O01O0M4L3N2M4N1001O0M3N3L3M4M2M3M4M2M3N3Lnal5"}, "image_id": 913, "id": 15087}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 468.0, 58.0, 44.0], "area": 1367, "segmentation": {"size": [512, 512], "counts": "o_^21l?3N2N2O1O1001O00001O001O00O1M3M3N2M3N2M3N2M3M3N2M3N2M3N200001O00001O0000N21O0N3M2M3N3L3N3O000010O01M2M3N3L3M3N3L3NVad4"}, "image_id": 913, "id": 15088}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 472.0, 38.0, 40.0], "area": 1153, "segmentation": {"size": [512, 512], "counts": "_oP12l?2M4b@Km>7QAKl>8QAKm>8PAJn>8PAJm>f0N2M3O11O001O00001O001O001OM3N21O00001O001N1N2010O01DSABn>4QAK3On>3SAJ3On>4QAK[?2]Q\\6"}, "image_id": 913, "id": 15089}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 479.0, 29.0, 29.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "[_[71n?3N1N2N3M2N2O2M2N2N3N1N2N0001O010O2N3M2O1N3M2N2O1N3M2N2OcP6"}, "image_id": 913, "id": 15090}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 489.0, 61.0, 23.0], "area": 801, "segmentation": {"size": [512, 512], "counts": "noY42l?2M3N2M3M3N2M3N2O1001O00001O001O00001O00001O001O00001O001O00001O00001O001O00001O001O00001O00001O001O00001O001O00001O000OU`g2"}, "image_id": 913, "id": 15091}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 498.0, 48.0, 14.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "n_\\32k?3N2N2M3N200001O001O00001O001O001ON200001O001O001O00001O001O00001O001OM3N200001O00001O001O0N2MY`k3"}, "image_id": 913, "id": 15092}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 187.0, 30.0, 67.0], "area": 1242, "segmentation": {"size": [512, 512], "counts": "V6d1[>2N2N2N1O2N1N01000002N2N2M3N2N2N1EcARO_>l0cARO_>l0cARO_>8\\A77^O`>8[A9P?ERA8Q?FQA8P?GRA7P?GRA7]?N2N2M2OYi`7"}, "image_id": 915, "id": 15093}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 230.0, 74.0, 63.0], "area": 2237, "segmentation": {"size": [512, 512], "counts": "VX:1n?2N1N3N2N1O2N2N1N3N2N2N2N2N2N2M3N02NSAROk>m0TAUOk>Q100000O01N2N2N2N2N2M20100000000OO2N2M100000O1000O100000O1000O10002N2N2N2N1N3N2N2N2N2N2M2O2N2N2N2N2M3N1O2N2NYh`6"}, "image_id": 915, "id": 15094}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 267.0, 62.0, 73.0], "area": 2320, "segmentation": {"size": [512, 512], "counts": "UYZ11n?2o@OP>3nAOo=3PBOn=3oA0o=2oA0o=2oAOP>3`AC94aAB99iAJU>8iAJU>7jAKU>6iAKW>P1000000000ON1000O1000O10000000O01000000001O2M2O2N2N2N2N2N1O2M3N2N2N2N1O2N2M3N2N2N1O2N2N2M3N2N1O2NPgf5"}, "image_id": 915, "id": 15095}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 316.0, 77.0, 66.0], "area": 2594, "segmentation": {"size": [512, 512], "counts": "dZk12m?2N1O2M3N2N2N2N1O2M3N2N2N2N2M2WASO^>o0`ARO_>P1_ARO`>o0^ASOb>V100000O10O10000000O10000000O1000O10O1M2O0000000O0100000000O01000000000O3N2N2N2N1N3N2N2N2N2M3N1O2N2N2M3N2N2N2N1O2M3NbUn4"}, "image_id": 915, "id": 15096}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 357.0, 68.0, 67.0], "area": 2372, "segmentation": {"size": [512, 512], "counts": "X\\h22m?1N3N2N2N2M3N1O2N2N2M3N1o@XOi>j0UAWOj>k0TAWOi>R1N2N2N1O2M3N2N2N2M2O2N2N2000O100O1N2N2N2N1N3N2N000O10O1000JaAlN_>T1bAjN_>U1dAiN[>X1601O2M3N2N1O2N2M3N2N2N2M3N2N1O2N2M3N2N2N\\dU4"}, "image_id": 915, "id": 15097}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 367.0, 19.0, 36.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "_;R1o>0O10N2N2N2M2O2N2N2M3N1O2M3N2N1O2M3NTTf7"}, "image_id": 915, "id": 15098}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 390.0, 65.0, 74.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "l=>a?1O2N2M3N2N2N1CXO`Ak0T>TOlA3Nj0U>UOkA2Oi0V>WOiA2Og0W>ZOgA20c0Z>f00000O3N2N2N1N3N2N2000M3N1O2N1N01000000O01000001N3N2N2N1N3N2N2N20O10kN\\Am0d>QO^An0c>oN`Ao0j>N2N1N3N2N2N2N1N3N2N2N2M2O2N2NmRo6"}, "image_id": 915, "id": 15099}, {"iscrowd": 0, "category_id": 1, "bbox": [220.0, 395.0, 61.0, 76.0], "area": 2512, "segmentation": {"size": [512, 512], "counts": "Q]^31n?2N2N2N2N1N3_@D[?>c@D[?b0N2^AZOg=h0WBZOg=h0WBZOg=g0XB[Oe=h0YBZOe=h0XBZOg=h0WBZOg=h0WBZOg=h0WBZOg=h0WBZOg=h0WBZOg=g0XB[Oh=_1000O01000000OO0O01002O100O10O01N2N2N2M3N2N2N1FeAnN]>P1eAnN]>P1eAmN^>Q1dAmN^>Q1:ETA_On>?TA^On>a0SA^Oo>`0:M3N2N2N2M2O2N2NkRc3"}, "image_id": 915, "id": 15100}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 448.0, 74.0, 64.0], "area": 2687, "segmentation": {"size": [512, 512], "counts": "k^e02l?3N2N2N1HGh@;U?Hh@;V?Gh@;V?9[AXOo=j0oAXOn=j0QBWOn=k0oAXOo=j0oAXOo=j0oAXOo=j0oAXOn=k0PBWOn=j0PBXOP>]1000O001O1O1O1O1ON2O1O1O1O1O02N1O2N2JjA^NY>`15N2N1O2M3N2N2N2N1N3N0000000O01000002N2O1O01N2M3N2N2N1N3N2N2N2N2M2O2N2N2N2MbaU6"}, "image_id": 915, "id": 15101}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 462.0, 74.0, 50.0], "area": 2280, "segmentation": {"size": [512, 512], "counts": "YoV41n?2N2N2N2M2O2N2N2N2M3N1O2N20000000OO2QAUOf>m0XATOg>n0VATOi>R1N2O1O1O1O1N2O1O1O1O11O1O1O1O1O001O1O1O1O1O001O1OO1N2O1O1O1O0O0102N2N2N2M3N1O2N2N2M3N2N2N2N2M2O2N2N2N2M3NoPd2"}, "image_id": 915, "id": 15102}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 477.0, 64.0, 35.0], "area": 1364, "segmentation": {"size": [512, 512], "counts": "g_a11n?2M3N1O2N2N2M3N1O2N2N1N2O11O1OO1O1O1O1N2O1O1O1O1N2001O1O1O001O1O1O1O001O1O00O11O001O1O1OO1001O1O1O1O001O1O1O1O1O0O2M3N2N2N2N1N\\`^5"}, "image_id": 915, "id": 15103}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 482.0, 6.0, 13.0], "area": 45, "segmentation": {"size": [512, 512], "counts": "R?=d?N2N2M3N2Nj`l7"}, "image_id": 915, "id": 15104}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 496.0, 36.0, 16.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "n_31m?2O2N1O1O1N2O1O1O1O1N2001O1O1O001O1O1O1O1O00O1O11O001O1O1O1O001O1O1O1OQ`Z7"}, "image_id": 915, "id": 15105}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 502.0, 21.0, 10.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "oo[51n?1O1O1O1N2O1O1O1001O1O001O1O1O1O1O1O001OQ`Y2"}, "image_id": 915, "id": 15106}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 502.0, 20.0, 10.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "oof51n?1O1O1N2O1O1O1O11O001O1O1O1O1O1O001O1OQPo1"}, "image_id": 915, "id": 15107}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o_Z51o?0Q`d2"}, "image_id": 915, "id": 15108}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 0.0, 40.0, 11.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "PPl01o?001O001O001O00001O001O00001O001O001O00001O001O00O1N2O11O001O001O00001OM3N2M3NRP`6"}, "image_id": 916, "id": 15109}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 0.0, 26.0, 25.0], "area": 446, "segmentation": {"size": [512, 512], "counts": "_`[24i?3M3L5L3N20001O00010O000010O000010O000010L3M3M4K4Mn_W5"}, "image_id": 916, "id": 15110}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 0.0, 12.0, 3.0], "area": 21, "segmentation": {"size": [512, 512], "counts": "P`e21o?000000001O000000001O0P`T5"}, "image_id": 916, "id": 15111}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 0.0, 68.0, 27.0], "area": 1170, "segmentation": {"size": [512, 512], "counts": "U`R34i?3N20001O00001O00001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O00001O0000001O00001O00001O00001O00001O0000M3M3M3M3L4M3M3MS`k3"}, "image_id": 916, "id": 15112}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 0.0, 65.0, 15.0], "area": 623, "segmentation": {"size": [512, 512], "counts": "T`Q62j?4000000000001O00000000000000000000001O000000000000000000001O000000000000000000001O0000000000000[@Ia?;00000001O00000000000000000000KUPn0"}, "image_id": 916, "id": 15113}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 0.0, 27.0, 7.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "QPT71n?100000001O0000001O000000001O000000001O0000001O0000LT`>"}, "image_id": 916, "id": 15114}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 4.0, 29.0, 32.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "j``02l?3L3N2M4M2M4M2N3L31O010O01O01O010O010N1M3N3L3N3L3N2N3L3NfoP7"}, "image_id": 916, "id": 15115}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 17.0, 10.0, 12.0], "area": 79, "segmentation": {"size": [512, 512], "counts": "h`h73k?2M4M20010O00N3M2M`_2"}, "image_id": 916, "id": 15116}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 25.0, 67.0, 46.0], "area": 1827, "segmentation": {"size": [512, 512], "counts": "bae52k?3M3M4M2M4L3M3N3M21O01O01O010O01O01O01O01O010O00010O01O01O01O01O010O01O01O01O01O01O0O1M4L3M4M2O1010O00010M2N3L3M3N3L3M3M4M2M3M4M2M4LSoX1"}, "image_id": 916, "id": 15117}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 25.0, 35.0, 28.0], "area": 609, "segmentation": {"size": [512, 512], "counts": "[Qi61l?3M4L3M3N3L3O101O01O01O01O01O01O010O00010O00010O00010O000N3L3M3M4L3MQ_e0"}, "image_id": 916, "id": 15118}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 30.0, 62.0, 50.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "iao21l?4M2N3L3N3M2N2M4M2N3L3O2O01O010O01O010O01O010O01O010O01O010O01O010O01O010O010O01O01O010O010O01O010ON3L3N3M2M3N3M2M4M2N3L3N_^Q4"}, "image_id": 916, "id": 15119}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 36.0, 54.0, 59.0], "area": 1935, "segmentation": {"size": [512, 512], "counts": "la\\12l?2M3N3L3N3L3N2M4[AXOn=j0PBYOn=i0nAZOS>e0kA^OT>b0iAAX>?eAC[>=bAG^>m001O01O010O01M2N2N3O0010O00010O010O0N2N3L3N3L3N3L31O010O01ROUAd0l>YOWAe0k>YOWAe0T?L3N2M4M2M4M2M\\^h5"}, "image_id": 916, "id": 15120}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 39.0, 13.0, 27.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "W1j0W?O010O000N3L3N3L3N2N3L3Nf^i7"}, "image_id": 916, "id": 15121}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 55.0, 33.0, 43.0], "area": 779, "segmentation": {"size": [512, 512], "counts": "gRV23k?3L3N2M4M2N3L3N3L3N2M4M2M40O0010O010ON2N3L3N3L3N2N3L3N3L3N2M4M2MT^Y5"}, "image_id": 916, "id": 15122}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 58.0, 50.0, 70.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "dS91m?2M4M2M4M2N2M4M2M4M2M3@QOmAR1P>POmAS1P>QOmAR1P>POnAR1o=QOnAR1P>a0M2O20O010O0010O0010O001L3N2M4M2M4M2N5J4M2M4M2M3N3L3N3L3N2N3L3N3Limm6"}, "image_id": 916, "id": 15123}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 64.0, 39.0, 53.0], "area": 1113, "segmentation": {"size": [512, 512], "counts": "[c`21m?3L3N2M4M2M4L3N2M4M2M4M2M3N3L3N3L30001O010OM4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3Lkmk4"}, "image_id": 916, "id": 15124}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 67.0, 33.0, 29.0], "area": 795, "segmentation": {"size": [512, 512], "counts": "gb_78^?:00001O00000000000000E;0000000000000001O0000000000000001O000000lM"}, "image_id": 916, "id": 15125}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 84.0, 46.0, 61.0], "area": 1727, "segmentation": {"size": [512, 512], "counts": "fSQ33j?3N2N3L3N3L3n@]Oe>f0WA^Oe>e0YA]Oe>e0XA_Oe>o0M310O0010O0010O0010O01JcNeA_1Y>5M4N1010O0010O010O00M4^NhAX1Z>fNhAW1c>M2N3L3N3L3_Ol@4V?Jl@4W?Hm@4V?Jl@4b?LdlW4"}, "image_id": 916, "id": 15126}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 93.0, 50.0, 57.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "[TT11m?2N3L3N3L3N2A@[Ad0a>_O]Ac0a>@[Ad0a>_O]Ac0a>_O]Ac0a>`0L3N3O001O01O010O010O00010O0O2L3N2M4M2N3L3N2M4M2M4N11O01O010O0M4M2N2M4M2M4MglR6"}, "image_id": 916, "id": 15127}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 94.0, 61.0, 44.0], "area": 1567, "segmentation": {"size": [512, 512], "counts": "fcY51m?3M2M3N3M2M4M2N3L3N2O20O01O010O01O010O01O010O01O010O01O010O01O010O01O010O01O01L3N3M2M4N1000M4M2N3L3N3M2M3N3M2M4M2N3L3N2Nmlg1"}, "image_id": 916, "id": 15128}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 97.0, 22.0, 20.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "\\S[62k?4M2N3L3O20O0010O010O00010O010O01M2N2N3L3NjlY1"}, "image_id": 916, "id": 15129}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 117.0, 47.0, 61.0], "area": 1728, "segmentation": {"size": [512, 512], "counts": "bdn11m?3L3N2M4g@Dj>>TADj>?RAEj>>TADj>>TADi>l0M201O0O1N3M2eAcNS>^1jAdNV>d10O010O010O00010O010O00001L3N3L3N2M4M2N3L3N3L3N2M4M2N3L3H_@Oc?OWlY5"}, "image_id": 916, "id": 15130}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 132.0, 18.0, 20.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "bda32l?2M4M2M4M2N201O010O01O0N2M4M2N3L3Nj[U4"}, "image_id": 916, "id": 15131}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 133.0, 17.0, 22.0], "area": 212, "segmentation": {"size": [512, 512], "counts": "fTh21l?4M2N3L3N2N3L301O01M2N3M2M3N3L3Nj[o4"}, "image_id": 916, "id": 15132}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 133.0, 57.0, 36.0], "area": 1365, "segmentation": {"size": [512, 512], "counts": "edS71l?3N3M2N3L3N6J210O010O00010M201O010O010O01O010O01O010O2O0O01O001O000001O00M301O00L5O01O0000000000010O000000000001bK"}, "image_id": 916, "id": 15133}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 137.0, 53.0, 55.0], "area": 1702, "segmentation": {"size": [512, 512], "counts": "`eg42l?2M4M2M3N3L3N3L3N2M4M2O20O0001M2M4M2M3N30O010O00010O010O00010O010O00010N1N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M]k]2"}, "image_id": 916, "id": 15134}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 151.0, 27.0, 26.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "WeU12l?3M2N3M2N3M2M3O20O010O01O01O010O010O0O1N3M2M4M2N3L3NSk\\6"}, "image_id": 916, "id": 15135}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 153.0, 44.0, 52.0], "area": 1390, "segmentation": {"size": [512, 512], "counts": "Qfh21m?2M4BLl@6R?Lk@8R?Jl@8Q?=N3L3N3L3N2M4O0010O010O00010O010O00010O010O0O1M4M2M4M2N3L3N2M4M2M4M2M3N3M2MnZa4"}, "image_id": 916, "id": 15136}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 161.0, 25.0, 27.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "dU^51l?3N3M2N3L3N3M2N2O2O010O010O0010O0N3M2N3M2M4M2N2NkZU2"}, "image_id": 916, "id": 15137}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 176.0, 14.0, 15.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "jUi71l?3N2M4M21O010O00010O00010O\\J"}, "image_id": 916, "id": 15138}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 187.0, 43.0, 59.0], "area": 1514, "segmentation": {"size": [512, 512], "counts": "ZWa42n>0mA4P>OmA3P>0nA3o=0mA3P>0nA3o=OnA4o=0nA2R>NkA6U>JhA8X>HeAe01O010O01O01O010O01O01O010O0N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3L3NiYi2"}, "image_id": 916, "id": 15139}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 194.0, 28.0, 30.0], "area": 483, "segmentation": {"size": [512, 512], "counts": "hfX61l?3N2M4L3N3L3M301O0M310O010O0001YOj@c0Y?01O01O01N1M3N3L3M4M2MfYY1"}, "image_id": 916, "id": 15140}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 203.0, 49.0, 57.0], "area": 1520, "segmentation": {"size": [512, 512], "counts": "UWY53m?2^@LS?7j@LT?6j@LS?7j@LQ?Im@<1LP?f0N2M3N1N010O00010GlNeAT1\\>mNcAS1\\>POaAP1`>8O010O010O010O01O01O0101N3N2M3N2M3N1N3N2M3N2M3M3N1N3N2M3N2M3NbXn1"}, "image_id": 916, "id": 15141}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 210.0, 29.0, 34.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "\\gT71m?2M3M4L3N2M4L3N2M4M210O00010O00010O0010M2M4L3M3N3L3M3N3LZi<"}, "image_id": 916, "id": 15142}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 236.0, 58.0, 57.0], "area": 1707, "segmentation": {"size": [512, 512], "counts": "fhR61l?3N3L3M3N3L3N210O0010O00010O0010O00010O001O0M3N3L3M3N3L3M4M2M3M4M20001O010O0001O0M4L3N2M4L3N2M4L3N3L3M3M4M2M3M4M2M_XP1"}, "image_id": 916, "id": 15143}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 242.0, 25.0, 23.0], "area": 358, "segmentation": {"size": [512, 512], "counts": "PX`42l?2M4L3N3L3O101O01O01O01O01O010O01O01OO2L3M3N3L3M[XS3"}, "image_id": 916, "id": 15144}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 246.0, 54.0, 65.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "VYS71n?2M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2N1N3N2M3L2O1N2O1N101O1N2O02O2N1O2M2O1O2N1N3N1O1O2M2O2L3N2M4M2M4M2N2M4M2M4MRh1"}, "image_id": 916, "id": 15145}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 250.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "j73Sho7"}, "image_id": 916, "id": 15146}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 276.0, 49.0, 49.0], "area": 1319, "segmentation": {"size": [512, 512], "counts": "dY34j?2M3N3L3N3L3N2M4M201O0010O0010O0010O0010O0010O0010L3N3L3N2M4M2O20L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4MWWT7"}, "image_id": 916, "id": 15147}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 279.0, 3.0, 7.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "lhn71m?3L3YG"}, "image_id": 916, "id": 15148}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 292.0, 51.0, 54.0], "area": 1460, "segmentation": {"size": [512, 512], "counts": "YZo04j?2M4M2M3N3N101O01O010O01O01O01N1N3L3N2M4M2M4M2M3N3L3O2O00010O010O00N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4MeVW6"}, "image_id": 916, "id": 15149}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 309.0, 47.0, 56.0], "area": 1432, "segmentation": {"size": [512, 512], "counts": "P[m11l?3N2M4L3N2M4N110O00010N1M4M2M3M4M2M3N3L3M4M2O110O010O00010O01O01O01M2AbAZO`>d0bAYOb>c0aAZOb>d0aAYOb>c0?M3N3L3N3L3M3NTV[5"}, "image_id": 916, "id": 15150}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 327.0, 56.0, 48.0], "area": 1579, "segmentation": {"size": [512, 512], "counts": "jZf22l?2M4M2c@Ho>;n@Io>:n@Ho>f0N3O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O00O2M2M4MO010O4M2M4M2M3N3L3M4M2M3N3L3N3Lfe]4"}, "image_id": 916, "id": 15151}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 358.0, 25.0, 25.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "g[^12l?2M3M4M2M4M2N201O01O01O010O01O01O01N1M4M2M3N3L3MgTU6"}, "image_id": 916, "id": 15152}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 359.0, 53.0, 42.0], "area": 1230, "segmentation": {"size": [512, 512], "counts": "m[e33k?2M3N3M2M4O01O01O010O010N1M3N3M2M4M21O01O010O01O01O010O010O01O01O010O01O01O010O010ON3L3N2N3L3N3L3N2N3L3N]T`3"}, "image_id": 916, "id": 15153}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 361.0, 24.0, 26.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "ik:4j?2M4L3N2M4M2O2O01O01O010O00010O01M2N2M4L3N3L3NcTY7"}, "image_id": 916, "id": 15154}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 366.0, 18.0, 22.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "m[e43j?3N3L3N3M2M3010O010O0N2M4M2N3L3N2MadQ3"}, "image_id": 916, "id": 15155}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 373.0, 23.0, 22.0], "area": 317, "segmentation": {"size": [512, 512], "counts": "T\\f21l?3M4L3M4L300010O0010O00010O00010O00M4L3N2M4LXTn4"}, "image_id": 916, "id": 15156}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 391.0, 52.0, 42.0], "area": 1317, "segmentation": {"size": [512, 512], "counts": "o\\a42l?2M4M2M4M2N2M4M2M4M20010O010O00010O010O00010O010O0N2N30O01O01O010O01O01O010O01OO2L3N3L3N2M4M2M4M2M3N3L`cd2"}, "image_id": 916, "id": 15157}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 393.0, 25.0, 22.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "f\\Q62l?2N2N3M2N3M2O20O010O010O010O0010O01O0N2N3L3N3M2NaSb1"}, "image_id": 916, "id": 15158}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 397.0, 42.0, 49.0], "area": 1240, "segmentation": {"size": [512, 512], "counts": "amY23k?3BLk@7S?Kk@7R?Mk@6R?=N3O00010N1M4M2M3N3M210O01O01O010O010O0001N1N3M2M3N3L3N3L3N2M4M2N3L3N3L3NZSQ5"}, "image_id": 916, "id": 15159}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 397.0, 26.0, 23.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "k\\l31m?2N2M4M2N3L310O01O01O010O010O010O00010ON3M2M4M2N3M\\cf3"}, "image_id": 916, "id": 15160}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 412.0, 52.0, 53.0], "area": 1382, "segmentation": {"size": [512, 512], "counts": "R^^51m?3L3N3L3N2M40O010O00010O010O0001O0N3L3N2M4M2M4M2M3N3L3N30O01O01O010N1N2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N2Mnbg1"}, "image_id": 916, "id": 15161}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 420.0, 49.0, 62.0], "area": 1734, "segmentation": {"size": [512, 512], "counts": "]^^62k?4M2M3M4L3N2O20O00010O01N1m@ZOi>i0UAYOi>Q1L3M4L3N2M4L3M301O01O01O01O01O010O0001O0M4L3N2M4@_A\\Od>b0^A[Oe>b0^A\\Oe>`0_A\\Od>a0?N3L3M4L3MfRi0"}, "image_id": 916, "id": 15162}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 422.0, 21.0, 30.0], "area": 342, "segmentation": {"size": [512, 512], "counts": "m]i22l?3L3N3L3N2M4M2N3L3O110OM4M2M3N3M2M4M2M3NhRl4"}, "image_id": 916, "id": 15163}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 423.0, 48.0, 57.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "kmQ13j?3N3N101]@G;RAI:0`>`0^AB`>`0^AB_>b0]AB`>`0^AB_>R1O010O00010O010O0010O0010O010O0010O0010OO1N3L3N3L3N2N3L3N3M2M3N3L3N3M2M3N3M\\RV6"}, "image_id": 916, "id": 15164}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 425.0, 43.0, 50.0], "area": 1244, "segmentation": {"size": [512, 512], "counts": "^^\\31m?3L3N3L3N3M2M3N3L3N3L3N2N3L3N3N11O010O01O01O010O010O01ON3M2N3L3N2M4M2M4M2M3N3M2M4M2M_Rn3"}, "image_id": 916, "id": 15165}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 427.0, 28.0, 37.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "X^P32l?2M4M2M4M2M3N3L3N3L3N2N3O010O01M2M3N3L3N3L3N2M4M2M4L3Naba4"}, "image_id": 916, "id": 15166}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 443.0, 34.0, 28.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "Vn?1m?3N1N3M3N1N3N11000O010O01000O010O01000O0100O0100O0100N1O2M2N3N2M2NgQo6"}, "image_id": 916, "id": 15167}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 453.0, 24.0, 33.0], "area": 423, "segmentation": {"size": [512, 512], "counts": "onk33k?2M4M2M3N3M2M4M2M4M2N21O0N3L3N2N3L3N3L3N2N3L3NhQh3"}, "image_id": 916, "id": 15168}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 461.0, 49.0, 43.0], "area": 1377, "segmentation": {"size": [512, 512], "counts": "WoW71m?2M4M2N2M4M2M4M2M3N3M210O01O01O010O01O01O010O000N3O0010O01O01O010O010O00010O01M1N10O4M2N3L3N2M4MaA"}, "image_id": 916, "id": 15169}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 462.0, 43.0, 37.0], "area": 936, "segmentation": {"size": [512, 512], "counts": "S_T43j?3N3M2M3N3L3N3M2N2010O010O010O00010O010O010O00010O01M200010O010M2N2N3L3N3L3N3M2M3N3MYQV3"}, "image_id": 916, "id": 15170}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 464.0, 25.0, 48.0], "area": 1130, "segmentation": {"size": [512, 512], "counts": "`>`1`>00000000000000000000000001O000000000000000M3_Oa0@oQc7"}, "image_id": 916, "id": 15171}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 464.0, 27.0, 35.0], "area": 607, "segmentation": {"size": [512, 512], "counts": "Zoh23j?3M4L3M3M4L3M3M4M21O01O01O01O01O01O01M1N3L4M3M4L3M3Jbai4"}, "image_id": 916, "id": 15172}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 468.0, 32.0, 38.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "`oP62l?2M4M2M4M2N2M4M2N3L3N3M21O01O010O01O010M2M3N3M2M4M2N3L3N2M4M2NVQ_1"}, "image_id": 916, "id": 15173}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 469.0, 60.0, 43.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "koQ53k?3L3N3M2M3N2M3N2M3N2M3N2N2001O001O00001O001O00001O001O00001O001O0000M3N2M3N2M3N02M3N2M4M2N3L3N2N30O010O01O0N2M4M2M4M2NVQP2"}, "image_id": 916, "id": 15174}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 481.0, 18.0, 23.0], "area": 227, "segmentation": {"size": [512, 512], "counts": "b_]11m?3L3N3L3N2N3L310O0010M2M4M2N2M4M2Mn`Y6"}, "image_id": 916, "id": 15175}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 490.0, 42.0, 22.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "o_m11m?2M3O100O1M300001O00001O00O1N2M3N2M3N21O001O00001O001O00001O001O00001O001O0N2M4M2M``]5"}, "image_id": 916, "id": 15176}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 498.0, 26.0, 14.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "n_U42l?2M3N2O11OO1M3O1001O001O00001O0I]@1d?L^@4g?01O001O00001O00R`]3"}, "image_id": 916, "id": 15177}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 503.0, 17.0, 9.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "noo52k?3N2N2001O00001O001O001O00001O00T`g1"}, "image_id": 916, "id": 15178}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 504.0, 22.0, 8.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "n_l62k?3N2O100001O001O00001O001O00001O001O00001OQ`h0"}, "image_id": 916, "id": 15179}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 508.0, 11.0, 4.0], "area": 26, "segmentation": {"size": [512, 512], "counts": "ooc41m?2O1001O001O00001O00Q`V3"}, "image_id": 916, "id": 15180}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 93.0, 18.0, 21.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "[SY12j?5K4L4N3O0010O00010O01O01O0Ae@9a?M4L3Mll]6"}, "image_id": 917, "id": 15181}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 123.0, 52.0, 99.0], "area": 2580, "segmentation": {"size": [512, 512], "counts": "e4Y2e=2N1O2NJdBkM\\=T271O00000002O1O20O010O01O0O2M2N3M2N3M2N3M2N000000000000002N3M2N3M2N3M2N3M2N3M2Lh@^O[??4N3M2N3M2N3MnkU7"}, "image_id": 917, "id": 15182}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 155.0, 34.0, 36.0], "area": 720, "segmentation": {"size": [512, 512], "counts": "ceh01m?3L3N3M2M4M2N2M4O001O010O01O010O01O010O0N2N3M2M40O001[OSA5m>HWA5l>HVA5m>IUA5m>HWA5[?M4McZf6"}, "image_id": 917, "id": 15183}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 218.0, 43.0, 33.0], "area": 803, "segmentation": {"size": [512, 512], "counts": "\\W31m?2M4M2N3M2N3M2M4O0010O0010O0010O010O010O010O010O0010O0010O010ON1O01O2N3M2M4M2N3M2N3MoXW7"}, "image_id": 917, "id": 15184}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 265.0, 26.0, 71.0], "area": 1183, "segmentation": {"size": [512, 512], "counts": "Y8U2l=O0O2O010O010O00010O0N3UOPBBR>;PBBR>;PBBR>;PBAS>ROSBj0P>SOSBk0o=SOSBj0P>SOTBi0o=UOSBi0o=TOUBh0c>N2M2N3N2M3M4M2M4M\\of2"}, "image_id": 918, "id": 15191}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 9.0, 3.0, 6.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "96k?N2Nf_n7"}, "image_id": 918, "id": 15192}, {"iscrowd": 0, "category_id": 1, "bbox": [337.0, 14.0, 52.0, 67.0], "area": 2254, "segmentation": {"size": [512, 512], "counts": "gaX59g?8H8H1O0O100000O10000000O10000lN\\OlBc0U=EcB;]=NZB2f=6RBJn=T10000O10O10000000000000O10O100000000FTB`Nl=`19010000000008H8H8G9H8H9GQ^m1"}, "image_id": 918, "id": 15193}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 16.0, 24.0, 24.0], "area": 513, "segmentation": {"size": [512, 512], "counts": "RQ85Y?b01O000000000000000000000000000000000001O0000Mbo[7"}, "image_id": 918, "id": 15194}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 18.0, 11.0, 22.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "b0f0[?N2N2O1N2N3M2N2N2N2OS_j7"}, "image_id": 918, "id": 15195}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 35.0, 14.0, 13.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "XQn13k?3L3010O00010O01O010ON3M2Mlnj5"}, "image_id": 918, "id": 15196}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 39.0, 54.0, 71.0], "area": 1949, "segmentation": {"size": [512, 512], "counts": "[R\\63k?3L3M4L3N2M4L3N2M4L3O20OdAPOh=P1VBSOi=n0SBUOn=j0oAYOQ>g0lA]OS>d0jA^OW>U1010O01O01O01O01O01O010OM4M2M1O01O01O01O03M4M2M4L3M3N3L3M3N3L3M4M2M3M4Menh0"}, "image_id": 918, "id": 15197}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 44.0, 44.0, 63.0], "area": 1471, "segmentation": {"size": [512, 512], "counts": "eRZ72m?2N2N2N2N3M2O1N2N2N2N3M2O1N2N2N3M2N0010OO1N2O1O1O1O2M2O1O1O1O1101N1O2N1O2N1O2O0O1O2N1O`N"}, "image_id": 918, "id": 15198}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 52.0, 49.0, 59.0], "area": 1405, "segmentation": {"size": [512, 512], "counts": "jRb02m?2O2M2N2N2N2N2N2N3M2N2N2N2M3M3L5L2M2O1O1N2O1N2O11O2N1O1O2N1O2N1O2N1O2N1N2M4M2N3M2M4M2N3L3N2N3M2MU^e6"}, "image_id": 918, "id": 15199}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 56.0, 33.0, 32.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "URf11n?2N2O1N3M2N2N2N2N2N2N3M2O1N2N2O11N1N2N2N2N2N2N2O2M2N2N2N2N2N2N2Of]i5"}, "image_id": 918, "id": 15200}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 79.0, 28.0, 33.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "WSm52k?3N3L3N3L3N2M4M2M4N10010O0010O0010ON2M4L3N2M4M2M4M2M3N]md1"}, "image_id": 918, "id": 15201}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 85.0, 13.0, 12.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "mRo42j?4L5O0000010O000001O0O1K^]j2"}, "image_id": 918, "id": 15202}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 92.0, 69.0, 58.0], "area": 1847, "segmentation": {"size": [512, 512], "counts": "lSS11o?4L4L4K5L3M4L2M1000O10O1000O10O1000O0100000O0100O1O001O1O001O1O1O001O1O1O001O0000000001O000000000000003M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2Nc\\j5"}, "image_id": 918, "id": 15203}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 93.0, 10.0, 15.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "WSk73k?3L3M3N30O000M4M2MTM"}, "image_id": 918, "id": 15204}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 100.0, 68.0, 57.0], "area": 2621, "segmentation": {"size": [512, 512], "counts": "[dm21l?4M2M3N3L3N3L3N2M4L3N2M4ZAlN\\>V1aAmN\\>^1M2O2O00010O01O01O01M2N3O01O0O2M200000N3M21O010O01O01O010M2O1010O01O01O010O0M3N3L3hN]AQ1j>M3M4M2M4M2M3N3L3N2M4M2M`\\P4"}, "image_id": 918, "id": 15205}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 115.0, 14.0, 13.0], "area": 112, "segmentation": {"size": [512, 512], "counts": "kch42k?3M3N30O00010O00010O0N2M4L\\\\P3"}, "image_id": 918, "id": 15206}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 117.0, 62.0, 73.0], "area": 2306, "segmentation": {"size": [512, 512], "counts": "eTi62l?2M4M2M3N3L3M4N1l@[On>d0o@_OQ?i00O00010O0010O001_AROo=m0nAWOQ>i0mAYOS>h0iA[OX>d0fA_OY>b0cAA^>Q1M2N2O2O00010O010L3N2M4L3N2M02N3N2M4M2M3M4M2M4M2M3N3L3M4M2M3N3L3M3N3L3N3L3MVl7"}, "image_id": 918, "id": 15207}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 121.0, 14.0, 21.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "XTi72k?3N3L3M4L3O101O01O01O01L3MXL"}, "image_id": 918, "id": 15208}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 130.0, 27.0, 36.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "nd]22l?2N3L3N2N3L3N2N3L3N3M2M310O0010O001O0N2YOSA9P?DRA:P?CSA;[?L3N3M2N_kT5"}, "image_id": 918, "id": 15209}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 138.0, 43.0, 78.0], "area": 2187, "segmentation": {"size": [512, 512], "counts": "dUY4b0^?0000O10000000O10000000000hNGiB9W=9WBGi=b1F9G00O01000000000000000000000000000000000O4Mb0^Ob0^Ob0^Ob0^O[YQ3"}, "image_id": 918, "id": 15210}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 171.0, 59.0, 48.0], "area": 1747, "segmentation": {"size": [512, 512], "counts": "XVk02l?2N3L3N2N3L3N3M2M4M2N2O20O01O01O010O010O00010O010OO1N3M2010O01L3N201O010O01O010O01O0N03L3N2N3L3N3M2M4M2N2M4M2N3L3N3M_ZW6"}, "image_id": 918, "id": 15211}, {"iscrowd": 0, "category_id": 1, "bbox": [470.0, 171.0, 42.0, 97.0], "area": 1954, "segmentation": {"size": [512, 512], "counts": "iW[72k?4M2\\@I]?:`@H`?=0O010O00N3M2M4M2M3N3L3N3L3O110O0M4M2M3G_NQBd1k=`NQBc1l=;L3M3M4L3M1O1O001O02VOfBnN\\=o0gBnN]=o0eBoN]=n0gBnN\\=P1fBnN]=n0gBnNo7"}, "image_id": 918, "id": 15212}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 174.0, 48.0, 50.0], "area": 1266, "segmentation": {"size": [512, 512], "counts": "YVW51l?3N2M4L3N2M4M210O00010O00010O010O00001M2PAZOc>i0[AZOa>j0[AYOc>S1M4O01O010O01O01O01OO2L3N3L3M3N3L3M3N3L3M4M2M3M4M[jP2"}, "image_id": 918, "id": 15213}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 178.0, 61.0, 91.0], "area": 2637, "segmentation": {"size": [512, 512], "counts": "ogT23k?2M4M2M4M2M3N3L3N3L3N2M4M2N3O0N2M4J5N3L3N2M4M2M4M2M3N3L2O0O0101N3N2M4M2ZOYBPOj=n0XBPOk=l0XBQOk=n0WBoNl=P1TBmNo=T1QBhNR>X1nAfNU>Y1910O01ON3L3N3L3N2M4M2M4M2M3N3L3N3LVjl4"}, "image_id": 918, "id": 15214}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 178.0, 31.0, 31.0], "area": 507, "segmentation": {"size": [512, 512], "counts": "oe]62m?2N2N2N2N2N2N2N2N2N2N2N2N2O100000O1N2N2N2N2N2N2N2N2N2N2N2N2NoiR1"}, "image_id": 918, "id": 15215}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 205.0, 30.0, 29.0], "area": 465, "segmentation": {"size": [512, 512], "counts": "mVZ31m?2O2N2M3N1N3N2N1N3N2M3O010O1000O010O1N2M2O2N2M2O2M3N2N1N3NXiV4"}, "image_id": 918, "id": 15216}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 214.0, 55.0, 61.0], "area": 1983, "segmentation": {"size": [512, 512], "counts": "RX=3k?3M2M3N3M2M4O000M4M2N3L3JQOYAQ1e>QOYAR1d>6M3N3M2N3N10010O0010O0010O0010O0010O0010O0O2M2M3N3L3N3L3M3N3L3N2M4M2010OGd@N\\?0f@N]?Nf@O]?OXYg6"}, "image_id": 918, "id": 15217}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 219.0, 70.0, 47.0], "area": 1755, "segmentation": {"size": [512, 512], "counts": "]gl52m?3W@Ma?5]@N_?d0RA^Ok>f0QA]Om>m0L310O0fAmNh=R1VBQOi=P1SBSOm=m0QBUOP>j0mAZOR>g0kA[OU>Y10O0010O0010O0010O0010O0010ON2N0O10O0100O3JeAeN^>X16M4M2M3N3L3N3L3M3N3L3N3L3N2M\\ge5"}, "image_id": 918, "id": 15223}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 275.0, 64.0, 111.0], "area": 3581, "segmentation": {"size": [512, 512], "counts": "\\je61?1m>2PA1l>2QA1m>2PA1l>2QA1l>c0N3M020010O00010O010O00N3M2M4VOdNlB^1Q=fNkB]1S=eNjB_1R=eNkB]1S=eNjB^1U=cNiB`1W=`NeBc1[=b0O0TC_MZ<`2dCcM[<^2aCeM_<[2_CgMboNYAn0o>M2M3M4M2M4M2M3N3L3McVU7"}, "image_id": 918, "id": 15228}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 308.0, 13.0, 17.0], "area": 131, "segmentation": {"size": [512, 512], "counts": "PZm02k?3N3L3N2N30O01OO2M2M4M2M\\Vl6"}, "image_id": 918, "id": 15229}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 310.0, 51.0, 61.0], "area": 1596, "segmentation": {"size": [512, 512], "counts": "ijm22l?2M4M2N2N3L3N3M2N3O0O1M4M2N3M2M4M2010O00010O`AgNY>Z1dAhN]>]1010O010O01O010O01VOdAN\\>0fA1Z>KiA5W>IlA7T>FnA:R>DQBN2M4M2N3M2M4M2NSeX4"}, "image_id": 918, "id": 15230}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 318.0, 32.0, 37.0], "area": 705, "segmentation": {"size": [512, 512], "counts": "gZo13k?2M4M2N3L3N2M4M2M3N3N1010O0010O0010O0010O0001XOQA>n>@TA`0l>]OXA`0U?M3N3L3N2M4Lce`5"}, "image_id": 918, "id": 15231}, {"iscrowd": 0, "category_id": 1, "bbox": [499.0, 329.0, 13.0, 14.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "bji72l?2M4M2N30O010O000M4M2N3MeE"}, "image_id": 918, "id": 15232}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 343.0, 45.0, 55.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "RlT11]?1PA3m>0PA2n>0PA2m>2o@2n>0PA2n>b0L3N3M2M3N3M201O010O010O00010O010O010O0001M2N3L3N3M2M3N3M2N3L3N3M2M3N3M2M4M2NndT6"}, "image_id": 918, "id": 15233}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 346.0, 31.0, 44.0], "area": 992, "segmentation": {"size": [512, 512], "counts": "T;m0S?010O0000001J5L4N201O0001O0001O0001O01O0001K4L4K5L5K4K5L4L4KWU`7"}, "image_id": 918, "id": 15234}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 351.0, 32.0, 32.0], "area": 602, "segmentation": {"size": [512, 512], "counts": "cke22l?3M2M3N3M2M4M2M4N10010O0010O010O0010O0010O010M2M3N3M2M4M2N3L3NjTj4"}, "image_id": 918, "id": 15235}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 353.0, 50.0, 74.0], "area": 1794, "segmentation": {"size": [512, 512], "counts": "P]a41m?2M4M2N3L3N2N3N1O2M2N3L3N2N3YOnN[BU1c=mNnAN9X1e=QOYBQ1e=ROWBR1i=mNUBU1k=`0O2N1010N1N2N3L3N3M2M4M2N2IaAjN`>U141001N4M2N3L3N201N1N3L3Id@H_?4e@I]?58Nlde2"}, "image_id": 918, "id": 15236}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 359.0, 19.0, 16.0], "area": 174, "segmentation": {"size": [512, 512], "counts": "`kc71m?3L3N3M20010O010O010O0010O001M2M4M2Ned2"}, "image_id": 918, "id": 15237}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 367.0, 47.0, 60.0], "area": 1580, "segmentation": {"size": [512, 512], "counts": "Pmj11l?4M2M4L3N2M4M2M4L3N2M4M2M3M4M2M4M2M3N30O010O00010O0O2L3N2M4L3NO0102M3M3N3L3N3L3M3N3L3N2M4L3N_d]5"}, "image_id": 918, "id": 15238}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 367.0, 12.0, 26.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "o[j72l?2M4M2[@I^?>N3M20010O0O2M2aD"}, "image_id": 918, "id": 15239}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 377.0, 32.0, 32.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "_\\<3k?3M2M3N3M2M4M2O20O0010O010O001M2N2M3OO2M4M2N3M2M3N3M201N1M4M2NTdS7"}, "image_id": 918, "id": 15240}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 383.0, 67.0, 74.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "emZ22m?1N3M3N1N3M2O2M3M2O2M2N3N2M2N3M201000O010O01000O01O0N3M3N1N3M00010O00010O000010O00010O001O3N1N3M2O2M3M2O2M2N3N2M2N3N1N3M3N1N3M2N3N1Ngcc4"}, "image_id": 918, "id": 15241}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 396.0, 31.0, 38.0], "area": 687, "segmentation": {"size": [512, 512], "counts": "X]S52k?4L3N2M4M2M4M2M3N3L3N210O010O00010O010ON2M4M2M4M2M3N3L3N3L3N^S]2"}, "image_id": 918, "id": 15242}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 407.0, 55.0, 75.0], "area": 1902, "segmentation": {"size": [512, 512], "counts": "amT71l?4M2N2M4M2N3L3N3L3N2N30O010O010O00010O010O0010O0010O010O00010O010O010O0001fAROa=m0\\BWOc=j0ZBXOg=g0WB\\Oh=d0UB_Ol=a0QBAo=?oADP>=lAFU>9iAJV>P110O00010O010O01OfB"}, "image_id": 918, "id": 15243}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 408.0, 24.0, 20.0], "area": 272, "segmentation": {"size": [512, 512], "counts": "RmR13j?3N2N3L310O010O00010O010O0010O0010O01N1M3N3M2MSSa6"}, "image_id": 918, "id": 15244}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 413.0, 25.0, 26.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "^]i61m?3L3N3M2M3N3M2O20O010O00010O010O010M2N2N3L3N3M2NnRj0"}, "image_id": 918, "id": 15245}, {"iscrowd": 0, "category_id": 1, "bbox": [248.0, 428.0, 62.0, 75.0], "area": 2341, "segmentation": {"size": [512, 512], "counts": "__l31l?3N3L3N2M4M2M4N10001M2M4M2M3M4M2M3N3O00O2M2M4L3O11O0M3N3L3N3L3M3N3L3N2M4M2N30O00010M2N3L3N2M4M2M4CeAQO]>l0fAQO]>m0=L3M4M2M3N3L3N3L3N2M^bT3"}, "image_id": 918, "id": 15246}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 437.0, 20.0, 24.0], "area": 292, "segmentation": {"size": [512, 512], "counts": "Vnh52l?3L3M3N3L3M4N10010O01O01OO2M2M3M4M2M4LYRm1"}, "image_id": 918, "id": 15247}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 456.0, 55.0, 56.0], "area": 1843, "segmentation": {"size": [512, 512], "counts": "gof44j?2M4M2M3N3M210O00010O010O0O1_O]ObAf0[>^ObAd0\\>^OaAe0\\>_OaAc0]>a0N2N2001O001O00001OO1M3M31O00001O001O001O0M3N3L3N3nNYAf0j>WOYAg0i>WOYAf0S?M2N2M4M2M3N3L3NYa]2"}, "image_id": 918, "id": 15248}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 456.0, 30.0, 36.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "Vo^63j?3N2M4L3M3M4M2M3M4L3N11M4M2M3M4L3O110O00010O010O0N2M4L3M3MeQR1"}, "image_id": 918, "id": 15249}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 478.0, 36.0, 34.0], "area": 883, "segmentation": {"size": [512, 512], "counts": "a??_?2N2M3N2N2M3N2N2O1001O00001O001O00001O001O00001O001O0M4M2M3N3L3N3L3N2Nj`]7"}, "image_id": 918, "id": 15250}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 491.0, 44.0, 21.0], "area": 665, "segmentation": {"size": [512, 512], "counts": "o_b51l?3M3M3M3N2001O00001O00M3N2M3001O001O00001O00O1O11O00001O00001O00001O00001O00001M2M3M4L``g1"}, "image_id": 918, "id": 15251}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 492.0, 27.0, 20.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "noT71m?2N3L3N2N2M3N2N200001O001O001O00001O001O00001M2M4M2N_`="}, "image_id": 918, "id": 15252}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 509.0, 7.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "nok02m?11O001O0000Q`P7"}, "image_id": 918, "id": 15253}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 26.0, 35.0], "area": 643, "segmentation": {"size": [512, 512], "counts": "0o0Q?1O00001O001O00001O00M3N200O1N2M3N2M3N2M3N2M3N2M3NRPc7"}, "image_id": 921, "id": 15254}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 0.0, 47.0, 57.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "\\aa02l?2M4@KQA7m>Lo@8m>KQA7m>KPA8m>`0M2O20O0010O001M2M3N3L3N3N10001O001O00001ON3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2Mlof6"}, "image_id": 921, "id": 15255}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 0.0, 24.0, 9.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "PPn41o?001O001O00001O001O00001O001O00001O001O00N2N2MSPf2"}, "image_id": 921, "id": 15256}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 0.0, 40.0, 40.0], "area": 971, "segmentation": {"size": [512, 512], "counts": "PPb51o?001O0b@Om>1PA2P?Ol@5S?Kj@8V?Hg@;9Ea>b0[AAe>?XAEg>;VAHj>g001O00001O00001O00001O0000N2M3M3M3M3M31O00N2M3M3M3M3M3M3MSPj1"}, "image_id": 921, "id": 15257}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 0.0, 18.0, 7.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "P`V71o?001O00001O001O00001O001O001O00N2MS``0"}, "image_id": 921, "id": 15258}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 7.0, 52.0, 63.0], "area": 1721, "segmentation": {"size": [512, 512], "counts": "baS61l?3N2M4^@GY?;e@HX?a0M4M2O1010O010O0010O0O2M2M4M2M3N3L3N3M2M3N3L301O010ON3L3N3M2M3N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3Lb_R1"}, "image_id": 921, "id": 15259}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 13.0, 60.0, 65.0], "area": 1965, "segmentation": {"size": [512, 512], "counts": "PRW12k?4M2M3N3L3N3L3N2M4OO2O001O00010O010OO2L31O01L3N3L3N3M2M3N3L3N3M2M4M2001O0N2M4M2N3L3N2N3L3N2M4M2N3L3N2N3L3N3M2M3N3M2M4M[oj5"}, "image_id": 921, "id": 15260}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 31.0, 47.0, 59.0], "area": 1565, "segmentation": {"size": [512, 512], "counts": "]Rl61m?3L3N3L3N2M4L3N3L3N2M5L2M3M4M2M4M2M3010O01O01O010O01O01N1N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3M3N3Li^<"}, "image_id": 921, "id": 15261}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 42.0, 55.0, 52.0], "area": 1573, "segmentation": {"size": [512, 512], "counts": "_RQ21m?2M3N3L3N3L30001O010O01O01O0N3L3N2M4N110O00010M2SAPOg>V1N3L3N3L3001M201O01O01O010O0N2N3L3N3L3N2M4L3N3L3N2M4M2M4M2M_^S5"}, "image_id": 921, "id": 15262}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 59.0, 60.0, 55.0], "area": 1787, "segmentation": {"size": [512, 512], "counts": "Rcj21l?3M4M2M3N3L3N2M4L3N3M20010O0010O0010O00010O0010O0010O0010O0010O0N3L3N2M4M2M3M4N10O2M2M4M2M3M4M2M4M2M3M4M2M3N3L3M4M2M3NP^W4"}, "image_id": 921, "id": 15263}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 60.0, 62.0, 66.0], "area": 2066, "segmentation": {"size": [512, 512], "counts": "c2:a0Hc>8YAKg>5WANi>2TA0l>0QA3P?Mm@6R?<010O01N1N2N3O0010O010O0010O0M4M2N3L31O01O010O01O010OO2M000O10O10O10O102N2M4M2N3L3N2N3L3N3O01O01N1N2M4L3N3L3N2MPnP7"}, "image_id": 921, "id": 15264}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 84.0, 42.0, 51.0], "area": 1068, "segmentation": {"size": [512, 512], "counts": "Qdc01l?3JN\\@5a?6M4M2N2M4M2N3L3N3M2M3N2N0O010120N1N2M4M2N3L3N3L30010O010O01O01O0M4M2M4M2N2M4MT]g6"}, "image_id": 921, "id": 15265}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 91.0, 45.0, 57.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "Vdi34j?2UOLgA8V>JhA8U>KhA8V>KgA8U>KhA8V>KgA8V>JgA9Y>GeA;[>f00O010O00010O010O00010OO2L300010O0N3L3M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3Nnl_3"}, "image_id": 921, "id": 15266}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 91.0, 11.0, 32.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "ecj72k?4M2M3N3L3M4M2M3N3L3UM"}, "image_id": 921, "id": 15267}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 102.0, 38.0, 48.0], "area": 778, "segmentation": {"size": [512, 512], "counts": "bd\\11m?2M3N3L3N3L30001L0010O010O01O2O2M4M2M40O010O00010O010M2N2M4M2M4M2M3N3L3N3Le\\P6"}, "image_id": 921, "id": 15268}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 118.0, 33.0, 39.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "cT[21l?4M2N2M4M2N3L3N3M2N3L3N2O20O010O010O01ON3M2M4M2N3M2M3N3M2M4M2N3MS\\T5"}, "image_id": 921, "id": 15269}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 134.0, 37.0, 64.0], "area": 1649, "segmentation": {"size": [512, 512], "counts": "[4j0V?000001O0000000000]ACc=]10000J60000000010O0000000000000000000000gNeBMg>0000000000BkZ]7"}, "image_id": 921, "id": 15270}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 139.0, 56.0, 44.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "VU_41m?2M4M2M3N3L3M4M2M3N3M21O01O010O01O01O010O01O01O01O01O010O01O01O010O01O01M2N3L3O101ON3M2M4M2M3N3L3M4M2M3N3L3N3L`kd2"}, "image_id": 921, "id": 15271}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 155.0, 11.0, 10.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "kTa09g?0000000000001O00000U[Y7"}, "image_id": 921, "id": 15272}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 156.0, 13.0, 14.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "VUX21l?3M3M4N1010O0010OO1N3L3MT[a5"}, "image_id": 921, "id": 15273}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 156.0, 65.0, 54.0], "area": 1838, "segmentation": {"size": [512, 512], "counts": "nUU32l?2M4M2N3L3N3N10010O0O2M2M4M2N3L30010O010O010O0010O0010O010O010O00010O010O0M4M2N3M0O010002N2M4M2N2M4M2N3M2N3O01O01O0N3M2N3L3N3M2MoZj3"}, "image_id": 921, "id": 15274}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 168.0, 64.0, 51.0], "area": 2279, "segmentation": {"size": [512, 512], "counts": "_fg07[?>K500000010O0000000000000000000000010O00000000000000000F]OWAc0_>\\ObA2Nb0^>^OdA0Nb0^>^OdA0Oa0^>^OcA1Ob0]>]OdA1Ob0]>]OdA1Nc0^>\\ObAP1]>:000000000000000001O0000000000000000000001O01OG9B>C=CT[X6"}, "image_id": 921, "id": 15275}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 171.0, 13.0, 21.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "hUn17b?7J600001O0001O000000L5GkZk5"}, "image_id": 921, "id": 15276}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 173.0, 61.0, 60.0], "area": 1955, "segmentation": {"size": [512, 512], "counts": "bfY61m?2M3N3L3N2M4O010O00010M2M4M2M3N3L3N3L3N2N3O010O00010O01O01O010O01O01O0_AcN`>_1O01O01O01O01O010OZOaAK`>2bAN^>OeA2Z>LiA3X>IkA8T>FoA9R>CQB=o=ASB`0l=]OXBb0c>O2L3M3N3L3N3L\\ig0"}, "image_id": 921, "id": 15277}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 177.0, 57.0, 47.0], "area": 1580, "segmentation": {"size": [512, 512], "counts": "[V]54j?2M4M2M3N3L3N3M2N210O010O01O01O010O01O01O010O010N1M3N3L3N3N11O01O010O01O010O01O01O010L3N2M4M2M4M2N2M4M2M4M2M3N3LWZf1"}, "image_id": 921, "id": 15278}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 179.0, 32.0, 30.0], "area": 553, "segmentation": {"size": [512, 512], "counts": "ZVn32l?2N3M2N2N3L3N3M2N3M2N3ON3M2N30O010O010O0010O001L3N3M2N3M2M4M2NVja3"}, "image_id": 921, "id": 15279}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 200.0, 49.0, 52.0], "area": 1582, "segmentation": {"size": [512, 512], "counts": "YgW73j?3N2M4L3M4M2M3M4L3M3N3L30001O010O01O01O01O01O01O01O01nNZAi0e>TO^Al0l>O01O01O010O00010O01O0N2N3O000010O0M3M4L[I"}, "image_id": 921, "id": 15280}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 203.0, 58.0, 91.0], "area": 3331, "segmentation": {"size": [512, 512], "counts": "jhS23i?4L5J5L4O101O0M3L4L4POROjBS1Q=QOjBT1S=oNiBU1S=POhBT1T=POhBU1T=nNhBV1X=i000010O0000010O0000010O00000010O00000010O000N2L5K4K5L4L5K4L4K5L5K4L4L4K6K4L4L4L5K_Yo4"}, "image_id": 921, "id": 15281}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 212.0, 22.0, 25.0], "area": 313, "segmentation": {"size": [512, 512], "counts": "VWl42l?2N3L3N2M4M2M4N10010O010O00N3L3N3L3N2N3L3NYih2"}, "image_id": 921, "id": 15282}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 220.0, 27.0, 62.0], "area": 1403, "segmentation": {"size": [512, 512], "counts": "l6[1e>0dAmNk=d100000000001O0000014K000000000000000000010C1fA3V>0gA3MIf=8ZB1MKf=6ZB3LJg=6ZB3LJg=7YB3LJg=6ZBf0c=m0010OO1M4L300010O00010O00010O0O1M4L3N3L3M3M4L3M3M4L3M3hN`Am0d>oN`Am0k>M3M4L3M3M4L3M3McXT4"}, "image_id": 921, "id": 15285}, {"iscrowd": 0, "category_id": 1, "bbox": [34.0, 236.0, 69.0, 52.0], "area": 2211, "segmentation": {"size": [512, 512], "counts": "_Xa05k?0000000V@Lc?;E;D?XADg>>VAEi>;TAHm>7QAKo>d0M210O00010O0O2M2M3N3L3O20O00010O0010O0010O0010O0010QObA:^>CeA=\\>@gA`0X>]OkAc0U>[OmAf0S>VOQBi0o=UOSBk0b>0O0010O0010O0010O0010O0010O0010O00M4M2M4M2M3N3L3N3LUg0"}, "image_id": 921, "id": 15287}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 249.0, 35.0, 32.0], "area": 637, "segmentation": {"size": [512, 512], "counts": "\\hm52l?2M4M2M4M2N2M4M210O010O01O01O010O01O01O010O010O01O01OM4M2N3L3N2M4M2Nng`1"}, "image_id": 921, "id": 15288}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 251.0, 61.0, 66.0], "area": 2325, "segmentation": {"size": [512, 512], "counts": "]Yj31l?3L4M4L3M3L5O01O01L3M3L5O0001O01O00010O0SAoNk>S110L3M3M4L3L4M4L3N200010O00010O0001O01O01O01O0M3M4L3M3L5L3M3M3L5L3M3L5L3M3M4K4MoWW3"}, "image_id": 921, "id": 15289}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 256.0, 22.0, 23.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "^Xe72k?3N3M2N3L3N3O0010O0010O010O010ON2M4M2N3M2MmG"}, "image_id": 921, "id": 15290}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 274.0, 58.0, 67.0], "area": 1931, "segmentation": {"size": [512, 512], "counts": "XYl42k?3N3L3M3N3L3N3L3O110O0010O0010O00010O0eAXO^=i0_BYOb=f0[B^Od=b0YBAh=?UBDj=7lAMS>3jA0V>n00O0010O00010O010O0NO1O2O3L3N2M4L3N3L3M3N3L3N2M4L3N3L3N2M4LTgV2"}, "image_id": 921, "id": 15291}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 295.0, 58.0, 70.0], "area": 2090, "segmentation": {"size": [512, 512], "counts": "PZl51m?2M4M2M4M2N3L3N2N3L3O2O010O0jASOZ=l0dBVO\\=k0`BXOa=g0]B\\Ob=e0ZB^Of=b0XBAh=>VBDj==RBFo=9oAJP>7mAKT>Q1O0010O0010O0010O010O0010ON2N3M2M4M2M4M2N2M4M2M4M2N2M4M2N3L3N3L3N2NXfV1"}, "image_id": 921, "id": 15292}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 317.0, 56.0, 69.0], "area": 1996, "segmentation": {"size": [512, 512], "counts": "`[i63k?2M4M2M3N3M2O20O01O01O010O010O00O2L3N3L3N3M2M3N3L3N3M2M3N3L3N3L3N201O0010O0M4M2N2M4M2M4M2N2M4M2M4^OUAKm>2VAKn>2UAKm>3UAKm>2WAJm>3a0Mne:"}, "image_id": 921, "id": 15293}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 371.0, 59.0, 91.0], "area": 3454, "segmentation": {"size": [512, 512], "counts": "Unk03e?8G9O100cNBYC>^3gA3;[O[=_1`BgN_=Y1[BmNe=g100001O01O00000001O01O00004MO0000000M301M2K5J6J6K6I6K5J6J6K5J7JncZ5"}, "image_id": 921, "id": 15296}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 396.0, 43.0, 79.0], "area": 2218, "segmentation": {"size": [512, 512], "counts": "P^l22j?4L4m@Gl=0oA=1Gl=0oA>0Fm=0nA?0Gm=NoA?0Gm=_10000010O000001M2L4L401O01O0001O01O000M3L5K4L4L4L5K4K5L4L5K4L4L4L4L5K4LbS^4"}, "image_id": 921, "id": 15297}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 401.0, 60.0, 52.0], "area": 1773, "segmentation": {"size": [512, 512], "counts": "emc31l?3L4M3M4K4M3M4L3O101O01O00O2L3M3M4O01O01O01O01O01O0001O01O01O01O01O01O00010O0UO[A;e>A`A?_>^OdAb0]>ZOfAf0k>0O00010O0000010O0001M2M3M4K4M3M3LfR^3"}, "image_id": 921, "id": 15298}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 428.0, 52.0, 57.0], "area": 1833, "segmentation": {"size": [512, 512], "counts": "U^l41>1n>3o@0n>2o@1n>3o@0n>2o@1n>b0N3L3M4M2N201O010O01O01O010O01O01O01O010O01O01O01ObNbAZ1b>1O01O01O010ON3M2M3N3L3N3L3N2M4M2M4L3N2M4MSbY2"}, "image_id": 921, "id": 15299}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 455.0, 70.0, 57.0], "area": 2501, "segmentation": {"size": [512, 512], "counts": "Xog52k?4M2N3L3N3M2N3L3N2N3M201O010O01O010O010O01O010O000N3M2N3L3^AfN\\>`1N3O0010O0010O0010OO2O01O001O00001O001O001O0N3M2N2N3L3O2O010M2N2N3L3N3Dg@M[?1h@K\\?2f@L\\?1;N]QU1"}, "image_id": 921, "id": 15300}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 457.0, 62.0, 55.0], "area": 1924, "segmentation": {"size": [512, 512], "counts": "^_\\32k?4L3M3M4M20010O00010O0M3M3M4L3M3N30O0001O0M3M4L30010O00010O00010O00010O00010O000010O000VObA3^>IeA7[>FhA:X>ClA=T>_OoAa0Q>\\ORBd0n=YOVBf0b>0001O000O2L3M3M4L3M3Mg`d3"}, "image_id": 921, "id": 15301}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 474.0, 4.0, 18.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "j>b0_?O000H]am7"}, "image_id": 921, "id": 15302}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 486.0, 82.0, 26.0], "area": 1670, "segmentation": {"size": [512, 512], "counts": "koi02j?5K4M3L5K4O10001O01O0000001O00001O0000001O0000M3O1001O0000001O0000001O0000001O00001O0000001O0000001O00001O00L4L4O1001O0000001O00001O0000001O000Cg@3Y?Jj@6W?En@:\\?000001O000O2K]Pm5"}, "image_id": 921, "id": 15303}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 488.0, 50.0, 24.0], "area": 717, "segmentation": {"size": [512, 512], "counts": "o_\\41l?3N2N21O00001O001OO1N2M3N2M30000001O001ON2M3N2M31O001O00001O001O00001O001O00001O001O00001O0M4M2M3N``j2"}, "image_id": 921, "id": 15304}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 492.0, 47.0, 20.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "n__22l?2M3N20000001O001O0000M3N2M3N2M30000001O001O00001O001O00001O001O001O00001O001O00001O001L3N2M\\Pi4"}, "image_id": 921, "id": 15305}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 197.0, 40.0, 23.0], "area": 360, "segmentation": {"size": [512, 512], "counts": "\\fc12l?2M3N3O010O0010O0010O010O00010O010O01O01O010O010O01O010O01O01O010O010O000N3M2N^Yh5"}, "image_id": 922, "id": 15306}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 302.0, 115.0, 122.0], "area": 8098, "segmentation": {"size": [512, 512], "counts": "V\\]21l?3M3N3L3N3L3M4M2M3N3L3M4M2M3N3M2010O01O01O010O01O001M2M3N3M2M4M2N3L3N2M4M2N3L3N2N3L3N3L3N3M2M301O010O010O00010O010O0010O0010O010O0010O0010O0010O0010O0010O010O0000NO2O3M2M4M2M4M2N2M4M2N3\\OdB_N_=_1cB_N`=]1dB`N^=^1dB_N_=^1dB`N_=]1c0M4M2M4M2M3N3L3N3L3N3M2M3N3L3N3LmUi3"}, "image_id": 922, "id": 15307}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 327.0, 129.0, 185.0], "area": 10362, "segmentation": {"size": [512, 512], "counts": "o_o31m?2M3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2N2M3N2M3N2N2M3N2N2M3N2M3N2N2M2O000O01000O010O10O10O10O10O10O10O010002M4M2N3L3N2N3L3N3M2M3N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2M3N3M2M4M2N3L3NVUP2"}, "image_id": 922, "id": 15308}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 374.0, 84.0, 138.0], "area": 6894, "segmentation": {"size": [512, 512], "counts": "n_f62l?2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N200001O001L3N2M4M2N3L3N3L3N2M4M2N3L3N3L3N2M4M2N3L3N3L3N2NQD"}, "image_id": 922, "id": 15309}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 120.0, 29.0, 20.0], "area": 302, "segmentation": {"size": [512, 512], "counts": "h3;e?00010O010O00010O010O0010O0010O010O00010O010O0001O0N3M2No[a7"}, "image_id": 923, "id": 15310}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 164.0, 21.0, 17.0], "area": 250, "segmentation": {"size": [512, 512], "counts": "WUj53m?4K6K2NO1000O1000O1000O10O10002N3M0O104LcZk1"}, "image_id": 923, "id": 15311}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 486.0, 53.0, 26.0], "area": 750, "segmentation": {"size": [512, 512], "counts": "e_T62k?3N3L3N2N3L310O00010O010O0010O0010O010O00010O01O00001O001O001O00001O001O00001O001O001O00001O001O000N3M2NYPQ1"}, "image_id": 923, "id": 15312}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 45.0, 105.0, 60.0], "area": 5547, "segmentation": {"size": [512, 512], "counts": "dQ4b0^?o0QO4L000000000000000O100000000000000000000000000000000000O1000K\\NnAd1R>5000000000000000000000000O1000000000000000000000000000O100000000000007I0000000000O1000000000000000000000000000000000O10000000000000003ZOlAZOT>f0c0000k0UO_]W6"}, "image_id": 924, "id": 15313}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 116.0, 102.0, 282.0], "area": 19351, "segmentation": {"size": [512, 512], "counts": "f3Q8o71O1O1O1O1O1O1O1O1O1O1O0O2O1O1O1O1O1O1O1O1O1O1O1O00000000000000000O100000000007I00000000000000000000O010000000i0WOj0VOi0WOi0WOj0VOi0WOj0VOh0XO0000O10000000000000000000O1000000000000000000000000000O10000000000000000j0VOi0WO[d\\6"}, "image_id": 924, "id": 15314}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 127.0, 221.0, 111.0], "area": 22947, "segmentation": {"size": [512, 512], "counts": "mdV4h1X>000000000000000000000000;E000000000000000000000000000SO[1B00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000P1POQk:"}, "image_id": 924, "id": 15315}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 298.0, 247.0, 86.0], "area": 18832, "segmentation": {"size": [512, 512], "counts": "bYm3Q2o=0000000000000000000000000000000000000000000000000000000000=C0006J00000000000000000000O10000000000000000C=000000000000000000000000000000000000000000000000000000000000000000=C0000000000000000000000000000000000000000000000000000000000000000000000000000000000C=0O10000000000000000000000000000000=C00000000000000000000000000000000000000000000000000000000000000000000000000000000C=0000000000000000000000000000000000000000000000000000000000000000000030000001O00000000000000000000000000000000000000000000000000000000000000WbQ7"}, "image_id": 924, "id": 15319}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 450.0, 83.0, 62.0], "area": 977, "segmentation": {"size": [512, 512], "counts": "iof67i?0000000000000000000000000K5000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1_Na1"}, "image_id": 924, "id": 15320}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 459.0, 11.0, 10.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "[nn1:f?000000000000000000Ofak5"}, "image_id": 924, "id": 15321}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 0.0, 406.0, 102.0], "area": 32852, "segmentation": {"size": [512, 512], "counts": "Qal01o?3L4M3M3L5L3M3L4M3oNQOkBR1R=ROjBR1R=QOjBS1S=POjBS1S=POjBS1S=POiBT1T=oNiBT1T=oNiBT1T=P1L3M3M3M2N0000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000O10000O10000O1000000O10000O10000O10000O10000O10000O1000000O10000O10000O10000O10000O1000000O10000O10000O100VOeBoN[=P1hBmNY=S1iBkNW=T1lBjNT=U1oBhNR=X1PCfNP=Y1SCeNm<[1UCcNk<\\1XCaNi<^1YCaNg<_1[C_Ne<`1n0O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100O1O100O1O100O100OQ`h0"}, "image_id": 925, "id": 15322}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 338.0, 12.0, 43.0], "area": 392, "segmentation": {"size": [512, 512], "counts": "ck79V?a0@`00000010O00000K5@`0_OnUb7"}, "image_id": 925, "id": 15323}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 343.0, 170.0, 25.0], "area": 1658, "segmentation": {"size": [512, 512], "counts": "jZj17f?300001O000001O0000000000000001O0001O0000000000000001O000001O0000000000000001O0001O0000000000000001O000001O0000000000000001O0001O0000000000000001O000001O0000000000000001O0001O0000000000000001O000001O0000000000000001O0001O0000000000000001O0001O000000000000000001O0001O0000000000000001O0001O000000000000000001O0001O0000000000000001O0001O00Lnd`3"}, "image_id": 925, "id": 15324}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 372.0, 46.0, 40.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "d[Y7W1i>0001O000000000000000000000001O00000000000000000000000000000000000000000000000000000000000[D"}, "image_id": 925, "id": 15325}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 414.0, 37.0, 35.0], "area": 1092, "segmentation": {"size": [512, 512], "counts": "Q]\\7:f?8H9G5K000000000O1000O100000000000O1000O1000000000O1000O100000003M8H7I7IYR1"}, "image_id": 925, "id": 15326}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 489.0, 2.0, 3.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "Y_o72n?1f@"}, "image_id": 925, "id": 15327}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 127.0, 22.0, 81.0], "area": 1570, "segmentation": {"size": [512, 512], "counts": "PVe7>S??B>B>A?I8O000000000000000001O0000000001O0PL"}, "image_id": 926, "id": 15328}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 144.0, 22.0, 19.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "md45^?=000000000000000000000001O000000000001O00_[`7"}, "image_id": 926, "id": 15329}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 229.0, 218.0, 167.0], "area": 19379, "segmentation": {"size": [512, 512], "counts": "bkl04V?f0YOg0E;00000000000000001O0000000000000001O0000000000000000000000000001O0000000005K6J5K000000000000001O00000000000001O0000000000000001O0000000Db0^Ob0_Oa0^Ob0^Ob0K50000001O00000000000001O00000000000000000001O000000YG"}, "image_id": 926, "id": 15331}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 87.0, 107.0], "area": 4864, "segmentation": {"size": [512, 512], "counts": "W139`1g=bNWB`1h=aNSBK0g1j==N00000000101N2N2N2N2N01O00000H`BRN_=n1cBPN]=P2eBnM]=P2eBnM]=Q2:N2N1O2N2N01O0000010O3M2N2N2N0EmMjBS2V=oMiBP2W=RNdBLNR2^=UNbBm1^=:10OMbBeM^=[2310O1O2N2O100N001O2N2N3EPBbNQ>]1QB`NQ>^1QB`NQ>^1QB`NQ>^1:N3M2O1N2N2N2N3M2O10000010OO1N2N2N2O2M2N2Ja@Ha?66N2N3Mh^d6"}, "image_id": 927, "id": 15332}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 23.0, 75.0, 56.0], "area": 2501, "segmentation": {"size": [512, 512], "counts": "gQn53k?2M4M2M3M4M2M4L3N2N30O00010O010O00010O00010M2O20N1M3N3L3M3O20O01O01O010O00010O01O01O010O01O01O01O01O010O01O01O01O01O010O01O01O010oN]Ac0c>[O_Ae0a>XOcAf0_>VOdAg0l>M4M2M4M2M3JZ@OTnl0"}, "image_id": 927, "id": 15333}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 71.0, 78.0, 64.0], "area": 2768, "segmentation": {"size": [512, 512], "counts": "^Sh51m?2O2M2N3M2N3M2JAi@b0T?6N3M2N3M2N3M2N3M2N30M2N3N110O010O010O01[AfNc>[1010O010O010O010O010O010O010O10O010O010O010O0O2M2N3M2N30O0N3M2N3M2N3M2N3M2N3M2JPAZOS?c06N3M2N3M2N3M2N3MXmP1"}, "image_id": 927, "id": 15334}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 79.0, 62.0, 77.0], "area": 2706, "segmentation": {"size": [512, 512], "counts": "RTQ73k?2M3M3M4M2M3N3k@[Om>e0o@@Q?g00010N1M3M4L3N2M4_AcNZ>c1M3N3M200010O000010O00010M2N2M4L3M310O00010OO1M4K4M3M4L3M3M4L3M3L5L3M3M4L3M3M4L3M3L5L3M[M"}, "image_id": 927, "id": 15335}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 134.0, 4.0, 12.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "_Tn72k?4L3M3jK"}, "image_id": 927, "id": 15336}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 264.0, 72.0, 85.0], "area": 2947, "segmentation": {"size": [512, 512], "counts": "Wjc02m?2M3N1N3N2N1N3N2M3N1O2M3N2M2O2_OoNPBS1n=oNoAS1o=POoAR1o=oNPBS1n=oNoAT1n=oNPBR1o=oNoAT1o=`0M2O2M3N2N1N2O0O010000O3NO0101O1N3N2M2O2N2M3XNlAa1U>^NmA_1[>O2N2M2mN_Ah0a>VOaAj0_>SOcAn0]>POeAo0[>POgAP1e>0O01M3N1O2M3N2M2O2N2M2O2M3N2N1N3NaVX6"}, "image_id": 927, "id": 15337}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 334.0, 10.0, 24.0], "area": 134, "segmentation": {"size": [512, 512], "counts": "nZk73k?2N3M2O2M2N3M3N1N3aE"}, "image_id": 927, "id": 15338}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 381.0, 84.0, 75.0], "area": 3164, "segmentation": {"size": [512, 512], "counts": "W]f52l?3M2O2M2N3M2O2M3M2N3N1N30O0100O010O0100O01M1O2O2M2N3M2O2M3O001M2O2M2N3M3N1N30O010O01000O010O010O01000O010O0`NPBQ1P>lNRBT1n=kNTBU1l=hNVBY1j=dNYB[1g=cN[B]1U>N2M2N3M2O2nNUAk0m>SOVAl0P?0O010O010OO2M3M2O2M2N3M2O2M3M2N3Nfbo0"}, "image_id": 927, "id": 15339}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 222.0, 373.0, 290.0], "area": 54442, "segmentation": {"size": [512, 512], "counts": "noa02m?1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O11O1O1O1M2O2N2N2M2O2N2M3N1O2N2M3N1O2M3N2N2N1N3N2N2N1N3N2N2M2O2N2N2M3N1O2M3N2N1O2M3N2N1N3N2N2N1N3N2N2N2M2O2N2M3N1O2N2M3N1O2M3N2N1O2M3N2N2N1N3N2N2M2O2N2N2M2O2N2M3N1O2N2M3N2N1N3N2N2N1N3N2N2N1N3N2N2M3N1O2N2M3N1O2M3N2N1O2M3N2N1O2M3N2N2M2O2N2N2M2O2N2M3N1O2N2M3N1O2M3N2N2N1N3N2N2N1N3N2N2M2O2N2Neec1"}, "image_id": 928, "id": 15340}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 294.0, 25.0, 50.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "V9b1_>N1O2N2M3N1O2M3N2N2N1N3N2N2M2O2N2N2M2O2N2N2M3N1OYVc7"}, "image_id": 928, "id": 15341}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 277.0, 106.0, 144.0], "area": 7395, "segmentation": {"size": [512, 512], "counts": "P\\[61n?2N2N2M3N1O2N2M3N2N1N3N2N2N1N3N2N2N2M2O2N2N2M2O2N2M3N2N1O2M3N2N2N1N3N2N2N1N3N2N2M3N1O2N2M3N1O00O0100000O01000O10O1000O01000O10O1000O10O1000O10O10O10O1000O10O1000O10O10O102N2N2M3N1O2M3N2N1O2M3N2N2N1N3N2N2M2O2N2NlF"}, "image_id": 929, "id": 15342}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 483.0, 14.0, 14.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "Y_Z72m?2N2N2N2M201000O1M2O2N2N2Mi`>"}, "image_id": 929, "id": 15343}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 492.0, 9.0, 8.0], "area": 46, "segmentation": {"size": [512, 512], "counts": "__a72m?1O2N2O10O001N2McP:"}, "image_id": 929, "id": 15344}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 51.0, 29.0, 50.0], "area": 790, "segmentation": {"size": [512, 512], "counts": "naa72m?2N2N2Y@K`?00000000000000000B>K50000000000001O01O000000000000@`0^Ob000000000001O00000000000000_NjAX1_>00000000000000000000000000000001O01O000000000000000000000000000000000000000000000000010O000000L4VOSYb6"}, "image_id": 930, "id": 15347}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 298.0, 17.0, 20.0], "area": 333, "segmentation": {"size": [512, 512], "counts": "ZYd0c0]?0000000000001O00000000000000000fVS7"}, "image_id": 930, "id": 15348}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 0.0, 8.0, 4.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "P`62n?1O1OO1O100O10P`e7"}, "image_id": 931, "id": 15349}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 0.0, 53.0, 31.0], "area": 1055, "segmentation": {"size": [512, 512], "counts": "]`f01m?3L3N2M4M2O2O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001OM3N2M3N2M3N2N2M3N2M3N2NRP_6"}, "image_id": 931, "id": 15350}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 0.0, 56.0, 32.0], "area": 1050, "segmentation": {"size": [512, 512], "counts": "VPW23l?1N3M2O2O001O001O1O001O001O001O001O001O1O001O001O001O001O1O001O001O001O001O1O001O010OO2M2N3M2N3N1N3M3M2N3M2O2Mjol4"}, "image_id": 931, "id": 15351}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 0.0, 37.0, 64.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "Ya\\3f0V?4000000H]OSAc0P>:PBFP>:PBFP>:PBFP>:PBFP>:PBFP>:PBFP>:PBFP>:PBFP>:PBFP>U10000000VOPBDP>H8001O0001O0000000000000000000001O0001O00O1A?Bb^f1"}, "image_id": 931, "id": 15355}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 81.0, 83.0, 18.0], "area": 1023, "segmentation": {"size": [512, 512], "counts": "aRe6=c?00000001O00000000000000000001O0000000001O00000000000000000001O0000000001O000000000000000001O000000000001O000000000000000001O0000000001O00000000000000000001O000000Hb]1"}, "image_id": 931, "id": 15356}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 98.0, 67.0, 31.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "_SR5>U?=000000001O00000001O0000000000000000000001O000001O000000000000000000000001O000001O0000000000000000000001O000001O0000000000000000J6AY]l1"}, "image_id": 931, "id": 15357}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 127.0, 289.0, 147.0], "area": 28337, "segmentation": {"size": [512, 512], "counts": "kf_3>Q?a0^Ob00000000000000000000000000001O000B>^Ob0_Oa0H8000000000000000000000001O0001O0000000000000000000000000001O000001O0000000000000000000000000001O0001O000000000000000000000000000001O0001O0000000000000000000000000001O000001O0000000000000000000000000001O0001O000000000000000000000000000001O0001O0000000000000000000000000001OI7^Oc0^Oa0_Oa0000000001O00000000000lChN`9X1nEZOR:f0]EKc:5kD=U;CYDo0g;e11eLeDo1[;_MWEa2T<000000001O000001O0000000000000000L4D<000001O0001O000000000000000000000000000001O0001O0000000000000000000000000001O0001O000000000000000000000000000001O0001O000000000000000000000000000001O0001O000000000kLeCe2k<000000000000000WJ"}, "image_id": 931, "id": 15358}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 128.0, 80.0, 85.0], "area": 6141, "segmentation": {"size": [512, 512], "counts": "P4`2`=000000000001O0000000001O000000000000000001O0000000001O0000000000000001O0000000001O000000000000000001O00000001O000000000000000001O0000000001O000000M3B>A?B>A`0AXlg6"}, "image_id": 931, "id": 15359}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 262.0, 32.0, 50.0], "area": 1451, "segmentation": {"size": [512, 512], "counts": "^Y`59Z?=BDXA<[>1eAO[>1eAO[>1eAO[>1eAO[>1eAO[>1eAO[>1eAO[>1eAO[>k000000001O000000000000000000000000000000^O\\ho1"}, "image_id": 931, "id": 15360}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 272.0, 84.0, 18.0], "area": 1020, "segmentation": {"size": [512, 512], "counts": "cX_6:c?300001O0000000000000000000001O000001O0000000000000000000001O000001O0000000000000000000001O000001O0000000000000000000001O000001O0000000000000000000001O000001O0000000Gcg6"}, "image_id": 931, "id": 15361}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 279.0, 63.0, 116.0], "area": 4155, "segmentation": {"size": [512, 512], "counts": "g8^3b<00001O000000000000000001O000000000002N001OVNSDAm;MeD3[;[OWEe0i:iNiEW1`<0000000000000000000000000010O00000000000000000000000000000000010O00000G9^O_UP7"}, "image_id": 931, "id": 15362}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 314.0, 51.0, 75.0], "area": 2372, "segmentation": {"size": [512, 512], "counts": "\\[h3a0l>c000VAQOc>V10000000000000000000000000001O0000000000000L4UOk000000000000000000I7000000J6SOm000000000M3TO\\AS??B>A?A?M30001O000000000001O00000000000001O000000000001O0000000000000001O000000000001O0000000000000001O0000000001O0000000000000001O000000000001O0000000000000001O000000000001O0000000000000001O0000000001O0000000000000001O000000000001O00000000000000mE"}, "image_id": 931, "id": 15364}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 320.0, 46.0, 44.0], "area": 1639, "segmentation": {"size": [512, 512], "counts": "Q[c49Q?f00000EHVB8i>0000000000000000LPhn2"}, "image_id": 932, "id": 15369}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 229.0, 28.0, 44.0], "area": 1059, "segmentation": {"size": [512, 512], "counts": "QXb3>S??C=000000000001O01O00000000000000000000000001O01C0000000000000000000001O000001O000000000000000000000F:AVim4"}, "image_id": 932, "id": 15371}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 232.0, 29.0, 35.0], "area": 925, "segmentation": {"size": [512, 512], "counts": "ngo3;V?`0H70000000000000000000000000010O00000000000000000000H8AVia3"}, "image_id": 932, "id": 15372}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 233.0, 30.0, 32.0], "area": 808, "segmentation": {"size": [512, 512], "counts": "ShR34b?:E;K50000000010O0000000000000000010O0000000000000000L4En0000000000007I0003M000000000000001O0000000000000000000001O0000000000000000RO[Ya5"}, "image_id": 932, "id": 15375}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 240.0, 40.0, 53.0], "area": 1962, "segmentation": {"size": [512, 512], "counts": "hhn5j0B>000000000000000000001O000000000000000001O000000000000000000000000000000B?UOXY]1"}, "image_id": 932, "id": 15376}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 244.0, 31.0, 32.0], "area": 967, "segmentation": {"size": [512, 512], "counts": "dWT1o0Q?0000000000000000000000000000000000000000000000001O000000000\\X\\6"}, "image_id": 932, "id": 15377}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 264.0, 42.0, 25.0], "area": 861, "segmentation": {"size": [512, 512], "counts": "mh^21e?:E;00001O000001O00000000000001O0001O00000000000001O000001O00000000000001O00000L4FnWl4"}, "image_id": 932, "id": 15378}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 304.0, 58.0, 36.0], "area": 1695, "segmentation": {"size": [512, 512], "counts": "ZZ]52`?>0001O00000D<000000000000001O00000001O000000004L001O00000001O0000000000000000000001O00000001O00000000000000000J6@mfe1"}, "image_id": 932, "id": 15379}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 342.0, 46.0, 37.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "^[^51`??G90000001O01O00000000000000000k@^Ol>k00000001O00000000000001O00000000000001O00000000000001OA?Afej1"}, "image_id": 932, "id": 15380}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 345.0, 52.0, 29.0], "area": 1387, "segmentation": {"size": [512, 512], "counts": "]k^37U?d00000000000000000000000000001O000000000001O000000000000000000000000000000000000000000000001O00000000F`Ug3"}, "image_id": 932, "id": 15381}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 348.0, 54.0, 35.0], "area": 1644, "segmentation": {"size": [512, 512], "counts": "h[a24_?=B>O100000000001O00000001O000000000000000001O00000001O0000000000000001O00000001O000000000000000001O000K5B^ec4"}, "image_id": 932, "id": 15382}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 373.0, 22.0, 39.0], "area": 858, "segmentation": {"size": [512, 512], "counts": "e;W1i>00000000000000000000000000000000000000000[dd7"}, "image_id": 932, "id": 15383}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 378.0, 47.0, 32.0], "area": 1243, "segmentation": {"size": [512, 512], "counts": "\\\\`59_?8G9O10001O0000000001O0001O000000000001O0001O0000000001O0001O000000000001O0001O00000000O2F9HYTh1"}, "image_id": 932, "id": 15384}, {"iscrowd": 0, "category_id": 1, "bbox": [170.0, 382.0, 81.0, 32.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "g\\e23Z?c0J600000000001O000001O000000000000000000000000000001O000001O00000000000000000000000000000001O0001O00000000000000000000000000000001O000001O000000000000000000K5]OaTR4"}, "image_id": 932, "id": 15385}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 397.0, 99.0, 115.0], "area": 10578, "segmentation": {"size": [512, 512], "counts": "ao^6?Q?`0_Oa0@`0_Oa0@`0M300000000000000000000000000001O00000000@`000000000001O000000000000000000000000000000000000001O000000000000000000000000000000000000001O000000000000000000000000000000000000001O000000000000"}, "image_id": 932, "id": 15386}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 408.0, 48.0, 32.0], "area": 1136, "segmentation": {"size": [512, 512], "counts": "XmY58a?70001O0001O00M3I704L1O0000000001O0001O0000000001O000001O00000000O2G8000000000000010O000000L4H\\Sn1"}, "image_id": 932, "id": 15387}, {"iscrowd": 0, "category_id": 1, "bbox": [168.0, 411.0, 82.0, 31.0], "area": 2142, "segmentation": {"size": [512, 512], "counts": "X]d2>U?=001O0000000000000000000001O000000000001O0000000000000000000001O00000000000001O00000000000000000001O00000000000001O0000000000000000000001O000000000001O000000000FZcR4"}, "image_id": 932, "id": 15388}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 412.0, 52.0, 34.0], "area": 1495, "segmentation": {"size": [512, 512], "counts": "P]18h?;E;E00O10O10000000000000000000O010000000000000000000O01000000000000000000000O010000000000000000007Hf0000000000000000000000000000000000000001O0000000000000001O000000gNlAf0g>0000000000000000000000000L4000L4H80000000000000000000000000000000000000000000001O000001O00EPbV4"}, "image_id": 932, "id": 15391}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 466.0, 52.0, 46.0], "area": 1367, "segmentation": {"size": [512, 512], "counts": "X?6i?2O2M2N3N1N3M2O2M3M2O2M2O0O1O100O1O100O1O10O01O01O01O01O010O00010O00102M3M2O2M2O2M2N3N1N3M2O2M3M2O2M2Nf`U7"}, "image_id": 932, "id": 15392}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 488.0, 50.0, 24.0], "area": 763, "segmentation": {"size": [512, 512], "counts": "noi02l?2N2M3N2M3N2N2M3N2O1001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O0O1N3L[P]6"}, "image_id": 932, "id": 15393}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 497.0, 36.0, 15.0], "area": 291, "segmentation": {"size": [512, 512], "counts": "o_Y21m?2N2O1N2N2N2N2O100001O1O001O001O001O001O001O1O001O001O001O001O001O001OQ`T5"}, "image_id": 932, "id": 15394}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 500.0, 31.0, 12.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "ho_38d?4000000000000000000000000000000000000000000000000001O000000[`P4"}, "image_id": 932, "id": 15395}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 0.0, 66.0, 74.0], "area": 2484, "segmentation": {"size": [512, 512], "counts": "PPo6b0^?00000000000000000000000000000000000000000000000000000000000000000fA2b_?3L4M3M3L4M3M3M3L4000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000dNQOgCo0Yn1R>00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000G90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000P2PNXPS4"}, "image_id": 933, "id": 15400}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 52.0, 40.0, 28.0], "area": 535, "segmentation": {"size": [512, 512], "counts": "naP53j?3N3M2N3N10010O010O010O00010O010O010O0010O0010O010O0010O0010O010O0010M2N3M2M4Mo][2"}, "image_id": 934, "id": 15401}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 118.0, 7.0, 12.0], "area": 61, "segmentation": {"size": [512, 512], "counts": "mcl73j?3N2N3O010O0YL"}, "image_id": 934, "id": 15402}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 201.0, 104.0, 47.0], "area": 3114, "segmentation": {"size": [512, 512], "counts": "dfc5:f?b0900000O10000000000000O101O000M300000000O1000O100000000008H;ETYh0"}, "image_id": 934, "id": 15403}, {"iscrowd": 0, "category_id": 1, "bbox": [212.0, 482.0, 38.0, 30.0], "area": 761, "segmentation": {"size": [512, 512], "counts": "n_Z32k?3N2N2M3N2N2M3N2N2M3N2N200001O001O00001O001O001O00001O001O0O2L3N2N3L3N3M2Mg`R4"}, "image_id": 934, "id": 15404}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 509.0, 8.0, 3.0], "area": 17, "segmentation": {"size": [512, 512], "counts": "n_U32m?100001O001O00Q`f4"}, "image_id": 934, "id": 15405}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 0.0, 28.0, 20.0], "area": 383, "segmentation": {"size": [512, 512], "counts": "Y`42k?3N3M2N3O001O001O00001O001O001O001O00001OO1N3M2M3N3M2NP`]7"}, "image_id": 937, "id": 15406}, {"iscrowd": 0, "category_id": 1, "bbox": [50.0, 0.0, 49.0, 20.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "RPi01m?3O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O001O00001O001O0000N2N2M3N2N2N2M3NR`^6"}, "image_id": 937, "id": 15407}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 26.0, 55.0, 58.0], "area": 1700, "segmentation": {"size": [512, 512], "counts": "PR:3j?3N3L3@GWA;g>GVA=f>FXAHWA`0M3O20O01O01O010O01O01O010O01O01O010O0QO]Ab0b>[OaAe0`>XOcAg0]>VOfAk0Z>ROiAm0e>1O01O010O010O00010O010O001M2M3N3L3N3L3N2M4M2MY^j6"}, "image_id": 937, "id": 15408}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 88.0, 29.0, 36.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "dSg01l?4L3M3M4L3M3M4L3N2N3O0010O00010O00010N1M3M4L3M3M4L3M4L3MU]j6"}, "image_id": 937, "id": 15409}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 89.0, 44.0, 48.0], "area": 1209, "segmentation": {"size": [512, 512], "counts": "i2R1n>1O010O01O01O010O00010O01O01O010O01O01O010O00010O[OUA5l>HVA8j>EYAA^A>c>_O_Ab0o>01O010O01O01O0O2M2M3M4M2M3NZlY7"}, "image_id": 937, "id": 15410}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 213.0, 27.0, 26.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "Tg53k?2M4M2M3N3L3010O00010O010O00010O010O00010L3N3L3N2M4MUi\\7"}, "image_id": 937, "id": 15411}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 8.0, 25.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "h6i0X?L3M3M4M2M3M4LXik7"}, "image_id": 937, "id": 15412}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 0.0, 42.0, 31.0], "area": 854, "segmentation": {"size": [512, 512], "counts": "b`_41l?4M2M3N3L3N3L30001O001O00001O00001O001O00001O001O00001O001ON2M3N2M3N2M3N2M3N2M3M3NR`k2"}, "image_id": 938, "id": 15413}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 0.0, 52.0, 52.0], "area": 1521, "segmentation": {"size": [512, 512], "counts": "oPP71l?4@Om@3Q?Ol@4Q?0l@3Q?Om@3P?a0M2O2O010O01O01O010O010O00010O010O0010O0010OO2N11O01O010OUAROd>U1010O0O2M2N2M4M2M4M2M3N3M2M4M2M3Nao5"}, "image_id": 938, "id": 15414}, {"iscrowd": 0, "category_id": 1, "bbox": [502.0, 12.0, 10.0, 14.0], "area": 96, "segmentation": {"size": [512, 512], "counts": "ePk72l?2M4M2N3O0010O0N3MD"}, "image_id": 938, "id": 15415}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 32.0, 26.0, 21.0], "area": 300, "segmentation": {"size": [512, 512], "counts": "ZQf62l?3M2N3L3O20O0010O0010O010O010O0010O010O00O2L3N3M2Ninl0"}, "image_id": 938, "id": 15416}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 50.0, 12.0, 34.0], "area": 215, "segmentation": {"size": [512, 512], "counts": "b1R1n>N3M2M4L3N3L3M3N3L3M4M[ni7"}, "image_id": 938, "id": 15417}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 56.0, 49.0, 43.0], "area": 1195, "segmentation": {"size": [512, 512], "counts": "`Rd62l?3M2N3L3N3M2M4M2N2N3M210O0010O0010O010O010O00010O010O010O0010O0010O010O0010N1N3M2M4M2N3L3N2N3M2Mk]c0"}, "image_id": 938, "id": 15418}, {"iscrowd": 0, "category_id": 1, "bbox": [485.0, 57.0, 27.0, 56.0], "area": 1000, "segmentation": {"size": [512, 512], "counts": "Xcb73h?6K4K5N21O01O000M3K6K4L4K5L4K6M20000010O000000010O00UN"}, "image_id": 938, "id": 15419}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 60.0, 63.0, 80.0], "area": 2599, "segmentation": {"size": [512, 512], "counts": "jS41m?2M4M2M7J20010O010M2N2M4M2M4M2N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3O2O01O01O01M2N3L3N2N3L3N3L3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N3M2M3Nj]l6"}, "image_id": 938, "id": 15420}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 72.0, 24.0, 22.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "ebW13j?4L3M3M4N1010O00010O00010O0010O0001O0M3M4M2M3Me]\\6"}, "image_id": 938, "id": 15421}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 95.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "o22olo7"}, "image_id": 938, "id": 15422}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 97.0, 44.0, 54.0], "area": 1300, "segmentation": {"size": [512, 512], "counts": "YTR12l?2M4M2O101N1M4M2N2M4M2M4M2M4M2M3N3L3O2O01O01O010O01O01O010ON3L3jN_Ak0d>RO^Ak0n>M2M3N3L3N3M2M3N3L3N_lW6"}, "image_id": 938, "id": 15423}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 123.0, 4.0, 10.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "k3:g?L3M3NVlm7"}, "image_id": 938, "id": 15424}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 134.0, 56.0, 39.0], "area": 1300, "segmentation": {"size": [512, 512], "counts": "hdS23j?3N2M4M2M4M2M301O01O01O010O01O01O010O00010O010O00010O01O01h@[OQ?k0M4M210O01L31O010O01O01O010O01M2M3M4M2M3N3L3N3L3M3Nb[P5"}, "image_id": 938, "id": 15425}, {"iscrowd": 0, "category_id": 1, "bbox": [109.0, 153.0, 24.0, 28.0], "area": 394, "segmentation": {"size": [512, 512], "counts": "]ef12l?3L3N2N3L3N3L3N3N10010O010O00O2L3N3M2M4M2M3N3MS[m5"}, "image_id": 938, "id": 15426}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 157.0, 23.0, 22.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "Te72l?3M2M4O00010O0`@F[?:a@J^?<010O0010O0010O001M2M4M2N2Moj\\7"}, "image_id": 938, "id": 15427}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 158.0, 61.0, 63.0], "area": 2211, "segmentation": {"size": [512, 512], "counts": "Wfd04j?2M4M2M3N3L3O20O00O2L3N3L3N2M4M2M4M2M3N3O010O00010O010O00010O010O00010O010O00010O010OdNhAP1X>nNkAQ1V>kNmAV1R>hNQBW1\\>O1N3L3N3L3N2M4M2M4M2M3N3L3NXj\\6"}, "image_id": 938, "id": 15428}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 161.0, 63.0, 49.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "leQ33k?2M4M2M3N3M2M4M2M3N3L310O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O0M4M2M310OO2M2M4M2M3N3L3N3L3N2M4M2M4M2Mfjn3"}, "image_id": 938, "id": 15429}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 173.0, 23.0, 45.0], "area": 665, "segmentation": {"size": [512, 512], "counts": "]5Z1f>010O00010O0010O0M3N3L3M3N3L3N2M4M2M3N3L3N2M_Zd7"}, "image_id": 938, "id": 15430}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 213.0, 25.0, 20.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "oVP21m?3M2N3L3O2O0010O0010O010O0010O0010O010O01L3N2N3LVYc5"}, "image_id": 938, "id": 15431}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 216.0, 36.0, 31.0], "area": 654, "segmentation": {"size": [512, 512], "counts": "YgR32k?4M2M3N3M2M4M200010O010O0010O0010O010O00010O010O00010O010M2N2M4M2N3L3NoX[4"}, "image_id": 938, "id": 15432}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 222.0, 61.0, 82.0], "area": 2803, "segmentation": {"size": [512, 512], "counts": "oX`02k?4L3N3L3N3M200010O\\O^OjAc0S>_OlAb0Q>BlAa0P>BnA`0P>BmAa0P>CmA`0o=CoA?n=l0M2M4L3M3O20O01O01O010O01O010O00010O01N1M3M4M2M3N3L3M4M2M3M4M2M3M4M2M4L3N2M4L3N2M4M2M4LfXa6"}, "image_id": 938, "id": 15433}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 227.0, 23.0, 33.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "lWa13k?3L3N3L3N3M2M3N3L3N3N11N1N3L3N2M4M2N3L3N2M4MjXS6"}, "image_id": 938, "id": 15434}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 232.0, 3.0, 7.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "X77i?M4MiXn7"}, "image_id": 938, "id": 15435}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 240.0, 88.0, 60.0], "area": 2617, "segmentation": {"size": [512, 512], "counts": "\\hR22l?2M4M2M3N3L3N3L3N2M4M2O20O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O0010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O01M2N2M4M2M4M2N2M4M2M4McWa4"}, "image_id": 938, "id": 15436}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 242.0, 32.0, 40.0], "area": 616, "segmentation": {"size": [512, 512], "counts": "b83k?3M2N3L3N3M2O1O2M2N3L3N1O00O011O2M4M2O2O010O00001M2N3M2M4M2N3LZh_7"}, "image_id": 938, "id": 15437}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 277.0, 88.0, 59.0], "area": 2558, "segmentation": {"size": [512, 512], "counts": "_Yk13k?2M3N3M2M4M2M4M2M3N3M210O3M010O01O01O010O01O010O0O1O2O010O01O01O010O01O01O010O01O010O01O010O01O01O010O01O01O010O01O01O010O010O01O01O010O01O01O010O01O01O0O2M2M4M2M3N3M2M4M2M3N^fh4"}, "image_id": 938, "id": 15438}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 293.0, 18.0, 22.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "fY_12k?3N2M4L3N3L3O1010O001O0N2M4L3N2M4MifW6"}, "image_id": 938, "id": 15439}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 314.0, 49.0, 86.0], "area": 2328, "segmentation": {"size": [512, 512], "counts": "];l0Q?4M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M2M3N0O11N3N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2M4M2M3N3LnUW7"}, "image_id": 938, "id": 15440}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 363.0, 16.0, 22.0], "area": 211, "segmentation": {"size": [512, 512], "counts": "k[h04i?3N2M4L3N2M4O0001N1N3L3M3M4M2Medo6"}, "image_id": 938, "id": 15441}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 363.0, 89.0, 61.0], "area": 2513, "segmentation": {"size": [512, 512], "counts": "W\\Y12k?3N2M4M2N3L3N2M4M2M4M2O2O00010O010O00010O010O010O00010O010O00O2M210O010O01O01O0UOPAg0T?0010O0010O0010O03N0O010O00010O010O0O1N30O01O010O01O01O010O01O010O01O010ON3M2M3N3M2M4M2M4M2N2M4MgSZ5"}, "image_id": 938, "id": 15442}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 389.0, 22.0, 17.0], "area": 217, "segmentation": {"size": [512, 512], "counts": "_\\l01l?3M3N3M210O00010O0010O01O01O01O01O0N3L3N2Ngch6"}, "image_id": 938, "id": 15443}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 391.0, 27.0, 26.0], "area": 439, "segmentation": {"size": [512, 512], "counts": "gl93j?3N3L3N2M4M2O2O01O01O010O01O01O010O01O0N2N3L3N3L3N2MecX7"}, "image_id": 938, "id": 15444}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 410.0, 23.0, 19.0], "area": 241, "segmentation": {"size": [512, 512], "counts": "Sm52l?2M4M2N30O01O010O01O010O010O01O010O01M2N3M2NPc^7"}, "image_id": 938, "id": 15445}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 413.0, 94.0, 62.0], "area": 2663, "segmentation": {"size": [512, 512], "counts": "g]l02l?2N3L3N2M4M2N3L3N2M4M2010O010O0010O0010O0010O010O0010O0010O0010O010O0010O0010O0010O0010O010O0010O0010O0010O010O0010O0010O0010O010O0010O0010O0010O010O0010O0010O0010O0N3M2M4M2M3N3M2M4M2N2MTbd5"}, "image_id": 938, "id": 15446}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 426.0, 20.0, 56.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "Z=h1X>N3L3N3M2M3N3L3N3M2M4M2M3N3M2M4M2N3L3N`be7"}, "image_id": 938, "id": 15447}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 448.0, 31.0, 29.0], "area": 514, "segmentation": {"size": [512, 512], "counts": "bna01l?3N3M2N3L3N3M2N210O010O0010O0010O010O010O0010N1N3M2N3L3N3M2Nhan6"}, "image_id": 938, "id": 15448}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 487.0, 50.0, 25.0], "area": 741, "segmentation": {"size": [512, 512], "counts": "ooi01m?2N2M3N2N2N2M3N2N2M3O100001O001O001O001O00001O001O001O00001O001O001O0Bb@:b?01O001O001O00001O001O001O0MXP]6"}, "image_id": 938, "id": 15449}, {"iscrowd": 0, "category_id": 1, "bbox": [31.0, 498.0, 13.0, 14.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "io?4j?2M3N3O0010O00010N1M4M2M^`Y7"}, "image_id": 938, "id": 15450}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 505.0, 16.0, 7.0], "area": 60, "segmentation": {"size": [512, 512], "counts": "m_73k?2N21O001O001O001O00001O001O00Q``7"}, "image_id": 938, "id": 15451}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 0.0, 70.0, 38.0], "area": 1657, "segmentation": {"size": [512, 512], "counts": "b`k62l?2N3L3N2N3L3010O01N1N2N3O001O001O00001O001O001O00001O001O001O00001O0010O010OO1O2O001O0000M3N2N2M3N2N2M3N2N2N2001O001O001O0000O1M3N2N2M4M2NQ`1"}, "image_id": 941, "id": 15452}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 33.0, 22.0, 31.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "eQe73j?3N3L3N2N3L3N3L31O010O01O010O01O010O01O01OiN"}, "image_id": 941, "id": 15453}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 48.0, 41.0, 53.0], "area": 1437, "segmentation": {"size": [512, 512], "counts": "ZRe64X?0XA4d>0XA5c>OYA5c>OYA5b>i0L2N000O0100000O0100000O0100000O0100000O04M4L4L4L5J5L4L000O100If@HY?8803M4L^]f0"}, "image_id": 941, "id": 15454}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 87.0, 28.0, 31.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "^SY71l?3M4L3M3L5L3M3O20O00010O00010O00001@k@4T?Io@7Q?FRA:n>CVA=V?0001L3M3M3Mjl8"}, "image_id": 941, "id": 15455}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 194.0, 64.0, 46.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "Wg]51m?3M2N2M4M2010O01O0O2L3N2M4M2N3O010O01O0N2N3L3M4M2001N1M4M2M3N3L3N3O01O01O010O01O01O010O00010O0M4M1O02M4M2M4M2M3N3L3N3L3N2M4M2MlYb1"}, "image_id": 941, "id": 15456}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 197.0, 47.0, 68.0], "area": 1755, "segmentation": {"size": [512, 512], "counts": "oW`61l?3N3kNMXB6f=LXB7d=MXB6f=LXB6e=MXB7e=LXB6e=MXB7e=LXB6h=JUB:k=ESB=m=DPB>Q>i0O0010O0010O010O0001L3N3[NiA]1Y>aNiA\\1`>M2M4M2M3N3L3N3O000010ON3M2M3N3M2M4M2MZYh0"}, "image_id": 941, "id": 15457}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 239.0, 54.0, 46.0], "area": 1525, "segmentation": {"size": [512, 512], "counts": "[XU71l?3N3L3N3M2M4M2N2M4M2O2O010O01O01O010O01O010O01O010M2N2N3L310O0010O0010O0010O010O0N3M2N2M4M2N3L3N3M2M3N3L3NYH"}, "image_id": 941, "id": 15458}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 259.0, 48.0, 58.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "_i^54j?2N3L3N2^OD]A`0_>C_A?_>D^A?_>C^A`0_>D^A>`>c0L3N3O010O00010O010O0010O0010OO2L3N2N3L3N3L3N2N3L3N3M201O00010M2N3L3N3L3N2NaWi1"}, "image_id": 941, "id": 15459}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 265.0, 27.0, 28.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "kX]62l?3L3N3L3N2M4M2O20O01O01O010O01O01O010N1M3N3L3N3L3N3MaWU1"}, "image_id": 941, "id": 15460}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 286.0, 30.0, 28.0], "area": 490, "segmentation": {"size": [512, 512], "counts": "^iX73k?2N3L3N2N3L3N3O010O00010O010O0010O0010O010ON2N3M2M4M2M4M2NkV8"}, "image_id": 941, "id": 15461}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 303.0, 99.0, 61.0], "area": 2459, "segmentation": {"size": [512, 512], "counts": "Vjf42l?2M4L3N3L3N2M4L3N2010O01O01O01O010O01O01O01O010O01O01O01O010O01O0N2N3O010O01ON3O0001N101O01O010O00010O010O0002O0m@YOk>g0RA]Om>k01O010O01O01O010O01OSORAh0T?O010O00010O01O01O010O01O01O010O00010O01O01M2M4M2M3M4M2M4M2M_eg1"}, "image_id": 941, "id": 15462}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 307.0, 61.0, 50.0], "area": 1653, "segmentation": {"size": [512, 512], "counts": "VZa62l?2M4M2N3L3N2k@_Oh>e0UA]Oi>e0UA]Ok>m0O010O010O00010O010O00010O010O00010O010O02O0O0010O0010O0010O0010O0010O010O0010OM13L3N2M4M2M4M2N3L3N2M4MkU`0"}, "image_id": 941, "id": 15463}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 309.0, 23.0, 29.0], "area": 354, "segmentation": {"size": [512, 512], "counts": "\\ja71m?3M2M4M2N3M2M3N3M2M4O0001L3N3M2N3L3N2N3M2M4MXf2"}, "image_id": 941, "id": 15464}, {"iscrowd": 0, "category_id": 1, "bbox": [286.0, 341.0, 99.0, 60.0], "area": 2449, "segmentation": {"size": [512, 512], "counts": "[[_42k?3N2M4M2N3L3N2M4M201O00010O010O00010O010O00010O010O0n@VOn>o00O0010O0010O010O00010O010UOSAa0m>\\OVAd0j>ZOYAf0P?0010O0010O0010O0010O0010O0N3O00010O010OO1N3L3O2O01O010O01O01O010O01O01O010O01O01O010O01O01O001M2M3N3L3N3L3N2M4MYTo1"}, "image_id": 941, "id": 15465}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 359.0, 24.0, 24.0], "area": 340, "segmentation": {"size": [512, 512], "counts": "f[n62l?2M4M2N2M4M2O2O010O01O010O01O010OO2L3N2N3L3N3Mdde0"}, "image_id": 941, "id": 15466}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 360.0, 68.0, 122.0], "area": 3697, "segmentation": {"size": [512, 512], "counts": "e^l53j?3M4L3M3N3L3M3010O0010O0010O00N3L3M3M4L3M3N0O3O2O01OcNaNYD`1c;cN\\D^1b;eNmCJBd1^e>^O^A?e>^O^AMJ=k>DbA9U?L3M3MncQ1"}, "image_id": 941, "id": 15467}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 393.0, 63.0, 107.0], "area": 3565, "segmentation": {"size": [512, 512], "counts": "Vno63k?2SALk=7SBKk=8SBJi=9UBJh=9VBHg=;WBHf=:YBHc=X1>J2M4M2N3L3N2N3L3N3L3N3M2MXc0"}, "image_id": 941, "id": 15468}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 398.0, 58.0, 72.0], "area": 2055, "segmentation": {"size": [512, 512], "counts": "gm\\42l?3L3N2N3M2M4M2N3L3N2N3L3N3M2N3L3N3M200010O01O010O01O010O010jNdAf0\\>WOhAi0W>UOkAk0V>ROlAo0S>nNQBQ1_>010M2M4]AhNZ>Z1cAiN]>]110O01CcAVO\\>g0gAYOY>e0iA[OX>a0lA_OS>?oAAR>;QBFn=8UBGl=5WBKi=3ZBMe=0^B0c=M_B4`=IdB5^=HdB6d>M4MfQf2"}, "image_id": 941, "id": 15469}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 403.0, 30.0, 28.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "U]`51m?3L3M3N3L3M4M20010O00010O0010O0010O00010O010OO1M4M2M3N3L3MXcP2"}, "image_id": 941, "id": 15470}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 464.0, 33.0, 48.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "mo_73m?000000000001O000000000000000000000000000000001O000000E;_Oa0_Oa0N200"}, "image_id": 941, "id": 15471}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 467.0, 51.0, 45.0], "area": 1334, "segmentation": {"size": [512, 512], "counts": "n_n32k?3N2M3N2O100001O001O00ZOGfA8X>KdA8Y>KeA7Y>LdA6Z>LcA7Z>McA5]>KaA7_>I^A:c>a0001O001O00001O001O001O00001O001O001N1N2M4M2N3L3N2N3L3N3M2M4MQQX3"}, "image_id": 941, "id": 15472}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 0.0, 58.0, 43.0], "area": 1363, "segmentation": {"size": [512, 512], "counts": "hP72m?1O2N2EJi@8U?Jh@9U?Ji@8U?Ji@7V?;N2N2M3N1O2O1O1O1O1O001O1O1OO1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1OQPl6"}, "image_id": 942, "id": 15473}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 0.0, 53.0, 26.0], "area": 726, "segmentation": {"size": [512, 512], "counts": "P`^31o?1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O0000O1O1O1N2O1O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1OQPg3"}, "image_id": 942, "id": 15474}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 8.0, 72.0, 76.0], "area": 2655, "segmentation": {"size": [512, 512], "counts": "lak02m?2N2M3N1O2N2N2GBm@`0Q?Bl@a0R?8N2N1N3N2N2N2M3N2N2N1O2M3N2N2N2NO01000000O10O100000O1000O3N1O2N1O2N1N2O2N2N2N2N2M3O1oNUAl0k>QOXAn0n>000O10O100M3N2M2O2N2M3N2N2M2O2M3N2Nj^P6"}, "image_id": 942, "id": 15475}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 9.0, 63.0, 79.0], "area": 2326, "segmentation": {"size": [512, 512], "counts": "Xbb42S?OfA3X>OfA3X>OfA3X>OfA3X>OfA3W>O]AG3<_>O\\AF4=^>5`AM^>5`AM^>5`AM^>l0M3N1O2N2N2M2OO10000000O10O10000000OJQB^No=b1SB[Nn=d1702N1O2N2N2M3N2N2N1O2N2M3N2N2N2N1O200O100O1M3N1O2N2N2N2M3N2N1Oon]2"}, "image_id": 942, "id": 15476}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 31.0, 25.0, 52.0], "area": 682, "segmentation": {"size": [512, 512], "counts": "o0d1]>N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1N3N2N2N2N1N3N2N2N^^c7"}, "image_id": 942, "id": 15477}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 62.0, 73.0, 63.0], "area": 2249, "segmentation": {"size": [512, 512], "counts": "nbT51n?2N2M3N1O2N2N2M3N2N2N2N2M2O2N2N1O0TAPOi>o0XAROi>n0UATOk>Q100000000N1O2N2N2N2M3O010O100000N2N2M10O100000O10O100000O10O1002N2N2M2O2N2N2N2M3N1O2N2N2M3N2N2N1O2M3N2N2Namf1"}, "image_id": 942, "id": 15478}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 66.0, 64.0, 58.0], "area": 2107, "segmentation": {"size": [512, 512], "counts": "RSb12l?3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2N2M3N1O2N2N2N2000O1O0O2N2N2N0000000O3N2N2N1O2O1O1O1N0000O010001O1O2HSAYOP?e07N2N2N1N3I`@Kb?3`@Kb?28Ncm]5"}, "image_id": 942, "id": 15479}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 72.0, 61.0, 79.0], "area": 2215, "segmentation": {"size": [512, 512], "counts": "ac61e01b>1\\A1b>0]A2`>1]A2a>0]A1b>0]A2a>0]A2a>0]A2a>O^A3_>0_A2_>0_A2_>O`A3^>i0N2N1N3N2N1O00000O0100000000O0ImAdNS>\\1PBaNP>_1RB_Nn=a18O10O100001O2N1N3N2N2N2N2N2N1N3N2N2N2N2N2M2O2N2N2N2N2N2M2O2NUmj6"}, "image_id": 942, "id": 15480}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 96.0, 61.0, 73.0], "area": 2291, "segmentation": {"size": [512, 512], "counts": "jcU62n0OP>3nAOP>3nANP>4nAOP>3nAOo=4oAMP>5`AC8:V>5`AB83aAC7;W>:gAHW>:gAHW>:fAIZ>o000000000O1N1O00000O1000O1000O100000O10O11O2N2N2M3N2N2N1O2N2M3N2N2N1O2N2M3N2N2N1O2N2M3N2N2N1O2N2M[lk0"}, "image_id": 942, "id": 15481}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 114.0, 79.0, 66.0], "area": 2660, "segmentation": {"size": [512, 512], "counts": "jTh02m?2M3N1O2N2N2N2N2N2M3N1O2N20000N2N2N1O001O2M3N2N2N2N2N1O2N2M3000O1N2N2N2N2N1N2O0002N2M3N1O2N000000O0100000O101O2N2N2M2O2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N1N3N2N2Nl[P6"}, "image_id": 942, "id": 15482}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 148.0, 73.0, 64.0], "area": 2634, "segmentation": {"size": [512, 512], "counts": "cUb62m?2N2N1O2N2M3N2N1O2c@_OX?e0O2N2N2N2M3O001000000000O100000O1N2N1O1O0O0100001O2M3N1O2N2N2M3N2N1O2N2O1000O1N2N2N1N30000O1N2M2O2N2N2N2M3N1O2^OTAMn>1TAMn>0UANm>0TANo>0SANn>1TAMn>1TAMn>0X[9"}, "image_id": 942, "id": 15483}, {"iscrowd": 0, "category_id": 1, "bbox": [209.0, 150.0, 67.0, 62.0], "area": 2271, "segmentation": {"size": [512, 512], "counts": "TeX31n?2N2j@L]>5bAM\\>5bAL]>6aAL\\>7bAK\\>7bAK\\>7aAL]>6aAL]>5bAM\\>5bAL\\>7bAK^>5`AM`>j0O100000O1000O100000O100000O1N1O00000O10O2O2N1O2N2N2M2O00000000O010001O2N2M3N2N2N1O2N2M3N2N2N2N1N3N2N2N2Njje3"}, "image_id": 942, "id": 15484}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 186.0, 87.0, 93.0], "area": 3390, "segmentation": {"size": [512, 512], "counts": "VhX21n?2N2M3N1O2N2N2N2M3N2N2N1O2M3ZOSOVBo0h=SOVBo0h=SOVBo0d=nNnA6;o0e=VOYBk0f=WOXBh0i=YOVBg0i=\\OUBd0k=^ORBc0n=g0000O2O2N00O100LWNRBh1o=4O1O100O10O10000000001N3N2N1O2N2N2M3N2N2N1O2N2M30000O01000000000O01000N2N2N2N1O2N2M3N2N2N2N1N3N2N2N2N2M2O2NTi[4"}, "image_id": 942, "id": 15485}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 188.0, 61.0, 69.0], "area": 2171, "segmentation": {"size": [512, 512], "counts": "^gT41m?3N2N2N1O2N2N2M3@_O^Ac0_>@_Ab0V>\\OfA42a0W>]OeA42a0W>]OeA42a0W>]OeA42a0W>j0N2M3N1O2N2N20N2N2M3N00O1000O101O2N2N2M3N1O2N00001N3N2N2N2N2M3N2N1O2N2M3N2N2N2N2M2O2N2N2N2N`il2"}, "image_id": 942, "id": 15486}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 200.0, 33.0, 49.0], "area": 914, "segmentation": {"size": [512, 512], "counts": "of_72m?2N2M3N1O2N2N2N2N2N2M2O2N2N2N2O100O10O100000N2N00002M3N2N2N1O2N2gI"}, "image_id": 942, "id": 15487}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 222.0, 62.0, 72.0], "area": 2394, "segmentation": {"size": [512, 512], "counts": "Xhl42m?2`@No>4o@Me>0XA42Nd>0WA53Md>0WA52Ne>OWA52Me>`0YABe>n0FhNiAZ1U>gNjAZ1U>hNhA[1V>9M3N2O001000N0O10000000O10O101O2N1O2N2N2M3N1O2N2N2N2M2O2N2N2O10O1000O1O1N1O2M3N2N2N1N3N2N2N2M2O2N2NXXT2"}, "image_id": 942, "id": 15488}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 237.0, 27.0, 40.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "]7W1j>N11000O1N2N1N2OO1000O2O2N2N2N1N3N2N2N2M2O2N2N2M3N1OXXb7"}, "image_id": 942, "id": 15489}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 249.0, 73.0, 70.0], "area": 2564, "segmentation": {"size": [512, 512], "counts": "fXU31n?2M3N2N2N1O2M3N2e@_OT?c0j@_OT?h0M3N2O00100000O1NWATOa>j0_AXO`>h0_AZO_>g0_A[O`>g0^A[O`>g0^A[O`>T1N2M2O2N2N2M3N2N110O01N2N2N2N2M2O1O00O010000000O01KeAfN[>Z1fAeN[>Z1gAdN[>Y18N2N2N2M2O2N2N2N2M2O2N2N2M3N2N1O2M3N2N2NfWf3"}, "image_id": 942, "id": 15490}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 270.0, 74.0, 68.0], "area": 2579, "segmentation": {"size": [512, 512], "counts": "eYb52m?2N2M3N2N1O2JCf@?W?7N2N2N2N1N3HPO]AR1a>PO]AR1a>oN^AS1`>8N1O2M3N2N2N2N1O2000000000O01000O1N2N2N2N1N2O001BcAXO^>g0dAWO]>h0eAVO\\>i0fATO\\>k0eATO\\>j0gATOZ>k0=0O1000O1000O2O11000000O0O2M3N2N2N2M2O2N2N2N2M3N1O2NTgX1"}, "image_id": 942, "id": 15491}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 279.0, 66.0, 68.0], "area": 2225, "segmentation": {"size": [512, 512], "counts": "PjP41m?3N2N1O2N2N2M3N2N2N1O2M3N2N2N2N000O2O1O2N2N2M3N2O0100000O1N2N2N1O2M3N2N2N1OO010000002N2M3N2N2AfAUO\\>h0gAVOZ>i0gAUO\\>i0fAUO\\>i0fAUO\\>i0fAUO\\>i0?M2O2N2N2N2M3N2N1O2N2N2MgVn2"}, "image_id": 942, "id": 15492}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 301.0, 73.0, 73.0], "area": 2767, "segmentation": {"size": [512, 512], "counts": "PjW12m?1O2M3YAJa=8]BIb=9\\BIb=8]BJa=8jAC88k=8kAA99j=8jAB:8j=8jAB:7k=b0SB@j=c0SB@k=a0TBAj=a0TBAj=a0TB@m=Z10000O1000N2N2N1O2N2N1N100000O1000O10001O1O2N2N2M3N2N1O2N2N2M3N1O1OO1000O10O10000002M2O2N2N2N2N1O2M3N2N2N2N1O2M3NXfc5"}, "image_id": 942, "id": 15493}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 303.0, 66.0, 82.0], "area": 2777, "segmentation": {"size": [512, 512], "counts": "[k^62m?2N2N1N3nNIXB9f=IhAM3HiAN2HiAM3=R>HiAM3JiAL2=S>IhAM5:Q>8oAHo=:oAHo=:oAHo=V1M2O2N2N2N2N2M3N1O20N2M3N2N2N2N1O2M3N2N2N2N1N3N2N2aNdAW1^>gNdAW1d>N2M1000002N110O0O2M3N1O2N1N3N2N2M3N2N2N2M3N2N2N_U`0"}, "image_id": 942, "id": 15494}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 325.0, 72.0, 76.0], "area": 2727, "segmentation": {"size": [512, 512], "counts": "Q[j42l?3M2O2M3N2N2N2N1O2N2[A[Ol=g0RBZOn=g0PB[OP>e0mA^OR>b0mA@S>`0kAAV>?hACX>=fAEZ>;dAF\\>;aAH_>m0N2N2N2M3N2N2N3L2O0000O01000000O11N3N2N2N1O2N2M3N2N2N1O2N2M2O00O12N2N2M3N1O2N2N2M3N2N1O2N2N2M3N1O2N2N2NUeQ2"}, "image_id": 942, "id": 15495}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 344.0, 65.0, 70.0], "area": 2176, "segmentation": {"size": [512, 512], "counts": "kkh12m?2N2N2N2M2O2N2N2N2M3N1O2N2N2N2000O10O100O1M3N2N1O2N2N2M3N2N1O2N2O10000O010O1N2N1O000OVOoAHQ>8QBFo=:SBCn==TBAl=>WB@h=a0ZB]Oh=a0YB^Oi=`0YB]Oj=a0XB]Oj=`0YB^Oi=`0YB^Oh=a0ZB]Oh=a0m0N2M3N2N2N1O2M3N2NkdV5"}, "image_id": 942, "id": 15496}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 361.0, 55.0, 75.0], "area": 2410, "segmentation": {"size": [512, 512], "counts": "YlT72m?1O2M3N2N2FFk@;T?Gi@`16N1O2M3N2N2M2O2N2N0O1000O10O1002^D"}, "image_id": 942, "id": 15497}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 362.0, 77.0, 75.0], "area": 2714, "segmentation": {"size": [512, 512], "counts": "hl52m?2N2M2O2N2N2N2N2N2M3N2N1100000O1N1O00002N2N1N3N2N2N2N2EfNmA\\1Q>fNmA\\1Q>fNlA\\1S>:M3N2N2N2N2N1O0O11N3N2N2N1O2N2M3N2N2N2JeAdN]>Z15O2N2M3N5K2N2N2N20O01N2NO1000O2O2N2N2M2O2N2N2N2M3Nlcc6"}, "image_id": 942, "id": 15498}, {"iscrowd": 0, "category_id": 1, "bbox": [356.0, 374.0, 73.0, 73.0], "area": 2615, "segmentation": {"size": [512, 512], "counts": "^\\b51n?1O2N2M3N2N2N2N1O2M3N2N2N2N1bAVOd=m0ZBTOg=l0WBVOi=j0TBYOl=g0RBZOo=f0oA\\OP>d0oA]OQ>d0mA^OQ>d0lA_OR>c0lA_OQ>d0mA^OQ>Z1N2N2N2M3O1ON3N2N2N2N2M3N1O2N2M3N2N2J`AiNa>V1aAhNa>W15N0000000O010000000O3N2N2N2N2N1N3N2N2N2N2M3N1O2N2N2M3NiSY1"}, "image_id": 942, "id": 15499}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 390.0, 63.0, 75.0], "area": 2484, "segmentation": {"size": [512, 512], "counts": "[m\\22m?2M2O2N2N2N2M2O2N2N2M3N2N1O2O100000O10N2N2M3N00002ZAkN]>X1aAjN]>]1O2N2N2M3N1O2N2M3N2N2N10100000N1O000UOWB_Oj=a0XB]Oh=b0[B\\Og=c0YB\\Oi=c0XB[Oj=d0WBZOk=d0WBZOj=e0XBXOk=e0XBYOj=e0WBZOk=d0WBZOk=a0eAZOb03k=`0[B^Of=6aA2]?Ld@3^?Kd@3^?Kd@3f?MWcc4"}, "image_id": 942, "id": 15500}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 398.0, 73.0, 83.0], "area": 2967, "segmentation": {"size": [512, 512], "counts": "_]T11n?2N2M2O2N2N2YADg=>WBDg=>VBEg==XBEf==XBDg=>WBDg=>VBEh==VBEj=:UBHk=8RBKn=5PBLP>5nAMR>3lAOT>1iA2U>n0N2M2O2N1O000O0100000O100000O010000002N1N3N2N2N2N1O2N2M3N2N2N1O1O1N2O2N2N2N2N1O2M3N2N2N1O2N2M3N2N1O2N2N2NjRg5"}, "image_id": 942, "id": 15501}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 424.0, 66.0, 65.0], "area": 2227, "segmentation": {"size": [512, 512], "counts": "Xn\\61n?2N2N2N1N3N2N2N2N2N2M2O2N2n@WOj>k0TAWOj>Q1N2M2O2N2N2N2N2O10O10O100000000OO000000000O2O2N0000000O010000002N2M3N1O2N2N2N2N2M3N1[Ol@=V?Al@=V?Al@=]?N1N3N2N2N2NRRb0"}, "image_id": 942, "id": 15502}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 439.0, 66.0, 64.0], "area": 2165, "segmentation": {"size": [512, 512], "counts": "gn`32m?2N2N1N3N2e@En>=PAEn>=PAEm>=RAEl>=RAEl>h0LQOVAQ1g>6N2N2N2M2O2N2N2N2M20100OO2N2N2M3N1O2N2N2N2M3N1O2N00000O10O100000O10O100002N2N20OO2M3N2N2N2M2O2N2N2N2M2O2N2NlQ^3"}, "image_id": 942, "id": 15503}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 446.0, 6.0, 11.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "T^m71m?3N1O2N2N2QB"}, "image_id": 942, "id": 15504}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 447.0, 72.0, 65.0], "area": 2553, "segmentation": {"size": [512, 512], "counts": "k^k11n?2N2M2O2N2N2N2N2N2XA_Om=c0RB^Om=d0QB^Om=d0PB_OP>a0nAAR>?lACT>=jADV>=hADY>:bAH_>8_AI`>l0O1N2O1O1O1O1O1O1N2O1O1O1O1O1O1N20O2M3N1O2N2N2M3N2N2N2N1O2M10O11O1O2N2M3N2N2N2N2N1N3N2N2N2N2N2M2O2N2N2N[aP5"}, "image_id": 942, "id": 15505}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 457.0, 61.0, 55.0], "area": 2095, "segmentation": {"size": [512, 512], "counts": "goQ71n?2N2N1O2N2M3N2N2N1O2M2O1J[OPAf0o>\\Oo@e0P?6O1N2O1O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O11O1O1O1O001O1O1O1O1O1O001O1O1K[AlNg>R15N1N3N2N2N2XOj@c0[?N2M3No@"}, "image_id": 942, "id": 15506}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 476.0, 52.0, 36.0], "area": 983, "segmentation": {"size": [512, 512], "counts": "o_S41n?1O1O1O1O1O1O1O1001O00O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1N2O1O1O1O1O11O001O1O1O001O1N2N1^Ol@7V?Gk@7X?Gj@7X?Gj@7a?M2O2N2N_`R3"}, "image_id": 942, "id": 15507}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 497.0, 61.0, 15.0], "area": 487, "segmentation": {"size": [512, 512], "counts": "oof21n?1O1N2O1O1O1O11O1O1O1O00O1O1O1O1O1N2O1O1O1001O1O001O1O1O1O1O001O1O1O1O0000O1O1O1O1N2O1O1O1001O1O1O001O1O1O1O1O001O1O1O1OQ`Z4"}, "image_id": 942, "id": 15508}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 34.0, 43.0], "area": 1215, "segmentation": {"size": [512, 512], "counts": "0V1j>001O00000000001O00000000001O00000000001O000000001ON2J6J6K5J6J6J6KUP_7"}, "image_id": 943, "id": 15509}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 0.0, 14.0, 14.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "TPg05j?3L3O1N10001O00O101O0O2N3LnoQ7"}, "image_id": 943, "id": 15510}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 0.0, 30.0, 15.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "PPU21o?1O2N1O1O1O2N1O1O1O2N1O00O1O1O100O1O1O1O100O1O1O1O100O1O1OQP\\5"}, "image_id": 943, "id": 15511}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 265.0, 27.0, 32.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "hXi31f?1`@3]?Oa@3]?0`@3\\?;N3L3NO10O100O3N0O0100O012N2M4M2M3N3M2M4M2M4MoVi3"}, "image_id": 943, "id": 15512}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 313.0, 50.0, 52.0], "area": 1328, "segmentation": {"size": [512, 512], "counts": "^jj62m?3M2N2O1N2N2N3M2N2N2O1N2N2N3M2N2N2O1N2N2N3M2N2N2000N2N2N3M2N2N2O1N2N2N3M2N2N2N2O1N2N3M2N2N2N2N2O1N\\U<"}, "image_id": 943, "id": 15513}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 318.0, 98.0, 61.0], "area": 3229, "segmentation": {"size": [512, 512], "counts": "ljY31o?2M4M3L3N3M2M4M2M4M1O0GVO[Ak0e>XOXAg0i>8O010O10O10O010O10O10O0KnN^AR1b>6O0100O0100O0100O0100O0102N2M2OO012N2M4M2N3L3N3L3N00O010O10O10O2O3L3N3M2M4M2M4M2N3L3N3L3N1O0O010O01000O010O01000O01000O010O01002M4M2M4M2N3L3NhTU3"}, "image_id": 943, "id": 15514}, {"iscrowd": 0, "category_id": 1, "bbox": [355.0, 417.0, 12.0, 12.0], "area": 115, "segmentation": {"size": [512, 512], "counts": "Uma55i?3N2O1N100001O000O2N3MlRX2"}, "image_id": 943, "id": 15515}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 0.0, 40.0, 17.0], "area": 382, "segmentation": {"size": [512, 512], "counts": "RPm31m?201O001O001O00001O001O001O00001O001O00001O001O001O00001O001O001OO1M3N2N2M3N2NRP_3"}, "image_id": 944, "id": 15516}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 0.0, 34.0, 19.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "XP_79_?80001O000000000000000000000000000000001O00000000000000000000000000"}, "image_id": 944, "id": 15517}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 23.0, 27.0, 22.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "RQl42k?4M2M4M2O1010O010O00010O010O00010O010O00010O0M4M2M3NS_f2"}, "image_id": 944, "id": 15518}, {"iscrowd": 0, "category_id": 1, "bbox": [241.0, 40.0, 63.0, 55.0], "area": 1654, "segmentation": {"size": [512, 512], "counts": "oah32m?1O2N2M3N2N2N1`@C\\?a0O2N2N2N2N2N1O2N2N2N2N2M1001O2M3N2O100000O10O10000000O1000O100000O1000O1000000N1O2N2N2N2M3N1O2N2N2N2M3N1O2N2NgmW3"}, "image_id": 944, "id": 15519}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 59.0, 47.0, 28.0], "area": 1231, "segmentation": {"size": [512, 512], "counts": "aRh65U?f0000000000000000000000000000000000000000000001O0000000000000000000001O00000000000000000000KY^`0"}, "image_id": 944, "id": 15520}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 92.0, 56.0, 48.0], "area": 1446, "segmentation": {"size": [512, 512], "counts": "bSY33k?2N2M4M2M4M2M3N3M2O20O01O01O010O010O00010O010O00010OPAWOi>P11O01O010O01O010O01O010M2M310ON2N3L3N3L3N2N3L3N3L3N3L3N2Nilj3"}, "image_id": 944, "id": 15521}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 94.0, 31.0, 28.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "^So43k?2M3N3M2M4M2N210O0010O010O0010O0010O0010O0010O0M4M2M4M2M3N3Mj\\a2"}, "image_id": 944, "id": 15522}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 96.0, 32.0, 31.0], "area": 645, "segmentation": {"size": [512, 512], "counts": "dS`71m?3M2M3N3L3N3L3N3N100010O010O00010O010O00010O010O001O0N2M4M2M4MiL"}, "image_id": 944, "id": 15523}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 106.0, 36.0, 37.0], "area": 784, "segmentation": {"size": [512, 512], "counts": "TT[41m?3L3N2M4L3N3L3N2M4M2O2O01O010O00010O010O00010O01OO2L3M4M2M3N3L3N3L3N2M`lR3"}, "image_id": 944, "id": 15524}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 122.0, 43.0, 36.0], "area": 879, "segmentation": {"size": [512, 512], "counts": "eda23k?3M2M3N3M2M4M2N2M4M2N2N3O0010O0O1N3L3M4M2O1010O01O01O01O01O010O01O01O01N1M3N3L3M4M2Mnkh4"}, "image_id": 944, "id": 15525}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 127.0, 16.0, 17.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "YdP43j?4L3N2N30O01O01O010O0O1M4L3M3No[g3"}, "image_id": 944, "id": 15526}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 130.0, 15.0, 26.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "ddh73k?2M3M4M2M3N3L301O01O010O01O0lK"}, "image_id": 944, "id": 15527}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 132.0, 49.0, 63.0], "area": 2334, "segmentation": {"size": [512, 512], "counts": "[U_6b0^?000000000RO3iAMW>3iAMW>3iAMW>3iAMW>3iAMn=l0nAWOR>i0kAYOU>g0hA]OW>V110O01O010O010O01L3N2N3M2M4O00N2N3M2M4M2FiAiNZ>T1iAiNY>U1iAhNZ>V1:L3N3M2N3L3N3M2M3N3M2N3L3NQjf1"}, "image_id": 944, "id": 15531}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 203.0, 47.0, 65.0], "area": 1632, "segmentation": {"size": [512, 512], "counts": "mgS21l?3N2N3L3N3M2M4F^ORAd0l>^ORAe0j>:N3L3N3L3N3L3N2N3O010O0mAWNP>l101O01O0M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M]iT5"}, "image_id": 944, "id": 15532}, {"iscrowd": 0, "category_id": 1, "bbox": [382.0, 227.0, 69.0, 80.0], "area": 2496, "segmentation": {"size": [512, 512], "counts": "YYo53k?2M3N3L3M4M2M3010O01O01O01N1M3N3L3M4M2M12M3N3L2OO012M3N2M4M2O2O01O010OO2L3M100O02N3N3O0010O0O1M4M2M4M2O101ON3L3M4M2M3N3L3N3L3[OTA3o>KSA3P?ITA3o>KSA3P?ITA3^?NZXn0"}, "image_id": 944, "id": 15533}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 232.0, 81.0, 142.0], "area": 4043, "segmentation": {"size": [512, 512], "counts": "X[_23k?3L3N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4L3N3L3N2M4M0O10O010O0100O010O10O0100O010OROdB\\O\\=d0gBXOY=h0jBVOV=j0mBSOR=n0PCoNQ=T1nBjNQ=Y1PCcNQ=`1nB^NQ=e1PCXNP=j1PCSNP=Q2`0O01O01N1N3L3N3L3N2N3L3N3M2M2O002M4M2N3L3N3M2M3N3L3N3M2M3NbXX4"}, "image_id": 944, "id": 15534}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 232.0, 61.0, 81.0], "area": 2763, "segmentation": {"size": [512, 512], "counts": "_hk33j?3M3M4L3M3M4L3O20O00O2M2M3M4L3ZAjN_>^1TBaNR=b1kBaNU=_1hBeNW=\\1eBgN[=Y1bBjN_=U1^BoNa=h10010O01O01O01O010O00010O0010O00M4L3N3L3M3M4M2M4L3M3M4M2M4L3M3N3L3M4L3M3N3L3M[hU3"}, "image_id": 944, "id": 15535}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 238.0, 2.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "`Wo73k?2bH"}, "image_id": 944, "id": 15536}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 247.0, 53.0, 68.0], "area": 2016, "segmentation": {"size": [512, 512], "counts": "dXP51m?3L3N3L3N3L3N2M4M2M4M2N3kAmN[=S1QBmN=2c=U1ZBoNe=Q1XBROi=n0SBUOm=k0PBYOo=g0nA\\OS>X1010O0010O01O01O01O010O010O00001L3M4L3N3L3M3N3L3M4M2M4L3M4M2M3M4M2MkWU2"}, "image_id": 944, "id": 15537}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 264.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "Xho71g7"}, "image_id": 944, "id": 15538}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 270.0, 63.0, 80.0], "area": 2595, "segmentation": {"size": [512, 512], "counts": "dZ[14j?2M3M4M2GBm@b0o>Bn@`0P?9M4M2M3N3L3M4M21O010O000M2N010O010O01O2O3M21O01O001M2M3N3L12M3M4M2N2010O01O01O010L30N2N3L3N2fN^AS1i>M4M2M4M2Cl@KW?3l@IX?3k@KW?3=L[We5"}, "image_id": 944, "id": 15539}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 271.0, 69.0, 73.0], "area": 2196, "segmentation": {"size": [512, 512], "counts": "TZj62k?4L3N3L3M3N3L3O1010O010O00010O00010O010O0001j@WOT?k0O01O01O010O00010O01OO1M3N2M4L3N3L3M3N3L3M3N1N3M3N2M01O3N3CRBcNQ>[1QBbNR>[1QBcNR>Y1DcA?Z>DcA`0Y>DcA?Z>DcA?Z>f0O00010O01O010O00010O0010O01O0N3L3N1O0O011O3M2M4M2N2M4M2N3M2M4M2N2M4M2N[Uk3"}, "image_id": 944, "id": 15543}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 342.0, 53.0, 64.0], "area": 1738, "segmentation": {"size": [512, 512], "counts": "Wl\\41m?3N1N3N1TOHnA;o=GPB;n=GoA;o=HnA;P>FoAGnA;o=GoAFoA

    EmA=T>h00O01000O010N2M2N3N1N3N2M2O2M3N1N3M100O010O2O1N3N1N3M3N1N3N2M2O2M3M2O2Mmdh2"}, "image_id": 944, "id": 15544}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 344.0, 35.0, 25.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "U[S72l?2M4M2N2O2O010O0010O0010O010O001O0N3M2N2O20O010OO02M2N3M1N2O2N3L3N3MWU;"}, "image_id": 944, "id": 15545}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 346.0, 31.0, 37.0], "area": 655, "segmentation": {"size": [512, 512], "counts": "bk`73j?4M2M3M4M2M4N10010O01O01O010O00010O00010O01O01O0N3L1O01O003NXE"}, "image_id": 944, "id": 15546}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 368.0, 15.0, 11.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "e[W72k?3N3O001O01O010O00010O001M2M_Ta0"}, "image_id": 944, "id": 15547}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 369.0, 53.0, 60.0], "area": 1668, "segmentation": {"size": [512, 512], "counts": "P\\\\51m?2M4M2M4M2M3RA_O]>b0_ABa>=]AEc>9WAJi>5TANl>d00O00010O010O0001^AnNU>Q1iAQOW>P1eATO[>k0cAWO]>V1010O010O00010O010O00010O0N3L3N2M4M2M3N3L3M4M2M3N3L3N3L3NoSi1"}, "image_id": 944, "id": 15548}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 378.0, 33.0, 38.0], "area": 752, "segmentation": {"size": [512, 512], "counts": "f\\Z73j?3N2M4M2M3M4M2M4M2M3N3O010O01O01O010O0001O0M4M2M3M4M2M4M2M3N3L3MQT5"}, "image_id": 944, "id": 15549}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 393.0, 51.0, 67.0], "area": 1806, "segmentation": {"size": [512, 512], "counts": "Qn[61l?4M2M3M4M2M4L3M3N3L3M3N3L3M3M4M2M4L30010O00010O0010O00001L3M3M10O0001O2O2M4L3N2M4L3M4M2M3M4M2M3M4L3Ndcj0"}, "image_id": 944, "id": 15550}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 425.0, 51.0, 56.0], "area": 1717, "segmentation": {"size": [512, 512], "counts": "\\nV71l?4M2N3M2M3N3M2N3L3N3M2N3L3VAlNf>Y1M2O2O010O01O010O01O010O010O01O010jN^Al0b>RO`Ao0`>nNcAQ1]>mNeAT1d>O010O010N1N1O0O10O1000O103M2N3MaB"}, "image_id": 944, "id": 15551}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 428.0, 29.0, 32.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "Q^]53k?3M2M4M2N3L3N2N3L3010O010O00010O010O0O2M2N2M4M2N3L3N3M2M_RT2"}, "image_id": 944, "id": 15552}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 435.0, 65.0, 53.0], "area": 1957, "segmentation": {"size": [512, 512], "counts": "ena02k?3N2M4M2M4M2M3N3O0010O01O01O010O01O01O010O000M4NO2M3N3M2M4M2M4M2O1010O010O00010O010O00010O010O0010O0010O001nN[Ah0d>VO_Ai0a>TObAm0^>POeAo0f>N2N3@l@OW?Ok@OX?Ml@OW?Ok@OW?NUb]6"}, "image_id": 944, "id": 15553}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 463.0, 25.0, 26.0], "area": 422, "segmentation": {"size": [512, 512], "counts": "R_^61l?4L3M3M4L3M3O2O0000010O00010O00010O00M4L3M3M4L3L`QU1"}, "image_id": 944, "id": 15554}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 476.0, 58.0, 36.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "j_U22k?4L3N3L3M3N3N10010O01OM3N2M3O1001O00001O00001O00O1N2N2M3O11O001O00001O00001O00001O001O00001O0Jo@]OQ?`0RA@o>=SACm>:WAEj>7YAIg>4\\ALd>2_AMb>OaA1W?01O00Q`m4"}, "image_id": 944, "id": 15555}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 496.0, 28.0, 16.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "o_j31l?3N2M3M3N2N200001O001O00001O001O00001O001O00001O001L3NZ`g3"}, "image_id": 944, "id": 15556}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 200.0, 92.0, 97.0], "area": 3695, "segmentation": {"size": [512, 512], "counts": "gX_32m?2N2N2N1O2M3N2N2N2N2N1O2M3N2N2N2N2N2M100000000000O01hNSO^Cm0a4000O0100000000000O0100000000000O0100000000000O0100000000000O0100000000000O016J00005K7I7HXo8"}, "image_id": 947, "id": 15561}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 10.0, 92.0, 57.0], "area": 2579, "segmentation": {"size": [512, 512], "counts": "iPh22l?4L4L4L5L100O2O001N100O2O0010O0010O0100O0001O0O2O0O101N101N10001N101N100O11N100O2O0O2O0O101N101N10001O010O010O00010O010O02N2O2M2O2M2N3N1N2OO0O101N101N10001N102M3N3M2M4M2M4M2N3L3N3L3NXni3"}, "image_id": 947, "id": 15562}, {"iscrowd": 0, "category_id": 1, "bbox": [294.0, 12.0, 98.0, 33.0], "area": 2302, "segmentation": {"size": [512, 512], "counts": "hPc4d0Z?200000000000000001O0N2000000000000000000001O00000000000000000001O000001O000000000000000F:0000000001O00000001O00000000000000000O1O1O1O1O10000000000000000000MN500000000000000000000000000000000000dok1"}, "image_id": 947, "id": 15563}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 17.0, 35.0, 48.0], "area": 680, "segmentation": {"size": [512, 512], "counts": "o0R1o>L3N3L3N2M4M1N010O0101N4M2O110O0010O0010O0010O0010O00O2L3N3L3N2M4M2MZ_^7"}, "image_id": 947, "id": 15564}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 58.0, 45.0, 93.0], "area": 2967, "segmentation": {"size": [512, 512], "counts": "_bY76i?8I7I7I7I7I6J7H8I7I4L0000000AkMRCV2nP1000001O01ON2L4L5J5L4K5O2O01O0000010O0000O2K4L4N201O01O000N2L401DSBcNm=Y1XBgNg=U1]BkNc=P1bBmNb=n0bBnNb=m0k0L4L4K6K4K5Ljmk2"}, "image_id": 947, "id": 15567}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 80.0, 37.0, 57.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "dbc63m?8H7I7I7I8G8I6J0000O1000O10000000O1000O1000000000O10O100007I7I7H9H7I7I7Inki0"}, "image_id": 947, "id": 15568}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 81.0, 46.0, 86.0], "area": 2918, "segmentation": {"size": [512, 512], "counts": "XTX53a?bNlA[1^>N3L3N2M4L3N3L3N2M4M2M4L3N2M4MeZT6"}, "image_id": 947, "id": 15572}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 188.0, 70.0, 72.0], "area": 2905, "segmentation": {"size": [512, 512], "counts": "]gh11l?4M2M4M2d@DQ??l@DR??k@Cj>K]Al0_>WObAh0\\>[OcAf0Z>\\OfAd0W>@iA?U>CkA>Q>EPB:n=IPB9l=JRB8l=KPB9n=m00010O0010O0010O0010O0010O0010O0010ON2M4M2O2O000O2L3N1N10O0100O3N3L3N2M4M2N3L30010O0010O00M4M2M4M2N2M4M2MoYT5"}, "image_id": 947, "id": 15573}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 201.0, 59.0, 83.0], "area": 2849, "segmentation": {"size": [512, 512], "counts": "]Xl22k?3hNMaB6[=MbB6\\=M`B6]=MaB6[=NcB3[=OfB1V=2hB0V=3gBOV=4jBMS=6mBIQ=9oBHP=8PCHP=9mBIT=6iBNV=3gBOZ=0cB3]=MaB6^=K^B8c=G[BM2M3N3L3M3N3L3N3L3N2M4L3N3L3N2MTYV4"}, "image_id": 947, "id": 15574}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 232.0, 65.0, 74.0], "area": 2502, "segmentation": {"size": [512, 512], "counts": "mXg33k?2M4M2M3N3L3N3L3N2M4M2UAoNd>Y1M2M4M2N2M4M2M4M2M3O20O010O00010O010O00010O0PNUBl1k=QNWBo1m=M4M2M4M2M3N3L3hN_Ao0j>M3WOQA?R?]OQAc0W?0O0010O0010O0010O001O0N2N3L3N3L3N2MhWX3"}, "image_id": 947, "id": 15575}, {"iscrowd": 0, "category_id": 1, "bbox": [439.0, 240.0, 63.0, 73.0], "area": 2799, "segmentation": {"size": [512, 512], "counts": "egk64l?7I7I7H8I7I7I7I7I7I1N10000000O1000O10000000O1000O10000000O1000O1007I7I7H8I7I7I3M00000O0100000000000O0100000000000O012N7I7I7I_f4"}, "image_id": 947, "id": 15576}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 248.0, 47.0, 83.0], "area": 2601, "segmentation": {"size": [512, 512], "counts": "[hg51o?7I7I00O0100000000VAEo=R=^OUC;R=^OUC:S=^OUC;_>I7Ike`1"}, "image_id": 947, "id": 15577}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 256.0, 59.0, 78.0], "area": 2318, "segmentation": {"size": [512, 512], "counts": "Rj43k?2M4M2M3N3M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N3L2OO10O010O11N4M201O01O010O010O0001M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2N3L3N2M4Megm6"}, "image_id": 947, "id": 15578}, {"iscrowd": 0, "category_id": 1, "bbox": [296.0, 281.0, 58.0, 68.0], "area": 2191, "segmentation": {"size": [512, 512], "counts": "_Yd47i?9G0000000000OBHUA9k>>O104L4L0000O1000001O1O001O1O0O2O1O01N7J0000000000OHPObAP1^>9000001O9G2N0O1000002N5K0000bNZBb0f=^OcB9\\=HmBOS=1VCFj<:_C]OaFQAFPA=n>EPA=n>oNlAQ1T>QOjAo0U>TOiAn0U>?O00000000000002N2N2N2N2M3N1O00O1000000001O1O2N2N2N2N2N0000000O100000000000001O2N2N2N2N2N2N2N2N2N2N2N2M3N2NfVf5"}, "image_id": 947, "id": 15580}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 309.0, 75.0, 65.0], "area": 2451, "segmentation": {"size": [512, 512], "counts": "WZY21n?2N2O2M2N2N2N3N1N2N2N3M2O1N2N3M2N2O1N3M2N2N2O20O0000010O0000010O0000010O000OO0001O0001O0001O0001O0001O0001O03M2N2N2O2IUATOm>k0TASOn>k05N3M2N2O1N3M2N2N2O2M2N2N2N]Ua4"}, "image_id": 947, "id": 15581}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 326.0, 29.0, 30.0], "area": 556, "segmentation": {"size": [512, 512], "counts": "dja3171\\?3`@0]?;10O001M2M3N3M21O01O01O010O00010O00001M2M3M4L3M3M4M2Mfeo3"}, "image_id": 947, "id": 15582}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 347.0, 93.0, 59.0], "area": 2671, "segmentation": {"size": [512, 512], "counts": "akU63k?2M4M2M4M2O101O010O01O010O01O01OO2M2N3N101N100O2O0O2O000O2O0N3N1010O0010O010O00010O010O00010O010OkNWAS1l>0O00010O010O00010O010O0010O0010O010O00010O01QOTAi0k>UOWAk0i>SOZAl0m>10O010O01O01O010N1N2M4M2M4M2M3N3M2MWd;"}, "image_id": 947, "id": 15583}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 349.0, 59.0, 91.0], "area": 2640, "segmentation": {"size": [512, 512], "counts": "i[[41l?3N2M4M2M4M2M3N3L3N3L31O010O01^BROPZ=CcB?]=S10O010O00010O0O2L3N4K3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2In@]OV?`06M3N3L3N3LaTg2"}, "image_id": 947, "id": 15584}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 357.0, 64.0, 58.0], "area": 2184, "segmentation": {"size": [512, 512], "counts": "Qlc11n?3JOY@4d?7L3N3M3L3N3L3N3M3L010O10O10O10O10O010O1HkNgAR1Z>QOeAm0Z>VOcAk0]>XO`Ag0a>6^AL`>6]AN_>5_AM_>6^ALa>5\\AOd>1YA1g>e00O01M2N2N3M2M4M2N3L3N2010O010O010O00010O010O010O00010O010ON3L310O01O01O010O010O01O010O01O010O01O010O0N0ZOWBWOh=j0[BROf=n0\\BPOd=o0_BoNd=n0_BoNc=o0_BnNe=n0_BoNc=P1^BnNd=R1f0O010O010O01O010O01O0N3EQAAQ?UOUAl0i>60O10O10O010O10O102O02MO0100O0100O0100O0100O0100K4100O0100O0100O0100O012N3L3O2O0O2M2N3L3N2M4M2Ndan4"}, "image_id": 947, "id": 15588}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 438.0, 14.0, 25.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "f=i0X?N3M2N2N100O3M2N200N2N2N2Nnah7"}, "image_id": 947, "id": 15589}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 451.0, 24.0, 24.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "WnS35k?4K6K4L2N000O10O100000O0100000O10O10002N5J6K4L]Q`4"}, "image_id": 947, "id": 15590}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 456.0, 59.0, 56.0], "area": 1691, "segmentation": {"size": [512, 512], "counts": "Y_g32m?2N2g@Kd>6[AKd>7ZAJe>8YAIf>9XAHg>:WAGg>=VADi>>UACj>?TABk>j0O1O1O1O1O1O1O11O1O1OO1OO[AiNd>W120000000000000000O10O1002N2N200O1N1O2N2N2N2N2N2N2N2N2N2N2N2M3N2N2NUQ[3"}, "image_id": 947, "id": 15591}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 465.0, 11.0, 31.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "Toj72l?2N3M2M4M2b@DV?d0M4M2O2O0_A"}, "image_id": 947, "id": 15592}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 471.0, 49.0, 41.0], "area": 1249, "segmentation": {"size": [512, 512], "counts": "nom42l?2M3N2M3N2M3N2M3N2N21O001O00001O001O000000M3N2M3N2N2000000M3N2N21O00001O001O000O2M2M0DSAIo>5TAGo>3o@F55n>3WAJm>2b0NYaY2"}, "image_id": 947, "id": 15593}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 483.0, 45.0, 29.0], "area": 1076, "segmentation": {"size": [512, 512], "counts": "]?c0]?00000I7000000000000000000N2O1000000000000000000000000000000:F000000000000000000000O10?ATPY7"}, "image_id": 947, "id": 15594}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 494.0, 37.0, 18.0], "area": 493, "segmentation": {"size": [512, 512], "counts": "oof21j?50000000000000000000000F:00000000N2000000000000000000000000000000001O00a`f4"}, "image_id": 947, "id": 15595}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 495.0, 30.0, 17.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "m_W63k?2M3M3N2M3O1001O00001O001O00001O001O00001O001O00001O0N3M2M[`Y1"}, "image_id": 947, "id": 15596}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 499.0, 6.0, 13.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "n_m72l?2N2N2M3N2"}, "image_id": 947, "id": 15597}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 42.0, 9.0], "area": 359, "segmentation": {"size": [512, 512], "counts": "09g?00000000000000000000000000000000000000000000O100000000000000000000000000000000000PP[7"}, "image_id": 948, "id": 15598}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 0.0, 61.0, 26.0], "area": 1466, "segmentation": {"size": [512, 512], "counts": "a`\\27X?a00000000001O000000000000000000000001OO10000000000000000000001O000000000000000000000000000000000000000000000000D=N11O000ood4"}, "image_id": 948, "id": 15599}, {"iscrowd": 0, "category_id": 1, "bbox": [233.0, 0.0, 35.0, 24.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "W`d32m?2M3N2N2N2N2O1O1O1O001O1O1O1O1O1O00O1O1O1O1O1O1O1N2O1O1O1O1O2N2N2N2Nmoi3"}, "image_id": 948, "id": 15600}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 0.0, 74.0, 39.0], "area": 1134, "segmentation": {"size": [512, 512], "counts": "i`\\44j?2M4M2M3N3L3N3L301O01O010O01O01O010O01O01O010O010M2N2M4M0O10O010O02O2M3N2N2M31O00001O00001O003M00001O001O00001O001O00O1M3N2O1001O001O00001ON2N2M3NR`^2"}, "image_id": 948, "id": 15601}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 0.0, 71.0, 51.0], "area": 1647, "segmentation": {"size": [512, 512], "counts": "lPa52l?3L3d@Im>:o@In>:PAHn>:o@Jn>e0M4M20001O010O010O00010O010O01O00ON3N1N3N1N3N1O2M3N101O010O010O010O10O0N3N1100O100O100O10000001O00001O00001O001ON2M3N2M3N2M3N2M3NR`[1"}, "image_id": 948, "id": 15602}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 0.0, 8.0, 7.0], "area": 40, "segmentation": {"size": [512, 512], "counts": "SPl71m?2O2O001O001O00"}, "image_id": 948, "id": 15603}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 22.0, 14.0, 10.0], "area": 135, "segmentation": {"size": [512, 512], "counts": "f`[39g?00001O000000000000000000N\\_]4"}, "image_id": 948, "id": 15604}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 23.0, 54.0, 59.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "R1Q1P?0O01O01O010O010O01O01O010O01O01N1N3L3N3L3N2N3L310O01O010O01O010O01O01OM4M2N3L3N3L3N2N3L3N3L3N2N3L3N3L3N3MnnT7"}, "image_id": 948, "id": 15605}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 24.0, 75.0, 90.0], "area": 3625, "segmentation": {"size": [512, 512], "counts": "hbj61n?2M3N2N1N3N2N2N1N3N2N2M2O2N2N2O10OXOQO`Bo0^=SOaBn0]=TOaBn0\\=TObBn0]=TOaBn0]=TOaBn0\\=UOaBn0]=SObBn0]=TOaBn0\\=UOaBn0]=TOaBn0]=TOaBm0]=m0N2N2N1O01N3N2N2M2O2N2M3N1O2M3N2N1OO01000O010000O011O2M3N1O2N2M3N1O2M3N2N1N3N2N2M2O2N2N2Gi@ES>"}, "image_id": 948, "id": 15606}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 33.0, 98.0, 39.0], "area": 3029, "segmentation": {"size": [512, 512], "counts": "dQY2a0Q?>00000000000000K5001O0000000000005K000000000001O00000000000000000000000001O000000000000000LM431O000QAUOi>R1O000001O6J0000001O000000000000000000000000000001O0000000000000001O0000000000000000000000000ZO\\oU4"}, "image_id": 948, "id": 15607}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 54.0, 69.0, 73.0], "area": 2682, "segmentation": {"size": [512, 512], "counts": "Qc`43k?2M4M2N3L3O101O010O0O2M2N2M4M2N3L3N3M2M3N3L3O2O010O00010O010O01O01O001L3N3M200PBUNk=Q20O010O010O00010lNTB2l=KWB6i=TOUB:5a0f=SOXB94e0c=PO[B85h0k=UOXBj0h=TOZBl0g=PO]Bm0\\>O101O0M4M2N3L3N2N3L3N3L3Nol\\2"}, "image_id": 948, "id": 15608}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 81.0, 23.0, 23.0], "area": 349, "segmentation": {"size": [512, 512], "counts": "QSc32j?4M4K4L4O2O01O00010O0000010O0001O01M2L4L4M4K^]Q4"}, "image_id": 948, "id": 15609}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 94.0, 39.0, 60.0], "area": 1264, "segmentation": {"size": [512, 512], "counts": "Pd\\72m?2N1N3N2N2M2O2N2M2O2N2M3N1O2M2O002M21000O10O1000O0N3N200N1O2M3N2N1N3N2N2M2O1RM"}, "image_id": 948, "id": 15610}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 98.0, 34.0, 41.0], "area": 892, "segmentation": {"size": [512, 512], "counts": "R3o0Q?1O010O01O010O01O010O01O010O01O010OROQAk0R?010O010O00N3M2N3L3N3M2M3N3M2Mal^7"}, "image_id": 948, "id": 15611}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 98.0, 63.0, 42.0], "area": 1550, "segmentation": {"size": [512, 512], "counts": "hST21m?2M3N3L3N3L3N2M4M2010O00010O010O00010O010O00010O010O01O01O010O01O01O010O01O01OO2M2M4M2M301O010O0001M2M4M2M3N3L3N3L3N2M4M2M4Mh\\l4"}, "image_id": 948, "id": 15612}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 99.0, 27.0, 27.0], "area": 438, "segmentation": {"size": [512, 512], "counts": "ec]31m?3M2M4M2N3M2M4M20010O010O010O00010O01O0M4M2N2M4M2N3MglT4"}, "image_id": 948, "id": 15613}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 121.0, 91.0, 62.0], "area": 3268, "segmentation": {"size": [512, 512], "counts": "jdj32k?3N3L3N2M4M2M4M2M3N3L3N30O0010O0010O0010O0010O010O00010O010O00010O010O00N3M2M4M2M3O2O010O01O010O01O01O010O01O01O010O01O01O010O01O01O010L3N2M4M2M4M2M3NO02O3L3N30O0010O0N3M2M3N3L3N3Lkkg2"}, "image_id": 948, "id": 15614}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 141.0, 71.0, 44.0], "area": 2018, "segmentation": {"size": [512, 512], "counts": "\\ei11j?5ALm@9n>Lm@9n>`0N1001O0001O0001O0001O0001O0001O0001O0001O01O0N2001O01XOo@`0P?[OUAf0R?00000L4N3O000001O01O000001O01O0000010O000000010O000000010O000000L5J5K5K5K5KZkR5"}, "image_id": 948, "id": 15615}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 162.0, 15.0, 29.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "R5j0W?0O010O010OO1N3M2M4M2N3L3N2MkZh7"}, "image_id": 948, "id": 15616}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 181.0, 89.0, 69.0], "area": 2982, "segmentation": {"size": [512, 512], "counts": "ffb31m?3M2M3N3L3N3M2M4N11O01O010L3N2N3L3N3L301O01O010O01O010O01O010O01O01O010O01O010O01O010O01O010L3N2010O010O00010O010O010SObA6^>HdA8\\>FgA:Y>BjA>IXOX>8RBd0o=YOSBg0m=VOVBk0i=SOZBl0]>10O01O01O010O001M2N2010O01O0N2N3L3N3M2M4M2MQiP3"}, "image_id": 948, "id": 15617}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 184.0, 33.0, 47.0], "area": 859, "segmentation": {"size": [512, 512], "counts": "mfh24i?3M3L5L3M3O2O0M\\Ok@c0Q?Ao@`0h>[OZA8N=e>L[A3c>O]A2c>M]A3c>N]A1c>O]A2c>M]A3c>N]A1c>O]A2c>M]A0f>b03_OWAHl>8TAEP?:PACS?>m@^OV?b05O000M4L3M3L5LTjf4"}, "image_id": 948, "id": 15618}, {"iscrowd": 0, "category_id": 1, "bbox": [370.0, 184.0, 64.0, 61.0], "area": 2074, "segmentation": {"size": [512, 512], "counts": "hVi51i?1Z@1d?1Z@1c?7O2M30O10O1M3L]Oh@d0W?4N2N2N1N3N2N2N110O1000O10O10O10N2M3N1O2M3N2M2O2N2M3O0100O0N3N2N2M2O2N2M3N1N3QOYAb0i>[OYAe0g>ZO[Ae0f>YO\\Ae0Q?M2O2N2M3N1O2M3N2M2O2N[iV1"}, "image_id": 948, "id": 15619}, {"iscrowd": 0, "category_id": 1, "bbox": [94.0, 200.0, 64.0, 45.0], "area": 1738, "segmentation": {"size": [512, 512], "counts": "QW_14i?3M3M3L5L3M301O01O01O01O01O01O00M4L3M301O00010O00010O0001O01O01O01O00010O00POUAk0P?1O01O00010O0001O01O01O01O01O01O000N3L3L4M3M4L3L4MZi`5"}, "image_id": 948, "id": 15620}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 223.0, 62.0, 87.0], "area": 2970, "segmentation": {"size": [512, 512], "counts": "cX\\61m?3`@No>3o@0n>3PANo>4n@Ol>IPA:2O0Fg>`0XALOEh>U1M3N1O2M3N2N1N100O2O2N2M3N1O2M3N2N1N3N000O10O010O10O10O10001N3N2N1N3N2N2M2O200POjA4W>IkA8T>GnA8S>EPB9R>EPB8S>FnA9S>EPB9R>EPB8S>FoA8R>FPB9R>EPB8S>FoA8m>O2M3N2Mfgd0"}, "image_id": 948, "id": 15621}, {"iscrowd": 0, "category_id": 1, "bbox": [342.0, 237.0, 65.0, 47.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "TX[51m?2O2N2M3N1O2M3KAf@a0W?5O2N2M3O010N2M30O10O1000O10O1000O10O1000O10O10O1O1M2O2N0O01000O01000O010002M3N20O1N2M2O2N2M3N1O2M3N2N1N3N2N2M2OXXd1"}, "image_id": 948, "id": 15622}, {"iscrowd": 0, "category_id": 1, "bbox": [196.0, 248.0, 68.0, 95.0], "area": 2997, "segmentation": {"size": [512, 512], "counts": "gXR31o?2M2N3N2M2O2M3M3N1N3N2RAWO`>k0_AWO_>k0^AXO_>k0^AWO`>k0_AWO_>V1N3N2M2N3N1N3N2M2N3N1N3N2M2N3N1N2O0O00010OnNfB_O[=`0gB^OY=b0jB\\OU=e0mBXOT=g0nBXOQ=i0QCTOP=a0fBQO<9hAKV>5iAMj=FRB=10m=EPB:24m=EmA845n=FlA566m=6SBKm=5PBNo=2oA2P>NnA4Q>n001O0O2O0O1M4M2N3O0001N1M4L3O110O0010O01OWG"}, "image_id": 948, "id": 15626}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 288.0, 84.0, 75.0], "area": 2133, "segmentation": {"size": [512, 512], "counts": "ZjX41n?2N2M2O2N2M3N1O2N2M3N11000i@[OQ?j0100000O0100000PAVOh>j0WAXOi>h0UAYOl>n000M3N1OO010002M1000B\\AAc>`0^A^Oc>a0`A]O`>c0bA[O^>e0dAXO\\>i0eAVO[>k0fASOZ>m0;10O2O2N20O10O1000OOOWOn@g0R?ZOo@c0Q?@m@`0S?60O10O10O10O10O10O101O2000000OO2N2N2M2O2N2M3N1O2M3N2N1O2MeV]2"}, "image_id": 948, "id": 15627}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 295.0, 66.0, 88.0], "area": 2864, "segmentation": {"size": [512, 512], "counts": "_k=1l?4M2M3N3M2M4M2M3010O01O01ON3M2N3L3N3L3N2M4]OeN]B]1a=eN]B^1`=dN]B_1`=eN]B]1a=eN\\B_1`=b0N3L30001O010O010OM4M2M3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N]Va6"}, "image_id": 948, "id": 15628}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 296.0, 32.0, 37.0], "area": 684, "segmentation": {"size": [512, 512], "counts": "Sj_12k?4M2N3L3N2M4M2N3L3N2M40O010O01O01O010O0O1N3L3N3M2M4M2M3N3M2M4MaVP6"}, "image_id": 948, "id": 15629}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 320.0, 69.0, 77.0], "area": 3376, "segmentation": {"size": [512, 512], "counts": "jk[53k?2O2nNJVB8h=JVB9g=JWB8g=JVB8h=JVB9g=JWB7h=JVB9g=JVB9g=JWB7g=KWB8g=JVB8h=JWB8f=KWB7h=R1000O010O01000O010O01000O001M1O010O00102M2N3OlAXNP>l11O10O010O0100O010O1O010POoAOQ>OQB2o=LSB3n=JTB7k=GXB8i=FXB;g=C[B=f=@]B?d=_O]B`0d=^O_B?d=_O]B?f=^O]B?e=_O]B>e>M2N2N3L3N_da1"}, "image_id": 948, "id": 15630}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 335.0, 50.0, 65.0], "area": 1802, "segmentation": {"size": [512, 512], "counts": "S\\^12k?3N3L3N2N3L3N3L3N3L3N2N3L3N3L3N2M4M2N3L3N2O2O010O0010O001L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2N2M4MXeh5"}, "image_id": 948, "id": 15631}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 339.0, 20.0, 22.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "S[U31m?2N3M3M2N3N1N3M2O2O01N1N30O010OFc@1^?Le@3[?Kh@2[?Lf@2dTa4"}, "image_id": 948, "id": 15632}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 343.0, 65.0, 85.0], "area": 2584, "segmentation": {"size": [512, 512], "counts": "glP23k?2M4M2N2O2_@D\\?`010O010O0O1N3L3N3L3N2O2O010O0DmNlAR1R>POlAS1Q>POkAS1R>POlAS1Q>POlAR1Q>a0M2M3N3M2M4M2M3N30O0M4M2N2M4M2ZOWBROm=k0UBSOm=j0WBROl=l0VBROm=j0VBTOl=m0TBoNP>P1PBnNR>S1>O00010O0O2L3N2M4M2M4M2N2M4M2M4Mndn4"}, "image_id": 948, "id": 15633}, {"iscrowd": 0, "category_id": 1, "bbox": [216.0, 349.0, 31.0, 31.0], "area": 532, "segmentation": {"size": [512, 512], "counts": "a[\\31m?3N1N3M2N3M2N3M3N1N3M2010O010O010O010O1M2O2M2N3M2N3M2O2M2N3MkTT4"}, "image_id": 948, "id": 15634}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 382.0, 102.0, 62.0], "area": 3186, "segmentation": {"size": [512, 512], "counts": "jlV61l?3N2M4M2N3L3N2M4M2N3O00010O010O01O01O010O010O00010O010O000O2M210O01O01O010O010O01O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010N1N2M4M2M4M2M4M2M3N3M21M2M3N3L3N3L3N2G\\ATOg>i0[ATOh>i0:M2M4M2M3N3L3N3M2MmS6"}, "image_id": 948, "id": 15635}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 386.0, 75.0, 86.0], "area": 2685, "segmentation": {"size": [512, 512], "counts": "R^g33j?3M3M4L3M3M40O00010O00010O00010O00010O00h@[OU?i00O00HYOWAh0e>[O\\Ad0a>_O_Aa0^>CaA>[>EfA:W>IfA:X>i0O00010O00010O00010O00010O0O2L3M3N30M2M3M1O000000000000004L3M3M4L3M4L3M3M4L3M3M4M2M3M4L3MkSS3"}, "image_id": 948, "id": 15636}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 386.0, 28.0, 27.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "dll42l?2M3M4M2M4M2M310O0010O00010O0010O0010O001M2M3N3L3N3L3MiSe2"}, "image_id": 948, "id": 15637}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 391.0, 67.0, 60.0], "area": 2073, "segmentation": {"size": [512, 512], "counts": "[]j22l?2N3M2N3N1N3M2N3N1010O01O0h@[OS?j0N3M2N3M2010O010O010O0QOXAg0h>VOZAk0e>SO^Al0k>10ON3M2N3M2N3M2O2M2N3M2N2N0002N3M2N3N1N3M2N3N2O0O2M2N3N1N3M2N3M2N3M2N3M2N3M2O^ST4"}, "image_id": 948, "id": 15638}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 425.0, 61.0, 87.0], "area": 2598, "segmentation": {"size": [512, 512], "counts": "c_`54j?2N3[@H]?;a@G_?=010O01M2N2N3L3N3L3N2N2N200CkNoAU1o=nNnAT1o=oNoAS1o=POmAS1P>`0N2M3M3N2M3N2M3N2M3N21O001M2M3N3UO_BSOc=j0`BSOd=j0_BSOc=j0`BTOc=l0\\BQOg=o0ZBnNh=R1XBkNl=U1TBhNn=W1?L3N3M2M3N3L3N3M2M3N3M2M4M2M^Ra1"}, "image_id": 948, "id": 15639}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 428.0, 19.0, 22.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "m]a61m?2N3M2N3L3N3M2N210O00O2M2M4M2N3M2N3MaRU1"}, "image_id": 948, "id": 15640}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 431.0, 56.0, 63.0], "area": 1874, "segmentation": {"size": [512, 512], "counts": "Qoh41m?2M3M4L3M3N3N11O01O01O01O010O0N2N3L3M3M4L3M3N3L3M3M4L3N3N100010O00010O00001L3M3M4L3N2M4L3M4L3N2M4L3M3M4M2M3M4L[R[2"}, "image_id": 948, "id": 15641}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 459.0, 48.0, 53.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "noc62l?2M3N2N2M3N2N2N2M3N2^OZOhAh0V>[OfAh0X>ZOfAh0X>[OeAg0X>\\OfAf0Y>a01O00001O001O001O001O001O00001O0N3L3N3M2N3L3N3M2M3N3M2N3L3N3M2MZQd0"}, "image_id": 948, "id": 15642}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 464.0, 23.0, 48.0], "area": 721, "segmentation": {"size": [512, 512], "counts": "`>`1`>01O001O001O000N3M2M4M2N2M4M2N3L3N3L3N2N3L3NYQd7"}, "image_id": 948, "id": 15643}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 478.0, 52.0, 34.0], "area": 1245, "segmentation": {"size": [512, 512], "counts": "g_=2k?4L3M3`@FX?=e@EX?c0M3N2M3M300001O00001O001O00001O00001O001O00001O00001O00001O001O000O2O01O01O001O000M4M2M3M4M2Mf`h6"}, "image_id": 948, "id": 15644}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 492.0, 24.0, 20.0], "area": 377, "segmentation": {"size": [512, 512], "counts": "k_d73j?3N3L3N3M21O001O0000M3N200001O00001O001O00001O"}, "image_id": 948, "id": 15645}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 506.0, 17.0, 6.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "o_a11m?2M3001O00001O001O00001O001O0000QPV6"}, "image_id": 948, "id": 15646}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 119.0, 15.0, 15.0], "area": 153, "segmentation": {"size": [512, 512], "counts": "QTm22k?3M3L5O00010O0001O01O00N3L3MY\\k4"}, "image_id": 949, "id": 15647}, {"iscrowd": 0, "category_id": 1, "bbox": [157.0, 176.0, 59.0, 53.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "Pf^22k?3N3M2N3j@Dc>?[ADb>?[ACc>?ZADg>;WAHh>9UAIl>f0O010O010O00010O010O001N110O01O010O01O010O010O01O010O01O010O010O01O010N1O1010O01M2N3O000An@NU?On@MU?1m@MV?0m@MU?0n@NoXd4"}, "image_id": 949, "id": 15648}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 243.0, 53.0, 82.0], "area": 1982, "segmentation": {"size": [512, 512], "counts": "jYV52l?2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2M4M2N2M4M2M3NO10O10O10O0103M2M4M2M3N3M2M4M2N2M4M2M4M2N3L3N2M4M2N3L3N3MUXo1"}, "image_id": 949, "id": 15649}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 291.0, 54.0, 61.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "dje11m?2M3N3M2M4M2M4M2M3N3M2M4M2M3N3L3N3M2M4M2O1010O010O00010O0O2M2N2M4M2M4M2N2M4M2O2O0010O00N3M2M4M2M3N3L3N3M2McV_5"}, "image_id": 949, "id": 15650}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 296.0, 109.0, 64.0], "area": 2642, "segmentation": {"size": [512, 512], "counts": "mYc31m?2N3L3N3M2M3N3M2M4O0010O00010O010O01O01O010O010O00010O010O010O00010O010O010O00010O010O01O01O010O010O01O01O010O010O00010O010O010O00010O010O010O00010O010O01O01O010O010O00010O010O010O00010O010O010O00010N1N3M2M4M2N2M4M2NaUf2"}, "image_id": 949, "id": 15651}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 326.0, 20.0, 22.0], "area": 259, "segmentation": {"size": [512, 512], "counts": "ejY32k?3M4M2M3M4N11O01O01O01O010OM4M2M3M4L3NgU\\4"}, "image_id": 949, "id": 15652}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 357.0, 60.0, 56.0], "area": 1995, "segmentation": {"size": [512, 512], "counts": "Y\\P13j?4L3M3M4L3M3M4L3M3M4L3M310O00010O00010O00010O00010O00010O0003N0O00010O00010O00010O00010O000kN[AP1j>00010OM3M4L3M3M4L3M3M4L3MVdQ6"}, "image_id": 949, "id": 15653}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 427.0, 13.0, 10.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "`]X21m?3M2O20O0010O0010O001M2NcRa5"}, "image_id": 949, "id": 15654}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 460.0, 25.0, 26.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "hn^71l?3N3L3N3M200010Oc@CW?>f@D[?a0O0010O0010O0010N1M4M2N3L3N2M`a4"}, "image_id": 950, "id": 15655}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 3.0, 3.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "03m?O1OQ`n7"}, "image_id": 951, "id": 15656}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 0.0, 64.0, 55.0], "area": 1901, "segmentation": {"size": [512, 512], "counts": "[P42n?2M2N2O2M2N3M2O1N3M2O2M1O0010O02O1O1O2O001O01O010O01O01O01M2N3N1N2N3M2O1N0001O01O00010O0001O3N1N2N3N1N3M2N2O2M2N3N1N2Ag@4\\?Je@4]?Jf@4d?N2NPok6"}, "image_id": 951, "id": 15657}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 0.0, 38.0, 19.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "PPY12n?1O1O2N1O2N1O2N1O1O2N1O2NO1O1O100O1O100O1O100O1O1O100O1O100O1O100O1O1O100OQPT6"}, "image_id": 951, "id": 15658}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 0.0, 22.0, 16.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "U`Y42n?1N3M2O1N3M2N2O00O1O1O100O1O100O1O2N3N1N2Nl_[3"}, "image_id": 951, "id": 15659}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 0.0, 8.0, 4.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "PPe41o?1O1O1OO100O1OQPW3"}, "image_id": 951, "id": 15660}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 0.0, 53.0, 49.0], "area": 1462, "segmentation": {"size": [512, 512], "counts": "g`m41n?2N2N3N1N3`@EV?=h@EW?TO[Ao0c>SOZAo0d>7O0001O01O01O01O01O1O100O1O101N1O00100O1O100O1O1O2O2M2N2O2M2N3M2O1N3M2O2M2N2N^oW2"}, "image_id": 951, "id": 15661}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 0.0, 31.0, 16.0], "area": 268, "segmentation": {"size": [512, 512], "counts": "P`i51o?1O2N1O2N1O1O2N1O1O2N1OO100O1O1O100O1O1O100O1O100O1O1O100O2NPPg1"}, "image_id": 951, "id": 15662}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 0.0, 34.0, 37.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "aPP61n?2O2M2N3M2O1N3M2O2M2N2O2M2N2OO01O01O2N010O0001O01Am@0S?0o@NQ?2RALn>3TAKn>4SAJo>4TAIo>4SAKn>4a0Mjo^1"}, "image_id": 951, "id": 15663}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 0.0, 26.0, 14.0], "area": 203, "segmentation": {"size": [512, 512], "counts": "PPZ72n?1O1O2N1O1O2N1O2N1OO100O1O1O100O1O100O1O1O100O1O2NPP9"}, "image_id": 951, "id": 15664}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 14.0, 56.0, 51.0], "area": 1532, "segmentation": {"size": [512, 512], "counts": "fQf11n?2]OOTA4i>NUA4i>NVA3i>OTA4i>NUA4i>OUA3i>NUA3j>a000010O00010O000012M2N1O010O00010O0000010O01O3NO00010O02N2N3N1N3M2O0O00101N3M2N3N1N210M2N3KW@Oj?OQo]5"}, "image_id": 951, "id": 15665}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 21.0, 49.0, 77.0], "area": 1841, "segmentation": {"size": [512, 512], "counts": "_QS73l?2O1N3M2O1N3M2N2O2M2N2N3N1N2N2XAPO^>R1_APO_>R1`AoN^>S1`APO9Jh=a1VBaNi=a1UBaNh=a1VBaNh=a1VB_Nj=d1TB\\Nl=k1000010O1O1O01O003NOdNTBe0l=\\OUBKO4m=0WBJM4l=2YBHN4h=4]BEM5f=7^BBO4d=9_BBN4d=8aBAM5d=5PBD`00O4c=6dBEJ3e=6UCIl<5a1N3M2OQ_4"}, "image_id": 951, "id": 15666}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 42.0, 78.0, 54.0], "area": 2040, "segmentation": {"size": [512, 512], "counts": "ka\\53m?1N2N3M2O1N3M2N2O2M2N3N1N2n@UOl>R1M2O1N3M2N3N1N2N3M12M2O2M2N0001O01O003N1mN\\Ag0f>XO[Af0h>WOZAg0h>WO[Ag0P?N1O0010O0000010O0001O01O0001O01O0001O01O00010O0000010O0001O101N3M2N2O2M2N3M2O1N[^\\1"}, "image_id": 951, "id": 15667}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 52.0, 44.0, 54.0], "area": 1108, "segmentation": {"size": [512, 512], "counts": "oaf03l?2O2M2N3N1N3M2O1N3M2i@ZOQ?m0N1N3M2O2M201O01O010O010O010O0N3N1N3M2O2M2N3N1N3M2N3N1N3M2O2M2N3NT]c6"}, "image_id": 951, "id": 15668}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 53.0, 19.0, 21.0], "area": 210, "segmentation": {"size": [512, 512], "counts": "oaS22n?1N2N2N3N1N2N2N2OO0001O01O011Hb@K`?3c@J_?49MWnb5"}, "image_id": 951, "id": 15669}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 57.0, 1.0, 30.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "i1n0Ymo7"}, "image_id": 951, "id": 15670}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 58.0, 42.0, 36.0], "area": 765, "segmentation": {"size": [512, 512], "counts": "]RZ61n?3N1N2N3N1N3M2O2M2N3M2O0O1O01O01O01O02N2OO01OMPAWOQ?i0200000102M2N2O2M2N3N1N3N11O02M2M2NcmP1"}, "image_id": 951, "id": 15671}, {"iscrowd": 0, "category_id": 1, "bbox": [144.0, 79.0, 64.0, 64.0], "area": 2257, "segmentation": {"size": [512, 512], "counts": "PSX23l?2N2O2[@H]?:b@H[?:c@H[?`0O2M2N2N3N1N3M2O1N3M2N201M2O2M2N2N3N1N2O2O010OO1N10O02N2N3N1N3M01O02N3M20001O0N2O2M2N3M2O1N3M2N2O2M2N3M2O1N3M2O1N3MXlg4"}, "image_id": 951, "id": 15672}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 80.0, 15.0, 15.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "eRc11n?2O2M2N3N1N3M0102M2N3N1N3M2OW]U6"}, "image_id": 951, "id": 15673}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 95.0, 57.0, 54.0], "area": 1587, "segmentation": {"size": [512, 512], "counts": "hcQ62m?2N3N1N3`@GT?;j@GU?:j@HS?;j@GT?e0N1GSO\\AP1b>RO[AP1c>RO\\Ao0b>7010O01O3N1N1O10O0001O01HdAkN^>T1dAjN]>T1eAjN]>T180O0001O01O01O2O2M2N3N1N3M2N2O2M2N3N1N3M010O000N[@Kd?63O3M2N`lQ1"}, "image_id": 951, "id": 15674}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 99.0, 19.0, 18.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "]SW71n?2O1N3M2N2N2OO000000010O001O3M2O1N2Ni\\?"}, "image_id": 951, "id": 15675}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 103.0, 39.0, 57.0], "area": 2151, "segmentation": {"size": [512, 512], "counts": "W3i1W>0000000000000000000000000001O000000000000000000000000000000000000000000000`NX^\\7"}, "image_id": 951, "id": 15676}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 115.0, 24.0, 31.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "gSo01n?3M2O2M2O2M3O010O10O10O10O01000O0N3N2N1N3N2M2Obkd6"}, "image_id": 951, "id": 15677}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 121.0, 27.0, 17.0], "area": 220, "segmentation": {"size": [512, 512], "counts": "TdP71n?2N3N2M1N2O010O00010O01O3N1N1O0100O3M2OO00011N2N3N1NSla0"}, "image_id": 951, "id": 15678}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 128.0, 23.0, 22.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "[Td72m?2O1N3M2N2O2M2N0010O0000010O002N2O2M2N2N3N1Nh;"}, "image_id": 951, "id": 15679}, {"iscrowd": 0, "category_id": 1, "bbox": [423.0, 142.0, 52.0, 58.0], "area": 1588, "segmentation": {"size": [512, 512], "counts": "eec61n?2N2AMo@5n>No@4P?Mn@5P?Mn@6o>Mo@4o>No@4P?`0M2N2N3N1N2N001LfNbAY1^>410O2N1O01O0001OMeAbN\\>]13000101N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3N1NgZb0"}, "image_id": 951, "id": 15680}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 147.0, 33.0, 26.0], "area": 279, "segmentation": {"size": [512, 512], "counts": "Ve_73l?2O2M2N10O0001O01O01O01O01O01O01O01O00010O00011N00010O00011N2N3MZK"}, "image_id": 951, "id": 15681}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 158.0, 72.0, 52.0], "area": 1963, "segmentation": {"size": [512, 512], "counts": "ZUj22n?1N3M2N3N1N2N3N1N3M2O1N3M2QATOg>o0VASOi>n0VATOg>T1N2O2N10010O01O000N3M2N3NO01O00010O00010O0000010O000010O02N3N1N3M2N01O0001O01O01O00010O00102M2N3M2O1N3M2N3N1N3McjQ4"}, "image_id": 951, "id": 15682}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 173.0, 40.0, 42.0], "area": 869, "segmentation": {"size": [512, 512], "counts": "kUk32m?2O1N3M2O1N3M2N3N1N2N3M2O1N3M2O2M2N2N3N02N1N3M2O1N3M2N2O2M2N3M2O1N3M2O1N3M2N3Nhi`3"}, "image_id": 951, "id": 15683}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 202.0, 23.0, 32.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "bfR21h?2[@3a?:L4L4K5L3MO3N3M000O10O10O10002N4K5L4L4L4Kmha5"}, "image_id": 951, "id": 15684}, {"iscrowd": 0, "category_id": 1, "bbox": [422.0, 206.0, 33.0, 33.0], "area": 560, "segmentation": {"size": [512, 512], "counts": "kVc61n?2N3N1N3M2N2O2M2N3M2O1N3M2O1N000010O2N3M2O1N3M2N3N1N2N3N1N2N3M2OoXl0"}, "image_id": 951, "id": 15685}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 211.0, 49.0, 77.0], "area": 1791, "segmentation": {"size": [512, 512], "counts": "bgP72n?1N3M2O1N3M2N2O2M2N2O2M2N2@VOgAm0V>VOhAk0W>VOgAl0W>WOfAl0W>VOhAk0V>WOhAk0W>a0M2N2N3O01O01OnNPB3Q>JQB7n=HSB8m=FVB9i=FYB;e=D]BBh0XOh0XOi0WOh0XOQaS4"}, "image_id": 951, "id": 15687}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 422.0, 35.0, 33.0], "area": 391, "segmentation": {"size": [512, 512], "counts": "Zmg44k?200N1N3N2O10000000000O1000O100000000000O10000000O1000000N1O2N2N2N2NRbf2"}, "image_id": 951, "id": 15688}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 457.0, 87.0, 54.0], "area": 3917, "segmentation": {"size": [512, 512], "counts": "]^V53m?T1lN;E0000000N2000000000000000000000O1000000000000000O1000000000000000000000000000000000000000O100000000000000000O100001O1O2N1O1O2N1O1O2N1O2N1O1O2N1O1O2M2O2N1O1O2N1O1O2N1O2NbP^1"}, "image_id": 951, "id": 15689}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 486.0, 46.0, 26.0], "area": 1103, "segmentation": {"size": [512, 512], "counts": "XoY4;e?=C0000000000000000000000000000000000000O10000000000000000000000000000000000000000O10000a0_OYPo2"}, "image_id": 951, "id": 15690}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 88.0, 52.0], "area": 4356, "segmentation": {"size": [512, 512], "counts": "0d1\\>0000000000O10000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000O1000000000007Ih0XOQoc6"}, "image_id": 952, "id": 15691}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 0.0, 46.0, 42.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "PPZ4d0\\?f0ZO000000O10000000000000000000000000000000000000000000000000000000000000000O100000000000h0XOXon2"}, "image_id": 952, "id": 15692}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 53.0, 41.0, 44.0], "area": 1757, "segmentation": {"size": [512, 512], "counts": "fQb2[1e>00000O100000O10000000000000000000000000000000000000000000000000000000000000009GR^i4"}, "image_id": 952, "id": 15693}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 65.0, 12.0, 193.0], "area": 2313, "segmentation": {"size": [512, 512], "counts": "RRj7P6P:000O10000000000000000oM"}, "image_id": 952, "id": 15694}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 74.0, 41.0, 325.0], "area": 13253, "segmentation": {"size": [512, 512], "counts": "[bQ4Q:o500000000001O00000000000000000000001N100000000000000000001O000000000000000000000fmY3"}, "image_id": 952, "id": 15695}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 126.0, 98.0, 247.0], "area": 23854, "segmentation": {"size": [512, 512], "counts": "ncb5_7a8001O00000000000000000000001O00000000000000000000001O00000000000000000000001O00000000000000000000001O00000000000000000000001O00000000000000000000001O00000000000000000000001O000000000000000000000R\\l0"}, "image_id": 952, "id": 15696}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 245.0, 154.0, 100.0], "area": 8556, "segmentation": {"size": [512, 512], "counts": "lh^12n?2M4TAIi=9UBJg=:UBIi=9UBJh=9TBJi=9UBJh=9UBIh=:UBJh=9UBIi=9TBJi=:TBFl=W1O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O0100O01000030O010O01N1N3L3N3L3N3M0O10O010O10O10O010O10O10O0101O2M10O01001N3N3L3N3M0O0101N3N3M2M4M2M4M2N3L3N3L3N3Cd@0^?Nd@O`?Nc@O_?NiVT4"}, "image_id": 952, "id": 15697}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 431.0, 204.0, 81.0], "area": 15076, "segmentation": {"size": [512, 512], "counts": "_=a2_=000000000000000000000000000000000008H0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000Zbi4"}, "image_id": 952, "id": 15698}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 433.0, 170.0, 79.0], "area": 11446, "segmentation": {"size": [512, 512], "counts": "Vn[4Q1o>1O00000000000000000000000000000\\O\\1XO00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000S1mN000000000000000000000000000000000000000000000000000000000000i0WOcPo0"}, "image_id": 952, "id": 15699}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 326.0, 14.0, 29.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "^Zi72n?2M2N3N1N2N3N1N3M2O2M2N3N1iE"}, "image_id": 953, "id": 15700}, {"iscrowd": 0, "category_id": 1, "bbox": [163.0, 331.0, 69.0, 61.0], "area": 2119, "segmentation": {"size": [512, 512], "counts": "[ka21n?3N1N3M2O2M3M2O2M2N3N1N3N1N3M2O2M3M2O0O1O010O00010O00010O01O01O01O01O01O01O010O00010O01O01O01O01O01O2O1N3M3N1N3N1N3M2O2M2N3N1N3M3N1N3M2Ojd[4"}, "image_id": 953, "id": 15701}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 349.0, 67.0, 85.0], "area": 2950, "segmentation": {"size": [512, 512], "counts": "\\kn61o?2M2N3N1N3M2e@CQ??l@CR??l@DQ?>n@CQ?g0N3M2O2M2N3N1N3M2O1N0010O002OPBbN`=[1`BhN_=V1bBkN_=S1`BoN`=P1_BSOa=l0^BUOb=k0\\BXOd=h0YBZOg=f0WB]Oh=d0VB]Ok=b0SBAl=Z110OO2M2N3N1N3M2O1N0010O00010O00010O00010O00010O01O3N1N3M2N3N1NWD"}, "image_id": 953, "id": 15702}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 379.0, 84.0, 91.0], "area": 3327, "segmentation": {"size": [512, 512], "counts": "\\\\S61o?1N3M2O2KH]@:a?5M2O2M2N3N1N3M010O0XAVO[>j0eAXOZ>h0fAZOW>f0iA\\OV>c0jA@T>a0kA@T>b0iAAT>a0jAAT>b0jA@T>W1N3NO3N110O01QBVNf=i1YBYNf=h1WBZNj=e1TB^Nk=l1O2O0010O0100OO2M2N3N1N3N1UNmAf1X>N3N1N3M2O2M2N10O00010O00010O00010O00010O00G[AXOh>e0ZAZOg>e0[AXOh>e0ZAYOh>e0`NmAa1S>6O00010O00010O02N2O2M2N3M2O2M2N3N1N20N2N3N1N1O01O01O02N3N1WOZA5i>HZA5h>IZA6h>HZA5h>IZA5i>HZA6g>IZA5i>HZA5Z?N3Ndae1"}, "image_id": 953, "id": 15704}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 429.0, 24.0, 43.0], "area": 525, "segmentation": {"size": [512, 512], "counts": "j]d71o?1N3M2O2M2N3N1N2N10O02N3N1N3M2N3N1N3M2O2O010O0^B"}, "image_id": 953, "id": 15705}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 462.0, 53.0, 49.0], "area": 1340, "segmentation": {"size": [512, 512], "counts": "Uo]41o?2M3M2O2M2N3N1N3M2O2M2O2M2N3N1N3M10O01O01O01O01O01O01O01O01O01O01O01O01O2O2M2N3N1N3M2O2M2N3N1N3M2O2M2N3Nj`g2"}, "image_id": 953, "id": 15706}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 465.0, 70.0, 47.0], "area": 1926, "segmentation": {"size": [512, 512], "counts": "k^m61n?3M2]@KY?7d@KZ?7d@LY?7e@JZ??N3M2N3N1N3M2O2M210O010O01O0O2N1O2N1O1O2N1O2N1O2N1O2N1OH@RA>n>CQA=n>Fo@;P?Go@9Q?;O1O100O1O1O100O1O100O1O11O1O2N1O1O2N1O2N1O2N1O2N1O2N1O2N1O2N1O"}, "image_id": 953, "id": 15707}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 500.0, 30.0, 12.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "o_U61n?100O1O100O1O100O1O100O1O100O1O1001O2N1O2N1O2N1O1O00O1001OQ`[1"}, "image_id": 953, "id": 15708}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 6.0, 22.0, 22.0], "area": 286, "segmentation": {"size": [512, 512], "counts": "\\Pj62n?2M3N2M4M2M3N1N10O10O010O0101N3N2M3N3L3N2M]oj0"}, "image_id": 958, "id": 15709}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 201.0, 133.0, 98.0], "area": 9850, "segmentation": {"size": [512, 512], "counts": "n6]2b=00010O000010O002O0O1O12N00000000000O01000000000ChMRCX2n<=00000O1000000000O10K500000000O100000O10000000000000O1O105K2N00000O1000000000O10000000O10000000001O0000000O10O1000000000_O\\BfNd=Z1fB\\NZ=d1a0000O105K;E00000000O1000O0100000N2000000000000O010000000000O100000O10000004L;D;F:F;E:F:FeWm5"}, "image_id": 958, "id": 15710}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 217.0, 60.0, 87.0], "area": 2585, "segmentation": {"size": [512, 512], "counts": "Tih21l?3N2N3L3N3L3N2N3L3N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M3N3L3N3M2M3NO4M2M3N3L3N3L3N2M4M2M4M2M3M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3NnXY4"}, "image_id": 958, "id": 15711}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 229.0, 31.0, 40.0], "area": 832, "segmentation": {"size": [512, 512], "counts": "og`72l?2M4M2M3M4M2M4M2M3N3N1010O01O01O010O01O01O010O01O01O010O01O0dH"}, "image_id": 958, "id": 15712}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 238.0, 76.0, 97.0], "area": 3627, "segmentation": {"size": [512, 512], "counts": "SYf32l?2M4M2N2M4M2M4M2M3N3L3N3L3N2N3L3N3L3N2M4M2N30O0010O0001M2N3OdBYN_g0600000O1000000000O10008H9GcVn5"}, "image_id": 958, "id": 15715}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 309.0, 50.0, 63.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "W[[22l?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M40O0010O0010O0010O00N3M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3NQfk4"}, "image_id": 958, "id": 15716}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 324.0, 37.0, 31.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "gj_32k?3N3L3M3M4L3N201O010O00010O00010O01O01O01O010O01O01O010O000N3L3M4L3N2M4Ldem3"}, "image_id": 958, "id": 15717}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 334.0, 51.0, 51.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "]kV73k?3L3M4M2M3N3L3N3L3N2M4M2O2O00010O010O00010O010O00010O01mNYAl0f>QO^An0j>010O00010O010O00010O010L3N2N11M3N3L3N3L3NVE"}, "image_id": 958, "id": 15718}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 361.0, 64.0, 50.0], "area": 1988, "segmentation": {"size": [512, 512], "counts": "_lR42f?8I7I7K5000001O0001O00000001O01O0000000010O0000000EUOaAk0_>XO_Ag0`>]O]Ad0a>@\\Ae0_>>O100001O00010O0O101O0O101O0O2H\\AROd>n0_AnNd>o08O0O10O10O012M4M2M4M2M4M2N3L3N3LiSm2"}, "image_id": 958, "id": 15719}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 376.0, 69.0, 86.0], "area": 2579, "segmentation": {"size": [512, 512], "counts": "lmb12l?2O2M3M2O2M2N3N2M2O2M3M2O2M2N3N2M2O2M3M2O2M2N3N2M2O2M00010O010O00010O0010O0010O000102M2O2M3M2O2M2N3N1N3N2M2N3N1N3M2O2M3N1N3M2O2M2O2M3M2OjcZ5"}, "image_id": 958, "id": 15720}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 387.0, 51.0, 39.0], "area": 1268, "segmentation": {"size": [512, 512], "counts": "l\\o62k?4K4L4M3L5L3L4010O000010O0000010O0000010O000010O000010O0000010O0000010O000010O000010O00M3L5K4M3L4L5KeS7"}, "image_id": 958, "id": 15721}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 400.0, 15.0, 34.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "[mh71m?3L3N3L3N3M2M3N3L3N3L31O010O^C"}, "image_id": 958, "id": 15722}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 417.0, 50.0, 33.0], "area": 803, "segmentation": {"size": [512, 512], "counts": "`mb21l?4L3N3L3M3O20O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O001M2M3N3L3N2MaRd4"}, "image_id": 958, "id": 15723}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 422.0, 73.0, 56.0], "area": 2042, "segmentation": {"size": [512, 512], "counts": "mm_32l?2M4L3N3L3M4N11O01O010O010O00010O010O0010O01O01O010O01O010O00010O010O0010O00N3QAXOa>k0\\AYOa>j0\\AXOa>U1M4M2M4O00010O00O2L3M4M20010O010N1M3N3M2M4M2M3N3L3N3L3N2M4M2Mcb[3"}, "image_id": 958, "id": 15724}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 445.0, 61.0, 50.0], "area": 1742, "segmentation": {"size": [512, 512], "counts": "n^Y21m?2N3L3N2M4M2N3L3N3M2M3N3L3N30O001M200010O010O010O00010O010O01O01O010O010O00001O010O010O00010O01N1N4L2N3L3N2N3L3N3M2N3L3NaQh4"}, "image_id": 958, "id": 15725}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 451.0, 54.0, 61.0], "area": 1731, "segmentation": {"size": [512, 512], "counts": "S>U1l>0O10O01000O010O10O10O01000O010O10O10O10O10O01000O010O10O10O01000O010O10O10N2M2O2M2N3N2M2O2M3M2O2M2O2M3M2Om`T7"}, "image_id": 958, "id": 15726}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 461.0, 55.0, 51.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "i_o03l?2M2N3N1N3M3N1N2O1N2N2O1N2N2O1N2O1N2N2O1N2N2O1N2N2O1N200001O001O1O0O2M2N3N2M2N3N1N3N2M2N3N2M2N3N1N3N2M2N3N1NVQU6"}, "image_id": 958, "id": 15727}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 474.0, 76.0, 38.0], "area": 1401, "segmentation": {"size": [512, 512], "counts": "m_]32k?3N3M2M3N2001O00N2M3N2M3N2N2M3N2M3N2001O00001O001O00001O001O00001O001O00001\\OPA6P?GTA8m>EUA;k>BXA>V?O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00R`\\3"}, "image_id": 958, "id": 15728}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 477.0, 25.0, 20.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "XoQ21m?3M2M4M2N3O00010O010O0010O010O0010O01O0N2N3M2M4Mm`a5"}, "image_id": 958, "id": 15729}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 494.0, 53.0, 18.0], "area": 699, "segmentation": {"size": [512, 512], "counts": "m_X63k?2M3N2M3N2M3001O00001O001O00001O001O001O00N2M31O001O00001O001O00001O0000M3O11O00001O001O00001O001N1N2M4M\\Pm0"}, "image_id": 958, "id": 15730}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 146.0, 193.0, 198.0], "area": 17847, "segmentation": {"size": [512, 512], "counts": "mfP21m?3N2N1O2N2M3N2N2N1N3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2M3N1O2N2N2M3N1O2N2N2M3N2N1O2N2O100O10O100000O1000O1000kDQLX:o3eETLZ:m3dEUL\\:j3cEXL]:h3aEYL`:g3^E[Lb:e3[E^Le:b3YE`Lf:a3XEaLh:^3WEdLi:\\3UEeLl:[3QEhLo:X3oDjLQ;V3mDlLR;R40O100000O1000O100000O1000O100000O10O10000000O10O100O1N1O2M3N2N2N2N1N3N2N2N2N2M210O1N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2M3N2N2N1O2M3N2N2N1O2M3N2N2N2N1N3N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N1O2N2M3N2N2N1N3N2N2N2N2M2O2NUhn2"}, "image_id": 959, "id": 15731}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 265.0, 138.0, 180.0], "area": 13729, "segmentation": {"size": [512, 512], "counts": "j8W3h<2N1O2M3N2N2N2N1O2M3N2N2N2N1O20000O100000O1000O100000O1000O100000O1000O100000O100000O100000O1000O100000O1000O100000O1000O100000O100000O100000O100O0N3N2N2N2N2M2O2N2N2N2N2M2O2N2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1O2M3N2N2N2N1N3N2N2N2N2M3N1O2N2N2M3N2N1O2N2N2M3N2N1O2N2M3N2N2N1O2Mcdj5"}, "image_id": 959, "id": 15732}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 324.0, 141.0, 122.0], "area": 6274, "segmentation": {"size": [512, 512], "counts": "X]e11n?2N2M3N2N1O2N2M3N2N1O000O1000O1000O1000O1001N3N1O200O100000O1000O100IUOXAk0e>XO[Ag0d>ZO[Ah0c>ZO[Ah0c>10O00010O010OTAROf>T10010O01O0M4N11OQOSAk0R?O010O010O01O01ON3PAQOn>Q1O010O00N3O010O00010O010O0001N1N3N11OUAnNh>U1010O001O0O2O00[AnN\\>S1aAoN`>P1]ATOb>U110OlN^Aj0b>TO`Al0a>QOaAP1^>mNfAo0g>N3M21O010O01O010O01N1n@TOn>P1O20O010O000N3O010O010O01O01O010O01L30010O001N1N12L3N3L012N3L3N3L5L2N6I4MYnl3"}, "image_id": 962, "id": 15737}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 8.0, 35.0, 40.0], "area": 602, "segmentation": {"size": [512, 512], "counts": "UQU72k?4M2O20O0010O00100O010N1N3L3N2M4M2M4M2M3M3NO2O2M4M2M3M4M2M4M2M3N3L3Nf_9"}, "image_id": 962, "id": 15738}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 29.0, 103.0, 64.0], "area": 2832, "segmentation": {"size": [512, 512], "counts": "dah52l?2N3L3N3M2N3M2M3N3M2O20O010O00010O01M2O20O0010O010O00010O010OQORAl0R?0O01O01O010O010O01O01O010O010O00010O010O0010O0010O010O00010O013L00010O010O00O2O001O010O01O010O01O01O010O01O01O010O010O01M2N2N3L3N3L3N2N3L3N3Momc0"}, "image_id": 962, "id": 15739}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 42.0, 30.0, 29.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "kQ^73k?2N3L3N3M2M4M2O110O0010O010O0010O0010O010O0M4M2N2M4M2N3L3N_n2"}, "image_id": 962, "id": 15740}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 51.0, 4.0, 12.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "jQn74j?2M4M2]N"}, "image_id": 962, "id": 15741}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 71.0, 6.0, 14.0], "area": 48, "segmentation": {"size": [512, 512], "counts": "W2>d?N2M3N2M3N_ml7"}, "image_id": 962, "id": 15742}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 78.0, 136.0, 81.0], "area": 4286, "segmentation": {"size": [512, 512], "counts": "Yc]53k?2M4M2M4L3N2M4M2M4M2N30O00010O010O00010O010O010O00010O010O00010O010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O010O010O00010O010O00010O010O010O00010O010O010O00010O010O00010O010O010O00010O010O00010O010O010O00010O010M2N3L3N2N3O010O000Bf@7Z?Fi@6Z?Hh@6b?N3Li[>"}, "image_id": 962, "id": 15743}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 100.0, 29.0, 39.0], "area": 697, "segmentation": {"size": [512, 512], "counts": "]3n0Q?010O010O010O010O010O0010O010O011N3N2M3N2M3M3N2M3N2M2O2MW\\a7"}, "image_id": 962, "id": 15744}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 153.0, 123.0, 55.0], "area": 3997, "segmentation": {"size": [512, 512], "counts": "heP52i?5K5J6K6J5N2000010O00000010O000000N3L31O000001O01O0001VAoNc>W11O01O000001O01OL4N2oNVAl0o>0001O0001O0001O0001O0001O0001O0O1M30010O00000001O01O000001O01O0001O000oNUAn0n>01O000000010O000000010O000000010O00L4000001O01O0000010O000000010O000000011N001O00000L5J5K5K5L4KejQ1"}, "image_id": 962, "id": 15745}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 172.0, 3.0, 7.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "aen71m?2M4cJ"}, "image_id": 962, "id": 15746}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 183.0, 13.0, 15.0], "area": 195, "segmentation": {"size": [512, 512], "counts": "gUT1?a?00000000000000000000000YZe6"}, "image_id": 962, "id": 15747}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 194.0, 44.0, 41.0], "area": 1459, "segmentation": {"size": [512, 512], "counts": "Sf<i070O1000000006J=CWYm6"}, "image_id": 962, "id": 15748}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 197.0, 26.0, 25.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "hV_71l?3M3L4M4L3M3010O00010O0001O01O01O01O00001L3M3M4K4Mhi3"}, "image_id": 962, "id": 15749}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 205.0, 3.0, 7.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "^fn72n?2M3aI"}, "image_id": 962, "id": 15750}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 215.0, 68.0, 45.0], "area": 1946, "segmentation": {"size": [512, 512], "counts": "[ge12h?6J7I6N200001O01O0o@]Oe>c0TADl>h0001O01O0002N000001O0001O000001O0001O0005K000001O01ON2M3TORAf0T?001O000001O0001O000001O0001O000001O0001O000001O0L4J6J6JnXX5"}, "image_id": 962, "id": 15751}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 227.0, 34.0, 9.0], "area": 306, "segmentation": {"size": [512, 512], "counts": "Sge59g?00000000000000000000000000000000000000000000000000000000000000000mXi1"}, "image_id": 962, "id": 15752}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 244.0, 35.0, 26.0], "area": 647, "segmentation": {"size": [512, 512], "counts": "WX61j?5K5K6K400000010O000000010O000000010O000000010O000000010O0000N3J5L4K[XX7"}, "image_id": 962, "id": 15753}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 266.0, 45.0, 35.0], "area": 895, "segmentation": {"size": [512, 512], "counts": "hhj13m?1N3M2O2M2N3N2M2d@]OX?g0O0O2N101O02O0O1O00O1O010O1O010OO1010O010O00010O03M2O2M2N3N1N3M2O2M2N3NUg^5"}, "image_id": 962, "id": 15754}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 266.0, 43.0, 26.0], "area": 477, "segmentation": {"size": [512, 512], "counts": "hX]62m?2N3N1N2N3M2O1N2N00010O0000010O00000101N2N3M2O1N3M2N11N2N2O1N000010O0000010O2N2N2O1NbWm0"}, "image_id": 962, "id": 15755}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 267.0, 39.0, 42.0], "area": 1477, "segmentation": {"size": [512, 512], "counts": "[XQ5d0\\?0000e0[O0000000000000001O0000000000000001O0000000000000000000000000001O000DbNjA]1W>500011N3M10O00IeAjN[>W1fAgN[>X1hAfNW>Z1710O0000010O0001O01O000N3O01O0001O01O00010O0001O01O01O3N1N2N3M2O20O0010O001M2N2O2M2N3N1N2N3M2O2M2N2Ofdd0"}, "image_id": 962, "id": 15761}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 340.0, 12.0, 15.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "fj84k?8I2N0000O1000O100007IUUa7"}, "image_id": 962, "id": 15762}, {"iscrowd": 0, "category_id": 1, "bbox": [38.0, 345.0, 43.0, 45.0], "area": 872, "segmentation": {"size": [512, 512], "counts": "`[c06i?:G7H001O01O01O01O00010O0001N11O01O01O0001O01O01O01O01O01O00012M2N3N1N3M2O1N3M2N3N1NhTg6"}, "image_id": 962, "id": 15763}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 350.0, 13.0, 26.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "n:j0V?05K8G10000000000000009G\\Ti7"}, "image_id": 962, "id": 15764}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 360.0, 59.0, 55.0], "area": 1778, "segmentation": {"size": [512, 512], "counts": "o[h22m?3N1N3M2`@FW?2n?1N3M2O1N3M2O2M2N3M2O2M2N2O2M2N3N1N3M2O0O1O01O01O00010O00010O0001OYOgNhBZ1X=hNeBX1[=jNdBU1\\=nNaBS1_=nN_BR1a=QO]Bn0c=TO[Bm0e=TOYBl0g=WOVBi0j=YOUBg0k=f000010O0000010O000ROTBJl=5VBIj=8XBEh=;ZBDf=;]BBc=?^B_Ob=a0aB]O_=b0cB\\O]=e0eBXO[=h0gBWOY=h0iBVOW=k0kBROU=n0mBPOS=P1PCnNP=Q1o01O01O01O01O01O01O01O0001O01O01O01O01O01O01O01O02N3N1N3M2N3N1N3M2O2M2N2O2M2N3N1NXcY6"}, "image_id": 962, "id": 15768}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 386.0, 39.0, 35.0], "area": 616, "segmentation": {"size": [512, 512], "counts": "gln13l?2O2M2N2N3N1N3M2O0O000010O000010O000010O000010O000010O00002O2M2N2O2M2N3M2O1Ncc]5"}, "image_id": 962, "id": 15769}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 404.0, 69.0, 62.0], "area": 2053, "segmentation": {"size": [512, 512], "counts": "b]U31n?3N1N2N3M2`@FX?o04O2M2N2N3N1N2N3M2O1N3M2N2O2M2NQb`0"}, "image_id": 962, "id": 15772}, {"iscrowd": 0, "category_id": 1, "bbox": [161.0, 431.0, 35.0, 30.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "Qn`21n?3N1KL[@7b?5O0O001O01O02N3M2O1N3M2O0O001O01O00101N2N3N1N2N3M2O2M2N2O2M2NRbm4"}, "image_id": 962, "id": 15773}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 451.0, 21.0, 22.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "Yna11n?2O1N3M2O2M2N2N3N1O1010M2N3N1N2N3N1N2N3M`aS6"}, "image_id": 962, "id": 15774}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 462.0, 51.0, 50.0], "area": 1711, "segmentation": {"size": [512, 512], "counts": "onV72n?1N3M2O2M2N2O2M2N3N1N3M2O1N3M2O2N100010OO2M2N100O1O100O1O1O100O0010O002N3N110N00O0000010OL[AnNe>R1]AlNd>T1300010bA"}, "image_id": 962, "id": 15775}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 467.0, 59.0, 45.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "S_m32m?2O1N3M2N3N1N2N3N1N3N11ON3N1N3M2O1N3M2N2O0O1O1001O1O2N1N2O0O0001O01O00010O0001O01O002O2M2N2O2M2N3M2O1N3M2O2M2N2N3N1NhPU3"}, "image_id": 962, "id": 15776}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 479.0, 82.0, 33.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "l_n0120i?4O100O1O1O12N00O100O1O100O1O1001O2N1O2N00O1O1O100O1O1O100O1O100O1O1O100O1O11O0000O1O100O1O1O100O1O100O1O1O100O1O1O100O1O11O1O2N1O2N1O1O2N1O2N1O1O2N1O1O2M2N3N1N2NW`h5"}, "image_id": 962, "id": 15777}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 494.0, 25.0, 18.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "i_W33m?1N3M2O0O1O1O100O1O100O1O1O100O11O1O1O2N1O2I\\@Ne?0]@Od?O^@0j_\\4"}, "image_id": 962, "id": 15778}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 511.0, 2.0, 1.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "ooQ51o?0QPm2"}, "image_id": 962, "id": 15779}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 319.0, 95.0, 77.0], "area": 3025, "segmentation": {"size": [512, 512], "counts": "`kb12m?2O2M3M2O2M2N3N1N3M3N1N3N1N3M2O2M2N01O01O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01O102M3M2O2M2O2M2N3N2M2N3N1N3M2O2M3NWem4"}, "image_id": 963, "id": 15780}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 8.0, 32.0, 22.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "]`R39g?8H000K5001O3M0O100000O10000000000000O100000O10000000000000O7J`_]4"}, "image_id": 964, "id": 15781}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 464.0, 10.0, 17.0], "area": 163, "segmentation": {"size": [512, 512], "counts": "g^X6:_?70000000000000000`ab1"}, "image_id": 964, "id": 15782}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 0.0, 110.0, 53.0], "area": 2777, "segmentation": {"size": [512, 512], "counts": "hP=2m?2N2O2M2N2N3N1N3M2O1N3M2N2O2M2N3M2O1N3M2O0O001O01O01O01O0001O01O00100O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O11O1O1O2N1O1OO100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1O100O1O10PPl5"}, "image_id": 965, "id": 15783}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 0.0, 23.0, 13.0], "area": 167, "segmentation": {"size": [512, 512], "counts": "PPY31o?2N1O2N1O1O2N1O2NO100O1O1O100O1O100O1O1O2O1No_[4"}, "image_id": 965, "id": 15784}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 0.0, 7.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "PPW41o?1O1O00O100OQ`e3"}, "image_id": 965, "id": 15785}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 0.0, 56.0, 54.0], "area": 1513, "segmentation": {"size": [512, 512], "counts": "[Q^41n?2ZONWA1C3T?OVA8h>IVA9h>JVA7h>KVA8h>IVA9h>JUA8i>`0O10O000010O02N2N10O00010O0000010O00010O0001O01O0001O01O01O100O2N2N2O2M2N3N1N2N3M2O2M2N3N1N2N]oe2"}, "image_id": 965, "id": 15786}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 0.0, 8.0, 4.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "PPb71o?2N1O00O1O1O10PP:"}, "image_id": 965, "id": 15787}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 27.0, 49.0, 42.0], "area": 1146, "segmentation": {"size": [512, 512], "counts": "ZQc32m?3N1N2N3M2O2M2N2O2M2N2N3N1N3M2O0O1O00010O111O010O00N3M2N10O01O01O0001O2O2M2N2O2M2N2N3N1N3M2O1N3M_^d3"}, "image_id": 965, "id": 15788}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 31.0, 20.0, 18.0], "area": 186, "segmentation": {"size": [512, 512], "counts": "Ya21n?2N3M2O1N3M1O10O0000010O001O3N1N2N3M2Oj^c7"}, "image_id": 965, "id": 15789}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 43.0, 62.0, 52.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "Ubi41n?3M2N3LH\\@9b?4N1O010O00010O3j@_Oh>c0RA^ON1n>d0RABl>j0N3M2N10O01O2O1N00000100O2N0010O00010O00000101N2N2O2M2N3M2O1N000010O01O2O1N3M2N2O2M2N3M2O1N3MP^W2"}, "image_id": 965, "id": 15790}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 46.0, 28.0, 26.0], "area": 410, "segmentation": {"size": [512, 512], "counts": "ha41n?3N1N3M3N1N3N2M2O2M1O10O010O01O01O101N3N2M3M2O2M3N1N3MT^]7"}, "image_id": 965, "id": 15791}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 50.0, 28.0, 32.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "YRk01c?1f@1X?1f@2W?0g@2X?0f@1X?1f@1X?>M10O01O0001O01O0001O2O1N3M2N2O2M2N2N3N1N2NQnf6"}, "image_id": 965, "id": 15792}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 62.0, 66.0, 65.0], "area": 2029, "segmentation": {"size": [512, 512], "counts": "ebo13l?2N2O2M2N2N3n@B^>?aAB]>`0aAB]>a0`AB]>`0aAB^>?aAB]>a0`AB]>`0aAB]>R1O2O00O2M2N2O2M2N2N3N1N2N3M10O000AUOkAj0U>YOhAh0W>ZOgAf0Z>[OdAe0\\>^ObAa0^>A`A?`>C^A>b>?0000010O0000101N3M2N2O2M2N2N3N1N2N3M2O1N3M2N2O2M2N2N3NW]o4"}, "image_id": 965, "id": 15793}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 72.0, 31.0, 30.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "dRQ12n?1N3M2O2M2N3M2O1N3M2O2M1O10O0000010O2N2O2M2N2N3N1N3M2O1N3M2NY]_6"}, "image_id": 965, "id": 15794}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 73.0, 38.0, 36.0], "area": 719, "segmentation": {"size": [512, 512], "counts": "WSP41c?0g@2X?0f@2W?0g@1X?1f@OZ?4c@L^?:01O002O1N2N3M2O0O001O01O0001O01O0001O02N3M2O1N3M2N2O2M2N2O2M2NXm\\3"}, "image_id": 965, "id": 15795}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 79.0, 25.0, 42.0], "area": 575, "segmentation": {"size": [512, 512], "counts": "_2T1m>O2M2N2O2M2N3M2O10N2N3N1N3M21O010[Oo@7R?GPA7S?Gn@7T?Go@7R?GPA7_?M2O2M_\\c7"}, "image_id": 965, "id": 15796}, {"iscrowd": 0, "category_id": 1, "bbox": [330.0, 92.0, 69.0, 50.0], "area": 1767, "segmentation": {"size": [512, 512], "counts": "\\SU51n?3M2O1N3M2O2M2N2N3N1N3M2O1O20O0001N1O2M2N2O2M2N2N01O01O01O2O1N2N3M2O2M1O01O0001O01O01O01O0001O101N0010O0001O01O03M2N3N1N2N3N1N3M2N2O2M2Nd\\h1"}, "image_id": 965, "id": 15797}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 113.0, 34.0, 32.0], "area": 549, "segmentation": {"size": [512, 512], "counts": "Rd_41n?2N3M2O1N3M2N2O2M2N2O1N001O01O01O0001O01O0002N2O1N3M2N2O2M2N2O2M2NR\\o2"}, "image_id": 965, "id": 15798}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 115.0, 30.0, 19.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "dcV1b0^?0000000000000000000000000000000000000000000O1000000000000]\\Z6"}, "image_id": 965, "id": 15799}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 121.0, 18.0, 13.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "iSj1=c?0000000000000000000000000000000ISA4o>JSA4P?ISA5o>IRA5P?ISA5]?N3M3NmZ[7"}, "image_id": 965, "id": 15804}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 138.0, 16.0, 30.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "ZTj28h?0000000Y@=Q?900000000000000000O1[O[lm4"}, "image_id": 965, "id": 15805}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 140.0, 27.0, 23.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "fTg31n?2N2O1N2N2O0O00010O00011N2N3N1010O010O0O2M2O2M2N3N1NU[k3"}, "image_id": 965, "id": 15806}, {"iscrowd": 0, "category_id": 1, "bbox": [120.0, 144.0, 20.0, 27.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "mTl18h?00000I7N8J0L400000000000000006J000000Zki5"}, "image_id": 965, "id": 15807}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 150.0, 27.0, 25.0], "area": 332, "segmentation": {"size": [512, 512], "counts": "TUV51n?3M2N2O2M2N2N3N0O0000010O0001O01O0001O2O1N3M2N3N1N2NR[\\2"}, "image_id": 965, "id": 15808}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 160.0, 12.0, 11.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "SUT42m?2N2O1N2O2OO1O2M2N2N2Oije3"}, "image_id": 965, "id": 15809}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 170.0, 49.0, 64.0], "area": 1934, "segmentation": {"size": [512, 512], "counts": "Pgc21k?4L4L5K4L4M4K4L4L4L5K4L4O2O01O00010O00010O0001O01O00010O00O2K4M2NO4M4L3L4O1010OM3@_A^Oe>=_A_Oe>>^A_Oe>=b0L3L4M3Mejc4"}, "image_id": 965, "id": 15810}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 170.0, 43.0, 41.0], "area": 995, "segmentation": {"size": [512, 512], "counts": "ieb62m?5L1N3M2N2O2N100O2M2N2O2M2N3M2O1N1O01O0001O01O00010O2N3N1N2N3M2O1O11M1O12M2_Oe@:]?De@:b?O2I[@Ng?0Vjg0"}, "image_id": 965, "id": 15811}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 172.0, 35.0, 34.0], "area": 618, "segmentation": {"size": [512, 512], "counts": "mU82n?1N2N3M2O2M2N2O2M2N2N3N0O0010O0000010O000010O0002O1N3M2N3N1N2N3N1KZ@Mi?0ZZV7"}, "image_id": 965, "id": 15812}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 189.0, 50.0, 43.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "efR14l??A0000000000000^Ob0000O100000000000000000000000000000K5000000000000O1000O105K0[OVA4j>Le00000000000000004LjYT6"}, "image_id": 965, "id": 15813}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 202.0, 80.0, 74.0], "area": 3485, "segmentation": {"size": [512, 512], "counts": "Rhn3b0^?0bNHTC8lBO1IN_@5_?M^@5`?7N3M2N2O1N3M2N2N2O2M20O1O1N2N3M2N2O1N3M2N2N3N1N2N2N3M2O1N\\Xi2"}, "image_id": 965, "id": 15814}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 202.0, 22.0, 22.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "bV_61n?2O1N3M2N2O2M2N2N3N1N11N2O2M2N2N3N1N2N3M2OYiU1"}, "image_id": 965, "id": 15815}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 206.0, 87.0, 81.0], "area": 3232, "segmentation": {"size": [512, 512], "counts": "VhZ61n?3N1N2N3N1N3M2N2O2M2N2N3N1N2FTO]Ao0`>TO^Am0a>TO]An0a>:N10O0000010O00010O0000010O0000010O00010O0000010O0001McNcA\\1]>30010O0001ONaNdA`1[>21O01O02N1O10O000010O03O00N2O2M2N2N2OO01O3N1N2N3M2ZOo@;R?CPA;S?Bo@AjAb0V>^OjAb0V>^OjAb0V>^OjAb0V>^OjAb0V>^OmA?S>AmA?S>AmA?S>g000000`NmAS1S>mNmAS1`>00000O100000000000000000000000000000000000000000O100000000000d0\\O0000000000000000000000000000000000000000000000000000000M3000900000000000000000000O10000000000009G00000O10000000000000000000O1000000000000000000000000d0\\OVTa4"}, "image_id": 965, "id": 15831}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 351.0, 38.0, 35.0], "area": 1179, "segmentation": {"size": [512, 512], "counts": "W[]1k0U?0000000O100000000000000000000J60000000000O10000000000000000000000000000l0TOUdo5"}, "image_id": 965, "id": 15832}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 370.0, 20.0, 20.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "c[o0:f?9G0000000000O1000000000O1000000000000^df6"}, "image_id": 965, "id": 15833}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 389.0, 38.0, 26.0], "area": 688, "segmentation": {"size": [512, 512], "counts": "\\lR33l?2O000O100MK[@5c?`0E004L000000000000L40000000000000000000001O0K500000000000O16JeSZ4"}, "image_id": 965, "id": 15834}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 396.0, 60.0, 43.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "Ume14l?>B000O100000000000000000000000000YOFhA:X>g000000000000O1000000000000000000000O12N0000001O000000000000OJ7000000000000000007I[S\\5"}, "image_id": 965, "id": 15835}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 445.0, 3.0, 5.0], "area": 8, "segmentation": {"size": [512, 512], "counts": "nmn71o?1N3QB"}, "image_id": 965, "id": 15836}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 126.0, 10.0, 11.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "oSc3:f?0000000000000O100RlW4"}, "image_id": 968, "id": 15837}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 170.0, 32.0, 31.0], "area": 889, "segmentation": {"size": [512, 512], "counts": "beR7e0S?81O00000000000001O000000000000000000000001O00000000000001O00_OU[="}, "image_id": 968, "id": 15838}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 258.0, 29.0, 12.0], "area": 324, "segmentation": {"size": [512, 512], "counts": "Rh[3;e?00000000000000000000001O0000000001O0000000000000000000OngU4"}, "image_id": 968, "id": 15839}, {"iscrowd": 0, "category_id": 1, "bbox": [311.0, 311.0, 18.0, 13.0], "area": 206, "segmentation": {"size": [512, 512], "counts": "iik4:d?200000000000010O0000000000000000H`Vk2"}, "image_id": 968, "id": 15840}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 333.0, 38.0, 30.0], "area": 950, "segmentation": {"size": [512, 512], "counts": "oja38_?9G90001O0000000001O000001O000000000001O000001O0000000001O000001O0000M3G9GiUk3"}, "image_id": 968, "id": 15841}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 365.0, 18.0, 20.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "jkk35c?8K6O00000001O0001O0000000001O0N2HiTk3"}, "image_id": 968, "id": 15842}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 369.0, 35.0, 19.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "g[W1:`?70O0000000000000000010O000000000000000001O01M7J101O0001O00000000000VTW6"}, "image_id": 968, "id": 15843}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 381.0, 42.0, 18.0], "area": 577, "segmentation": {"size": [512, 512], "counts": "[\\l21b?=O11O000000000000000001O000001O00000000000000000001O0001O00000000000000000001O00GYd^4"}, "image_id": 968, "id": 15844}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 390.0, 35.0, 29.0], "area": 749, "segmentation": {"size": [512, 512], "counts": "ilo34g?5K6J5L4000001O01O000001O01O0001O0001O0001O0001O0001O0001O0O1K5K5K6Jic^3"}, "image_id": 968, "id": 15845}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 426.0, 30.0, 34.0], "area": 426, "segmentation": {"size": [512, 512], "counts": "U^a36g?3C=000000000001ON2A?000K50001N1K51O0000000000009G0000000Icbo3"}, "image_id": 968, "id": 15846}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 429.0, 24.0, 21.0], "area": 467, "segmentation": {"size": [512, 512], "counts": "k]W66\\?>0000000000000000000000000000000000010O000000bb\\1"}, "image_id": 968, "id": 15847}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 460.0, 100.0, 52.0], "area": 3386, "segmentation": {"size": [512, 512], "counts": "Yo^33m?2M4M2M3N3M2M3N2M3N0O100O10000O100O100O100O100O10000O100O100O100O10000O1002N2N2N2N2N3M2N2N2N3M2N2N2N2N3M1O00O100O100O10000iNW1K500000000000000000000000000000000000000000000000000000000000H800000000000dQo2"}, "image_id": 968, "id": 15848}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 277.0, 100.0, 156.0], "area": 9990, "segmentation": {"size": [512, 512], "counts": "P]i11k?4L5K4L4L4K6K4L4L4L5J5L4L4L5K4L4K5L4L5K4L4L4K6K4L4L4L5K4K5L4L5K4L4N201O01O0001O0001O0001O01O0001O01O0001O01O0001O01O0001O01O0001O0001M2L4L4K6K4L4L4L5K4K5L4L5K4000001^OoBSNQ=h1TCYNk`0XA@h><\\ADd>7aAI_>3fAL[>NjA2R?0001O000000001O0000001O000000001O000000Q`e5"}, "image_id": 969, "id": 15855}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 0.0, 64.0, 34.0], "area": 1399, "segmentation": {"size": [512, 512], "counts": "]`a11l?3N3M2M3N3N101O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O001O00001O001O00001O001O00O1N2M3N2M3N2N2M3N2M3N2N2M3NR`^5"}, "image_id": 970, "id": 15856}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 0.0, 34.0, 29.0], "area": 579, "segmentation": {"size": [512, 512], "counts": "a`i22l?2M4M2N2M4M2N3N11O010O01O01O010O010O01O01O010O01O01O0O2L3N2N3L3N3Lh_e4"}, "image_id": 970, "id": 15857}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 0.0, 20.0, 10.0], "area": 124, "segmentation": {"size": [512, 512], "counts": "P`T33m?1O001O00001O001O001O00001O001OO1M3N2MS`a4"}, "image_id": 970, "id": 15858}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 0.0, 30.0, 33.0], "area": 599, "segmentation": {"size": [512, 512], "counts": "lP[32l?2M4L3M3N3L3M3N3L2N02N30000001O00001O00001O01L4L4L4L4L4L4LloU4"}, "image_id": 970, "id": 15859}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 0.0, 23.0, 5.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "PPm31o?0000001O000000001O000000001O0000001O000000MS`g3"}, "image_id": 970, "id": 15860}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 0.0, 52.0, 63.0], "area": 1887, "segmentation": {"size": [512, 512], "counts": "f`\\42l?2M3M4M2M3[ACd=a0YBBd=`0YBCd=a0YBAf=`0WBDi=;TBHm=8PBKo=5nANS>2jA1U>m01O10M2O2M201O00001O00O1M3N2N2M3N2N2M3N2N2M3N2N2M3N2N2M3N2N2M3N2N2M3N2N2M3NR`i2"}, "image_id": 970, "id": 15861}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 0.0, 59.0, 32.0], "area": 1199, "segmentation": {"size": [512, 512], "counts": "\\Pg51l?3N2M4M2N3O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O001ON2M3N2N2M3N2M3N2M3N2N2MS`[1"}, "image_id": 970, "id": 15862}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 0.0, 39.0, 18.0], "area": 402, "segmentation": {"size": [512, 512], "counts": "P`g63m?001O00001O001O00001O001O000Z@Ic?:01O00001O001O00001O001O000010O01O01O0N3M2M3N3Lood0"}, "image_id": 970, "id": 15863}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 30.0, 24.0, 29.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "Zae31n?2N2O2M2N2_@GX?;d@H[??O2O0O1O2N1O1O2O_On@3T?Km@5T?Im@6U?Ik@6X?Gh@:Y?Dh@;`?L3M3Mh^n3"}, "image_id": 970, "id": 15864}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 33.0, 62.0, 43.0], "area": 1606, "segmentation": {"size": [512, 512], "counts": "dQ[12m?2N2N2N2N2N2N2N2M3N2N2N2N2N2N2N2O000O1O1010O1O1O001O1O1O100SOPAh0Q?VOPAj0S?0000000010O0000000000010O000000000000010O0000000000010K4H8Ifne5"}, "image_id": 970, "id": 15865}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 40.0, 44.0, 40.0], "area": 1075, "segmentation": {"size": [512, 512], "counts": "naZ61m?3L3N3M2N2M4M2N3O0010O000102M010O01O01O010O0O2O0010O010ON2N3M2N3M2001N11ON3L3N3ZOPA11JQ?3PA11JR?2PA1Y?Lj@1mmo0"}, "image_id": 970, "id": 15866}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 42.0, 31.0, 32.0], "area": 501, "segmentation": {"size": [512, 512], "counts": "jaP43j?4L3M3M4L3O1010O000010O00010O00010O0g@\\OV?h0O01Dk@KU?2o@NP?OSA1n>KUA5\\?0O00010ON2MQn_3"}, "image_id": 970, "id": 15867}, {"iscrowd": 0, "category_id": 1, "bbox": [156.0, 46.0, 28.0, 29.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "PR^23k?2M4L3N3L3N2M4O010O00010O010O00010O010ON2M4M2M4M2N2M4M\\nS5"}, "image_id": 970, "id": 15868}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 51.0, 79.0, 56.0], "area": 2287, "segmentation": {"size": [512, 512], "counts": "hRV51l?3N3M2M4M2N2010O01O01N1M4M2O02L3N3M2N3M2M3N3M2N3M2M40O00010O010O010O00010O010O010O00010O010O010O00010O010O01O01O010O010O01O01O0O2M2N3L3N2N3L3N3M2M4M2N2M4M2Ni]b1"}, "image_id": 970, "id": 15869}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 75.0, 81.0, 75.0], "area": 2874, "segmentation": {"size": [512, 512], "counts": "ac`34j?2M3N3L3M4M2M4M2M4ESO^AP1`>9O2N1O2O001O01O010O105J10O010M2N2010O010O010O0010O0010N1N3N110O01OiN\\AR1i>@XAI5EX>?eAL1GZ>;hAMLJ]>6iAAnA>S>_OoAb0P>\\ORBd0n=ZOUBf0a>1N2N3L3N3M2N3L3N3N1001O0N3L3N3L3N2N3L3N3L3N3L3N3M2M3N3LflV3"}, "image_id": 970, "id": 15870}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 79.0, 77.0, 50.0], "area": 2085, "segmentation": {"size": [512, 512], "counts": "YSl01l?3N3M2M4M2N2M4M2N3L3N3O010O01O01O010O010O0010O0010O00010O01O01O01O0010O00010O01O00010O00010O0001O01O00010O0001O01O00010O0001O01O00010O000N3L3L4M3L5L3M3Lo\\m5"}, "image_id": 970, "id": 15871}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 83.0, 20.0, 16.0], "area": 180, "segmentation": {"size": [512, 512], "counts": "jRi13k?2N2N3N1010O0010O0010O0010O0010M2N3L3NYml5"}, "image_id": 970, "id": 15872}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 93.0, 27.0, 26.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "^SU22l?2M4M2M4M2M3N30O01O01O010O00010O01O01O001L3M3N3L3N2Mo\\]5"}, "image_id": 970, "id": 15873}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 113.0, 15.0, 14.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "hS]63k?2N3L3010O00010O010O0O2M2M3N]\\[1"}, "image_id": 970, "id": 15874}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 122.0, 71.0, 61.0], "area": 2206, "segmentation": {"size": [512, 512], "counts": "iTe02l?2N3L3N2M4M2M4M2N3L3N2M4M210O00010O010O010O00010O010O00010O010O001N11O010O01O012M001M20010O010O01O01OWO_A3b>JaA6^>GeA9\\>DfA=Y>AjA>W>^OlAb0T>\\OoAd0f>010O0010O0N3L3N2N3L3N3L3NoZW6"}, "image_id": 970, "id": 15875}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 125.0, 64.0, 46.0], "area": 2031, "segmentation": {"size": [512, 512], "counts": "PeW53h?5K6J5K5K5K5010O00J60001O000001O01O000000010O0000000010O0iN]AQ1j>O0001O00O2M200001O0001O0001O000001O01O000000010O00000001J5K5K5K5K5Ki[h1"}, "image_id": 970, "id": 15876}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 128.0, 36.0, 34.0], "area": 725, "segmentation": {"size": [512, 512], "counts": "fdb41m?3L3N3M2M4M2M3N3M2O20O010O010O0010O0010O010O010O00001M2N3M2M4M2N3M2M3Ng[k2"}, "image_id": 970, "id": 15877}, {"iscrowd": 0, "category_id": 1, "bbox": [4.0, 159.0, 57.0, 81.0], "area": 2385, "segmentation": {"size": [512, 512], "counts": "oV21m?3M2_NKTC9iQ=AmBa0S=@iBd0W=[OgBg0Y=ZOdBh0]=WO`Bm0_=TO^Bn0b=RO\\BQ1d=nNZBT1f=mNVBW1j=?0010OVNVB^1j=_NYBb1g=[N\\Bd1d=ZN^Bg1n=O0010O0010O010O0010OO1N3L3N3M2M4M2N2M4M2N3L3N3M2M3N3M2M4M2NSZQ7"}, "image_id": 970, "id": 15878}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 165.0, 25.0, 23.0], "area": 351, "segmentation": {"size": [512, 512], "counts": "ceh02k?4M2M3N3L3O20O00010O010O00010O010O000O2L3N3L3M3Nfjj6"}, "image_id": 970, "id": 15879}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 176.0, 49.0, 90.0], "area": 2509, "segmentation": {"size": [512, 512], "counts": "mgg42l?3L3Y@I_?1b@:[?8M2N2M4M2M4UOPObBR1[=ROaBQ1]=QOaBR1[=QOcBQ1Z=SObBQ1[=QOcBQ1Z=SObBP1[=SOcBP1Z=l0M4N110OO1M4M2M4M2O2N1N2N3L3N3L3N2N3L301O010O01O01jNcA41O]>JfA4OO_>JdA41O]>KeA300]>JfA31Ol>NVA0aX`2"}, "image_id": 970, "id": 15880}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 178.0, 33.0, 27.0], "area": 543, "segmentation": {"size": [512, 512], "counts": "VfR11l?3N3L3N2N3L3N3L300010O01O01O010O0N2N3M210O01O01O01O01N1M4M2M3N3LXj\\6"}, "image_id": 970, "id": 15881}, {"iscrowd": 0, "category_id": 1, "bbox": [379.0, 181.0, 17.0, 13.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "lem51l?4L3O2O00010O00010O010O0001O0M4MXji1"}, "image_id": 970, "id": 15882}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 208.0, 43.0, 60.0], "area": 1539, "segmentation": {"size": [512, 512], "counts": "QgY11l?3TAMi=6SBMj=6TBLi=7TBMi=6SBMj=6TBMj=4SBOm=2oA1R>NlA5S>LiA7W>IfA:[>f0O010O00010O01O01O010O01O01ON3M2M4L3N2M4L3N3L3M3N3L3M3N3L3N3LUiP6"}, "image_id": 970, "id": 15883}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 254.0, 30.0, 30.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "aXW53j?3N3M2M3N3L3N3M2010O00010O010O00010O010O0O2M2M3N3L3N3M2M3NlgY2"}, "image_id": 970, "id": 15884}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 277.0, 73.0, 53.0], "area": 2004, "segmentation": {"size": [512, 512], "counts": "big31o?2M3N2M3N2FFi@=T?Fj@l0bAPO^>o0;M2N3L3N2N3M2M4M2N3L3NSW`2"}, "image_id": 970, "id": 15886}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 316.0, 13.0, 34.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "l9R1o>M2N3L3N2M4M2N3L3N3M2M3NQVi7"}, "image_id": 970, "id": 15887}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 328.0, 26.0, 26.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "^Z]42n?3M3L4M3M4K3N000O0100000O01000O01000O0103M4K5L3M4KWeU3"}, "image_id": 970, "id": 15888}, {"iscrowd": 0, "category_id": 1, "bbox": [11.0, 334.0, 61.0, 69.0], "area": 2583, "segmentation": {"size": [512, 512], "counts": "Rl51m?2oN0oA4m=OQB3m=OPB5l=OQB3m=OPB4m=0PB3l=0QB3n=NPB5P>KlA8T>HjA:V>i00O010O0010O0010O0010O0010O0010O00010O010O00010O010O00010O01O0N2M4M2M4L3N3L3N2M4M2M4M2M3N3L3N3L3Nodk6"}, "image_id": 970, "id": 15889}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 357.0, 68.0, 48.0], "area": 1882, "segmentation": {"size": [512, 512], "counts": "l[f32k?4M2N2N3L3N3M2M3N3M2010O00010O010O00010O010O01O01O010O010O00010O010O00010O01M2N2M4M201O0010O0010O0010O001ROYAb0f>[O]Ae0c>YO`Ag0`>UOcAk0i>01O0N3L3N2M4M2M4M2N3LVdW3"}, "image_id": 970, "id": 15890}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 373.0, 44.0, 59.0], "area": 1549, "segmentation": {"size": [512, 512], "counts": "V]S11m?3L3M3N3[ODaA?\\>DaA`0\\>BbA`0[>DaA?\\>DaA`0\\>CaA?\\>d010O0N3M200010O01O01O01O01O0N3M2M3M2O2M3M3N3L3N2M4L3N3L3M3N3L3N3LVdV6"}, "image_id": 970, "id": 15891}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 386.0, 15.0, 25.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "RZOmAi0T>b0O000O2M200010L3M3M4M12O000010O001N101N101K4M3L5L3L4M4K4M4K4M3L5Kfc]5"}, "image_id": 970, "id": 15893}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 403.0, 59.0, 47.0], "area": 1712, "segmentation": {"size": [512, 512], "counts": "^]S32k?3N2N3L3N3M2M4M2N2M4M2O20O01O01O010O01O010O01O010O01O0O2M2N2010O010O010O00010O010O01M0002N3L3N3M2M3N3M2M4M2N3L3N2N3LXSo3"}, "image_id": 970, "id": 15894}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 421.0, 23.0, 18.0], "area": 216, "segmentation": {"size": [512, 512], "counts": "^m:1m?2N3M2M40O0010O010O010O010O0010OO20O0O2M2N3MebY7"}, "image_id": 970, "id": 15895}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 424.0, 25.0, 32.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "Rnc01m?2M4M2M4M2M3N3L3N3L3O1010O000N3L3N3L3N2N3L3N3L3Nebo6"}, "image_id": 970, "id": 15896}, {"iscrowd": 0, "category_id": 1, "bbox": [258.0, 429.0, 32.0, 40.0], "area": 718, "segmentation": {"size": [512, 512], "counts": "]^Q41m?2M4M2N3L3N2M4M2N3L3N3L3N2O20O010O000O2M2M4M2N2M4M2M4M2N3L3N2M_b^3"}, "image_id": 970, "id": 15897}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 440.0, 27.0, 26.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "m=620^?:M40O010O00O20O01O01O010O01O010O01O001M2M3N3M2M4M2NQRb7"}, "image_id": 970, "id": 15898}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 456.0, 39.0, 35.0], "area": 830, "segmentation": {"size": [512, 512], "counts": "n^Y12l?2M3N3L3N3L3N2M4M201O01O010O01O01O010O01O01O010O01O01O010O0O1N3L3N3L3N2N3L3N_QS6"}, "image_id": 970, "id": 15899}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 467.0, 51.0, 38.0], "area": 1698, "segmentation": {"size": [512, 512], "counts": "dni29g?c0]O9G000000000O10000000000000O100000000000003M000000000000000000004K10N200000000O100000000003M0000b0^Od`\\4"}, "image_id": 970, "id": 15900}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 468.0, 9.0, 23.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "d>g0Y?O1M4M2N3L3N3M2M\\Qk7"}, "image_id": 970, "id": 15901}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 492.0, 52.0, 20.0], "area": 911, "segmentation": {"size": [512, 512], "counts": "k_h05^?=N20000000000000000001O000000000000000000O1001O0000000000000000001O00000000000@d@<`?0000000001O000000000La`]6"}, "image_id": 970, "id": 15902}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 510.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "n_j32o?O00QPT4"}, "image_id": 970, "id": 15903}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 0.0, 52.0, 27.0], "area": 1118, "segmentation": {"size": [512, 512], "counts": "X`g0:^?800000001O0000000000000000001O0000000000000000001O2N0000003M00000000001O00000000M3000000001O000000G9FZ`^6"}, "image_id": 971, "id": 15904}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 0.0, 28.0, 27.0], "area": 498, "segmentation": {"size": [512, 512], "counts": "``a33j?3N3L3N2N3L3O2O0010O0010O010N1N3O00001ON2M4\\Oi@<^?1OO2M5K2Mh_P4"}, "image_id": 971, "id": 15905}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 1.0, 86.0, 73.0], "area": 3083, "segmentation": {"size": [512, 512], "counts": "SaW41m?3L3N3M2M3N3M2M4M2N3L3N2N3L3O2O01O010O010O00010O010O00010O01]AiN\\>V1bAlN^>[1010O010O00bAaN[>b110O01O01OQOfA8[>EhA;W>BlA>U>_OnA`0R>]OnAg0Q>WOoAk0R>QOnAS1]>2N201O0010O0010O0010O0010O0010O0010O0010O001N1N3L3N2M4M2M4M2M3N3L3N3L3Nl^]2"}, "image_id": 971, "id": 15906}, {"iscrowd": 0, "category_id": 1, "bbox": [89.0, 11.0, 62.0, 56.0], "area": 1787, "segmentation": {"size": [512, 512], "counts": "\\a\\11m?3M2N3M2N3M2N2N3M2N3O010O0010O010O010OO2M2M4M2N2N3M2N3N1010N1010O010O0010O0010O01RO^A=a>AaA?`>^OcAb0\\>\\OfAd0[>YOhAg0W>WOkAi0V>TOmAl0c>0010O0N3M2N3M2N3L3N3M2N3M2Ng^d5"}, "image_id": 971, "id": 15907}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "<2boo7"}, "image_id": 971, "id": 15908}, {"iscrowd": 0, "category_id": 1, "bbox": [243.0, 18.0, 28.0, 32.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "Yai32l?2M4M2M4M2M4M2O101O0M4M21O010O01O01O0M4M2M3N3L3N3L3N3LZ_h3"}, "image_id": 971, "id": 15909}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 24.0, 3.0, 6.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "h06k?M2NY_n7"}, "image_id": 971, "id": 15910}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 28.0, 37.0, 26.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "ZQ=3k?4L3L3N3L3N3O000010O010O00010O010O0O1N3M2O2O010O00010O010O00010N1N3M2M3Nk^P7"}, "image_id": 971, "id": 15911}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 30.0, 65.0, 41.0], "area": 2013, "segmentation": {"size": [512, 512], "counts": "TQ_2P1P?000000000000000000000000000000000000000000000000000000000000003M0O1000000O1000M3O01000O0100000O01000O10O1000O102N4K5L4L3M4K5L4LU^`4"}, "image_id": 971, "id": 15912}, {"iscrowd": 0, "category_id": 1, "bbox": [8.0, 33.0, 16.0, 20.0], "area": 193, "segmentation": {"size": [512, 512], "counts": "_Q43j?3M3M4L3N2010O00010N1M3M4L3M3Monc7"}, "image_id": 971, "id": 15913}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 45.0, 15.0, 17.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "iQU11m?2M4L3M4N10010O0010N1N3L3N2Mc^c6"}, "image_id": 971, "id": 15914}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 66.0, 20.0, 24.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "cbo02k?4M2M4L3N2M4N100010O010O0N3L3N2M4L3N3Ll]f6"}, "image_id": 971, "id": 15915}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 77.0, 51.0, 74.0], "area": 2299, "segmentation": {"size": [512, 512], "counts": "i3b0[?3N3M2O2N1N3M2M3N3L3N3M2M3N3L3N3L3O110O0010O010O0001L3N3L03N2N3L3N2M4M2N3L3N2M4M2M4M2WOTABTA;P?BSA;o>BTA;[?M2M3N3MR]V7"}, "image_id": 971, "id": 15916}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 77.0, 15.0, 12.0], "area": 106, "segmentation": {"size": [512, 512], "counts": "ebc31m?2M4M2O2O01M21O02O0O0010OO1MamT4"}, "image_id": 971, "id": 15917}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 81.0, 3.0, 12.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "a21lA3P>1lA2R>0lA2Q>1lA3P>1mA1Q>1lA3P>0nA2R>OjA4W>KgA8X>h010O00010O01O01O010O01O01O010O01M2N2M4M2M4L3N2M4M2M4M2M3N3L3N2M4M^l^6"}, "image_id": 971, "id": 15923}, {"iscrowd": 0, "category_id": 1, "bbox": [207.0, 114.0, 25.0, 31.0], "area": 399, "segmentation": {"size": [512, 512], "counts": "\\dW32k?3N2N3O0O1M4M2M4M0O2N3N3O01O0N2M4M2M3N3N1O1N3L3M]l[4"}, "image_id": 971, "id": 15924}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 118.0, 53.0, 74.0], "area": 2043, "segmentation": {"size": [512, 512], "counts": "UUZ23l?3N2M3N1N3N2M3N2M3N2M3N2M2OON3L3N3M2M3N3L3N3M2M4M2N30O100O101N100O2O0O100O2M2L4M4K4M3L4M4K4M3L5K4M3L4M4KW\\k4"}, "image_id": 971, "id": 15925}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 120.0, 67.0, 61.0], "area": 2087, "segmentation": {"size": [512, 512], "counts": "aT\\51n?2O1N2N2N2N3M2O1N2N2N2N3M2O1N2N2N2N3M2O1N1O002N2N2O1N2N2N3M20000001O0O1N2N2O101M2N2N2N2N2N3M2O0O0000002N2N2O1N2N2N3M2N2N2O1N2N3M2N2NW[b1"}, "image_id": 971, "id": 15926}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 123.0, 44.0, 54.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "Veb11l?3N2AMn@7n>Mn@6j>EUA7O7h>EVAj0h>8N3L3N201O010O00010O010O00010O010O01O01O01N1N3L3M3N3L3N3L3N3L3M3N3L3N3L3Nj[g5"}, "image_id": 971, "id": 15927}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 145.0, 66.0, 63.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "iUk61n?2N2N2N3M2O1N2N2N2N3M2N2O1N2N3M2BoNjAS1T>oNjAS1T>oNkAR1S>POkAQ1T>QOjAo0W>SOfAm0Z><000001O01O000000012M2N2N2N2N2N2OO000001O0001O0002N2N3M2N2O1N2N3M2N2N2O1N2N3M2N2N2N2Oij3"}, "image_id": 971, "id": 15928}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 176.0, 29.0, 29.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "leS62m?2O2M2N2N2N2O1N3M2N2N2N2O1N0001O2N2O2M2N2N2N3M2O1N2N2N3MRj]1"}, "image_id": 971, "id": 15929}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 200.0, 28.0, 24.0], "area": 376, "segmentation": {"size": [512, 512], "counts": "afe22m?3N2M3N1N3N2M100O010O010O01O03M3N2ON3N2M3N2M3N2M3N2M2OVYl4"}, "image_id": 971, "id": 15930}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 200.0, 16.0, 14.0], "area": 128, "segmentation": {"size": [512, 512], "counts": "af\\62m?101N2K401N00001010O1O1N2N3M2NbY[1"}, "image_id": 971, "id": 15931}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 203.0, 85.0, 59.0], "area": 2560, "segmentation": {"size": [512, 512], "counts": "Xg]32m?1N3N2N2M2O2M3N1N3N2M2O2N2M2O2M3N1N3XAjNc>Z1N3O1O10O10O10O10O10O10O10O1M2O2M30O01000O010000O01000O01000M2O2M3N1N3N20O10O1N1N10OJl@BU?>m@@R?b051N3N2M2O2M3OO2N1N3N2M3N1N3N2M2O2M3N1N3N2MSiW3"}, "image_id": 971, "id": 15932}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 204.0, 28.0, 50.0], "area": 763, "segmentation": {"size": [512, 512], "counts": "RWb71n?2N2O1N3M2N2N2O2M2N2N2N2O2M2N2N2N2N3N1N2N2N3M10O0001O0eI"}, "image_id": 971, "id": 15933}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 211.0, 27.0, 27.0], "area": 380, "segmentation": {"size": [512, 512], "counts": "oVR72m?2N2N3N1N2N2N3M2O1N2N2N000010O3M2N2N2O1N3M2N2N2O2M2NPY`0"}, "image_id": 971, "id": 15934}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 220.0, 30.0, 31.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "aWZ61l?4M2N3M2N2N3M2N3M2N3O010O010O010O010O01M2N3M2N3M2M4M2N3M2NmhV1"}, "image_id": 971, "id": 15935}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 226.0, 60.0, 57.0], "area": 1609, "segmentation": {"size": [512, 512], "counts": "bgb21n?2O2M2N2N3N1N3M2O1N3M2N2O2M2N3N1N2N3M2O1N3M21O010O01O01O01ON3M2N2O2M2N3M2O1N00010O0000100O2N2N010O2N2N3N1N3M2O1N3M2N2OnW_4"}, "image_id": 971, "id": 15936}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 233.0, 29.0, 58.0], "area": 1059, "segmentation": {"size": [512, 512], "counts": "Y7g1Z>O0010O010O0010O001M2N2M4M2N3L3N3M2M3N3L301O0Ig@GY?6k@HW?5k@IW?5YOYAf0g>:000O010O01000O010O10O10O011N4M3M2M4M2M0100O01000O0100O4M2N3L3N3L3N`Vo4"}, "image_id": 971, "id": 15940}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 284.0, 24.0, 22.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "PYP41n?3N1N2N3N1N3N1OO2010O010O01ON00010O02N2O2M3M2Ogfc3"}, "image_id": 971, "id": 15941}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 289.0, 55.0, 55.0], "area": 1595, "segmentation": {"size": [512, 512], "counts": "Uj61m?2N3M2N3M2N3N1010OO2M2N3M2N3M201O010O0O2M2XAnN`>U1]AnN`>Z1N3M00010110O010O010O010M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2N3M2Ndfm6"}, "image_id": 971, "id": 15942}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 291.0, 31.0, 49.0], "area": 888, "segmentation": {"size": [512, 512], "counts": "mi`71m?2N3N2M2N3N1N3M3N1N3M2O2M3M2N3O0VAoNe>V110O010O10O10O01hNZAU1i>0O010O010aF"}, "image_id": 971, "id": 15943}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 297.0, 39.0, 38.0], "area": 764, "segmentation": {"size": [512, 512], "counts": "jYZ41o?1N2N2N3N1N2N2N3N1N2N3M2O1N2N3M2O0O0000010O0001O2O1N3M2N2O2M2N3Ka@Da?:4O2M2N2N3NTVR3"}, "image_id": 971, "id": 15944}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 301.0, 61.0, 62.0], "area": 1225, "segmentation": {"size": [512, 512], "counts": "iYo63l?2M2N3N1N3M3N1O2O010O10O10O010OO2O10O10O010O10O10O01o@@d><[AEe>9VAHk>7SALl>5QANo>1PA0P?a00O10O10WOPA`0P?^OSAb0m>[OUAe0T?000O010O01000O010O01O1O010O010Gf@J[?3g@NX?0k@0U?Nm@OU?Om@OV?Nm@0]d2"}, "image_id": 971, "id": 15945}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 310.0, 41.0, 49.0], "area": 1681, "segmentation": {"size": [512, 512], "counts": "\\jZ11o??A9G000L400_O]OeAc0[>a0000000000000000000000O10O100000000000000000000000000000000032Kg0YO^eP6"}, "image_id": 971, "id": 15946}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 324.0, 60.0, 75.0], "area": 2118, "segmentation": {"size": [512, 512], "counts": "P\\e52l?3M2N3M3N1N3M2N3M2N3N1N3M2N3M3M2O2M2N3M2N3M2N3N1N2N00000010O0000101N3M2N3N1N3M2N3N1N3M2N3N1N3M2N3N1N3M3M2O2M2N3M2O2M2Nae\\1"}, "image_id": 971, "id": 15947}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 335.0, 23.0, 33.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "bZV24l?2N3L3[@F`??M4M2N3L3N3M11M4M2M4M0O2O3M2M4M2M3N3MgT^5"}, "image_id": 971, "id": 15948}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 337.0, 63.0, 107.0], "area": 3236, "segmentation": {"size": [512, 512], "counts": "Y]\\62l?3N2M2O2M2N3N2M2PO]OaBf0]=[ObBf0\\=]OaBf0\\=]OaBf0]=[ObBf0\\=]OaBf0\\=\\ObBf0]=\\OaBf0\\=]OaBf0\\=\\ObBf0]=\\OaBf0\\=\\ObBf0\\=]OaBf0]=P1M2N3N1N3N2M00010O00010O0003N1N3M2O2nNfBZO]=c0eB[O]=c0fB[O\\=c0fBZO]=c0eB[O\\=d0gBZOX=g0iBWOX=h0kBUOW=i0lBUOU=j0lBTOW=i0lBTOV=j0lBUOV=i0lBTOW=i0Q1N3N1]Oi@;Z?Bi@;_?O2M2O2M3MQUd0"}, "image_id": 971, "id": 15949}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 348.0, 30.0, 32.0], "area": 563, "segmentation": {"size": [512, 512], "counts": "akk03k?2M3M4M2M3M4M2M3010O00010O00010O00010\\Ok@;U?Bn@>[?0O0010O0N2M4M2M3MhTe6"}, "image_id": 971, "id": 15950}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 350.0, 22.0, 21.0], "area": 293, "segmentation": {"size": [512, 512], "counts": "Z[b03k?2M3N3M2M4O000010O010O00010O010O0N2M4M2M4MndR7"}, "image_id": 971, "id": 15951}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 371.0, 46.0, 108.0], "area": 2829, "segmentation": {"size": [512, 512], "counts": "^^Y71n?1N3M2N3N1N3M2N3N2M2PO\\ObBg0[=\\OcBe0\\=\\ObBg0[=\\ObBf0\\=\\ObBg0[=\\OcBf0[=[OcBg0[=\\ObBg0[=[OcBg0[=\\OcBf0Z=]OcBe0\\=\\ObBg0[=P1N3M3M2O2M2N3M10O0001O01O2N3N2M2N3M2N3oNeBZO\\=d0gBYO\\=d0fBZO]=c0fBZOc1"}, "image_id": 971, "id": 15952}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 391.0, 20.0, 24.0], "area": 230, "segmentation": {"size": [512, 512], "counts": "i\\T61m?2O2M3M2N3M2O2M2N3M03N2M2N3N1N3M2N3N2Meca1"}, "image_id": 971, "id": 15953}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 424.0, 20.0, 23.0], "area": 231, "segmentation": {"size": [512, 512], "counts": "h]R72m?1N3N2M2N3N2M2O2M2OO102M3N1N3N1N3M3N1Nebc0"}, "image_id": 971, "id": 15954}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 446.0, 20.0, 34.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "n=R1o>O2M2N3N1N00010O0110001M2N100If@G\\?7f@G]?7:K2Nbae7"}, "image_id": 971, "id": 15955}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 488.0, 49.0, 24.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "jod02n?2M2N2N100O1O1O100O1O100O1O1O100O1O1O100O1O100O1O1001O2N1O2N00001O1O2N1O1E]@7g?O1O2N1O1OO100O11O2N1OQ`b6"}, "image_id": 971, "id": 15956}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 500.0, 22.0, 12.0], "area": 164, "segmentation": {"size": [512, 512], "counts": "g?9g?02N2NO1O100O1O1O100O1O11O1O1O2N1O1O2N1O1OQ`d7"}, "image_id": 971, "id": 15957}, {"iscrowd": 0, "category_id": 1, "bbox": [477.0, 511.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "oo^71PPa0"}, "image_id": 971, "id": 15958}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 0.0, 83.0, 38.0], "area": 1699, "segmentation": {"size": [512, 512], "counts": "TP11m?2N3O001O1O001O001O001O001O001O1O001O001O001O001O1O001O001O001O001h@ZOT?j0O001O1O001O001O001O001O1O001O001O001O001OO1N2N2O1N2N2N2N2O100001O00O1N2N2N2N2O10000O1N2N2O1N2N2NR`e6"}, "image_id": 974, "id": 15959}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 0.0, 74.0, 65.0], "area": 2593, "segmentation": {"size": [512, 512], "counts": "iP_22m?1N3M2N3M3M2N3H_Om@c0Q?@m@b0P?8O2O001O001O001O001O1O001O001O001O001O001O001ON2001O003M1O001O001O001O001O1O001O00fNfAn0Z>QOhAn0X>POjAP1V>nNmAR1NjNl=2XBT1JlNo=MZBW1DoN_>Y110O01FaARO^>l0eARO]>l0dARO^>l0eAQO^>2\\Ad0V?N3M2N3M2N3N1N3M2NUo[4"}, "image_id": 974, "id": 15960}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 0.0, 9.0, 9.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "TPo33k?2N3O001O00N3M2NQ`l3"}, "image_id": 974, "id": 15961}, {"iscrowd": 0, "category_id": 1, "bbox": [260.0, 0.0, 91.0, 59.0], "area": 3248, "segmentation": {"size": [512, 512], "counts": "[PR43k?2O2M2N3M3N1O2O001O1O001O1l@]Oi>c0UA_Ok>b0SA@l>`0RACm>h001O001O001O1O001O001O001O1O001O001O001O1O010O010O01000O010O010O010M3M2N3N101O001O1O001OO1O1N2N2N2O1N2N2N2O11O01N1N3M2GWAYOl>d0WAZOj>e0WAYOl>f080OO2N1N3M2N3N2M2N3M2Og_`2"}, "image_id": 974, "id": 15962}, {"iscrowd": 0, "category_id": 1, "bbox": [348.0, 0.0, 13.0, 7.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "PP^51o?1O1O1O001O1O1OO1O1N2O1OQ`[2"}, "image_id": 974, "id": 15963}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 27.0, 55.0, 67.0], "area": 2171, "segmentation": {"size": [512, 512], "counts": "X1R1o>0O0VAPOe>P1XAROh>T1N1O2M3M2N3N1N3M2N30O0100O010O0100O0100O010OO2M3M2N3N1N3M2N3CZA]Og>a0[A]Oh>`0[A]Oh>b0XA\\Oj>d0:0O010O10O10O01M2O2M2N3M2O2M3M2N`^T7"}, "image_id": 974, "id": 15964}, {"iscrowd": 0, "category_id": 1, "bbox": [228.0, 41.0, 84.0, 75.0], "area": 3072, "segmentation": {"size": [512, 512], "counts": "[Rb32l?2N3N1N3M3M2N3N1N3M2N3M210O10O010O010O10O010O10O010O01M2N3M2N3N2M2N3M2N3N1N3O01000O010O010O010O01000O010O010O010O10O10O01POjA5U>InA6S>HnA9Q>ERB:o=CSB>m=_OVBa0i=]OYBc0h=[OZBe0e=YO^Bf0c=WO_Bj0`=TOcBk0Y>O2M2O2M2N3M3M2O2M2N3M2NUmS3"}, "image_id": 974, "id": 15965}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 47.0, 77.0, 60.0], "area": 2605, "segmentation": {"size": [512, 512], "counts": "cRR22l?2N3M2N3N1N3M2N3G]OQAe0m>^OPAe0m>8N3M3M2O2M201O010O01M2N3M3O010O010O010O01O1M2N3O0010O010O10O0^AeN`>]10100O010O010O010O0100O0N3O001O0N3M2N3M2O2M2N5K2O2O0O2N1N3M2N3M2N3M2Oh]g4"}, "image_id": 974, "id": 15966}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 91.0, 16.0, 29.0], "area": 285, "segmentation": {"size": [512, 512], "counts": "k2k0U?010O010OO2M2N3M2O2M3M2N3M2N3Molg7"}, "image_id": 974, "id": 15967}, {"iscrowd": 0, "category_id": 1, "bbox": [463.0, 92.0, 49.0, 46.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "[cW72n?5K5K5J6K2NO1000O1000O1000O1000O1004K6K1O0000O010000JPO\\AP1d>5100000O1000O1000O1000O1000O101O1O000O010TM"}, "image_id": 974, "id": 15968}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 97.0, 80.0, 62.0], "area": 2625, "segmentation": {"size": [512, 512], "counts": "Zdc12l?2N3N1N3BGQAFRAGQAFSA;l>>M2N3M2N3N2M2N3N1010O010O010O1M2N3M2N3N110O0mNZAm0f>PO]Ao0j>10O010O01000O010O010O010O01000O010O010OO2N1N3M3M2N2OO002N3N1N3M3M2N3N1N3M2N3M2O2M3M2N3M2O2M2NZ\\T5"}, "image_id": 974, "id": 15969}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 100.0, 81.0, 86.0], "area": 3515, "segmentation": {"size": [512, 512], "counts": "edi52l?3M3N1N3M2N3M2O2M3M2N3N1O2FROaAm0]>VOcAj0Z>XOfAh0X>[OeAh0Y>YOfAi0W>ZOfAh0X>b0M2N3N1N3O00100O0100O010O010O0100O0100O010O0100O0100O010O010O0hNTB=l=@VBa0i=^OYBa0h=\\O[Bd0e=YO]Bg0c=WO`Bi0`=TObBl0^=SOdBm0\\=POfBP1Z=nNiBR1W=kNjBW1m=2N12M3M2N3M2O2M2N3M2O2M3M2N3M2O2M2N3MZkm0"}, "image_id": 974, "id": 15970}, {"iscrowd": 0, "category_id": 1, "bbox": [68.0, 144.0, 75.0, 67.0], "area": 2581, "segmentation": {"size": [512, 512], "counts": "kUR12l?3N1N3M2N3ACWA`0g>AXA`0f>CWA`0f>CWA?g>CWA`0f>>O2M2N3M3M2O2N1010O10O010O01M2N3M2N3O10O010O010O10O010O10O010O010OkN^Am0b>POaAo0_>oNcAR1f>O10O010O010O10O010O10O010OO2M2N3M2N3N2M2N3M2N3N1N3M2N3NYZh5"}, "image_id": 974, "id": 15971}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 147.0, 45.0, 61.0], "area": 1814, "segmentation": {"size": [512, 512], "counts": "neY72n?5K5J5L2N000AA]A?c>FWA;i>>0O10O100000O10O100000O\\OQOYBo0g=WORBj0n=[OmAe0R>AiA?W>e00O1000O1000O1000O1000O100000O1000O1000]K"}, "image_id": 974, "id": 15972}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 158.0, 107.0, 64.0], "area": 3113, "segmentation": {"size": [512, 512], "counts": "cU]22l?2N3M3N1O2O01000O010O010O01000OO2N1N3N2ON3M3N1N3M2N3N2M2010O01O0N3N2M2N3N1O2O10O010O10O010O10O010O10O010O10O10WAlNc>Y1N3O1O010O0100O0100O0100O010O0100O0100O01M1O0011N3M2O2M3M2N3N1O200O0100O01O0N3M3M2O2M2N3N1N3M3M2O2M2NZZm3"}, "image_id": 974, "id": 15973}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 171.0, 89.0, 76.0], "area": 3064, "segmentation": {"size": [512, 512], "counts": "ZfZ52l?2N3M3M2N3N1N3M2N3M2010O010O010O01000O0O2O0010O010O0O2M2N3N1N3M2N3M2N3M3M210O010O010O010O0100O010O0100O010O010O010O0gA`NS>`1jAcNU>e10O10OdNjAn0W>oNlAQ1S>mNPBR1Q>kNQBV1n=hNUBW1Z>10O010O010O0N3M2N3N1N3SOQAe0V?N3M2N3M2N3N1N3M2NYiX1"}, "image_id": 974, "id": 15974}, {"iscrowd": 0, "category_id": 1, "bbox": [454.0, 179.0, 7.0, 9.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "hUS73h?6O000001O00\\Zi0"}, "image_id": 974, "id": 15975}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 207.0, 82.0, 69.0], "area": 2771, "segmentation": {"size": [512, 512], "counts": "bgU21m?3M2N3M2M4M2N2O2O010O010O010O0010O0010O010O010O010O001[O[OkAg0S>[OjAh0T>[OiAh0T>ZOjAh0V>XOhAk0X>UOeAm0[>SObAQ1^>8010ON3M20010O010O010O010O0010O0010O010O010O010O0010O001SOgA2X>KkA5V>HmA8R>FPB:Q>CRB=m=AUB?l=]OXBc0g=[O[Be0e=YO]Bh0c=UO`Bh0]>N3L3N3M2N3M2M4M2NUXa4"}, "image_id": 974, "id": 15976}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 210.0, 7.0, 10.0], "area": 57, "segmentation": {"size": [512, 512], "counts": "cVT73m?5J2OO100000^Yh0"}, "image_id": 974, "id": 15977}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 213.0, 87.0, 69.0], "area": 3241, "segmentation": {"size": [512, 512], "counts": "jWm42\\?1QA2l>0RA3l>ORA2l>0RA3k>0RA2l>0RA3k>b0O2M3M2N3N200O10O010O010O10O10O010O010O01M3M21O001O010O010O10O010O10O010aNeAW1[>gNgAZ1Y>dNiA[1^>1000O010O001M2N3N2M2010O010O010O01000O010O010O01000O01O0N3N1N3M3M2N3N1N3M2N3N1N3MRXg1"}, "image_id": 974, "id": 15978}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 218.0, 36.0, 56.0], "area": 1441, "segmentation": {"size": [512, 512], "counts": "jW^74l?4K4M4L4K4WO[OQBg0m=\\OPBc0Q>AkA?T>EhAGeA8\\>L`A4`>f0O01000O10O10O10O1000O10O10O10O10O10O1000O10O10VI"}, "image_id": 974, "id": 15979}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 248.0, 16.0, 15.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "ngj62m?2O2M2N3N1N001O01O101N3M2O2M2NRXm0"}, "image_id": 974, "id": 15980}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 270.0, 92.0, 59.0], "area": 3126, "segmentation": {"size": [512, 512], "counts": "mhh13k?2M3_@IW?:f@IW?9g@IW?a0N3M2010O010O010O01O010O010O0N3O0010O010O010O01O010O01M201O010O01L3N2N3M210O010O010O0\\AiN_>X1^AkNb>Y1010O010O01N1N3M2N30O010O01O010O010O01O0N3M2N3M2N2N3M2N3M2O2O001M2N3M2N2M4M2N3M2N3M2NUWi4"}, "image_id": 974, "id": 15981}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 273.0, 79.0, 67.0], "area": 2751, "segmentation": {"size": [512, 512], "counts": "Zio51o?2M2N3N1N3M3^@C]?b0M2O2M3M2O2M2N3N1N00101N3M3N1N3N2M201O10N1O2M1O01O010O00010O0010O0010O0011N2N1IaAmN_>R1dAkN\\>V16010O00010O0010O0010O00010O0102M2N3N2M2O2M3M2O2M3_Ob@010O0100O0100O010O010O01000O010O010O0100O0100O01kN_Ag0c>XO^Af0e>WO^Af0d>XO_Ae0o>O2O010O01000O010O010O010O01000O0N3M2O2M2N3M3M2O2MkeT7"}, "image_id": 974, "id": 15984}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 286.0, 21.0, 69.0], "area": 716, "segmentation": {"size": [512, 512], "counts": "Rje73m?1N3M2O2M2ADWA>h>DVA>g>DWA>g>EVA>h>CWA>g>EVA>g>?O2CfNoA\\1o=gNnA\\1MaNn=5TB[1LbNo=4SBa1j=U1]AnNb>X1010O0010O0010ON2M40O01O01O010O01O010O01O010O01O01O0N3L3N2M4M2M4M2M3N3N1010O00010O01N1M3N3L3N3L3N^eY5"}, "image_id": 974, "id": 15986}, {"iscrowd": 0, "category_id": 1, "bbox": [257.0, 324.0, 72.0, 70.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "akP43k?2ALPA6n>Mo@6n>Lo@7o>Ln@7n>LPA6n>`00OO2M2M3N3M2N3L3N3N110O00010O01N1N3N110O0010O0010O010O010O0dNcAU1^>hNdAY1b>O01O010O010O01O0jN[AR1e>kN]AU1g>10O010O010O0010O0010O010VOWA;i>CYA>g>_O\\A?e>_O]A?f>]O]Aa0S?L3N3M2N3L3N]Tk2"}, "image_id": 974, "id": 15987}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 324.0, 71.0, 66.0], "area": 2170, "segmentation": {"size": [512, 512], "counts": "`[f63m?1N3M2O2M2N3M3N1N3M2O2M3GTOYAl0g>VOXAi0b>UO`AR1_>PO_AP1b>601O01O01O01O01O01O0001O01O01O01O01O01O01O01O01O00010O0001O101N3M3N1N3M2O2M1O10O000100O2N3N1N3M2N3N2M2N3N1NUU6"}, "image_id": 974, "id": 15988}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 347.0, 36.0, 51.0], "area": 963, "segmentation": {"size": [512, 512], "counts": "k:^1c>0O10O010O010O0lN_Aj0a>TO`Ak0b>ROaAk0a>SOaAk0l>M3M2O2N110O010O01000O010O010O001M2N3N2M2N3M2N3NXd]7"}, "image_id": 974, "id": 15989}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 365.0, 91.0, 72.0], "area": 3708, "segmentation": {"size": [512, 512], "counts": "Pmm04d?8H8L400001O01G8I7H80000aAbN]>`101O0000000001O0001O0000000001O0001O0K5I7001O000001O00000001O0XNTB\\1X>001O000001O000001O3M000001O000001OPOkA5U>CSB=m=\\OZBd0a>0000000010O00000000000010O000000000000010O00N2H8I7H]cd5"}, "image_id": 974, "id": 15990}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 379.0, 64.0, 65.0], "area": 2595, "segmentation": {"size": [512, 512], "counts": "Z\\P72n?1N3M2O2M2N3N1N3M2g@\\OT?i0N3M2O2M2N3N1N3M2O2M2N10O02010O010O010O01M2N3N1N11O2M2N10O01O01O01O01O01O01O01O01O01O102JbAeN`>Y16N1N3M2O2M2N3N1NeC"}, "image_id": 974, "id": 15991}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 393.0, 70.0, 58.0], "area": 2418, "segmentation": {"size": [512, 512], "counts": "k\\e31m?3l@OU>3iAOU>3iA0T>3iAOU>3iAOT>5iANT>4iAOU>4hAOV>2hA0X>1eA2Z>j010O010O010O010O01O010O0N2N3M201O010O0010O010O010O0010O010O0iN]AR1c>kN`AT1f>01M2N3M210O01O010O010O01O010O0TOPAi0T?O0O1N3M2N3M2N3M2M4M2NdbW3"}, "image_id": 974, "id": 15992}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 394.0, 79.0, 84.0], "area": 3257, "segmentation": {"size": [512, 512], "counts": "]ni42m?2N3N2M2O2BFQAFQA=m>EQAFQA=l>=OO01O01O010O00010ODQOhAo0Y>SOdAP1Y>ROfAo0X>TOeAn0Z>;A^NaBc1^=_N`Ba1a=aN]B^1c=dN[B]1d=fNYBZ1h=hNVBW1j=kNTBV1k=?1O01O010O01O01O01O01O01O011N3M2O2M3N1N2N01O01O30O01N1N3N1N3M3N1mNWAl0l>QOVAm0Q?N1N3M3N1N3N1N3M2O2M3M2O2MVbn1"}, "image_id": 974, "id": 15993}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 405.0, 9.0, 10.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "i\\j42k?4N110O010O010OM[SQ3"}, "image_id": 974, "id": 15994}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 412.0, 10.0, 12.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "S]e03j?3M4N101O01O001K4LVcU7"}, "image_id": 974, "id": 15995}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 420.0, 9.0, 21.0], "area": 110, "segmentation": {"size": [512, 512], "counts": "T=e0[?N3N1N3M2N3N2M2NjRk7"}, "image_id": 974, "id": 15996}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 443.0, 89.0, 51.0], "area": 2999, "segmentation": {"size": [512, 512], "counts": "enj03m?FdA:\\>e0000000O100000O1000000000O1000M3000000000O10000000O10000000O1000000000O2O001O001O00001O001N10009G8KL100000O100000000000O100000O1000000000001O:F:Fl`h5"}, "image_id": 974, "id": 15997}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 445.0, 25.0, 44.0], "area": 650, "segmentation": {"size": [512, 512], "counts": "Unc75j?3N1N3N1N3M2O2M2N3N2M1O0100101M2010OO2M2O2M2N3N1RB"}, "image_id": 974, "id": 15998}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 457.0, 75.0, 55.0], "area": 2364, "segmentation": {"size": [512, 512], "counts": "Zo\\51n?3N1N3M2N3N1N3M2N3N1N3M2N3N1N1002N1OO1O100O1O100O1O100O1O1O100O1O100O1O100O1O1O100O0010O00010O01O00010O2N20N2N3N1N3M2O2M3M2O2M2N3XOj@b0W?\\Ol@a0[?N3N1N3M2O2M2N3Nc`]1"}, "image_id": 974, "id": 15999}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 464.0, 85.0, 48.0], "area": 2424, "segmentation": {"size": [512, 512], "counts": "`oa32m?2O2M2N3N1N3N2M2N3N0O00010O00010O000l@[Oo>g0o@YOP?i0n@XOQ?l000O100O1O100O1O100O1O100O1O100O11O2N1O00O100O1O100O100O1O100O1O10O01O01O01O03M2O2M2N3N2M2N01O010O0002O2M2N3N1N3M2O2M3M2O2M2N3Ne`S3"}, "image_id": 974, "id": 16000}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 486.0, 27.0, 26.0], "area": 449, "segmentation": {"size": [512, 512], "counts": "Y?g0Y?0O100O1O11O1O2N1O2N2N00O1O1002N1O2N1O2N1O2N2N1O2M2ORPb7"}, "image_id": 974, "id": 16001}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 486.0, 10.0, 11.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "Xo>3m?1N3N1O1N21N1N3M3NcP\\7"}, "image_id": 974, "id": 16002}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 493.0, 38.0, 19.0], "area": 474, "segmentation": {"size": [512, 512], "counts": "hog12n?3L3N1O0O100O10000O10000O100O10000O100O10000O100O10000O100003M2N3M2N3M2N3MQPe5"}, "image_id": 974, "id": 16003}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 499.0, 27.0, 13.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "oo]61n?100O1O1O100O1O100O1O100O1O100O1O11O1O1O2N1O2N1O2N1OQ`T1"}, "image_id": 974, "id": 16004}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 0.0, 23.0, 7.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "P`21o?001O001O001O001O1O001O00O1N2O11O001O0000O1NRPb7"}, "image_id": 975, "id": 16005}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 0.0, 49.0, 14.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "P`b01o?1O1O001O001O001O001O1O001O001O001O001O001O1O0000N2N2N2O1N2N21O001O001O1O001O001O001O00O1N2O1N2NRPe6"}, "image_id": 975, "id": 16006}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 0.0, 60.0, 32.0], "area": 1121, "segmentation": {"size": [512, 512], "counts": "W`o31m?3N1N3M201O1O001O001O001O1O001O001O001O1O001O001O001O1O001O001O001O1O001O001O001O1OO1N2O1N21O1ON2Hm@AU?>l@@V?>7N2N2O1N2N2N2O1NR`R3"}, "image_id": 975, "id": 16007}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 0.0, 57.0, 42.0], "area": 1101, "segmentation": {"size": [512, 512], "counts": "_`T51m?2N3M2O2M2N3M2N3O001O1O001O010O2OO010O10O010O10O010O010O10O010O10O010O010O001O10O010O010O01[Oh@a0W?^Oj@b0[?0OO2M2N3M3N1N3M2NWon1"}, "image_id": 975, "id": 16008}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 0.0, 32.0, 11.0], "area": 159, "segmentation": {"size": [512, 512], "counts": "QPa51n?2O001O00O1N200001O001O001O001O1O001O001O001O1O001OO1N2N2O1N2NRPo1"}, "image_id": 975, "id": 16009}, {"iscrowd": 0, "category_id": 1, "bbox": [406.0, 0.0, 10.0, 7.0], "area": 47, "segmentation": {"size": [512, 512], "counts": "SP[61n?2M201O001O00O1N2NRP`1"}, "image_id": 975, "id": 16010}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 0.0, 17.0, 6.0], "area": 53, "segmentation": {"size": [512, 512], "counts": "PPd61o?001O001O001O001O1O00N2O1O11O00NR`S1"}, "image_id": 975, "id": 16011}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 0.0, 60.0, 17.0], "area": 844, "segmentation": {"size": [512, 512], "counts": "PPR73m?=C1O00000000000000000000000000O1000000000000000000000000O1000000000000000000000000O1000000000000000000Gc@O]?19000000000000"}, "image_id": 975, "id": 16012}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 22.0, 16.0, 21.0], "area": 158, "segmentation": {"size": [512, 512], "counts": "Ua]33k?2N3N1N3M2N3M0002N2N3M2N2N3M2NZ_Z4"}, "image_id": 975, "id": 16013}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 27.0, 11.0, 12.0], "area": 72, "segmentation": {"size": [512, 512], "counts": "SaR61m?2N3M2N30O010N1N3M3NRog1"}, "image_id": 975, "id": 16014}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 32.0, 71.0, 54.0], "area": 2037, "segmentation": {"size": [512, 512], "counts": "kQe32l?2O2M2N3M3M2N3N1N3M201M2N3N101O1M2N3O010O010O01M3M210O0100O010O010O0100O010O01N2M210O010O010O010O010O10O10O010ON3M2N3M2O2M2N3M3M2N3N1N3M2N3MZ^W3"}, "image_id": 975, "id": 16015}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 69.0, 27.0, 28.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "fbZ52l?3M2N3M3M2O2M2N3M210O010O010O010O01N1O2M2N3M2N3M3M2NdmW2"}, "image_id": 975, "id": 16016}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 77.0, 72.0, 46.0], "area": 1972, "segmentation": {"size": [512, 512], "counts": "QcQ32k?3N2N3L3N3L3N2N3N1010O01O01O010O010O00O2M2010O010O00010O010O0010O002OO010O01O01O001NN4M2N30O01O01O010O010O00010O010O0O101O0010OCSAGn>6UAJj>3YAMg>1[AMh>0[ALh>1[AMh>0[AMg>0k]j3"}, "image_id": 975, "id": 16017}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 91.0, 13.0, 10.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "PcW51m?3M2O2O010O010O01O0O2M2ORma2"}, "image_id": 975, "id": 16018}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 92.0, 18.0, 45.0], "area": 451, "segmentation": {"size": [512, 512], "counts": "l2X1h>10O010O0ZO[A2e>K]A6c>H_A7a>GaA:_>CdA;]>CeA<]>AfA<\\>BfAU?@l@>]?N1N3M3M2O2MRil4"}, "image_id": 975, "id": 16026}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 213.0, 30.0, 29.0], "area": 483, "segmentation": {"size": [512, 512], "counts": "WWW31m?2N3N1N3M3M2N3M2O2M3O0010O0100O0100O0O2M2O2M2N3M2O2M2N3M3NRiY4"}, "image_id": 975, "id": 16027}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 226.0, 24.0, 20.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "[Wd31n?1N3N2N1N3N1O20O01000O010O01000O010O0N3N2M2O2Mfho3"}, "image_id": 975, "id": 16028}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 241.0, 58.0, 56.0], "area": 1932, "segmentation": {"size": [512, 512], "counts": "nWf13j?3M4j@K[>9bAJ[>9bAJ^>6_AMa>4[A0d>f01O01O00010O00010O0000010O000100O1O100OVAnNf>V11O100O1O1O10O00010O00010N1N2010O000010ON2M4L3L4M4L3M3L4M4L3Mog\\5"}, "image_id": 975, "id": 16029}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 286.0, 43.0, 44.0], "area": 1058, "segmentation": {"size": [512, 512], "counts": "jY`22l?3M2N3N2M2N3M2O2M2N3M3N1N3M2N3N1010O10O10O010O010O001N2M2N3M2N3N1N3M3M2O2M2N3M2O2M3MeVj4"}, "image_id": 975, "id": 16030}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 294.0, 64.0, 66.0], "area": 2405, "segmentation": {"size": [512, 512], "counts": "]jZ11S?1cA4X>1dA2X>2dA2Y>1dA2\\>O_A6`>J\\A:d>a01O01O00010O0001O01O00010O0000010O0001O01O00010O0001O01OhN\\AT1h>010O0000010]AmNW>S1eAQO[>o0bATO_>W1O0000010O0000010O00M4K4E\\AWOg>e0HPAHPA=k>>L400010O000010O00010O0001O01O01O00O2O000010O000010O0001O01L3M3N3O01O00010O000010O00010O0001O01O01O0001O0O1O2M2O1N2O2M2O1O1N3N1N2O2N11O1O10O01O1O100O00100O1O1O10O01O1O10O01O010O001O010O001O0O2N1N3M2N3M2N3N1N3M2N3M2O2M2N3M2N3M2O2M2N3M2N3N1NYSZ5"}, "image_id": 975, "id": 16032}, {"iscrowd": 0, "category_id": 1, "bbox": [152.0, 0.0, 11.0, 4.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "PP\\21o?00001O001O00001O00OQ`^5"}, "image_id": 977, "id": 16033}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 0.0, 7.0, 3.0], "area": 13, "segmentation": {"size": [512, 512], "counts": "P`b51o?001O1O00O1OQPZ2"}, "image_id": 977, "id": 16034}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 0.0, 48.0, 21.0], "area": 597, "segmentation": {"size": [512, 512], "counts": "SPV71l?301O00001O00001O000X@Ke?80001O00001O001O00001O001O00001O001O00001O001O00001O00001O00M3N2M3M3M3M3MSP2"}, "image_id": 977, "id": 16035}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 4.0, 63.0, 47.0], "area": 1699, "segmentation": {"size": [512, 512], "counts": "i`c13i?4L4L5J7K30001O01O00N30O0000010O00000010O0000010O00000010O0000010O0000010O0000010OSAUOe>k0WAYOi>P1O01O0001O01O0001O01O0M3L4O1M4K4L4K5L5Kco\\5"}, "image_id": 977, "id": 16036}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 7.0, 50.0, 55.0], "area": 1312, "segmentation": {"size": [512, 512], "counts": "VQZ43k?2O2M2N3N1N3M3M2O2M2N3M2O2M3M2N3N1N3O010O10O010O10OUO]A:c>C`A<`>BbA?^>_OdA`0\\>^OfAc0Z>ZOiAe0X>YOiAh0V>VOmAj0e>O010O010O0O2N2M2N3M2O2M2N3M3N1Nhnl2"}, "image_id": 977, "id": 16037}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 13.0, 28.0, 28.0], "area": 447, "segmentation": {"size": [512, 512], "counts": "mP53k?2N3M2O2M3M2N3N1N30O0100O010O0100O001M2N3M2O2M2N3M3N1N[o\\7"}, "image_id": 977, "id": 16038}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 19.0, 2.0, 11.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "kPo73e?8]O"}, "image_id": 977, "id": 16039}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 25.0, 57.0, 49.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "_Qk61n?2M3M2O2M2O2M3M2O2M2N3N2M2O2O01000O010O01000O010O01000O0100O0100O0100O0100O0100O0100OO2M3M2O2M2N3N2M2N3N1N3M3N1N`^8"}, "image_id": 977, "id": 16040}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 29.0, 37.0, 38.0], "area": 700, "segmentation": {"size": [512, 512], "counts": "ZQl02m?2N2N2O1N2N2N2N3M2N2N2N2O1N2N3N1001O0000000N2N2N2O2M2N2N2N2N2N2N3M2O1N2N]^a6"}, "image_id": 977, "id": 16041}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 30.0, 23.0, 24.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "`Qa21l?4K4L4M4K4000001O01O0001O01O01O01N1L4M3L5K4MQ_S5"}, "image_id": 977, "id": 16042}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 47.0, 24.0, 22.0], "area": 289, "segmentation": {"size": [512, 512], "counts": "iQ_33k?2N3N1N3M3N1010O01000O010O010O010N2M2N3N1N3M3NXnT4"}, "image_id": 977, "id": 16043}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 51.0, 5.0, 11.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "jam71m?3N1N3M3\\N"}, "image_id": 977, "id": 16044}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 64.0, 14.0, 15.0], "area": 127, "segmentation": {"size": [512, 512], "counts": "T27g?2N2010O010O010O010M2M4M2Nmmh7"}, "image_id": 977, "id": 16045}, {"iscrowd": 0, "category_id": 1, "bbox": [108.0, 66.0, 49.0, 48.0], "area": 1488, "segmentation": {"size": [512, 512], "counts": "ZRf13m?5K4L5J5L00O1000O10O1000O1000O1001N6K4L5K4K4M00O1000O1000O10O1000O1000O1000O102N4L5K4K6K4L5K4K6Kh\\a5"}, "image_id": 977, "id": 16046}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 68.0, 26.0, 30.0], "area": 488, "segmentation": {"size": [512, 512], "counts": "ibe22k?4L3N3L3M3N3L3N201O010O00010O00010ON3L3N2M4L3M3N3Li]m4"}, "image_id": 977, "id": 16047}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 68.0, 59.0, 47.0], "area": 1519, "segmentation": {"size": [512, 512], "counts": "lR`63k?3M2M3N3L3N3M2M3N3N101M2010O0010O010O0010O0010O010O00010O010O0010O0QORAl0R?0O0010O0010O010O0010O0010O01O0N2M4M2N3L3N2N3L3NX]b0"}, "image_id": 977, "id": 16048}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 71.0, 57.0, 50.0], "area": 1381, "segmentation": {"size": [512, 512], "counts": "lbb33k?2O2M3M2O2M2N3N2M2O2M3M2O20O10O10O10O010O10O10O10O010O10O10O10O010O10O10O10O010O10O10O0N3M3N1N3N2M2N3N1N3M3N1N3Nol`3"}, "image_id": 977, "id": 16049}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 82.0, 4.0, 9.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "b29h?M2N3M^mm7"}, "image_id": 977, "id": 16050}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 84.0, 25.0, 38.0], "area": 537, "segmentation": {"size": [512, 512], "counts": "]cc73k?2O2M2N3M3N1N3M2O2N2O0100O0100O0N3N1N1O01O01O03M]M"}, "image_id": 977, "id": 16051}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 91.0, 23.0, 29.0], "area": 421, "segmentation": {"size": [512, 512], "counts": "Y3:c?3N30O01L3N2N3L310O010O01O0O1N3L3N3M2M4M2N3MP]d7"}, "image_id": 977, "id": 16052}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 92.0, 67.0, 46.0], "area": 1845, "segmentation": {"size": [512, 512], "counts": "ec<1m?2M4M2M4M2M3M4M2M3N3O010O01O01O010O01O01O010O01O01O010O00010O010O00010O01O01O001L3N2N3O010O000O2N101O01O0O2L3N2M4M2M4M2M3N3L3M3N3L3Nmla6"}, "image_id": 977, "id": 16053}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 98.0, 52.0, 56.0], "area": 1349, "segmentation": {"size": [512, 512], "counts": "hSl21n?2N2N2N2N1O2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N200000000000000000000000jN]An0c>PO_An0c>PO_An0k>N2N2N2N2N2N2N2N2N2N2N2N2N2N2NjkY4"}, "image_id": 977, "id": 16054}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 119.0, 68.0, 49.0], "area": 1720, "segmentation": {"size": [512, 512], "counts": "[dQ61n?2M2O2M3N1N3N2M2O2M2O2M3N1O2000O01000O01000O01000O01000O010O1O001O10O01O1O0000n@XOl>o0O00000000000001O000001O0000000000000001N1_OQA2n>B^A>R?000000O2Cb[l0"}, "image_id": 977, "id": 16055}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 139.0, 37.0, 44.0], "area": 842, "segmentation": {"size": [512, 512], "counts": "ZUT73k?3N1N3M2N3N1N3M3M2O2M2N3M2O2M3M2N3N01O2M2N3M3N1N3M2N3N1N3M2N3N2M2N3M2O2M][9"}, "image_id": 977, "id": 16056}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 155.0, 80.0, 78.0], "area": 2153, "segmentation": {"size": [512, 512], "counts": "aUd51m?2O2M3N1N3N2M2O2M3N1O2M3N1N3N20O01000O01000O01000O01000O01000O01000O01XAPO_>P1_AROa>n0\\ATOd>l0[AVOe>S10O010000O01000O010000O010YOZA3f>GZAE2d0d>EbA;^>CdA=\\>@gA?Y>@hAa0X>\\OkAd0i>O01000O01000O01000O010O1M2O2M3N1N3N2M2O2NWiS1"}, "image_id": 977, "id": 16057}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 180.0, 32.0, 31.0], "area": 519, "segmentation": {"size": [512, 512], "counts": "TVd62m?1O2M3N1N3N2M2O2M3N2N1O2O10O10O10O10O10N2M2O2M3N2M2O2M3N1O2M3NPjk0"}, "image_id": 977, "id": 16058}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 190.0, 48.0, 57.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "SgT51n?2N2N2@KRA7l>KRA7l>KRA7i>DQA747i>CRA837i>NUA4i>c0N2N2N2N2N2N2N2N2N2N02N2N2N2N2N2N2N2N2N2N2N2N1N3N2N2N2N2N2N2N2N2N2N2N2N2N2NYYS2"}, "image_id": 977, "id": 16059}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 190.0, 44.0, 56.0], "area": 1240, "segmentation": {"size": [512, 512], "counts": "kVZ71n?2N2N2N2N2N1N3N2N2N2N2N20000000000000O0100N2N2N2N2N2M3N2N1O00000000000000000O010000000SJ"}, "image_id": 977, "id": 16060}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 200.0, 44.0, 57.0], "area": 1436, "segmentation": {"size": [512, 512], "counts": "aVo01o?3M2M4M3L3N3M2M4M3L3N3M3L3N3M2M4M3L3N000O010O10O100O4M2N3L3N3L4M2N3L3N3M3L3N3L4M2N3L3N\\hZ6"}, "image_id": 977, "id": 16061}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 224.0, 43.0, 59.0], "area": 1467, "segmentation": {"size": [512, 512], "counts": "Xg_41o?3M3L3N3M3L4M3M3L4M3M3L4M3M3L4M3M3L10000O01000O0100O4M3M3L4M3M3L4M3M3L4M3M3L4M3M3L4M`gj2"}, "image_id": 977, "id": 16062}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 224.0, 30.0, 29.0], "area": 450, "segmentation": {"size": [512, 512], "counts": "^gn51n?2N2N2N2N2N2N2N2N2N2N2N2N2N2O01N2N2N2N2N2N2N2N2N2N2N2N2N2NcXb1"}, "image_id": 977, "id": 16063}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 236.0, 15.0, 8.0], "area": 120, "segmentation": {"size": [512, 512], "counts": "\\WT28h?000000000000000000000000000dXd5"}, "image_id": 977, "id": 16064}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 238.0, 36.0, 31.0], "area": 667, "segmentation": {"size": [512, 512], "counts": "Phj62k?4M2M4M2M3M4M210O00010O00010O010O00010O010O00010O00010O01M2N2M4L3N3L3NYXc0"}, "image_id": 977, "id": 16065}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 241.0, 33.0, 52.0], "area": 1469, "segmentation": {"size": [512, 512], "counts": "fgf3:f?6J0L40>B?A2N0000000000000000O01000000000000000000000000000O`0Ac0]O]gh3"}, "image_id": 977, "id": 16066}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 245.0, 49.0, 55.0], "area": 1615, "segmentation": {"size": [512, 512], "counts": "ihl11m?3N2N2N1O2M3N2N2N1N3N2N2F[OVAf0b>B\\Aa0a>a0O1O1M20100O1O1O0000N2O2N1O1N2O1O1O1N3N1O1O1O1N2O1O2N1N2O1O1O1M3I8H]hZ5"}, "image_id": 977, "id": 16067}, {"iscrowd": 0, "category_id": 1, "bbox": [191.0, 249.0, 31.0, 82.0], "area": 2053, "segmentation": {"size": [512, 512], "counts": "[io2>R>KUCd0\\B?APg`4"}, "image_id": 977, "id": 16068}, {"iscrowd": 0, "category_id": 1, "bbox": [47.0, 256.0, 11.0, 10.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "Vhg01m?2M4N110O0010O001M2NogR7"}, "image_id": 977, "id": 16069}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 260.0, 47.0, 28.0], "area": 603, "segmentation": {"size": [512, 512], "counts": "bh;1m?3M2N3M3M2O2M2O2O010O010O010O010O0100O010O010O010M2N3M2N3M2001O0010O010O010O010O010O001M3N1N\\gl6"}, "image_id": 977, "id": 16070}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 262.0, 27.0, 27.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "hXZ13j?3M4L3N2M4L3O1010O00010O00010O0010O0001N1M3M4L3M3M4MeWX6"}, "image_id": 977, "id": 16071}, {"iscrowd": 0, "category_id": 1, "bbox": [353.0, 269.0, 36.0, 30.0], "area": 708, "segmentation": {"size": [512, 512], "counts": "eh`52n?4L4L3L5L4L1N10O1000O10O1000O0100000O0100000O01000O10O1000O04M4L4L4K4MQWm1"}, "image_id": 977, "id": 16072}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 286.0, 66.0, 31.0], "area": 1698, "segmentation": {"size": [512, 512], "counts": "SiT68h?>B4L0O100000000000000000O10000000O100000000000000000O10000000O100000000000000000O10000000O100000000000000000O10000000O1000000BXVj0"}, "image_id": 977, "id": 16073}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 290.0, 52.0, 41.0], "area": 1485, "segmentation": {"size": [512, 512], "counts": "R9W1i>0000002N000000N>D000000000000001O00000000000000000000000000000000000000000000000000000000000000000000afU7"}, "image_id": 977, "id": 16074}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 293.0, 30.0, 40.0], "area": 813, "segmentation": {"size": [512, 512], "counts": "Rj_13f?0[@3b?8L3M3M4M2M3M6K2010O00010O00010O000M4L30001O01OM4L3Aj@1Y?Lk@0Y?Lj@1meQ6"}, "image_id": 977, "id": 16075}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 305.0, 27.0, 27.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "hYe42n?3L4M3M3L4M3M0O10O10O10O10O10O10O10O10O12M4M3M3L4M3MnUm2"}, "image_id": 977, "id": 16076}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 315.0, 21.0, 20.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "kYW2d0\\?00000000000000000000000000000000000000CbV^5"}, "image_id": 977, "id": 16077}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 317.0, 48.0, 22.0], "area": 739, "segmentation": {"size": [512, 512], "counts": "Pjg36j?;D3N0000000000000000000O010000000000000000000O10O1000007I4L00000000O1000O1000000000000000O14LfU`3"}, "image_id": 977, "id": 16078}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 318.0, 11.0, 9.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "oYa77h?2O0000000000000004LnU9"}, "image_id": 977, "id": 16079}, {"iscrowd": 0, "category_id": 1, "bbox": [433.0, 322.0, 12.0, 12.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "Sjh64l?7I00000000O010000007IgUQ1"}, "image_id": 977, "id": 16080}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 322.0, 26.0, 27.0], "area": 615, "segmentation": {"size": [512, 512], "counts": "TZo6;e?>A100000000000O100000000000O100000000000O1000:F=CWec0"}, "image_id": 977, "id": 16081}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 331.0, 23.0, 22.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "]ZS65k?=C2M10000000O1000000000000000O10000000O10;EZUa1"}, "image_id": 977, "id": 16082}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 335.0, 8.0, 6.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "`Za45k?00000O1000000aeZ3"}, "image_id": 977, "id": 16083}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 338.0, 12.0, 8.0], "area": 92, "segmentation": {"size": [512, 512], "counts": "bZk08h?00000000000001O000000]en6"}, "image_id": 977, "id": 16084}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 342.0, 23.0, 23.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "ijZ41n?9H9G2N0000000000O0100000000000000O010006J8HldY3"}, "image_id": 977, "id": 16085}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 348.0, 22.0, 22.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "mjl0e0[?00000000000O100000000000000000000000O10b0^ObTh6"}, "image_id": 977, "id": 16086}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 350.0, 26.0, 32.0], "area": 726, "segmentation": {"size": [512, 512], "counts": "oj_23m?=Cc001O00001O0O2O00001@gNWBY1i=`0O000OO2N2N2M3N2M3N2M3N2Mc0^Ob0]ObSb4"}, "image_id": 977, "id": 16094}, {"iscrowd": 0, "category_id": 1, "bbox": [285.0, 380.0, 49.0, 50.0], "area": 1627, "segmentation": {"size": [512, 512], "counts": "cl^46j?9G000000000O0BMQA3o>?0Do@IQ?7750O100000O10000000001O=CCkbh2"}, "image_id": 977, "id": 16095}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 380.0, 38.0, 43.0], "area": 1300, "segmentation": {"size": [512, 512], "counts": "n[_5>b?6J0000000O10006J?A0000000000000000000000000000O10O10000000000000ZO\\A0d>0f00009Gkcm1"}, "image_id": 977, "id": 16096}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 382.0, 33.0, 52.0], "area": 1313, "segmentation": {"size": [512, 512], "counts": "]]h31g?8I8H7I7H8I7M30001O0001O00000001O0000000000010O000000I7I7I7I7I7HXTg3"}, "image_id": 977, "id": 16097}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 388.0, 37.0, 64.0], "area": 1563, "segmentation": {"size": [512, 512], "counts": "alS12n?4L5BIh@=S?B>B>B3L1000000I70O1000000000000000O100000000000O7J00007I>B>B?AoQR7"}, "image_id": 977, "id": 16099}, {"iscrowd": 0, "category_id": 1, "bbox": [121.0, 395.0, 43.0, 50.0], "area": 1553, "segmentation": {"size": [512, 512], "counts": "ell19g?;E00000000000000000000O10G@RAd0j>j0_O0O1000000000000000000000O1000007I1O00O10000000000=Cb0^O_b]5"}, "image_id": 977, "id": 16100}, {"iscrowd": 0, "category_id": 1, "bbox": [444.0, 398.0, 8.0, 11.0], "area": 80, "segmentation": {"size": [512, 512], "counts": "^\\n68h?3M0000000005K]cm0"}, "image_id": 977, "id": 16101}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 401.0, 7.0, 54.0], "area": 282, "segmentation": {"size": [512, 512], "counts": "aM3N2M5L>B?@gRl7"}, "image_id": 977, "id": 16102}, {"iscrowd": 0, "category_id": 1, "bbox": [431.0, 0.0, 78.0, 37.0], "area": 2276, "segmentation": {"size": [512, 512], "counts": "P`g62n?6J6J6J6J6J5K0000000000O10000000000O10000000000O10000000000O10000000000O10000000000O10000000000O10000000000O10000000000O10000000000O1000000O1L4M3M3M3M3M3LT`1"}, "image_id": 978, "id": 16103}, {"iscrowd": 0, "category_id": 1, "bbox": [247.0, 6.0, 131.0, 224.0], "area": 19789, "segmentation": {"size": [512, 512], "counts": "Xek34l?6cMIeD=U;HfD>T;HeD`0T;FfD`0S;GgD?h8kNRHl0PO`0`8PO[Hf0oN]OaAc0e>WO[Ai0n>00O10O1000000000O01000000000O10O1000000000O2O6J7I6J7IVWS5"}, "image_id": 978, "id": 16105}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 58.0, 69.0, 59.0], "area": 3246, "segmentation": {"size": [512, 512], "counts": "Ubm66i?7J6J7I6J7I6I5L0000O100000O1000O100000O100000O1000O100000O100000O100000O1000O100000O100000O1000O100000O100000O1000O100000O100000O100000OWN"}, "image_id": 978, "id": 16106}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 394.0, 84.0, 115.0], "area": 8360, "segmentation": {"size": [512, 512], "counts": "]\\W6?a?l0TOl0TOk0UO>B000O10000000000000000000000000000000O1000000000000000000000O10000000000000000000000000000000O10000000000000000000O1009Gl0TO4L0000000000000000000000O10O105Kl0TOk0UOb`>"}, "image_id": 978, "id": 16107}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 441.0, 91.0, 71.0], "area": 4486, "segmentation": {"size": [512, 512], "counts": "k>U1k>00000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000SO_O[Ba0e=m0000000O10000000000O1000000000000O10000000000O10000000000001O001O001O1O1lN[BKf=4T1O2M3N3M`0@YQb6"}, "image_id": 978, "id": 16108}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 470.0, 144.0, 42.0], "area": 5547, "segmentation": {"size": [512, 512], "counts": "mn`3]Ob0^Oc0]Oa`m7"}, "image_id": 979, "id": 16116}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 467.0, 56.0, 27.0], "area": 1254, "segmentation": {"size": [512, 512], "counts": "lnk0_OXAa0g>[O]Ae0o>010OL4M3L5L3M3L^_d6"}, "image_id": 980, "id": 16118}, {"iscrowd": 0, "category_id": 1, "bbox": [116.0, 0.0, 24.0, 8.0], "area": 122, "segmentation": {"size": [512, 512], "counts": "PPj12n?00001O001O00001O00001O001O00001O0000N2001ON2MSPj5"}, "image_id": 980, "id": 16119}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 0.0, 40.0, 27.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "P`Q31o?001O00001O001O00001O0c@Ln>5n@OQ?1m@1S?0j@2V?Ng@6X?901O001O00001O001O00001O0000M3N2N2M3N2M3N2M3N2NR`Z4"}, "image_id": 980, "id": 16120}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 0.0, 54.0, 33.0], "area": 1135, "segmentation": {"size": [512, 512], "counts": "`Pe31l?4L3N3L3N2010O00010O010O00010OM4M2N201O001O00001O001O00001O001O00001O00001O001O00O1M3N2M3M3N2M3N2M3N2M3N2MSP`3"}, "image_id": 980, "id": 16121}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 0.0, 64.0, 81.0], "area": 2727, "segmentation": {"size": [512, 512], "counts": "QRa53k?2M3N3L3N3L31O01O0M4M2M3N3L3N3L3N2M4L3N3B]NZBf1d=]NYBe1d=^NYBf1d=M3N3L3N3L3N2M4M2M4M2O110L3N3L3N2M4M2M3Mjo^1"}, "image_id": 980, "id": 16122}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 1.0, 30.0, 30.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "g`a11l?3N2M4L3M3M4L3N30O00010O0010O00010O00010O00M4N11OGg@KZ?2i@MW?0l@1S?LPA3`?L\\_o5"}, "image_id": 980, "id": 16123}, {"iscrowd": 0, "category_id": 1, "bbox": [172.0, 1.0, 25.0, 27.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "dPf21m?3L3N2M4M2M4M2N210O010O00010O010OO1N3L3N3M2M3N3Lk_m4"}, "image_id": 980, "id": 16124}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 26.0, 29.0, 29.0], "area": 505, "segmentation": {"size": [512, 512], "counts": "\\Qa22l?3M2M4M2N2M4M2N3O001O01O010O010O010O01OO2M2N3M2M4M2N2M4MP_P5"}, "image_id": 980, "id": 16125}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 35.0, 59.0, 58.0], "area": 1859, "segmentation": {"size": [512, 512], "counts": "ZRh63k?3L3N2M4M2N30O00010O010O01M2N2M4M2M4M21O01O010O01OM7J2M4M2M4M00110010O00010O010M2M3N3L3N3L3N3M2M3N3L3N3L3N2M4M2N3L3Ne^:"}, "image_id": 980, "id": 16126}, {"iscrowd": 0, "category_id": 1, "bbox": [23.0, 52.0, 82.0, 55.0], "area": 2394, "segmentation": {"size": [512, 512], "counts": "_b;1l?3N2M4M2M4M2M3N3L3N3M21O01O010O01O01O010O01O01O010O01O01O010O01O01O010O010O00010O010O00010O010O00010O001L3N2O2O010O00010O010O01O01O010O01L3N2M4M2M4M2M3N3L3N3M2M3Nj][6"}, "image_id": 980, "id": 16127}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 54.0, 26.0, 61.0], "area": 1102, "segmentation": {"size": [512, 512], "counts": "XSc71l?3N3[OMXA6f>LXA6f>LXA7d>MXA6f>LXA7d>d0N2N3L3N3L3O2O00010O010O00010O010O0VN"}, "image_id": 980, "id": 16128}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 59.0, 98.0, 52.0], "area": 2989, "segmentation": {"size": [512, 512], "counts": "QSU22k?3N3L3N3L3N3N11O010O01O010L\\Oj@d0S?_Ol@b0R?8L3N3L3N3M2M3N3O0010O010O0N3M2N2M4M2N3L3N2N3O010O01O01O010ON3M2O20O00010O010O01O01O010O01O010O01O01O010O010O01O0000000O1000000000000000000O10O10000000000000O8I9G9GolY4"}, "image_id": 980, "id": 16129}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 63.0, 29.0, 30.0], "area": 502, "segmentation": {"size": [512, 512], "counts": "dbj11m?3M2M4M2N3L3N2N3L310O0010O0010O010O0001M2M4M2N3L3N2N3L3Nkmf5"}, "image_id": 980, "id": 16130}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 73.0, 32.0, 35.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "RSW62l?2N3L3N2M4M2N3L3N2M4N10010O0010O00010O010O0N2M4M2M4M2M3N3L3M4MamX1"}, "image_id": 980, "id": 16131}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 97.0, 70.0, 63.0], "area": 2034, "segmentation": {"size": [512, 512], "counts": "cc61n?2N3M2O1N3M2N2N3N1N2N3M2N2O2M2N2N3M2O1N3M2N1100O2O0O100O2N100O2O0O101N1O101N100O010O0010O0010O0010O010O0010O0010O0010M2N3L3N3M2M3N3L3N3M2Mo[f6"}, "image_id": 980, "id": 16132}, {"iscrowd": 0, "category_id": 1, "bbox": [341.0, 101.0, 64.0, 48.0], "area": 1764, "segmentation": {"size": [512, 512], "counts": "lcZ52k?4M2M4M2M3N3L3N3L300010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01L3N2N30O01O01O010O01O01O010O01O01WOWA9i>EZA;f>A]AB]A;e>B^A;U?N3L3N3LQ\\e1"}, "image_id": 980, "id": 16133}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 103.0, 17.0, 15.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "`cZ71m?2M4M2N3O00010O010O00010OO2L3N2Ngl<"}, "image_id": 980, "id": 16134}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 110.0, 54.0, 45.0], "area": 1345, "segmentation": {"size": [512, 512], "counts": "RTd63k?2M3M4M2M4M2M3O2O0O20O0010O00010n@WOk>i0SAZOm>l000010O0010O0010O00010O010O00010O010L31O010O01O01O010OYORABTA>m>^OWAb0S?N2M4M2M3N3Lik`0"}, "image_id": 980, "id": 16135}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 112.0, 53.0, 56.0], "area": 1590, "segmentation": {"size": [512, 512], "counts": "fd_21m?3L3N3L3N2M4M2M4L3N2M4M2M3N3L310O00010O010O00010O01jN]An0b>PO`AP1`>mNdAR1e>1O010O01O01O010O01O0POWAj0h>SO[Am0l>10O0O2L3N2M4M2M4L3N2M4Mdke4"}, "image_id": 980, "id": 16136}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 129.0, 20.0, 32.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "gTf72l?3L3N3L3N2M4M2M4M20010O0010O0010O0010OkK"}, "image_id": 980, "id": 16137}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 145.0, 27.0, 28.0], "area": 463, "segmentation": {"size": [512, 512], "counts": "TUk12l?2M3N3L3N3M2M4M20010O0010O0010O010O00O2M2N3L3N3M2M3NZ[g5"}, "image_id": 980, "id": 16138}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 146.0, 28.0, 55.0], "area": 1127, "segmentation": {"size": [512, 512], "counts": "b4S1m>0]ATOR>l0eA]O[>T101O01O000000000bNmAo0b>001O000001O00000001O0000M3H9F9GUka7"}, "image_id": 980, "id": 16139}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 150.0, 68.0, 52.0], "area": 2119, "segmentation": {"size": [512, 512], "counts": "beR52k?4M2M3N3L3M4M2M3N3L3N30O0010O0010O0010O00010O010O00010O0010O0010O0010O0010O001O0N2M4M21O010O01O01O010O01O01O010O000M4M2SOWA`0m>\\OVAa0m>]OVA`0V?M3N3L3N3LeZk1"}, "image_id": 980, "id": 16140}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 162.0, 38.0, 34.0], "area": 742, "segmentation": {"size": [512, 512], "counts": "eU\\63k?2N3L3N2N3L3N3M2O20O0010O010O0010O0010O010O0010O010O0010OO1N3L3N3M2M4M2N2MejP1"}, "image_id": 980, "id": 16141}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 169.0, 33.0, 34.0], "area": 591, "segmentation": {"size": [512, 512], "counts": "gUl02m?2N3N1N2N2N3M2O1N2N3M2N2O1N3M1O0001O2N3N1N2N3M2N2O1N3M2N2N2O2M2NVZc6"}, "image_id": 980, "id": 16142}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 170.0, 61.0, 59.0], "area": 2095, "segmentation": {"size": [512, 512], "counts": "]VR22l?2M301O3NO01]@F\\?`0M4M2M4M2M3N3L3N3L3N2M4N01N3O0010O010O00010O010O00010O010O00010O010O00010O010O00mNaAe0`>WOcAj0\\>TOgAk0Z>QOiAn0f>L3N3M2M3N3L3N3L3N2MkYo4"}, "image_id": 980, "id": 16143}, {"iscrowd": 0, "category_id": 1, "bbox": [442.0, 179.0, 22.0, 24.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "TVm62l?2M4M2M3M4M2N2010O010O00010O000M4M2M4L3N2M[jg0"}, "image_id": 980, "id": 16144}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 202.0, 69.0, 49.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "RWg43j?3N3L3N3M2M3N3M2M4M210O01O01O010O01O010O01O010O01O01O010O010O01O01O010O01O010O01O0N3M2N2M4M2N3O010O01O01OM4M2N3L3N2N3L3N3M2M4M2M3N3M2M4M_YV2"}, "image_id": 980, "id": 16145}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 203.0, 34.0, 41.0], "area": 770, "segmentation": {"size": [512, 512], "counts": "gfi03l?2N2O2M2N2N2O2M2N2N2N3N1N2N2N3OO101O000O10N2N2N101N12SOUA?n>_OUABSA;Q?CPA9T?Eo@7_?L4MPYe6"}, "image_id": 980, "id": 16146}, {"iscrowd": 0, "category_id": 1, "bbox": [15.0, 204.0, 30.0, 29.0], "area": 510, "segmentation": {"size": [512, 512], "counts": "nf72k?3N3L3N3L3N2M40O010O00010O010O00010O010O00010L3N3L3N2M4M2M^YY7"}, "image_id": 980, "id": 16147}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 209.0, 39.0, 29.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "mfo53k?3M2M4M2M310O0010O0010O0010O010O0010O0010O0010O010O00@c@>_?01O010O01O01M2M4M2N2MRi\\1"}, "image_id": 980, "id": 16148}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 213.0, 35.0, 20.0], "area": 244, "segmentation": {"size": [512, 512], "counts": "jf^61l?4M210O01O01O010O010O01O01O010O010O010O01O01O0O2O0010O01O01O010L3N3MQio0"}, "image_id": 980, "id": 16149}, {"iscrowd": 0, "category_id": 1, "bbox": [110.0, 225.0, 66.0, 58.0], "area": 2047, "segmentation": {"size": [512, 512], "counts": "lWg11l?3N3L3N2M4M2M4L3N2M4N10010O0010O0010O0010O0010O0010O0001VAQOb>n0[AVOd>S110O00010O010O00010O01O01O010O00010O010O00010O010O00WOZA8g>E[A;e>B^AA^A;e>B^AB_A:U?N2M4L3NlgW5"}, "image_id": 980, "id": 16150}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 249.0, 71.0, 50.0], "area": 2131, "segmentation": {"size": [512, 512], "counts": "cX[43k?2M4L3N2M4L3N3L3N2M4O00010O0010O0010O00012M00010O010O0001N11O01O010O00010O01O01O010O0001O0N3L3N210O010O00010ON2M4M2M4L3N201O01O01O0Dj@OV?Mm@3S?JPA4R?JQA2`?N^Wa2"}, "image_id": 980, "id": 16151}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 259.0, 27.0, 26.0], "area": 414, "segmentation": {"size": [512, 512], "counts": "bhT13j?3N3M2M3N3M2O2O01O010O010O01O01O010O010M2M3N3L3N3M2Mhg]6"}, "image_id": 980, "id": 16152}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 259.0, 36.0, 29.0], "area": 611, "segmentation": {"size": [512, 512], "counts": "ghc51m?2M4M2M3N3L3M3N3O0010O00010O01O01O010O01ON3L3010O0001Ad@9[?Di@;^?1O01O0N2M4L3N_Wj1"}, "image_id": 980, "id": 16153}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 284.0, 54.0, 57.0], "area": 1469, "segmentation": {"size": [512, 512], "counts": "^i_12m?3M2O1N2N2N2N3M2N2O1N2N2N3M2N2O1N2N3M2N2N2000001O0001O000001O00N3M2O1N2N2N2N3M2N2O1N2N2N3M2N2N2O1N3M2N2N2NPVe5"}, "image_id": 980, "id": 16154}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 286.0, 10.0, 11.0], "area": 74, "segmentation": {"size": [512, 512], "counts": "UiU62k?3M3O2O00010OO1N3LSWe1"}, "image_id": 980, "id": 16155}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 291.0, 75.0, 51.0], "area": 2060, "segmentation": {"size": [512, 512], "counts": "jij31m?3M2M4M2N3L3O101O010O010O00010h@^OP?b0n@@S?g0M2N3L3N3M2OO3L300010O01O010O01O010O01O010O01O010O01O010O01O01O010O010O01O01O010O01O010O01O0N3L3N2N3L3N3M2M4M2N2M4MYfo2"}, "image_id": 980, "id": 16156}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 303.0, 30.0, 30.0], "area": 529, "segmentation": {"size": [512, 512], "counts": "TjT51l?3N3L3M4M2M3N3L310O01O01O010O01O01O010O01O0M3N3L3N3L3N2M4MZV\\2"}, "image_id": 980, "id": 16157}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 305.0, 25.0, 26.0], "area": 341, "segmentation": {"size": [512, 512], "counts": "PZ?2m?2N2N2N2N1N3N2N2N2N2M2O1OO3N1O2N2N2M3N2N1O2N2M3NWVT7"}, "image_id": 980, "id": 16158}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 317.0, 28.0, 22.0], "area": 344, "segmentation": {"size": [512, 512], "counts": "YZl01l?4M2M3N3M210O00010O010O0010O0010O010O00010O01N1M3N3L3Nlee6"}, "image_id": 980, "id": 16159}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 325.0, 23.0, 44.0], "area": 530, "segmentation": {"size": [512, 512], "counts": "V:[1d>01O2N2M3N2N2N1O2M3N2N2N1O2M3N2N2N2M2O2N2N2N[Ud7"}, "image_id": 980, "id": 16160}, {"iscrowd": 0, "category_id": 1, "bbox": [33.0, 345.0, 15.0, 15.0], "area": 119, "segmentation": {"size": [512, 512], "counts": "oj`01n?2N2N2N2N2N2O1000N2N2N2N2N2NQeW7"}, "image_id": 980, "id": 16161}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 348.0, 52.0, 57.0], "area": 1443, "segmentation": {"size": [512, 512], "counts": "e[o01n?2N2N1O2N2N2M3N2N2N2N2N2N1O2N2M3N2N2N2N2N2N2N2O0100000000000O10gN`AQ1`>mNbAS1^>kNdAT1]>jNdAW1c>O1N2N2N2N2M3N2[Ok@ZO^Ag0b>VOaAi0k>1O01O010O01O01ON3M201O01O01O010O01O01O010O01O0SOSAh0l>UOWAk0o>01O0N3L3N2M4M2M4L3NST\\3"}, "image_id": 980, "id": 16163}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 358.0, 32.0, 36.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "k[52l?3N2N2N2N1O2N2N2N2N2N2N2N2N2M1000000002N2N2N2N2N2Hf@G\\?7f@G\\?77O2N2N2N`dZ7"}, "image_id": 980, "id": 16164}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 383.0, 50.0, 60.0], "area": 1386, "segmentation": {"size": [512, 512], "counts": "Zm>2m?1O2N2N2M3N2N1O2M3N2N2N1_OYOhAi0V>YOgAj0W>WOhAk0U>XOiAj0U>XOiAj0U>XOiAi0V>YOgAj0W>>0100002N2M3N2N1O2M3N2N2N1O2M3N2N2N2M2O2N2N2N2M2O2N2N2N2M3N\\Sh6"}, "image_id": 980, "id": 16165}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 400.0, 55.0, 55.0], "area": 1559, "segmentation": {"size": [512, 512], "counts": "jmR31m?3L3N2N3L3N3N110O01O01B@XAb0f>AWAb0e>AYAa0e>>N3L3N3M21O01O010O010O00010O01M2M3N3L3N3M2M4M2M3N3N1010O00O2L310O00001L3N3M2M4MScQ4"}, "image_id": 980, "id": 16166}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 402.0, 51.0, 74.0], "area": 1938, "segmentation": {"size": [512, 512], "counts": "Xnj43k?2M4L3N3L3N210O0010O00j@]OU>0^Bd0YO_OV>1^Bi0_=YO^Bj0_=YO^Bk0^=YO_Bi0_=YO^Bj0`=YO\\Bk0c=UO[Bm0f=ROWBQ1i=d00O01O01O010O0N2M4L3N2M4M2M4L3N2M4M2M4L3N2M4L3N2M4M2M4L3N2MUc[2"}, "image_id": 980, "id": 16167}, {"iscrowd": 0, "category_id": 1, "bbox": [261.0, 412.0, 41.0, 24.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "WmR42l?3L3M4O01O010O01O01O010O00001N1001M2M3N3N110O01O01O010O01O01O01O010O0N2N3L3N3L3NnbX3"}, "image_id": 980, "id": 16168}, {"iscrowd": 0, "category_id": 1, "bbox": [305.0, 482.0, 31.0, 30.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "ooh41f?0`@2]?1`@2^?9M3N2M3N2M3N21O00001O001O00001O001O00001N1N3L3N2M4M2M4Mg`g2"}, "image_id": 980, "id": 16169}, {"iscrowd": 0, "category_id": 1, "bbox": [129.0, 486.0, 58.0, 26.0], "area": 954, "segmentation": {"size": [512, 512], "counts": "eoP21n?2N2N2N2N2N2N1O2N2N2N2N1N2O11O1O1O1O1O1O1O1O1OO1O1O1N2O1O1001O1O1O1O1O1O1O1O1O0000O1O1O1O11O1O1O1O001O1M3N2N2N2N2NYPR5"}, "image_id": 980, "id": 16170}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 495.0, 23.0, 17.0], "area": 280, "segmentation": {"size": [512, 512], "counts": "ooT31l?3M3M3M3M3O1001O00001O00001O00001O00001M2M3M_`_4"}, "image_id": 980, "id": 16171}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 18.0, 34.0], "area": 347, "segmentation": {"size": [512, 512], "counts": "0R1n>M3M3N2M31O00M3N2O11O00O2L3N3L3N2MQPg7"}, "image_id": 981, "id": 16172}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 0.0, 88.0, 65.0], "area": 3063, "segmentation": {"size": [512, 512], "counts": "Ta`13S?ObA4\\>NbA4[>OYAK0:d>NXAK29c>OYAJ1;e>g00001O001O00001O001O00001O001O000000N2M3O1001O001O00001O001O00001O010O0001kN_Aj0`>SOdAl0]>QOeAP1e>01O010O01OnNVAn0OSOk>R1O010O01O01O010O01O01O010O01O01O010O01O0UOVAa0i>]OZAb0g>ZO\\Ag0n>O2M2M4N11O01O010Bc@7]?Ge@:a?L3N3L_^S5"}, "image_id": 981, "id": 16173}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 0.0, 12.0, 4.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "PPa21o?00001O0000001O00001OOQPY5"}, "image_id": 981, "id": 16174}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 0.0, 8.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "PPT31o?00001O001O00NRPh4"}, "image_id": 981, "id": 16175}, {"iscrowd": 0, "category_id": 1, "bbox": [376.0, 0.0, 51.0, 27.0], "area": 738, "segmentation": {"size": [512, 512], "counts": "PPl51o?001O1Z@N\\?2c@0\\?1a@1_?0_@2`?8O001OO1O1N2O1001O001O1O001O1O001O1O1O001O1O001O1O001O1O001O1O00O1O1Ak@1W?Mk@1W?Ml@1U?Nm@OU?Om@0T?N`0001O000P`Z1"}, "image_id": 981, "id": 16176}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 8.0, 46.0, 101.0], "area": 3069, "segmentation": {"size": [512, 512], "counts": "WQT76d?7J5J6mAYOhc0kA_OU>b0hA@X>`0eADW>?gACW>`0fACW>?fADW>T1O20O01O010O01O010O01O01O010O01OO2M2N3L3N3N11OnN^Af0c>WO`Ai0_>UOcAk0^>QOfAo0e>00010O010O00010O010O010O00010O010O00010O010O010O00010OO2M2M3N3L3N3M2M4M2M3N3M[m]5"}, "image_id": 981, "id": 16180}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 48.0, 47.0, 62.0], "area": 1637, "segmentation": {"size": [512, 512], "counts": "maQ61o?2M3N3L3N2M3N3L3N2N2M3N3L3N2M3N3L3N2N2M4M2M2O0O10O010O10O04M2M3N2M4M2N2M3N3L3WOQA=Q?@SAWOYAi0f>YOXAg0i>8O01O01O2O2M2O2M3M2O2M3N1O02N1N3N2M2O2M3M2O2M3N1N3M3N2M2O2M3M2O2M3N1N3N2M3M^[n2"}, "image_id": 981, "id": 16185}, {"iscrowd": 0, "category_id": 1, "bbox": [71.0, 117.0, 83.0, 81.0], "area": 2895, "segmentation": {"size": [512, 512], "counts": "TdS12l?2O2M3N2M2O2M3N1N3N200O0YAZOU>f0iA\\OW>d0gA^OX>b0fAAZ>?dAB]>>`AE_><_AFa>9^AHc>k0O1000O01O1N2M2O2M3N01O2M2O2O10O01000O10O10O10O1eN`AU1_>iNdAW1\\>gNfAY1`>010002M3N00O10O10O10O10ZAmN^>S1_APO`>Q1^APOc>W100O10O1N2M2O2M3000OO2N2M2YOPA=R?APAj1010O0010O010O00010O01O0N2O2O0POiA8W>^OiAH2j0U>\\OlAF3m0R>YOWBh0h=VOZBj0f=SO^Bm0Z>01O010O01O010O01O01O010O01O010O01O010O01O01O010O01O010O01N1010O0010O0010O001L3VOo@c0X?M3N3M2M4M2M4MTZg3"}, "image_id": 981, "id": 16187}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 147.0, 79.0, 52.0], "area": 2146, "segmentation": {"size": [512, 512], "counts": "Ze:1n?2M2N3N2M2N3d@BS??k@DR??k@CS?g0N1N3M3N1N30O10O10O010O10O10O01N10O2O001O001O10O010O10O10O10O010O1N1N3M2O2M3N1100O0100O0100N1O20O0100O0100O010O0N3N2M2N3N1N3M3N1100O01G[@4i?N3NZj]6"}, "image_id": 981, "id": 16188}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 199.0, 61.0, 65.0], "area": 2090, "segmentation": {"size": [512, 512], "counts": "^gW12k?3N3L3N2M4M2M4L30010O010O00010O010O00010O0o@YOj>f0SA]Om>k001O0N2M4M2M4M2M3M4M2M4N10010O0010O00010N1M4M2M3N3L3G`AoNd>m0_AQOc>m09M4UOl@f0X?M4M2M3N3L3M4M_ii5"}, "image_id": 981, "id": 16189}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 204.0, 47.0, 63.0], "area": 1678, "segmentation": {"size": [512, 512], "counts": "fVQ7191X?2e@2X?0f@2W?>\\A\\Oj=d0SB@l=`0RBBn=?QBDj=>VBCi=>VBBj=>SBFm=9SBGm=:RBFo=9RBGm=:RBFn=:QBHo=7nALR>5kANU>YOgAf014[>h001O010O00010O010O0010N1N3L3N2M4M2M3N3L3N3M2M3N3L3N3L3N2MVY7"}, "image_id": 981, "id": 16190}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 206.0, 71.0, 49.0], "area": 1951, "segmentation": {"size": [512, 512], "counts": "WWi22k?3M3N3L3N3L3M3N3L3O20O00010O00010O010Oo@TOn>P1O01O010O01O01O01ON3N1010O0010O0010O001O0N2010O0010O0010O0010O0010O00010O0010O0001L3M4M2M3N3L3M3N3L3NPYS4"}, "image_id": 981, "id": 16191}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 213.0, 51.0, 50.0], "area": 1491, "segmentation": {"size": [512, 512], "counts": "VWm53m?2M4M2M3N2M3N2M3N3M2M3N2M3N2M10O010O0100O010O0100O010O010O010O010O0101N30M3N2M3N2M4VOm@a0U?\\On@a0[?N2N2M4M2M3N2MXXY1"}, "image_id": 981, "id": 16192}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 246.0, 83.0, 70.0], "area": 2815, "segmentation": {"size": [512, 512], "counts": "dhW23k?2M4M2M4M2M3M4M2M3N3O0010O00010O010O0001M2M4M2O1010O0010O0010O00010OiAgNg=Z1VBhNk=W1RBlNn=T1PBoNo=R1PBnNQ>Q1PBoNo=R1PBnNQ>Q1mAQOS>o0jAUOU>LkAS1V>iNmAW1S>gNPBY1P>cNSB]1X>10O00010O010O00010M2M4L3N2M40O01O01O01QOSAj0Q?00010O010O0001N1M3M4M2M4M2M3N3LVg^4"}, "image_id": 981, "id": 16193}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 254.0, 9.0, 22.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "_hk71m?3M2N3M2M3N3M2N3QH"}, "image_id": 981, "id": 16194}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 266.0, 66.0, 49.0], "area": 1783, "segmentation": {"size": [512, 512], "counts": "RYQ53k?2M4M2M3N3L3M4N11O010O01O01O010O01O01OM4M2M4M200010O01O01O010O01O01O01O01O010O01O01nNVAl0j>ROYAn0l>0010O0010O0010O0010O0010M2M4M2M3N3L3M4M2M3NSgm1"}, "image_id": 981, "id": 16195}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 272.0, 47.0, 27.0], "area": 926, "segmentation": {"size": [512, 512], "counts": "fXb61o?5J10008H6J000000O010000000000000O010000000000000O010000000000000O0100000005K00O10O1001O8H8C]@Mbff0"}, "image_id": 981, "id": 16196}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 274.0, 31.0, 37.0], "area": 675, "segmentation": {"size": [512, 512], "counts": "Wi[72l?2M4M2M3d@DR??j@ER?>l@DR?g0L3O1010O01O01O010L3N2O2O0010ON2M4M2M4L3N2M4M2MYg4"}, "image_id": 981, "id": 16197}, {"iscrowd": 0, "category_id": 1, "bbox": [504.0, 277.0, 8.0, 24.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "XYl72l?2JN\\@5a?6N3M2N3L3[G"}, "image_id": 981, "id": 16198}, {"iscrowd": 0, "category_id": 1, "bbox": [394.0, 292.0, 29.0, 29.0], "area": 435, "segmentation": {"size": [512, 512], "counts": "bYU61n?2N2N2N2N2N2N2N2N2N2N2N2N2N2OO2N2N2N2N2N2N2N2N2N2N2N2N2N`V\\1"}, "image_id": 981, "id": 16199}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 303.0, 70.0, 102.0], "area": 3131, "segmentation": {"size": [512, 512], "counts": "h[b61n?1N3N2N2N2M2O2N2N2M2O2N2N2M3N1O2M3N2N2N1N3N2oNdN[C^1b?bA_O_>`0cA^O_>?dA^O_>`0bA_O`>?bA_O`>?bA_O_>`0b0M3N2N2N1N3N2N2Nee:"}, "image_id": 981, "id": 16200}, {"iscrowd": 0, "category_id": 1, "bbox": [298.0, 313.0, 84.0, 54.0], "area": 2573, "segmentation": {"size": [512, 512], "counts": "jZe41k?4M4K4L4M3L5L3L4L410O000010O0000010O000010O000010O00000mNWAP1l>01O01O0001O01O0001O01O01O01O0001O01O01O0001O01O01O0001O01O01O01O0001O01L3000010O000010O00001M2M3L4M4K4L4M3L5KfeP2"}, "image_id": 981, "id": 16201}, {"iscrowd": 0, "category_id": 1, "bbox": [18.0, 327.0, 68.0, 83.0], "area": 2966, "segmentation": {"size": [512, 512], "counts": "Y\\92l?2M4L3N2M4L3N2M4L3O2O00010QASOi>R110O0^OnNVBR1g=QOZBo0b=UO\\Bl0b=VO[Bn0a=UO]Bm0`=WO\\Bl0b=VO\\Bm0`=VO]Bm0`=i001020O00010O0010O0010N1M3M4M2M4OO101OO2CSBcNo=Z1TBcNo=[1SBbNQ>Z1^OXAb0i>ZO[Ae0e>YO]Ah0m>01O010O01OM4M2M3M4M2M4M2MSed6"}, "image_id": 981, "id": 16202}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 327.0, 28.0, 30.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "ljP62l?2M3N3L3N3M2M3N3M2010O010O00010O010N1N3L3N2N3L3N3L3N2NdUa1"}, "image_id": 981, "id": 16203}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 339.0, 20.0, 32.0], "area": 327, "segmentation": {"size": [512, 512], "counts": "_[51l?3L5L3M3M4L3M3M4L3001K4M3M4L3M3M3M4L3M]e`7"}, "image_id": 981, "id": 16204}, {"iscrowd": 0, "category_id": 1, "bbox": [97.0, 363.0, 85.0, 59.0], "area": 2660, "segmentation": {"size": [512, 512], "counts": "Tl`11l?3N3L3M4M2M3M4j@[Ol>h0QA[Om>m0O20O01O01O010O00010O01O01O01O01O010O01O01O01O01O010O01O01O01O01O010O01N11O010O00010O01O01O010O00010O01O01O010O00010O01O01O010O000M4M2M3M4M2M4L3N2M4L3MmcT5"}, "image_id": 981, "id": 16205}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 370.0, 65.0, 51.0], "area": 1935, "segmentation": {"size": [512, 512], "counts": "_la42l?2M4M2M3N3M2M4M2M3N3L3N30O010O00010O010O00010O010O010O00010O010O00010O010O00010O010O010ON2M4M2N3O00M4M2M4M2N2M4M2M4M2M3N3M2M4M2MVd]2"}, "image_id": 981, "id": 16206}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 372.0, 27.0, 25.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "Slf54i?4K4L4M3O20O000010O000010O0000010O00010O000O1L5K4M3LZdk1"}, "image_id": 981, "id": 16207}, {"iscrowd": 0, "category_id": 1, "bbox": [483.0, 402.0, 26.0, 24.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "Pma72l?3L3N3L3N2N30O010O00010O010O00010O010O0O1N3L3N3L3NXS1"}, "image_id": 981, "id": 16208}, {"iscrowd": 0, "category_id": 1, "bbox": [393.0, 412.0, 80.0, 60.0], "area": 2362, "segmentation": {"size": [512, 512], "counts": "hmT64j?2M3N3L3N3L3N2M4M2M4O01O010O01O01O010O01L3N2N30O010O00010O010O00010O010O000POYAh0g>TO\\Al0d>RO_An0i>01O01O010O01O01O010O01O01O010O01O01O010O01O01OTORAf0T?1O01O010O01OO2M2M4M2M3N3L3N3LRRc0"}, "image_id": 981, "id": 16209}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 421.0, 34.0, 87.0], "area": 1612, "segmentation": {"size": [512, 512], "counts": "\\=`2a=N1O2M1000000O0100000O2O2ROXBAj==XBAi=>YB_Oj=>YB@i=>YB@i=>XB@k=>WB@j=>YB@i=>YB@i=>YB_Oj=?WB@k==XBAi=>YB@i=>n0N2M3N1O2N2M3N[b^7"}, "image_id": 981, "id": 16210}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 423.0, 79.0, 56.0], "area": 2198, "segmentation": {"size": [512, 512], "counts": "P^Y13k?2N2M4M2M4M2M3N3M2M4N101O01O010O01O01O01M2O2O010O01O010O01O01O010O01O010OPOVAj0k>SOXAl0n>10O010O00010O010O01O01O010O010O01O01O010O0TOo@j0T?O0010O010O0010O001N1M3N3L3N3M2M4M2MkQ_5"}, "image_id": 981, "id": 16211}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 423.0, 70.0, 48.0], "area": 1861, "segmentation": {"size": [512, 512], "counts": "P^S41l?3M4M2M3N3L3N3L3M3O2O010O00010O01O01O010O01O01O010O01O01O01O01O010O01O01O010O01O01O010O00010N1M3N3N1010O00010O01ON3L3N3L3M3N3L3N3L3M3N3L3N]bi2"}, "image_id": 981, "id": 16212}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 430.0, 32.0, 37.0], "area": 666, "segmentation": {"size": [512, 512], "counts": "Z^Y51m?3M2M4M2N2M4M2N3L3N3M2M301O010O010O01O0M3N3M2M4M2N3L3N2N3L3N3M\\bV2"}, "image_id": 981, "id": 16213}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 449.0, 28.0, 27.0], "area": 429, "segmentation": {"size": [512, 512], "counts": "c^m01m?2N3L3N3M2N3L3N2010O010O010O01O01O010N1N3M2N3M2M3N3M2Niad6"}, "image_id": 981, "id": 16214}, {"iscrowd": 0, "category_id": 1, "bbox": [227.0, 467.0, 53.0, 45.0], "area": 1534, "segmentation": {"size": [512, 512], "counts": "noa32n?01O00O1M3ELh@6U?Mh@6V?Mg@5V?10O010O00001OO1N21O001O001O00001O001O001O001O001O00001O001O0Ai@5X?Hj@8V?Fm@9]?0001O001O001O001O001O00001O0O2MX`j0"}, "image_id": 981, "id": 16216}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 473.0, 70.0, 39.0], "area": 1599, "segmentation": {"size": [512, 512], "counts": "c_Q12k?3N2N5J301O01OO2M2N3L3010O0010O010O0O2M2M3N3M2N2001O001O001O00001O001O001O0UOSAc0m>ZOVAf0R?01O001O001O00001O001O001O00001O001O001O00001O001O001L3N2N3L_`k5"}, "image_id": 981, "id": 16217}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 478.0, 23.0, 28.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "bom44f?7I6L4O1M30010O0001O00010O001O000010OM5I5J6JQaf2"}, "image_id": 981, "id": 16218}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 487.0, 27.0, 25.0], "area": 442, "segmentation": {"size": [512, 512], "counts": "g_X54j?2M4M2M3M4M2O2O01O01O010O01O00001O001O0N2N3L3N3L3N2MePZ2"}, "image_id": 981, "id": 16219}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 496.0, 19.0, 16.0], "area": 181, "segmentation": {"size": [512, 512], "counts": "iod52l?2N3L3N3O01O010O010O01O01O001M2N3L3N\\`Q2"}, "image_id": 981, "id": 16220}, {"iscrowd": 0, "category_id": 1, "bbox": [90.0, 0.0, 41.0, 15.0], "area": 328, "segmentation": {"size": [512, 512], "counts": "PP]11o?001O00001O001O001O00001O001O001O00001O001O001O001O00001O001O001O0000N2N2N2N2M3NR`n5"}, "image_id": 984, "id": 16221}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 0.0, 50.0, 43.0], "area": 1448, "segmentation": {"size": [512, 512], "counts": "l`\\32l?2M4EIi@9U?Ih@:U?Ii@:T?:M4M2O101O00001O001O00001O001O00001O001O00001O001O00001ON2M3N2M3N2M3M3N2M3N2M3N2M3N2M3NR`j3"}, "image_id": 984, "id": 16222}, {"iscrowd": 0, "category_id": 1, "bbox": [351.0, 0.0, 5.0, 2.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "P`_51o?00001OOQP^2"}, "image_id": 984, "id": 16223}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 0.0, 43.0, 13.0], "area": 284, "segmentation": {"size": [512, 512], "counts": "P`^61o?001O001O00001O001O001O001O001OO1N200001O00001O001O001O001O001O00001O001OO1N2N2M3N2NRPl0"}, "image_id": 984, "id": 16224}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 3.0, 51.0, 59.0], "area": 1588, "segmentation": {"size": [512, 512], "counts": "\\aW41m?2O2N2N2N2M2O2N2N2M3N1O2N2M3N2N1O2M3N2N2M2L5J4M2101O1O1N3N1O1O1N2O1O2N1N2O1O1O2M2O1O1O1N2O2N1M3J6K5Joon2"}, "image_id": 984, "id": 16225}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 6.0, 81.0, 45.0], "area": 2001, "segmentation": {"size": [512, 512], "counts": "m`d02k?3N3L3N3L3N2M4M2M40O00010O010O00010O010O00O2O010O00010O010O00010O001O00010O01O01O010O01O01O010O01O01O01L3N2M4M2N30M2N3M200010O010O00010O010O00010O0M4M2M3M4M2M4MYoR6"}, "image_id": 984, "id": 16226}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 15.0, 47.0, 65.0], "area": 1690, "segmentation": {"size": [512, 512], "counts": "SRZ53k?2M3N3M2M4M2XO@nAc0o=@mAc0Q>_OmAc0P>AmAb0P>@mAc0Q>@lAc0P>@nAb0R>f010O0010O010O0010O0010N1N3L3N3M2M3N3M2M4M2M4M2N2M4M2N3L3N2N3L3N3MU_n1"}, "image_id": 984, "id": 16227}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 21.0, 59.0, 85.0], "area": 2682, "segmentation": {"size": [512, 512], "counts": "mbl51l?3M4M2M4M2M3N3L3N3L3M3N3L3N2M4M2M4L3N2M4M2M4M2M3N3L3M4M20010OM3N3L3O20OM3N3L3M3N3L3JjA`NY>]16M3N3L3N3O000O2L3N3L3M3N3Ed@M_?0e@M]?1ZoU1"}, "image_id": 984, "id": 16228}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 32.0, 30.0, 38.0], "area": 685, "segmentation": {"size": [512, 512], "counts": "f1:d?2O110M2M4M2M3N3L3N3L30001O010O01O0N2M4M2M4M2M4M2M3N3L3N3Lln`7"}, "image_id": 984, "id": 16229}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 64.0, 31.0, 86.0], "area": 1226, "segmentation": {"size": [512, 512], "counts": "Yd`71l?3N3L3N3M2M3N3L3N3N11O010OO2M2M4M2M3N3M2@eNXB_1d=dNZB^1d=eNXB^1f=dNXB_1d=`0N3L3N3M2N2PN"}, "image_id": 984, "id": 16230}, {"iscrowd": 0, "category_id": 1, "bbox": [22.0, 65.0, 63.0, 42.0], "area": 1673, "segmentation": {"size": [512, 512], "counts": "hR;4j?2N2M4M2M4M2N2M4M2N30O01O01O010O01O01O010O010O01O01O010O01O01O010O010O01O01O0O2O00O10000000O10O1000000000O01000000004L7I8G9Hj\\e6"}, "image_id": 984, "id": 16231}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 77.0, 9.0, 13.0], "area": 84, "segmentation": {"size": [512, 512], "counts": "]2;e?1O01O010N1N3M2Nb]k7"}, "image_id": 984, "id": 16232}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 89.0, 9.0, 18.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "i2b0_?O001M2M3N3M2N3MU]k7"}, "image_id": 984, "id": 16233}, {"iscrowd": 0, "category_id": 1, "bbox": [150.0, 105.0, 68.0, 94.0], "area": 3178, "segmentation": {"size": [512, 512], "counts": "gU[21l?3N3L3N2N3L3N3N101N1M3M4O001O010O0O1M4L3M4L3M4^O_N`Bd1]=_N`Bd1]=_N`Be1\\=_NaBc1]=b0L3M3N3O010O0010O0O1N3M0O012M3N2M4M2M3N3L3N2M4M2aNiAS1Z>iNiAT1Z>iNiAU1c>L3N2M4M2M3N3L3N2M4M2N3L3NWlb4"}, "image_id": 984, "id": 16234}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 111.0, 82.0, 58.0], "area": 2893, "segmentation": {"size": [512, 512], "counts": "Z44i?4M2M4M2N2M4M2N30O00010O010OM3N3L3N3N11O010O01O01O010O01O01O010O01O010O01O01O010O01O01O010OoNZAh0g>UO[Ak0m>10ON3M2M3N3M2M4M2M3N3O0010O0010O0010O001N1N2M4M2M4^OZAFh>7[AFi>7ZAFh>8ZAEi>8[AEh>8a0M4M2M[lf6"}, "image_id": 984, "id": 16235}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 137.0, 48.0, 65.0], "area": 1878, "segmentation": {"size": [512, 512], "counts": "neb32k?3M4WOJdA8Y>KdA9X>KdA8Z>JdA8Y>KdA9X>KdA8Z>JdA8\\>g00O010O00010O00M4L3N3M20010O00010OO1N3L3M4M2M3M4M2M3M4M2M4L3N2M4L3N2M4L3N3L`[e3"}, "image_id": 984, "id": 16236}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 161.0, 55.0, 66.0], "area": 1867, "segmentation": {"size": [512, 512], "counts": "hfZ41l?3N2M4M2M3M4M2M4L3N2M4O0010O0001N1N2M4L3N3L3N2M4L3N2M4O0001L3N2M4M2M4L3N2M4L3N2M4M210O01ON3M2M4L3N2M4L3N2M4Mgji2"}, "image_id": 984, "id": 16237}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 174.0, 44.0, 43.0], "area": 1186, "segmentation": {"size": [512, 512], "counts": "^5n0R?1O010O01O010O01O01O010O01O01O010O01O010O01O01O010O01O010O01O01O001M2M3N3L3N3M2M3N3L3NSjY7"}, "image_id": 984, "id": 16238}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 179.0, 57.0, 62.0], "area": 2103, "segmentation": {"size": [512, 512], "counts": "efV54j?2M3N3f@Ej>>SAFj>=SAEj>>SAFj>i0M3N3L3N3N11O010O01O01O010O01O01O010O01O01OM4M2M3N0O03N2M4M2M4M2M3N3L3O20O0001O0M4M2M3N3L3N3L3N2M4MVjl1"}, "image_id": 984, "id": 16239}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 188.0, 32.0, 39.0], "area": 674, "segmentation": {"size": [512, 512], "counts": "dfn21l?3N3M2M3N3L3N3M2M3N3O010O0010O0010O010O0n@UOo>o00HQA]Oo>`0TA@m>=VACi>;YAEg>8]AHc>7]AFf>;YACj><=Ea@4f?NhYa4"}, "image_id": 984, "id": 16240}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 199.0, 31.0, 28.0], "area": 553, "segmentation": {"size": [512, 512], "counts": "ifV23j?3N3L3M4L3N2N3O00010O00010O0010O0010O0001L3M310O010Ec@1]?Me@3[?Ji@4b?M3NZiY5"}, "image_id": 984, "id": 16241}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 212.0, 55.0, 65.0], "area": 2159, "segmentation": {"size": [512, 512], "counts": "XXS62l?3M2M3N3WOEiA=U>EhA?T>DjA>T>EhA>V>DhA?T>EiA=U>EhA?U>CiA?W>f00O0010O010O0010O0010O010O0001L0100O0103M201O01O01O010N1N3L3N2N3L3N3WOPA=S?@QA=Q?AQA=[?L3N2N3L3NlXQ1"}, "image_id": 984, "id": 16242}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 216.0, 31.0, 33.0], "area": 641, "segmentation": {"size": [512, 512], "counts": "Wgh2430_?3_@O_?:010O0001L3N2M4L310O00010O01O01O01O010O0O1M4L3M3M4L3N3L3MSig4"}, "image_id": 984, "id": 16243}, {"iscrowd": 0, "category_id": 1, "bbox": [245.0, 217.0, 29.0, 28.0], "area": 494, "segmentation": {"size": [512, 512], "counts": "[gj32k?3N3M2M4M2M3N3N1010O010O00010O010O00010O0O2M2M4M2M3N3L3NQif3"}, "image_id": 984, "id": 16244}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 228.0, 35.0, 46.0], "area": 1115, "segmentation": {"size": [512, 512], "counts": "T7V1j>0010O00010O010O00010O010O00010O00010O010O000N3M2M4L3N2M4M2M3M4M2M4MaX^7"}, "image_id": 984, "id": 16245}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 228.0, 32.0, 38.0], "area": 733, "segmentation": {"size": [512, 512], "counts": "PXf42l?2M4M2M4L3N2M4M2M3M4M201O01O01O010O01O01OO2L3N3L3M3N3L3N2M4L3Nghi2"}, "image_id": 984, "id": 16246}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 231.0, 49.0, 58.0], "area": 1712, "segmentation": {"size": [512, 512], "counts": "eXP71m?2XOO^A4_>0^A3_>O^A4_>O_A4^>O^A4_>O_A3b>MZA7e>b010O00010O010L3N2M4M2O20O00010O010O00010O0N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N`X7"}, "image_id": 984, "id": 16247}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 232.0, 50.0, 67.0], "area": 1827, "segmentation": {"size": [512, 512], "counts": "gXk12l?3L3N2N3^@FY?b001O010O0M3N3M2M4M2N3L3N2M4M2N3L3N3M2M310O0010O0010ON3M2N2M4M2N3L3N3L3EYA[Oi>b0ZA\\Oi>a0ZA[Oi>b0=M2N2M4M2N3L`h[5"}, "image_id": 984, "id": 16248}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 238.0, 33.0, 34.0], "area": 697, "segmentation": {"size": [512, 512], "counts": "UXe33k?2N3M2IGe@NlA4Q>0aAOI4c>0aAOI5c>NaA?\\>EcA;Z>HgA8U>LjA4S>OnA0P>2oA0n=3nA0R>o0010O0010O0010O0010O01O01O01O010O0N2N3L3N2N0O010O103L3N2M4M2M4M2M4M2M3N3M2M4M2M3NQh[4"}, "image_id": 984, "id": 16250}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 261.0, 28.0, 29.0], "area": 500, "segmentation": {"size": [512, 512], "counts": "hXb43j?3N2M4M2M4M2M3O2O010O00010O010O00010O01M2M3N3L3N3L3N2Mggo2"}, "image_id": 984, "id": 16251}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 262.0, 32.0, 30.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "hhb52k?4M2M4M2M3N3L3010O0010O0010O0010O0010O0010O0010O0N3L3N2M4M2M4MbWm1"}, "image_id": 984, "id": 16252}, {"iscrowd": 0, "category_id": 1, "bbox": [224.0, 277.0, 58.0, 67.0], "area": 2278, "segmentation": {"size": [512, 512], "counts": "XZ`32k?3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3M4M2M4O000010O01O01O010O01O01O010O01O01N1M4M2M3N3L3N3L3M3N3L3N3L3N2M4M2M4M2M3N3LPgb3"}, "image_id": 984, "id": 16253}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 278.0, 11.0, 36.0], "area": 187, "segmentation": {"size": [512, 512], "counts": "eij72k?3N2M4M2M4M2M3M4HWOUAk0h>8ZG"}, "image_id": 984, "id": 16254}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 279.0, 5.0, 13.0], "area": 36, "segmentation": {"size": [512, 512], "counts": "g8=c?M4M2M3NZWm7"}, "image_id": 984, "id": 16255}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 285.0, 32.0, 30.0], "area": 562, "segmentation": {"size": [512, 512], "counts": "_Y]62l?2M3N3M2N3M2N3M2O2O01O01O010O010O010O010O00010ON3L3N3M2N3L3N3MkfR1"}, "image_id": 984, "id": 16256}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 286.0, 45.0, 39.0], "area": 880, "segmentation": {"size": [512, 512], "counts": "`i]51m?3L3N3L3N2M4M2O20O0010O0i@\\OR?j00O010O00010O010WOl@d0X?010O010O00010O010O00010O010O00N3M2M4M2M3N3L3Nafk1"}, "image_id": 984, "id": 16257}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 297.0, 51.0, 67.0], "area": 1817, "segmentation": {"size": [512, 512], "counts": "dj_42k?3N3L3N2M4L3N3O01Oo@\\Of>e0WA]Oi>c0TAAl>i0010ON2M4M2M4L3N2M4M2M4L3N2010O00010M2M3M4M2M4M2M3M4M2M4M2M3M4M2M3N3L3M4M2M3N`ff2"}, "image_id": 984, "id": 16258}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 308.0, 9.0, 21.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "d9e0\\?O0O1N3L3N3M2M3N[Vk7"}, "image_id": 984, "id": 16259}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 308.0, 37.0, 49.0], "area": 981, "segmentation": {"size": [512, 512], "counts": "kjT73k?2M4M2M3N3L3N3L3N2M4M2M4M2M3N3M210O01O000M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3NVf8"}, "image_id": 984, "id": 16260}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 313.0, 32.0, 40.0], "area": 724, "segmentation": {"size": [512, 512], "counts": "fjU62l?3L3N2N3M2M4M2N3M2M4M2N2N3O010O010O010ON3L3N2N3M2M4M2Lc@C`?95N3M2NQVZ1"}, "image_id": 984, "id": 16261}, {"iscrowd": 0, "category_id": 1, "bbox": [332.0, 322.0, 51.0, 73.0], "area": 2098, "segmentation": {"size": [512, 512], "counts": "d[V52k?4M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M2M4M2M3010O01PBUNj=j1TBXNl=o10O01O01O010O01O01O010O0VOTB^On=?UB_On=>UB^On=?UB_On=>UB^On=?UB_Om=?UB^Oo=>UB_Om=?l0L3N2M4M2MeUP2"}, "image_id": 984, "id": 16262}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 354.0, 58.0, 59.0], "area": 1852, "segmentation": {"size": [512, 512], "counts": "[\\R63j?4M2M4L3N2M4N11O01O001L3O1010O00010O010O0001O0M3M4GoN_AS1^>:M20001O0M4M21O01O01O010O0M3N3L3M3N3L3M4M2M3N3L3M3N3L3M4M2M3MhdP1"}, "image_id": 984, "id": 16263}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 364.0, 54.0, 71.0], "area": 1998, "segmentation": {"size": [512, 512], "counts": "PmQ72k?3N3L3N2M4M2N3O01O01N1N3L3RATOf>n0XATOe>V1M2M3N3L3N3L3N2M4M2M4O01O01O01N1N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2M3N3L3N[T3"}, "image_id": 984, "id": 16264}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 372.0, 3.0, 6.0], "area": 11, "segmentation": {"size": [512, 512], "counts": "hkn71m?3M2\\D"}, "image_id": 984, "id": 16265}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 382.0, 70.0, 56.0], "area": 2334, "segmentation": {"size": [512, 512], "counts": "nli01l?3N2M4M2M4M2M3N3L3N3L3O1010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010OO2M2M3N3L3N3N11O010O01O01O010O01O01O010mNdA`0\\>^OfA?]>^OfA`0]>]OfA?]>^OfA`0]>]OfA?o>N2M4M2M4MXSS6"}, "image_id": 984, "id": 16266}, {"iscrowd": 0, "category_id": 1, "bbox": [507.0, 424.0, 5.0, 13.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "bmm71l?4M2M4M2hB"}, "image_id": 984, "id": 16267}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 427.0, 30.0, 34.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "Rnj14j?2N2M4M2M4I@j@b0S?8M21O01O010O010O01O01O01O0N3L3N3M2M3N3M2M4M2M_Rf5"}, "image_id": 984, "id": 16268}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 433.0, 60.0, 63.0], "area": 1991, "segmentation": {"size": [512, 512], "counts": "nnV22k?4M2M3N3L3N3O01O01O010O01O01O010N1M3N3L3N3L3N2M4M2M4M2N3L3N2O2O010O00010O010O00N3M2M4M2M3N3M2M4M2M3N3L3N3L3N2M4M2N3L3NVRk4"}, "image_id": 984, "id": 16269}, {"iscrowd": 0, "category_id": 1, "bbox": [36.0, 447.0, 62.0, 53.0], "area": 2058, "segmentation": {"size": [512, 512], "counts": "S_b02k?3M3L5L3M3M4L3M3M4O01O01O00010O00010O00010O00010O0M3M3M4L3O1010O00010O00010O00010O00010O00010O000M4K4M3M4L3M3M3M4L3L4M4L3Mja^6"}, "image_id": 984, "id": 16270}, {"iscrowd": 0, "category_id": 1, "bbox": [103.0, 451.0, 28.0, 24.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "^nc13k?2N3L3N2N30O010O010O01O010O01O010O010O01O010OO2L3N3M2NeQn5"}, "image_id": 984, "id": 16271}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 456.0, 59.0, 56.0], "area": 1749, "segmentation": {"size": [512, 512], "counts": "noQ32l?2M3N2M3N2N2O11O00001O001O00M3N2M3N2N2M3N2M3N2N2M3N2M3N2M3N2N2O11O001O00001O001O0N3L3N2N3L3N3L3N2N3L3N3L3N3M2M3N3L3N_aP4"}, "image_id": 984, "id": 16272}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 472.0, 47.0, 40.0], "area": 1139, "segmentation": {"size": [512, 512], "counts": "o_n31m?2M3M3N2M3001O0000O1N2M3N2M3M3N2M3N2M3N2O11O00001O001O00001O00001O001O0N2N3L3N3L3N2M4M2M4L3NPQZ3"}, "image_id": 984, "id": 16273}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 491.0, 28.0, 21.0], "area": 406, "segmentation": {"size": [512, 512], "counts": "noj12k?3N2M3M3N2M3N2O11O00001O00001O001O00001O001O000M4M2M4L`Pg5"}, "image_id": 984, "id": 16274}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 506.0, 19.0, 6.0], "area": 75, "segmentation": {"size": [512, 512], "counts": "n_n52k?31O00001O001ON2N200001O001O00001O0NUPh1"}, "image_id": 984, "id": 16275}, {"iscrowd": 0, "category_id": 1, "bbox": [153.0, 277.0, 41.0, 45.0], "area": 727, "segmentation": {"size": [512, 512], "counts": "hi\\21n?1O2N2M3N2N1O2M3N2N2M2O2N1O0O0100000O0100000O010000O01002N2M3N1O2N2M3N2N1O2M3N2NRgn4"}, "image_id": 985, "id": 16276}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 312.0, 55.0, 65.0], "area": 1467, "segmentation": {"size": [512, 512], "counts": "YkY22m?2N1N3N2N2N2N2M201O1N2N2M3N1O2N2N2M2O000AmNRBS1n=oNoAR1Q>POmAo0T>SOjAm0V>UOhAk0W>WOhAi0X>YOeAg0\\>>00O12N2M3N2N2N2M3N1O2N2N2M3N2N2N2N1N3N2N2N2M3N2N2N1Odej4"}, "image_id": 985, "id": 16277}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 387.0, 62.0, 66.0], "area": 1955, "segmentation": {"size": [512, 512], "counts": "\\mT21n?2M2O2N2N2N2M3N1O2N2O100000O010000N2M2O2_OVOiAl0U>VOiAl0U>VOhAm0V>TOiAn0T>UOjAl0U>VOiAl0U>a0N2N2M2O00O102N2M3N2N2M20100O1N2M3N2N2HZAROh>n0XAQOj>o04O1O1N2N2N2M2O2Ie@F]?8e@F]?78N2M3NUSl4"}, "image_id": 985, "id": 16278}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 396.0, 85.0, 77.0], "area": 3275, "segmentation": {"size": [512, 512], "counts": "Xme61n?2N2M2O2N2N2N2M3N2N1O2M3N2N2N2N1N3N2N2N2M3N1O2000000O1000O100000bAaNY>`1eAbN[>b10O10000000O1N1O2M3N2N2N20O10O10000000N2N1N3N2N2N2N2M2O2N2N2N2N2N20O01N2N2N2M3N1O2N2N2M3N2N1O2N2N2M3N^B"}, "image_id": 985, "id": 16279}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 508.0, 8.0, 4.0], "area": 20, "segmentation": {"size": [512, 512], "counts": "o_R61n?1O1O1001O1O1OQ`i1"}, "image_id": 985, "id": 16280}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 0.0, 56.0, 29.0], "area": 879, "segmentation": {"size": [512, 512], "counts": "SP51m?2O2O001O00001O001O00001O001O00001O0g@Gk>:RAIm>7QAKo>5n@OQ?2l@0T?>01O001O0000O1M3N2N2M300001O00001O001O00001O001OO1M3N2M3N2M3M3N2MSPo6"}, "image_id": 986, "id": 16281}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 0.0, 35.0, 17.0], "area": 370, "segmentation": {"size": [512, 512], "counts": "UPT11l?3N3O001O00001O001O00001O001O00001O001O00001O001O00001OO1M3N2M3N2M3NR`Z6"}, "image_id": 986, "id": 16282}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 0.0, 49.0, 51.0], "area": 1363, "segmentation": {"size": [512, 512], "counts": "a`R22m?2N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2O1O1O1O1O1O1O1O1O1O1000000000O1N3M2N2N2UOUA>m>@UA>m>@UA>m>@UA>m>@UA>X?N2N2N2N2N2N2NPoT5"}, "image_id": 986, "id": 16283}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 0.0, 25.0, 12.0], "area": 184, "segmentation": {"size": [512, 512], "counts": "PPn4141e?OX@4h?201O00001O00001O00001O00001O00M3M3M3O100001O0P`e2"}, "image_id": 986, "id": 16284}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 0.0, 59.0, 40.0], "area": 1153, "segmentation": {"size": [512, 512], "counts": "f`Y52m?2N3M2N2N2KDc@>[?Dd@=Z?6N2N3N1N2N2N2N1O0000000001O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1OQPi1"}, "image_id": 986, "id": 16285}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 0.0, 51.0, 27.0], "area": 822, "segmentation": {"size": [512, 512], "counts": "PPk61o?2N2N2N2N2N2N2N2N2N2N2N2N2NO100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O100O102M3N2Mk_;"}, "image_id": 986, "id": 16286}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 8.0, 13.0, 21.0], "area": 243, "segmentation": {"size": [512, 512], "counts": "X`i23m?b0^O0000000000000000000OTA8OKl>NSA90Jk>OTA8OLj>OTAc0j>;O2M2N2N3M01O00010O2N2N3NO01O0000010O0000010O002N2O1N2N3M2O1N3M2N2O2M2N2N3M2O1N3M2N2Oo^a3"}, "image_id": 986, "id": 16288}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 16.0, 33.0, 33.0], "area": 552, "segmentation": {"size": [512, 512], "counts": "RQ`12m?2N2N2N2N2N2N2N2N2N2N2N2N2N1O000O11O2N2N2N2N2N2N2N2N2N2N2N2N2N2NS_o5"}, "image_id": 986, "id": 16289}, {"iscrowd": 0, "category_id": 1, "bbox": [203.0, 19.0, 20.0, 21.0], "area": 408, "segmentation": {"size": [512, 512], "counts": "d`U3d0\\?000000000000000000000O100000000000000]_`4"}, "image_id": 986, "id": 16290}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 36.0, 79.0, 62.0], "area": 2479, "segmentation": {"size": [512, 512], "counts": "T2l0T?000000000000000G90001O00000001O00000000000000000000000POYAg0P?00000000001O0LVORAj0l>XOTAh0i>9N2MI8@`02M4M2N3L3N2N3N1010O010O00010O010O0010O0010O010O001M2M3N3M2M4M2N3L3N2N3L3Nb^h6"}, "image_id": 986, "id": 16291}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 46.0, 49.0, 39.0], "area": 1413, "segmentation": {"size": [512, 512], "counts": "aQU79g?9G8H9G1O000O1000000000O100000O1000000000O1000O100000000003L9H5K0000000000000O010000000000007I9Fd]2"}, "image_id": 986, "id": 16292}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 54.0, 6.0, 22.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "gQm73m?8H8H2N0O1ZN"}, "image_id": 986, "id": 16293}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 60.0, 64.0, 39.0], "area": 1856, "segmentation": {"size": [512, 512], "counts": "^bi51o?YOgAg0i>00L4G:EXmW2"}, "image_id": 986, "id": 16296}, {"iscrowd": 0, "category_id": 1, "bbox": [270.0, 76.0, 49.0, 41.0], "area": 1274, "segmentation": {"size": [512, 512], "counts": "ZSW45c?9O000001O000001O00000001O00K5H9I61O0000000000010O00000000000001I6O11O00000001O000001O0I7I7H8H8Hj]P3"}, "image_id": 986, "id": 16297}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 83.0, 36.0, 41.0], "area": 1108, "segmentation": {"size": [512, 512], "counts": "^cg1:]?9F:H9O01O00000000000001O0001O000000000POZAf0P?001O0000000001O00000001OO1G9FZ]f5"}, "image_id": 986, "id": 16298}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 93.0, 1.0, 9.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "mbo79j<"}, "image_id": 986, "id": 16299}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 97.0, 15.0, 18.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "]SP11m?2N2N3M2N3M201O00Fa@4_?Ic@7]?Gf@9`?N3M2Mi\\h6"}, "image_id": 986, "id": 16300}, {"iscrowd": 0, "category_id": 1, "bbox": [462.0, 98.0, 48.0, 50.0], "area": 1531, "segmentation": {"size": [512, 512], "counts": "ZTW73f?7J6I7O100010O00N2L4001O01O000L4J6K501O000001O000001O0001O000001O00PO_Aa0a>YOeAg0\\>ROjAn0d>010O0000_OQA1o>IWA7i>B^A>R?0010I6IRl0"}, "image_id": 986, "id": 16301}, {"iscrowd": 0, "category_id": 1, "bbox": [24.0, 104.0, 32.0, 23.0], "area": 636, "segmentation": {"size": [512, 512], "counts": "bS<P1\\AROd>U1010O01O01O01O01O010O0001M2M3O2O0010O0010ON2M4M2M3M4L3N2O2O001N1M3010ON2M4L3M3N3L3MUkm0"}, "image_id": 986, "id": 16308}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 160.0, 4.0, 20.0], "area": 58, "segmentation": {"size": [512, 512], "counts": "]Un76c?7J601oJ"}, "image_id": 986, "id": 16309}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 161.0, 21.0, 24.0], "area": 294, "segmentation": {"size": [512, 512], "counts": "deg41m?2M3N3L3N3L3N2N3O010O0001O0M4M2M3N3L3N3Mljm2"}, "image_id": 986, "id": 16310}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 161.0, 50.0, 40.0], "area": 1251, "segmentation": {"size": [512, 512], "counts": "ieQ52k?3N2N3L3N3M2M4M2N2M40O010O010O00010O010O010O00010O010OO1N3L3N3O001O01O001L3N3L3N2N3L3N3M2M4M2N2M4MjZU2"}, "image_id": 986, "id": 16311}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 164.0, 48.0, 40.0], "area": 1365, "segmentation": {"size": [512, 512], "counts": "SVU71i?7J5J6K5J6M310O0000000010O00000000010O0000000010O0000000010O000QORAl0R?O0001O0001O0001O0000N3J5J6J6Kfj2"}, "image_id": 986, "id": 16312}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 165.0, 53.0, 49.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "TVT11m?2M4M2N2M4M2M4M2M3N3M2M4M21O010O01O010O01O010O01O01O010O01O010O01O01O010O01nNVAl0j>ROXAo0m>O01ON3M2M4M2M4M2M3N3M2M4MWZQ6"}, "image_id": 986, "id": 16313}, {"iscrowd": 0, "category_id": 1, "bbox": [127.0, 166.0, 20.0, 24.0], "area": 275, "segmentation": {"size": [512, 512], "counts": "feo13k?2N3L3N3M2M3N3O0010O01ON3M2M4M2N3L3N2MiZf5"}, "image_id": 986, "id": 16314}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 180.0, 5.0, 12.0], "area": 44, "segmentation": {"size": [512, 512], "counts": "d5;e?10N1M3M^Zm7"}, "image_id": 986, "id": 16315}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 180.0, 71.0, 40.0], "area": 1483, "segmentation": {"size": [512, 512], "counts": "Xf`34j?2N3L3N2N3O010O010O01O01O010O010O00010O0N3M2N2M4M2N3M2010O0010O010O0010O0010OO1M4M2N3L3N3N10010O0010O010O00010O010O0010O0010O010O00M4M2N3L3Nli[3"}, "image_id": 986, "id": 16316}, {"iscrowd": 0, "category_id": 1, "bbox": [302.0, 190.0, 32.0, 29.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "aVg41l?3N3M2M4M2M3N3M201O01O01O010O010O01O01O010O01O0N2N3L3N3L3N3M2Mlih2"}, "image_id": 986, "id": 16317}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 202.0, 36.0, 27.0], "area": 531, "segmentation": {"size": [512, 512], "counts": "efj04j?2N2N3L3N3O0010O00010O010O010O010O00010O010O010O00010O010O0O2M2M4M2N2M]Yc6"}, "image_id": 986, "id": 16318}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 220.0, 48.0, 56.0], "area": 1543, "segmentation": {"size": [512, 512], "counts": "Qh`62l?2M3N3L3N3M2M3N3L3N3L3N2M4M2N3N11O010O010O01O01O010O01O01O010O01O01O010\\O^AHb>5aAL^>2eAM\\>OgA1Y>MjA3V>JlA6T>GoA:Q>CRB9Q>DRB:l>L3N2MoWg0"}, "image_id": 986, "id": 16319}, {"iscrowd": 0, "category_id": 1, "bbox": [183.0, 229.0, 24.0, 27.0], "area": 390, "segmentation": {"size": [512, 512], "counts": "ggk22k?4M2M3N3L3N3L3010O0010O0010O0010OM3N3L3N3L3N3LgXh4"}, "image_id": 986, "id": 16320}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 231.0, 41.0, 48.0], "area": 1479, "segmentation": {"size": [512, 512], "counts": "Whe0=T??B>M30001O0001O0000000000000000002N01O000000lN]Ak0l>00000000000000000000SOaA:R?00000001O0FRhe6"}, "image_id": 986, "id": 16321}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 235.0, 52.0, 42.0], "area": 1225, "segmentation": {"size": [512, 512], "counts": "QhZ33k?2M3N3M2M4O010O00010O010N1N3L3N2N3L3010O010O0010O0010O010O0010O0010O010O0010O0010N1N3L3N3L3N2N3L3N3M2MZXk3"}, "image_id": 986, "id": 16322}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 245.0, 25.0, 25.0], "area": 375, "segmentation": {"size": [512, 512], "counts": "The52l?3L3N3M2M4M2O110O010O00010O010O010O00M4M2N3L3N2MWhm1"}, "image_id": 986, "id": 16323}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 255.0, 44.0, 51.0], "area": 1305, "segmentation": {"size": [512, 512], "counts": "Vic41m?2M4M2M3N3L3N3L3N2M4M2M4M2N2M4M2010O0010O0010O010O00010O0M4M2M3N3M2M4M2M3N3L3N3L3N2M4MhWf2"}, "image_id": 986, "id": 16324}, {"iscrowd": 0, "category_id": 1, "bbox": [263.0, 264.0, 26.0, 27.0], "area": 425, "segmentation": {"size": [512, 512], "counts": "hhS43k?3L3N2N3L3N3M20010O0010O010O0010O0010L3N3M2M3N3L3NcW_3"}, "image_id": 986, "id": 16325}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 268.0, 55.0, 43.0], "area": 1325, "segmentation": {"size": [512, 512], "counts": "QYY54j?2M4M2N2M4M2M4M2N3O01O010O01O01O010O010O01O01O010O01O01O010O01O01O010OO2O001O01O010O01O01L3N3L3N3L3N2N3L3N3LVWk1"}, "image_id": 986, "id": 16326}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 269.0, 28.0, 26.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "e8:c?3N2M4O010O01O01O010O01O01O01O010O01O01O010N1M3N3L3N3L\\ga7"}, "image_id": 986, "id": 16327}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 278.0, 56.0, 53.0], "area": 1667, "segmentation": {"size": [512, 512], "counts": "Tj]61i?7H7J6I7I7J6I701O0001O00000001O0001O000001O0001oNbA>^>[OiAe0W>UOoAk0c>010O000000000N3M2ML5L4K5K8M2010O0010O010O0010O0010L3N3M2MmVf0"}, "image_id": 986, "id": 16328}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 285.0, 17.0, 14.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "Ti>1l?4M2N3O00010O010O0010O010N1N3M2NPgX7"}, "image_id": 986, "id": 16329}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 286.0, 52.0, 42.0], "area": 1321, "segmentation": {"size": [512, 512], "counts": "fim22k?3N3L3N2N3N1010O01O0O1M4M2M4M2010O0010O0010O0010O0M4M2010O00010O010O0010O0010O001M2N2M4UOPAb0X?N2M4M2M4M2N2MfVX4"}, "image_id": 986, "id": 16330}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 288.0, 50.0, 30.0], "area": 817, "segmentation": {"size": [512, 512], "counts": "fYm31m?2M4M2O101L3N3M2M4M2N201O01O0M3N3L301O00010O010O010O00010O010O010O00010O010O010O00010O0O2L3N3M2M3NcfY3"}, "image_id": 986, "id": 16331}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 297.0, 18.0, 14.0], "area": 137, "segmentation": {"size": [512, 512], "counts": "_ik01l?4M2O2O0010O0010O010O010O0010M2N3McVk6"}, "image_id": 986, "id": 16332}, {"iscrowd": 0, "category_id": 1, "bbox": [2.0, 301.0, 55.0, 58.0], "area": 1717, "segmentation": {"size": [512, 512], "counts": "ZZ12l?3L3N3M2M3N3O0010O010O0001i@]OP?b0n@@R?a0k@BU?d001O01O010OO2M2N3L3N2N3L3N3M2N3O01O010O010O01O01O010O010YOgAEY>9iAHV>6lAJU>2oAJT>4nAJU>3nAJT>3oAJU>3nAJT>4nAJT>3o0M[VS7"}, "image_id": 986, "id": 16333}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 303.0, 42.0, 60.0], "area": 1332, "segmentation": {"size": [512, 512], "counts": "P[R53k?2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3M2000N3M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N[fX2"}, "image_id": 986, "id": 16334}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 330.0, 29.0, 27.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "ljQ31m?2M4M2M4M2M3N3N101O01O010O01O01O010O01O01N1M4M2M3N3L3N2Mbe_4"}, "image_id": 986, "id": 16335}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 346.0, 73.0, 51.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "_kd53k?2M3N3M2M40O00010O010O010O00010O010O010O00010O010O0001N1N3L3N3L3N2N3L310O010O00010O010O010O00010O01mNWAn0h>POZAP1k>010O010O010O00010O01YORA;m>CVAAWA`0h>]O[Ac0R?N1N3L3N2M4M2NWdV1"}, "image_id": 986, "id": 16336}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 347.0, 29.0, 34.0], "area": 603, "segmentation": {"size": [512, 512], "counts": "ck[32d?0a@4[?0b@2\\?;M2N3L3N210O010O01O01O010O01O010M2N2M4M2N3M2M4M2N2ModU4"}, "image_id": 986, "id": 16337}, {"iscrowd": 0, "category_id": 1, "bbox": [154.0, 350.0, 46.0, 57.0], "area": 1626, "segmentation": {"size": [512, 512], "counts": "d[]21<2Q?2k@1R?2k@2Q?1m@1P?a0M4M2M4M21O01O01O01`AhNV>X1gAkNZ>T1dAnN\\>\\1O010O00010O00010O00010O01O000M4L3M3M4M2M3M4L3M3M4L3N2M4L3Mhdk4"}, "image_id": 986, "id": 16338}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 351.0, 29.0, 28.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "b[f61m?2M4M2M4M2M3N3M210O00010O010O00010O010O01M2M3N3M2M4M2M3NkTk0"}, "image_id": 986, "id": 16339}, {"iscrowd": 0, "category_id": 1, "bbox": [350.0, 382.0, 20.0, 22.0], "area": 260, "segmentation": {"size": [512, 512], "counts": "^\\_52l?2M3N3L3N3L3O1010O0010O00O2L3N3L3N2M4MocV2"}, "image_id": 986, "id": 16340}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 384.0, 58.0, 55.0], "area": 1499, "segmentation": {"size": [512, 512], "counts": "S]_31m?3M2N3M2N3N1N3M30O10O010O010O010O010O10OO2M2O2M2N3M2N3M2O2M3M2N3M2N3M12M2O2M2N3M2N3M2N3N1N3M3M2N3M2O2M2N3M2N3M2N3Necc3"}, "image_id": 986, "id": 16341}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 385.0, 29.0, 28.0], "area": 473, "segmentation": {"size": [512, 512], "counts": "c\\h51m?3M2M3N3M2N3L3N3O0010O0010O010O010O00010ON3M2N3L3N3M2M3NiSi1"}, "image_id": 986, "id": 16342}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 401.0, 24.0, 25.0], "area": 373, "segmentation": {"size": [512, 512], "counts": "Q]]43j?3N3L3N2M4L301O01O010O00010O01O01M2N3L3N2M4L3N[cV3"}, "image_id": 986, "id": 16343}, {"iscrowd": 0, "category_id": 1, "bbox": [300.0, 409.0, 47.0, 57.0], "area": 1536, "segmentation": {"size": [512, 512], "counts": "T^f43m?2M3ZOJ]A8a>K]A7`>L]A7a>J^A8_>K^A6b>L\\A4c>OZA1g>0XAOh>e00OHmNdAT1\\>nNaAR1_>QO_Ao0`>81O010O010O010O00011N2O0O010O0010O0100O3N2M2O2@RAIQ?4RAIQ?5QAIP?5RAIQ?5QAIQ?4`0M3NcRb2"}, "image_id": 986, "id": 16344}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 422.0, 61.0, 60.0], "area": 1806, "segmentation": {"size": [512, 512], "counts": "en]53k?2M4M2M3N3L3N3L3N2M4M2M4M2M3N3O010O00010O010O00010OO2M2M3N3L3N3N1001L3N2M2O0O0100O4M2M4M2N201O010O01O01O0O2M2M3N3L3N3L3Ndbc1"}, "image_id": 986, "id": 16345}, {"iscrowd": 0, "category_id": 1, "bbox": [290.0, 430.0, 12.0, 17.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "k]a41l?4L3M3M4N101ON3L3M3M4MbbX3"}, "image_id": 986, "id": 16346}, {"iscrowd": 0, "category_id": 1, "bbox": [497.0, 453.0, 15.0, 41.0], "area": 365, "segmentation": {"size": [512, 512], "counts": "Uoh74j?2M3M4L3N2M4L3M3N3L3M4M20001jA"}, "image_id": 986, "id": 16347}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 465.0, 6.0, 16.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "a>`0a?M2M4M2M3N_al7"}, "image_id": 986, "id": 16348}, {"iscrowd": 0, "category_id": 1, "bbox": [80.0, 474.0, 28.0, 28.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "[_X12l?3M2N3M2M4M2N3M200010O010O010O01O010O01M2N3M2M4M2N3M2NPaY6"}, "image_id": 986, "id": 16349}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 483.0, 50.0, 29.0], "area": 983, "segmentation": {"size": [512, 512], "counts": "o_61m?2M3N21O0000M3N2M3M3N21O00001OO1N2M3N2M31O00001O001O00001O001O00O1M3O1001O00001O000N3L3N3L3M3N3L3Nj`P7"}, "image_id": 986, "id": 16350}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 508.0, 10.0, 4.0], "area": 22, "segmentation": {"size": [512, 512], "counts": "ooe21l?31O00001O00001O00QPU5"}, "image_id": 986, "id": 16351}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 509.0, 8.0, 3.0], "area": 16, "segmentation": {"size": [512, 512], "counts": "n_V12m?1001O00001O00Q`e6"}, "image_id": 986, "id": 16352}, {"iscrowd": 0, "category_id": 1, "bbox": [9.0, 0.0, 44.0, 32.0], "area": 983, "segmentation": {"size": [512, 512], "counts": "``44i?3N3DIj@:V?Fh@3n@74Hl>3n@74Hl>a0RAAk>b0SA@k>l0N2N2N2N2N2N2N20N2N2N2N2N000000000000A"}, "image_id": 987, "id": 16356}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 17.0, 18.0, 23.0], "area": 238, "segmentation": {"size": [512, 512], "counts": "Saj21l?4L3N3L3N2M4M21O01O01M2N2M4M2M4M2M^_l4"}, "image_id": 987, "id": 16357}, {"iscrowd": 0, "category_id": 1, "bbox": [125.0, 19.0, 51.0, 52.0], "area": 1446, "segmentation": {"size": [512, 512], "counts": "gan13k?3L3N2M4M201M2N2M4O00010O0010O0010O001M2M3N3L3N3L3N2M4O0010O0010O00O2L3M3N3L3N3L3N2M4M2M4M2M3M4M2M4MVoW5"}, "image_id": 987, "id": 16358}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 30.0, 6.0, 16.0], "area": 52, "segmentation": {"size": [512, 512], "counts": "n0`0`?M4M2M3N3LSol7"}, "image_id": 987, "id": 16359}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 36.0, 49.0, 55.0], "area": 1557, "segmentation": {"size": [512, 512], "counts": "\\bl21m?3L3N3M2N3M2M3N3M2N3L3N3M2N3L3N2N3M2010O01O010O010O01O010O01O01L3N3M2N3L3N3M2N2N3L3N3M2N3L3N3M2NanZ4"}, "image_id": 987, "id": 16360}, {"iscrowd": 0, "category_id": 1, "bbox": [45.0, 42.0, 59.0, 65.0], "area": 1985, "segmentation": {"size": [512, 512], "counts": "dbf02l?2N3_OKRA8l>JRA9k>JRA8l>JRA9j>`0N3M2O1010O010O]AkN[>V1bAlN_>[1O0010O010O010O0010O010O0010M000O010000000O102N3M2M4M2N3M2N2M4O0010O01M2N3M2M4M2N2N3M_n[6"}, "image_id": 987, "id": 16361}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 67.0, 12.0, 16.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "_Ra21m?3L3N3L3N2010O0M3N3L3NmmX5"}, "image_id": 987, "id": 16362}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 82.0, 41.0, 52.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "icg14j?2N3L3N3L3N2M4M2JXORAk0j>7N2M4M2O2O000N3N1010O01O01O0N3L3N2M4M2M4M2N2M4M2M4M2M4M2N2M4MWmc5"}, "image_id": 987, "id": 16363}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 89.0, 13.0, 28.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "i2k0U?01O01ON3L3N3M2M3N3L3N3LU]i7"}, "image_id": 987, "id": 16364}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 93.0, 58.0, 64.0], "area": 2115, "segmentation": {"size": [512, 512], "counts": "oSY23j?3N3M2M3N3g@Bk>a0SAAk>b0RAAk>k0M4M2N3L3N2bAbNW>e110O01O01O010O01O01O010O010O01O0`NiAX1V>fNlAZ1U>bNoA^1Y>01O0N2M4M2N1N10O201010O0M4M2M4M2N2M4M2N3L3N2M4M2Ncli4"}, "image_id": 987, "id": 16365}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 95.0, 25.0, 26.0], "area": 395, "segmentation": {"size": [512, 512], "counts": "`cY13j?3N2N3L3N3M2M4O00010O010O01O01O01N1M4M2N3L3N2M4MllY6"}, "image_id": 987, "id": 16366}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 96.0, 53.0, 47.0], "area": 1435, "segmentation": {"size": [512, 512], "counts": "Pdj41m?2M4L3N2M4M2M4L3N2M4M200010O01O01O010O00010O01O01O010O00010O01O01ON3M2M4L2OO3M3N3L3M3N3L3M3N3L3M4M2M3M4MmlZ2"}, "image_id": 987, "id": 16367}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 101.0, 23.0, 29.0], "area": 407, "segmentation": {"size": [512, 512], "counts": "jc=3j?4M2M4M2M3N3L3N2O20O01O01O001L3N2M4M2M4L3N2MilV7"}, "image_id": 987, "id": 16368}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 119.0, 62.0, 44.0], "area": 1513, "segmentation": {"size": [512, 512], "counts": "cdh54l?5J7J1O000BEWA:j>KQA5n>?0000000O10O100000O1000O100000O10O100000O1000O1000O100000O1000O1000O1000O0[OQA;n>GVA4i>N\\ALc>6aAE^>=a0200000O1N2N2N3M2N2N2N2Nn[X1"}, "image_id": 987, "id": 16369}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 124.0, 26.0, 26.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "\\TY13k?2M4L3N3L3M301O0010O010O00010O010O0O2M2M3N3L3N3L3NokY6"}, "image_id": 987, "id": 16370}, {"iscrowd": 0, "category_id": 1, "bbox": [287.0, 125.0, 20.0, 23.0], "area": 278, "segmentation": {"size": [512, 512], "counts": "^d_42k?4M2M3M4M2M4N10010O0001O0N3L3M3N3L3N2MR\\V3"}, "image_id": 987, "id": 16371}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 142.0, 47.0, 58.0], "area": 1457, "segmentation": {"size": [512, 512], "counts": "fe82m?2N1BLn@7P?Kn@7P?Kn@7o>Ko@7g>C\\A8K7f>D]A7K7f>D\\Ak0b>WO\\Aj0b>ROZAn0m>0O0010O0010O010O00010O010O001N1N2M40O010M2M3N3M2M4M2M4M2M3N3M2M4MeZ]5"}, "image_id": 987, "id": 16374}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 157.0, 24.0, 19.0], "area": 267, "segmentation": {"size": [512, 512], "counts": "WUk42k?3N3L3N3O01O01O010O01O010O01O010O01O001L3N2N3Lnjh2"}, "image_id": 987, "id": 16375}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 166.0, 50.0, 43.0], "area": 1248, "segmentation": {"size": [512, 512], "counts": "nUY43j?3N3M2M4M2M3N3L3N3M20010O0010O010O0010O0010O0010O0010O0010O010O0010O0010O0010O0M4M2M3N3M2M4M2M4M2N\\jm2"}, "image_id": 987, "id": 16376}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 174.0, 29.0, 26.0], "area": 453, "segmentation": {"size": [512, 512], "counts": "gUn02l?2N3M2]@JZ?8d@J[??O01O010O01O01O010O01O01O010O010O000N3M2N3L3N2M\\Zc6"}, "image_id": 987, "id": 16377}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 177.0, 60.0, 52.0], "area": 1675, "segmentation": {"size": [512, 512], "counts": "bf]52l?2M3N3M2N3L3N3M2N3M2M310O0010O010O010O010O00010O010O010O010O00O2M2M4M2N3M2M4M20M4M2N2N3M2M4M2N3M2M4N101M2N2N3M2M4M2N3MXZd1"}, "image_id": 987, "id": 16378}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 194.0, 62.0, 49.0], "area": 1623, "segmentation": {"size": [512, 512], "counts": "^Vf61n?3M2N2N2N2N3M2N2N2N2N3O0000001O000O1N2N3M2N2O1N3M2N2O1000O2N1N2N2N10O00000002O1N3M20001O01OWOPAa0P?]ORAc0o>ZOSAf0T?1O01N1O1N2N2N2N2N3M2N2N2Nih:"}, "image_id": 987, "id": 16379}, {"iscrowd": 0, "category_id": 1, "bbox": [84.0, 200.0, 67.0, 52.0], "area": 1730, "segmentation": {"size": [512, 512], "counts": "UWZ11l?3N2M4M2M4L3N2M4M2M4M2O110O010O00010O01O01O010O01O0ROSAj0Q?0010O0010O0010O0010O00010O010O00010O010O00010O01O010O01O01O010L3N2M4L3N3L3M3N3LPYd5"}, "image_id": 987, "id": 16380}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 216.0, 78.0, 40.0], "area": 1812, "segmentation": {"size": [512, 512], "counts": "]gm33k?2M4M2N3L3N2M4O010O0010O0010O0010O0001M2M4M200010O010O01O01O010O010O00010O01M2M2O03M2N3L3N2N3M201O01O010O01O010O01O01O010O01O01O010O01O01O010N1N3L3N2M4M2MlXk2"}, "image_id": 987, "id": 16381}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 220.0, 32.0, 30.0], "area": 576, "segmentation": {"size": [512, 512], "counts": "`WR61m?2M4M2N2M4M2N3L3O2O00010O010O010O00010O010O01O0N2M4M2N3L3N3M2Mnh]1"}, "image_id": 987, "id": 16382}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 232.0, 29.0, 29.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "jW\\53k?2M4M2M4M2N2M4N110O00010O010O00010O010O0001L3N3L3N2N3L3NbXU2"}, "image_id": 987, "id": 16383}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 247.0, 29.0, 18.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "ngb61m?2M4M201O01O01O010O01O01O010O01O01O010O01O01O010O01N1M3NRhn0"}, "image_id": 987, "id": 16384}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 256.0, 32.0, 22.0], "area": 338, "segmentation": {"size": [512, 512], "counts": "XhP73j?3N2M4O010O010O00010O010O00010O010O0010O0010O0010O0010OM4M2M3NgW?"}, "image_id": 987, "id": 16385}, {"iscrowd": 0, "category_id": 1, "bbox": [234.0, 269.0, 59.0, 40.0], "area": 1391, "segmentation": {"size": [512, 512], "counts": "nXe33k?3M2M4M2N3L3O110O010O0010OO2M2N201O010O010O00010O010O010O00010O010O010O01O01O001M2N3N110O01O0O1M4M2N3L3N3M2N2M4M2N3L[W]3"}, "image_id": 987, "id": 16386}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 273.0, 156.0, 65.0], "area": 4792, "segmentation": {"size": [512, 512], "counts": "mY`52k?4M2M3N3L3IAj@b0S?8L3M4L3N2M4L3M3O20M2M3M4M2M3M4N11O01O01O010O01O01O01O0N00O03M301O010O01O01O010O01O01O010O01O010N1N2N3O0010O0010O0010O001L3N210O0[Ol@>U?^On@c0X?Oe@]OY?e0010O0010O0011N1O01O01O01O0f@\\OY?f0O01O01O0N2M4N101O01O010OSAUOe>k0XAYOg>P11O010O00010O0010O0010O0N3N100010O01O01O01O01O0N3M2N2M01O3001O01O01O010O00010O01O000O2O000FYA\\Og>`0]A_Oc>>`AB`>;c0M4L3N2Mgf1"}, "image_id": 987, "id": 16387}, {"iscrowd": 0, "category_id": 1, "bbox": [427.0, 273.0, 13.0, 14.0], "area": 118, "segmentation": {"size": [512, 512], "counts": "ihe63i?4M3O2O01O01O01O01OM3M4K`gS1"}, "image_id": 987, "id": 16388}, {"iscrowd": 0, "category_id": 1, "bbox": [321.0, 280.0, 31.0, 29.0], "area": 548, "segmentation": {"size": [512, 512], "counts": "[iP51l?3M4M2M4M2M3M4O0010O0010O0010O00010O0010O0010O0N3L3M3N3L3N2MSg_2"}, "image_id": 987, "id": 16389}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 283.0, 17.0, 45.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "k8]1c>N3M2M4M2N3M2M4M2N2N3L3N3M2N3L3NPWg7"}, "image_id": 987, "id": 16390}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 300.0, 58.0, 48.0], "area": 1436, "segmentation": {"size": [512, 512], "counts": "Wji01l?4M2N2M4M2N3L3N3M2M3N3N1010O01O01O010O010O01O01O010O0ROTAi0Q?0010O0010O0010O0010O010O0010O0010O010ON2N1O0O3N3M2M3L5L3N3MRVY6"}, "image_id": 987, "id": 16391}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 307.0, 34.0, 25.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "Pj31m?2M4M2M4M2O110O010O00010O010O00010O0010O0010O0010O0010O00N3M2M4M2MUV[7"}, "image_id": 987, "id": 16392}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 325.0, 66.0, 56.0], "area": 2121, "segmentation": {"size": [512, 512], "counts": "UkY32k?4M2N2M4M2N3L3N2M4M2N3L3N2O2O010O0010O0010O010O00010O010O010O00010O010O00010O010O01O01O010O010O00010O010O01N1M3N3L3TOPAf0V?M4L3N2M4M2M4MTUe3"}, "image_id": 987, "id": 16393}, {"iscrowd": 0, "category_id": 1, "bbox": [10.0, 327.0, 42.0, 26.0], "area": 511, "segmentation": {"size": [512, 512], "counts": "bZ52l?2M4M2M3O2O010O01O01O010O00010O01O01O010O01N10010O0D^@;c?01O01O01O010O01O01O010O0M3N3L\\eU7"}, "image_id": 987, "id": 16394}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 339.0, 28.0, 27.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "U[\\42l?2M4M2M3N3L3N3N10010O0010O0010O0010O001M2N2M4M2M4M2M3NXeU3"}, "image_id": 987, "id": 16395}, {"iscrowd": 0, "category_id": 1, "bbox": [481.0, 342.0, 13.0, 15.0], "area": 113, "segmentation": {"size": [512, 512], "counts": "Pk`72l?2M4L3N30O0010O00M4M2M4MXe8"}, "image_id": 987, "id": 16396}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 348.0, 90.0, 79.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "clV61o?2M2N2N2N2N3N1N2N2N2N3M2O1N2N2N3M2N2O1N1MiN\\AW1d>210O0000000001O01O00000001O01O00000000010O0002N2NO1N3M2O1N2N2N2101N1O1^OcA@_>?bA_O_>`0cA^O^>a0eA\\O]>c0dA\\O\\>c0gAZOZ>e0hAYOZ>e0a010O00010O00010O0010O0010O00010O00010L3M4M2M3M4M2MjT<"}, "image_id": 987, "id": 16397}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 357.0, 18.0, 30.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "j[g72k?3N3L3N3L3M3N3L3010O00010O010O0001gD"}, "image_id": 987, "id": 16398}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 360.0, 49.0, 46.0], "area": 1386, "segmentation": {"size": [512, 512], "counts": "T\\:1m?2M4M2M4M2M3N3L3N3L3N201O010O01O01O01O010ON3M2O110O0010O0010O0010O0010O0010nNWAk0i>RO[Am0l>1O010O]OQA3Q?JSA3P?JRA3Q?JSA3^?M3NlSm6"}, "image_id": 987, "id": 16399}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 378.0, 26.0, 32.0], "area": 593, "segmentation": {"size": [512, 512], "counts": "nkb52n?4K5L5K4L4L5J1000000O0100000O0100000002M5L5K4L4K6K^SP2"}, "image_id": 987, "id": 16400}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 380.0, 60.0, 44.0], "area": 1586, "segmentation": {"size": [512, 512], "counts": "e\\m21m?2M4M2M3N3L3N3L3M3N3O0010O00010O01O01O010O01O01O010O01O01O010O01O01O010O01L3N2M4N1010O0010ON2M4M2M4M2M3N3L3N3L3N2M4L3NncT4"}, "image_id": 987, "id": 16401}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 385.0, 28.0, 29.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "el_71l?3N2N3L3N3L3N2N3N1010O01O01O010O01O010ON3L3N3L3N2N3L3NjS2"}, "image_id": 987, "id": 16402}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 393.0, 37.0, 38.0], "area": 855, "segmentation": {"size": [512, 512], "counts": "S]V42l?2M4M2M4M2M3N3L3N3L3O110O010O00010O010O00010O010O0001M2N3L3N2M4M2M4M2M3N_SW3"}, "image_id": 987, "id": 16403}, {"iscrowd": 0, "category_id": 1, "bbox": [3.0, 411.0, 50.0, 51.0], "area": 1316, "segmentation": {"size": [512, 512], "counts": "im13k?2N3L3N2N3L3N3M2M4M2N2M4M210O0010O0010O010O0010lNYAm0g>QO\\An0k>1O010O010O01O01O010O010O01YORA:n>CUA>j>@XA`0i>]OZAc0Q?N3M2N3L3N2N3LVRU7"}, "image_id": 987, "id": 16404}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 424.0, 16.0, 22.0], "area": 202, "segmentation": {"size": [512, 512], "counts": "h]P73j?3M4L3M3M4N10010OO1M4L3M3M4L3Ngbg0"}, "image_id": 987, "id": 16405}, {"iscrowd": 0, "category_id": 1, "bbox": [171.0, 428.0, 60.0, 47.0], "area": 1653, "segmentation": {"size": [512, 512], "counts": "Tne21m?3M2M3N3M2M4M2M4M2N2O2O010O00010O010O0010O0010O010O0010O0010O010O00010O010N1N3M2O1010O010O00010O010O01O01ZOWA3i>KZA2i>JZA3i>KYA3j>JYA2j>KYA3\\?MgQ\\4"}, "image_id": 987, "id": 16406}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 428.0, 39.0, 66.0], "area": 1373, "segmentation": {"size": [512, 512], "counts": "Sn\\72m?2N2N2N2N2N2N2N3M2N2N2N2N2N2N2N2N2N3M2N2N2N2O1N20000000N2O1001O00000aA`N]>b10001O0UB"}, "image_id": 987, "id": 16407}, {"iscrowd": 0, "category_id": 1, "bbox": [259.0, 442.0, 29.0, 30.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "`nQ41l?4L3M3M4M2M3M4M2010O0010O00010O00010O0001M2M3M4L3M3N3L3MRb_3"}, "image_id": 987, "id": 16408}, {"iscrowd": 0, "category_id": 1, "bbox": [358.0, 442.0, 88.0, 37.0], "area": 2499, "segmentation": {"size": [512, 512], "counts": "n]c5m0S?00000000000000000O10O1000000000000000000000000000N2000000O1000000000000000000000000000000000O100000000006J00000000010O0L4K5J6J6J7M200001O0001O0K5J6K5JSX4"}, "image_id": 988, "id": 16418}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 0.0, 34.0, 12.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "PP81o?001O00001O001O00001O001O001O00001O001O00001O001O001O000000M3N2M3NRPW7"}, "image_id": 989, "id": 16419}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 0.0, 31.0, 16.0], "area": 309, "segmentation": {"size": [512, 512], "counts": "YPo01m?2M3N3M201O001O00001O001O00001OO1N2M3N20000001O001O00N2N2N2MS`a6"}, "image_id": 989, "id": 16420}, {"iscrowd": 0, "category_id": 1, "bbox": [134.0, 0.0, 61.0, 54.0], "area": 1969, "segmentation": {"size": [512, 512], "counts": "UQS22l?3L3N2M4M2M4L3O110O01O01O010O01OM4M2EUO^Ao0_>TO^An0_>;O2O001O00001O00001O001O00001O001O0000N2N2M3N2M3N2M3M3N2M3N2M3N2M3M3N2M3N2M3N2MS`n4"}, "image_id": 989, "id": 16421}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 0.0, 45.0, 22.0], "area": 569, "segmentation": {"size": [512, 512], "counts": "PP]31o?00001O001O00001X@N`?2]@1c?7O001O00001O001O00001O001O00001O001O001O00001O001O00O1M3N2M3N2N2M3N2MS`l3"}, "image_id": 989, "id": 16422}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 0.0, 73.0, 45.0], "area": 1890, "segmentation": {"size": [512, 512], "counts": "PQS41m?2M4L3N2M4M2M4L3N2M4M2M4N10001O00001O001O00001O001O00001O00001O00O1M3N2M3M3N2M3N2M3M3N2M300001O001O00001O001O00001O00001O001O0000N2N2M3M3N2M3N2MS`h2"}, "image_id": 989, "id": 16423}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 0.0, 48.0, 17.0], "area": 431, "segmentation": {"size": [512, 512], "counts": "PPX61o?00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O001O00001O001O0000N2N2M3N2M3NRPP1"}, "image_id": 989, "id": 16424}, {"iscrowd": 0, "category_id": 1, "bbox": [340.0, 14.0, 54.0, 47.0], "area": 1432, "segmentation": {"size": [512, 512], "counts": "XQZ52l?3L3N3M2N3L3N3M2M3N3M2O20O01O010O01O010O010O01O010O01O010O010O01O01O010O010O01O010OO2M2M4M2N3M2M3N3M2N3L3NRoj1"}, "image_id": 989, "id": 16425}, {"iscrowd": 0, "category_id": 1, "bbox": [190.0, 22.0, 55.0, 62.0], "area": 1734, "segmentation": {"size": [512, 512], "counts": "TRo21n?1N3M2N3N1N3M3N1N3M2O2M3M2N3N1N3M3N1N3M2N3N1N3M3N1N3M21O0O2M2N3M3N1N3M2O2M2N3N2M2N3M2O2M3M2O2M2N3M3N1N3M2O2Ml^U4"}, "image_id": 989, "id": 16426}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 44.0, 74.0, 69.0], "area": 2706, "segmentation": {"size": [512, 512], "counts": "Sce33j?3N2M4M2M3N3L3M4M2M3N3L3N3L3M3N3L3N3L3M3N3N10010O010O00010O01O0M3N3L3N3L3N2M4L3N2M4M2N3O00010O010O00010O01O01O010O01O01O010O000N3L3N3L3N2M4L3N2M4MR^U3"}, "image_id": 989, "id": 16427}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 50.0, 4.0, 11.0], "area": 27, "segmentation": {"size": [512, 512], "counts": "iQn73k?2M3N3]N"}, "image_id": 989, "id": 16428}, {"iscrowd": 0, "category_id": 1, "bbox": [460.0, 55.0, 52.0, 55.0], "area": 1738, "segmentation": {"size": [512, 512], "counts": "nRV71l?4M2N3L3N210O010O001M2N2N3L3N3M2M4M2N2M4M201O010O01O01O0O2M2N3N10010O01N1M4M2N201kN\\Am0c>QO_Ao0b>nNaAQ1g>M3NO1002M3N3M2MSN"}, "image_id": 989, "id": 16429}, {"iscrowd": 0, "category_id": 1, "bbox": [309.0, 67.0, 71.0, 75.0], "area": 2925, "segmentation": {"size": [512, 512], "counts": "`cj43j?3DMi@6T?Mi@5U?Mi@6T?S1eAoN[>Q1cARO]>m0aAUO]>Y1M2M4M21ON3L3N3L3M4O0010O010O0010OUN[B[1f=bN\\B]1e=aN^B\\1d=aN_B]1d=`N^B]1R>000000O10O10O102N2N3L3N3M2M3N3M2N3L3N3M2M3N3M2MgmQ2"}, "image_id": 989, "id": 16430}, {"iscrowd": 0, "category_id": 1, "bbox": [390.0, 100.0, 52.0, 76.0], "area": 1953, "segmentation": {"size": [512, 512], "counts": "fTS62l?2N3M2M4M2N3M2M3N3M2ZAUOU>n0hATOU>o0hAUOU>n0hATOV>n0hATOV>]1L3N3M2N3L3N3M10O2N3L3N3M201M2N3M2N2M4M2N3M2M4M2N3M2M3N3M2N3L3N3M2N3L3N2NblR1"}, "image_id": 989, "id": 16431}, {"iscrowd": 0, "category_id": 1, "bbox": [452.0, 119.0, 47.0, 57.0], "area": 1637, "segmentation": {"size": [512, 512], "counts": "[TR74j?2M4M2o@EZ>>cAEZ>?cADZ>>cAEZ>?cAC[>?bAE]>;aAG_>n00O0010_AbN^>a1O01O010O01O01O010O01O01O010O0O1M013L3N2M4M2M4L3N2M4M2M4M2M3N3L3NP\\6"}, "image_id": 989, "id": 16432}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 135.0, 51.0, 49.0], "area": 1921, "segmentation": {"size": [512, 512], "counts": "^4i0W?5K5K6J1N10O10000000O010000000O010000000O10O10000000O010000000O010000000O010000000004K7CTAZOm>e0612J9K5K6InZV7"}, "image_id": 989, "id": 16433}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 136.0, 27.0, 26.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "jdb51m?2M3N3M2M4M2N3M20010O010O0010O0010O01O0N3M2M3N3M2M4Mbko1"}, "image_id": 989, "id": 16434}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 156.0, 47.0, 40.0], "area": 1223, "segmentation": {"size": [512, 512], "counts": "eUQ14h?5K4N20010O00000N3I6K5M301O0001O01O0001O01O0001O01O0001O01O0001O01O0000QOTAj0R?0O0000010OL4M3L4M9FfZW6"}, "image_id": 989, "id": 16435}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 168.0, 62.0, 62.0], "area": 1982, "segmentation": {"size": [512, 512], "counts": "lVn11m?3L3M3N3L3M3010O010OO1N3L3M3N3L3M4M2M3M4M2M4M2M03N30O001L3O110O00010OO1CmAkNV>Q1mAlNV>Q1mAlNW>Q1=QOVAi0i>UOZAj0n>0010O010O00010TOo@g0U?0010O010M2M3N3L3M3N3L3MTjR5"}, "image_id": 989, "id": 16436}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 191.0, 31.0, 35.0], "area": 634, "segmentation": {"size": [512, 512], "counts": "if62l?2M3N3L3N3L3N3L3N2N3M2010O0010O0010O001M2N2M4M2M4M2N2M4M2M4MkiY7"}, "image_id": 989, "id": 16437}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 199.0, 52.0, 61.0], "area": 1571, "segmentation": {"size": [512, 512], "counts": "hWU32l?2N2N3L3N3M2M4M2N3M2M3010O01O010O001L3N2N3L2O2N2N3L3N3M1O01O2N3L3N3M2N2N3M2N3M2N3L3N3M2N2M4M2N3L3N3M2NbiP4"}, "image_id": 989, "id": 16438}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 215.0, 74.0, 76.0], "area": 2805, "segmentation": {"size": [512, 512], "counts": "^X:4j?2M4M2M3N3L3N3L30010O01O01O010O01O01O0O2L3N2M4M2M4FgNiA[1T>hNiA[1U>:L3N3L3M301O010O01O01O010O00010ON3M2M3N3`NoAl0T>QOoAm0T>POoAl0T>QOoAm0S>QOoAl0e>M2010O0010O0010O00010OO2M2M3N3L3N3L3N2M4M[h`6"}, "image_id": 989, "id": 16439}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 215.0, 28.0, 30.0], "area": 516, "segmentation": {"size": [512, 512], "counts": "ZWm34j?2M4M2M3N3L3N3M200010O0010O0010O0010OO1M4M2M4M2M3N3L3MUid3"}, "image_id": 989, "id": 16440}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 236.0, 30.0, 37.0], "area": 640, "segmentation": {"size": [512, 512], "counts": "WXa23j?3N2M4M2M4M2M3N3M2M4M21O01O010O01O01OM4M2M4M2M3N3L3N3L3N2M`ho4"}, "image_id": 989, "id": 16441}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 238.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "^72`ho7"}, "image_id": 989, "id": 16442}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 240.0, 65.0, 77.0], "area": 2712, "segmentation": {"size": [512, 512], "counts": "oXb11l?4@Mo@5o>Mo@6n>Mn@68FW>7_A67EX>j0fAXOX>j0eAZOW>j0fAXOX>Z1O00010O010O010O00010O010O010O00010O010N1M4O0001O0N000O0100O010000O4M2N2M4M2N3L3N3L3N2N3M210N1N2M4M2N3L3N3L3N2NYX]5"}, "image_id": 989, "id": 16443}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 249.0, 40.0, 53.0], "area": 1212, "segmentation": {"size": [512, 512], "counts": "mXi32k?3N2M4L3M4_O_O^Ad0_>_O_Ac0^>@_Ad0`>]O\\Af0d>;10O00010O0010O0001]AhN]>W1`AlN`>Z110O00cNaAY1c>1O01O0M3N3L3M4L3M3YOk@a0[?M3M4L3N2Migb3"}, "image_id": 989, "id": 16444}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 253.0, 29.0, 32.0], "area": 576, "segmentation": {"size": [512, 512], "counts": "eXi41m?3L3M4L3N2M4L3N2M4O001O01O010O00010O000N3M2M4L3N2M4L3M3NoWh2"}, "image_id": 989, "id": 16445}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 260.0, 77.0, 56.0], "area": 2420, "segmentation": {"size": [512, 512], "counts": "Ri^52k?4M2M4L3N2M4M2M3M4M2M4N11O010O00010O01O01O010O01O01O010O00010O01O01O010O01O01O010O00010O01O01O010O01O01O01O0N2010O01O01O010O00001L3M3N3L3N3L3M3N3L3N3L3MYgZ1"}, "image_id": 989, "id": 16446}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 277.0, 63.0, 48.0], "area": 1866, "segmentation": {"size": [512, 512], "counts": "`iZ23k?2N2M4M2M4M2M3N3L3N3O01O01O010O00010O0010O01O01O01OO2N1009G0M3M3M31O0000001O00001O00001O00001O00010O000G:^Oa0O101N100O1N3L3N2NTge4"}, "image_id": 989, "id": 16447}, {"iscrowd": 0, "category_id": 1, "bbox": [208.0, 305.0, 59.0, 68.0], "area": 1959, "segmentation": {"size": [512, 512], "counts": "lZX31l?3N2M4L3M3M4L3N30O0010O00010O00010O00010L3N2M4M210O00010O0O1M4L3aAeNV>^1gAfNV>c1M3M4O00010O00010O00[OPB[OP>a0SB_Om=NPBL75i=LSBL79f=GWBL6=c=DZBM6>a=A\\BN6b0]=^OVCb0U>O0O1M4L3M3M4LPUj3"}, "image_id": 989, "id": 16448}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 313.0, 20.0, 26.0], "area": 350, "segmentation": {"size": [512, 512], "counts": "i9e0[?10O00010O010O00010O010O0N2M4M2M3N3L3NRfe7"}, "image_id": 989, "id": 16449}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 313.0, 56.0, 54.0], "area": 1927, "segmentation": {"size": [512, 512], "counts": "`jW53m?5J5L5K5K5K4K100000000O010000000O010000000O0100000O0DQOiAo0W>UOeAk0Z>[OaAe0_>>0O1000O102N000O1000O1000O10M3O103M5K5K4K6K5K5K4L5JoTl1"}, "image_id": 989, "id": 16450}, {"iscrowd": 0, "category_id": 1, "bbox": [100.0, 320.0, 28.0, 27.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "aZb14i?4L3L4M3L5O000010O000010O000010O000010O0001M2M3M3L5L3Lneo5"}, "image_id": 989, "id": 16451}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 324.0, 58.0, 61.0], "area": 1692, "segmentation": {"size": [512, 512], "counts": "fkV44j?2M3M4M2M3M4M2M4L3N2M4L3N2M4L3N3L3M3N30L3N3L3M3N3L3M4M2M4L3N2M4O010O00010O0010O0010O0010O00010O010O00010O01L3M4M7HZUl2"}, "image_id": 989, "id": 16452}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 348.0, 45.0, 55.0], "area": 1398, "segmentation": {"size": [512, 512], "counts": "e;d0\\?1O01O010O01O01O01O0O1N3L3M3N3L3M4L3N2M4L300010O00010O0N2M4L3M3N3L3M4L3N2M4L3N2M4L3M3N3LoTY7"}, "image_id": 989, "id": 16453}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 357.0, 80.0, 120.0], "area": 3816, "segmentation": {"size": [512, 512], "counts": "\\^e12l?3L3N2M4L3N3L300010O0010O0010O00010OO2L3N2M4M2M3M4M2M4M2M3N3L3M4M2M3N3L3N3VOdMkC_2SXO`Ak0]>WO`Am0\\>>M2M4M3M3N101O10N1O2N1N3N1O1N3N1O2N0O01O10O2O2N1N3L3M3L5K4M4K4M4K4L4MdTY6"}, "image_id": 989, "id": 16455}, {"iscrowd": 0, "category_id": 1, "bbox": [194.0, 365.0, 40.0, 31.0], "area": 698, "segmentation": {"size": [512, 512], "counts": "k[Q32l?2M3N3L3N3b@AW?e010O0010O001M2O1010O01O01O010O0f@_OV?`0h@BX?d0O0N2N3O00M4N110O0N2N3L3N3L3N2M4M]dZ4"}, "image_id": 989, "id": 16456}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 377.0, 75.0, 103.0], "area": 3105, "segmentation": {"size": [512, 512], "counts": "Z\\f34j?2Z@K_?7_@K^?=N3M2M4M2O11L3010O010j@YOQ?g0m@[OS?j00O010O010O0O1M40O0aBYOf;g0WD[Oi;e0UD]Ol;b0RDAm;`0cBZOZ17gNIX=j0PDBP<>jCUOQOdC[OWO6U=?bC]OXO6V==bC\\OVO9X=:cC2]ASA=o>ATAW1O0010O0010O010O000N3L3N3L3N2M4M2N3L011N3N3N11O01O010O01O010O01O01O010O01O010O01QOYBCg=;[BFd=7`BHa=5aBL^=1fBN[=OgB1Y=LjB5U=InB6S=FPC;ocNmA^1[>O00010O01O01O010O01ON3L3N2M4M2M3N3L3@i@5Z?Hi@4Z?Ii@4c?N\\fQ5"}, "image_id": 990, "id": 16484}, {"iscrowd": 0, "category_id": 1, "bbox": [54.0, 311.0, 37.0, 78.0], "area": 2283, "segmentation": {"size": [512, 512], "counts": "VZk0:[?;WB\\OVB>BfVb6"}, "image_id": 990, "id": 16485}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 344.0, 75.0, 53.0], "area": 2197, "segmentation": {"size": [512, 512], "counts": "hkn22l?3M2M3N3L3N3L3N2M4M210O0O2M2M3N3N1010O0010O0010O010hNZAT1i>01M2N2010O010O00010O010O00010O010O00010O010O00010O010O01O01O010O01N100010O01M2N2M4M2M4M2M3N3L3N`dk3"}, "image_id": 990, "id": 16486}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 366.0, 15.0, 13.0], "area": 129, "segmentation": {"size": [512, 512], "counts": "ekf22k?4L3O1010O00010O010O00001L3MadQ5"}, "image_id": 990, "id": 16487}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 366.0, 47.0, 72.0], "area": 2399, "segmentation": {"size": [512, 512], "counts": "l[b6f0T?6000000000000000000000000000000lAWOQ=i0SB3m=P1H80000000000000000006KO0000000000000000000000000000L4UOk0TOXUf0"}, "image_id": 990, "id": 16488}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 371.0, 32.0, 85.0], "area": 2144, "segmentation": {"size": [512, 512], "counts": "U\\`77a?8oAJX<6aC1_nI]A9`>J]A:`>H^A:`>I\\A;]>KaA7]>h0M2M3N3O001O01O010O010O01O01O01nAUNn=n101O01O010O010M2O101O010O0SOnAMR>0QBOo=OSB2l=KXB4i=IYB7g=G\\B9d=C_B=a=AaBN3L3N2NRbX5"}, "image_id": 990, "id": 16490}, {"iscrowd": 0, "category_id": 1, "bbox": [52.0, 412.0, 55.0, 62.0], "area": 2326, "segmentation": {"size": [512, 512], "counts": "^]j0:Z?i000K51O000001O0000000000000001O0001O00000000000000iNTB:l=[O_Be0^>00001O01O000000000000000N2000001O000L40000K5D_RZ6"}, "image_id": 990, "id": 16491}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 424.0, 87.0, 58.0], "area": 2619, "segmentation": {"size": [512, 512], "counts": "[^]52j?4L4M4K4L4N201O01O0001O01O01O0001O01O01O0001O01O0001O01O00O2L30010O00000O2K4L4L4L5O00000010O0000010O00010O0000010O000RObA:^>CeA>[>]OjAb0V>ZOnAf0R>VORBj0b>10O0000010O00010O0000010O0000010M2L4L4L5K4M3LmQW1"}, "image_id": 990, "id": 16492}, {"iscrowd": 0, "category_id": 1, "bbox": [193.0, 440.0, 67.0, 49.0], "area": 2425, "segmentation": {"size": [512, 512], "counts": "^nP30001O0000000006J0000000000000000000000000000000000000000000000000000000000000000000000L4@`000000VA"}, "image_id": 990, "id": 16496}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 479.0, 31.0, 33.0], "area": 915, "segmentation": {"size": [512, 512], "counts": "\\oS2:f?:[O_OWAa0i>;O100000000000000000000000000O10000000000000000002N=C>BT`\\5"}, "image_id": 990, "id": 16497}, {"iscrowd": 0, "category_id": 1, "bbox": [349.0, 487.0, 74.0, 25.0], "area": 1338, "segmentation": {"size": [512, 512], "counts": "Xo^5=c?:F000000001O00000000000000000O10000000000000000000000000000000000000000000000000000000000b0^O000000O10000000000000000000000000000000000000000000000XP\\1"}, "image_id": 990, "id": 16498}, {"iscrowd": 0, "category_id": 1, "bbox": [69.0, 492.0, 35.0, 20.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "`oR15k?7I3M000O10001O0000000O100000000000000O1000000000000O1000000004L7I7IR`[6"}, "image_id": 990, "id": 16499}, {"iscrowd": 0, "category_id": 1, "bbox": [235.0, 503.0, 27.0, 9.0], "area": 231, "segmentation": {"size": [512, 512], "counts": "hoe38h?0000000000000000000O100000000000000000000000000001OX`l3"}, "image_id": 990, "id": 16500}, {"iscrowd": 0, "category_id": 1, "bbox": [366.0, 0.0, 116.0, 65.0], "area": 3970, "segmentation": {"size": [512, 512], "counts": "PPg51o?1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1O1O1O1O001O1OO1N2O1O1O1O1N2O1O1O1O1N2O1O1O1N2O1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N1N3N2N`o>"}, "image_id": 991, "id": 16501}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 111.0, 71.0, 75.0], "area": 2639, "segmentation": {"size": [512, 512], "counts": "mTd32m?2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N0000O10O1002N2M3N2N2N1O2M3N2N2N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2M3Ng[X3"}, "image_id": 991, "id": 16502}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 248.0, 148.0, 181.0], "area": 6756, "segmentation": {"size": [512, 512], "counts": "f\\c12m?2M3N1O2N2M3N1O2N2M3N2N1O2M3N2N1O2N2M3N2N1O2M3N2N2N1N3N00O1000O10O1000O0100000O010000O10O1000O10O1000O10O10O100000O010000O0100000O01000O1000O10O1000O0100000O010000O0100000O10O1000O10O10O1000O10O10000O0100000O01000O10O1000O1000O0100000O010000O0100000O10O1002M3N1O2M3N2N2N1VOVABUABTA=n>ATABUABUA`0YACg>=WAFh>:UAIk>f001O00001O001O00001O00001O00001O00001O00001O001OO1M3M3M3M3O11O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O001O00001O00N2M3M3M3M3N2M3M3M3M3N2M3M3M3M3M3N2M3M3M3M3N2M3M3M3M3M3N2M3MSPa1"}, "image_id": 992, "id": 16505}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 0.0, 124.0, 139.0], "area": 12808, "segmentation": {"size": [512, 512], "counts": "XSR61l?3M3M4M2M3M4L3M3M4M2M4L3M3M4L3M3N3L3M3M4L3M3N3L3M3M4L3M4M2M3M4L3M3M4L3N2N3O00001O00001O00001O001O00001O00001O00001O00001O01O01N101O00001O00001O00001O00001O00001O001O00001O00001O00001O00001O00001O000000M3N2M3M3M3M3M3M3N2M3M3M3M3M3N2M3M3M3M3M3M3N2M3"}, "image_id": 992, "id": 16506}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 25.0, 43.0, 27.0], "area": 647, "segmentation": {"size": [512, 512], "counts": "Va92j?4M4L3M30001O01O01O01O01O01O01O01O01O01O01O01O01O0001O01O01O01O01O01O01O01O01L3M3M4LnnP7"}, "image_id": 992, "id": 16507}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 37.0, 43.0, 30.0], "area": 638, "segmentation": {"size": [512, 512], "counts": "bQn12k?4M2M3N3L31O01O010O01O01O010O01O01O010O01O01O01O010O01O01O01O010O01O01O010ON3L3N2M4M^^\\5"}, "image_id": 992, "id": 16508}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 57.0, 42.0, 22.0], "area": 337, "segmentation": {"size": [512, 512], "counts": "Pbm21m?2M3N30O01O01O01O010O01O01O010O01O01O01O01O010O01O01O010O01O01O01O01O010O01O0O1M4Mj]]4"}, "image_id": 992, "id": 16509}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 92.0, 36.0, 60.0], "area": 1361, "segmentation": {"size": [512, 512], "counts": "l2c088R>HoA7R>ImA8R>HnA8R>HoA7R>ImA8R>HnA8R>InA6S>ImA8R>HnA8R>InA6S>ImA8R>HnA8R>InA6S>ImA8R>HoA7Q>JnA6S>IkA:T>FiA=W>f01M2M3M4L3M3M4L3M3N3L3M3M4L3M3M4Lkl]7"}, "image_id": 992, "id": 16510}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 106.0, 28.0, 43.0], "area": 620, "segmentation": {"size": [512, 512], "counts": "_dh11l?3M3M4L3M3M4L3N3L3M3M4L3N2010M2M3M4L3M4M2M3M4L3M3M4L3Md\\i5"}, "image_id": 992, "id": 16511}, {"iscrowd": 0, "category_id": 1, "bbox": [95.0, 119.0, 14.0, 21.0], "area": 160, "segmentation": {"size": [512, 512], "counts": "Xd_12k?3M3M4L3M3N3O00M4L3M3M4M2MZ\\Y6"}, "image_id": 992, "id": 16512}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 142.0, 332.0, 282.0], "area": 60350, "segmentation": {"size": [512, 512], "counts": "ZZ71l?3M4L3M3M4M2M3M4L3M3M4L3M4L3N2M4L3M3M4L3M3M4L3N2M4L3M3M4L3M3M4L3M3N3L3M4L3M3M4L3M3M4M2M3M4L3M3M4L3M3M4M2M3M4L3M4L3M3M4L3N2M4L3M3M4O01O010O00010O00010O00010O01O01O01O01O01O01O01O01O01O01O01O01O010O00010O01O01O01O01O01O01O01O01O01O01O01O01O01O01O010O01O01O01O01O01O01O01O01O01O01O01O01O01O010\\G_Ig7h0cGk4c0`Jj7b0fGn4=dJm7:jGR55gJQ84mGU5OjJU8MoGY5JmJV8HSH[5CPKZ8BVH^5]OSK^8[OXH_6g7^I]Ha6a8010O0010O00010O00010O00010O00010O00010O00010O00010O0010O0010O00001M2M3N3L3M3N3L3M3N3N10010O00010O00010O00010O0010O00010N1M3M4L3M4L3N2M4L3M3M4L3M3M4L3N2M4L3M2N003M4L3N2010O0010O00010O0010O0O1M4M2M3M4L3M3M4L3M22L3N2M4L3M301O00010O00010O00010O00010O0001^LbGA]8dMROc:=mG=cMSOd:h8_O[G>h8@[GWORAi0o>31O1O100O1O1O1O1O100O1O1O1O1O1O100O11O1O2N1O1O10001M2N2O1O1O1O2N1O11O0001N1N2N2N2O10O1N2N3N1N2NI"}, "image_id": 993, "id": 16518}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 15.0, 101.0, 69.0], "area": 3193, "segmentation": {"size": [512, 512], "counts": "]al41m?3L3N2N3L3N3M2M3N3M2n@UOl>Q1N2N30O01OO2N110O0010N1N30O0010O0010O001jNXAS1j>010O0010O0010UAnNf>V110O0010O0M4O001O01O010O01O010O01O010O01O010O01O01O010O010O00010O010O0010O0010O010O0010O0010O001XOSABWA=j>@XAa0g>\\O]Ac0c>[O_Ae0o>0OO1M4M2N3L3N3M2MRn`1"}, "image_id": 993, "id": 16519}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 24.0, 27.0, 23.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "Va\\61l?3N3L3M4M2O1010O00010O010O00010O00010O010N1M3N3L3M3NSoU1"}, "image_id": 993, "id": 16520}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 29.0, 60.0, 85.0], "area": 2235, "segmentation": {"size": [512, 512], "counts": "dQQ72m?3M2N2O1N3M2N2N3M2O1N2N3M2N2N3N1N2N2N3M2N2O1N3M2N2N3M2O100RBTNg=m1VBUNj=k1TBWNm=o1O00[NUBT1k=kNVBU1j=iNXBW1i=fNZBZ1e=dN]B\\1c=bN_B^1a=`NaB`1_=_NbBa1P>0OO1N2N2N3M2N2VOXA:i>DYA:i>DZA9h>EZA9i>DYA:i>EXA9j>EXA:Y?M2N2N2Nol0"}, "image_id": 993, "id": 16521}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 64.0, 22.0, 39.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "W2P1o>000O01000001N2O2N2N2N2M3N1O2N2M3N2N1O2N2Mgmd7"}, "image_id": 993, "id": 16522}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 64.0, 61.0, 60.0], "area": 1791, "segmentation": {"size": [512, 512], "counts": "YS=2l?3L3N3L3N2M4N1010O00010O010O00010O010O01O01ON3L3N3M2M3N3L3N3M2M3N3L3N3O01O010O01OM4M2M4M2M3N3M2M4M2M4M2M3N3L3N3L3N2N3L3Nh]d6"}, "image_id": 993, "id": 16523}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 67.0, 56.0, 51.0], "area": 1800, "segmentation": {"size": [512, 512], "counts": "Xcc42j?5K4L4K5L4L5K4L4L4010O00000010O000000010O000000nNZAj0g>PO^AQ1h>0000010O004L0001O0001OM3010O000001O01O0000010OL4K5L4K6J5L4Ka]`2"}, "image_id": 993, "id": 16524}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 68.0, 41.0, 47.0], "area": 913, "segmentation": {"size": [512, 512], "counts": "_bj62m?2N2O2M2N2^@E\\?=b@E]?`0N3M2N2N2O2M2N2010O000010O000N210O00N2N3M2N2O2M2N2N2O2M2N2N3M2O1N2Nnl`0"}, "image_id": 993, "id": 16525}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 86.0, 56.0, 61.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "QT[11n?2N2N2N2N2N2N2N2N2N2N2N2N2_OWOhAk0V>WOhAk0V>WOhAk0V>WOhAk0V>WOhAj0W>YOfAg0Z>?00000000000000002N2N2N2N2N2N2N2N1O20000O1N2Jo@ZOQ?f0QAXOo>h06N2N2N2N2N2N2M3N2N2N2Nelh5"}, "image_id": 993, "id": 16526}, {"iscrowd": 0, "category_id": 1, "bbox": [407.0, 106.0, 64.0, 50.0], "area": 1808, "segmentation": {"size": [512, 512], "counts": "Vd[61m?2M4M2N2M4M2M4M2N3L3N2N3O010O00010O010O0010O0010O010O00010O010O00010O010O010O00010O010O00010O0M40O01O01N1M4M2M3N3L3N3L3N2M4L3NW\\d0"}, "image_id": 993, "id": 16527}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 114.0, 37.0, 44.0], "area": 948, "segmentation": {"size": [512, 512], "counts": "aTY54j?2N2N3L3N3M2M3N3M2N3L3N3M2N2010O010O01O01O010N1M4M2N2M4M2N3L3N3M2M3N3L3NW\\T2"}, "image_id": 993, "id": 16528}, {"iscrowd": 0, "category_id": 1, "bbox": [265.0, 117.0, 57.0, 61.0], "area": 2015, "segmentation": {"size": [512, 512], "counts": "odT43k?3M2M3N3L3N3M2M3N3L3N3M2M3N3L3N3M2N201O010O01O01O010O01O01O010O01O01O_NhAZ1_>10O00010O0N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3Nhkn2"}, "image_id": 993, "id": 16529}, {"iscrowd": 0, "category_id": 1, "bbox": [35.0, 133.0, 33.0, 28.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "dda04i?3N3N11O010O00010OO2L3M3M4M21O010O00010O01O01O01O01L3N3L3M3M4M2Mgkm6"}, "image_id": 993, "id": 16530}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 140.0, 44.0, 39.0], "area": 989, "segmentation": {"size": [512, 512], "counts": "RUo02k?3M4M2M4L3N2M4M2O2O01O010O01O01O0100O1O010O01O01O010O02N10O0010O0010OO1N3L3N3L3N2M4M2MXkZ6"}, "image_id": 993, "id": 16531}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 143.0, 24.0, 31.0], "area": 481, "segmentation": {"size": [512, 512], "counts": "m4"}, "image_id": 993, "id": 16534}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 169.0, 71.0, 55.0], "area": 2156, "segmentation": {"size": [512, 512], "counts": "XVQ61l?4L3N2M4L301O0M3N3L3M4O01O01O010O00010N1N3L3M301O010O00010O0010O0010O0010O00010O010O0PO[Ae0e>YO^Ag0b>UOaAk0_>ROeAn0f>00010O0010O00M4M210O0001M2M3N3L3N3L3N2M4M2M3N3LSZk0"}, "image_id": 993, "id": 16535}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 190.0, 40.0, 31.0], "area": 685, "segmentation": {"size": [512, 512], "counts": "cfm41m?3L3N2M4M2M4M2M4N10010O010O00010O010O01O01O01N1M4M2N2M4N110O010O01O01O0O2M2N2MiY^2"}, "image_id": 993, "id": 16536}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 192.0, 64.0, 46.0], "area": 1685, "segmentation": {"size": [512, 512], "counts": "iVi31l?4M2M4M2M3N3L3N3L300010O01O01O010O01O010O01O01O010O01O01O010O01O010O01O01O010O01O0N3L3N2N3L3N3O00M4M2M4M2M3N3L3N3L3N2N3L3N3L3NliV3"}, "image_id": 993, "id": 16537}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 193.0, 61.0, 53.0], "area": 1990, "segmentation": {"size": [512, 512], "counts": "QW?3j?3N3M2M4M2N2M4M2N3M2M4M2N2O20O010O010O00010O010O010O00N3M2N3N10010O010O010O00010O010ON3M2N1N3N3M2M3N3M2M4M2N2M4M2N3L3N3MfYb6"}, "image_id": 993, "id": 16538}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 197.0, 28.0, 38.0], "area": 662, "segmentation": {"size": [512, 512], "counts": "jfc5190Z?2d@1X?3d@0Z?2d@1X?=N2M4M2M4N11O01O010O01O000M4M2M4M2M3N3L3N2M4M2MgYn1"}, "image_id": 993, "id": 16539}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 199.0, 26.0, 29.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "e66h?3L3N3L3N2N3O0010O001Eh@MW?0l@0T?No@2Q?JRA6n>HTA9Z?O00N3O010O01N1N2N3LYib7"}, "image_id": 993, "id": 16540}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 223.0, 16.0, 27.0], "area": 283, "segmentation": {"size": [512, 512], "counts": "o6h0Y?O00010O010O010M2N3M2M3N3M2M4Mlhg7"}, "image_id": 993, "id": 16541}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 229.0, 62.0, 59.0], "area": 2326, "segmentation": {"size": [512, 512], "counts": "Zhh53_?0i@3U?Oi@3T?1h@3U?=M4L3N2M4O010O01O01O001L3M3O2O01O01O010O01O01O010O01O01O010O01O01O01O01O010O01O01O010O01O01OdNaAU1f>M4M2M3N3L3M4M2M3Hc@K`?19MYXX1"}, "image_id": 993, "id": 16542}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 248.0, 45.0, 36.0], "area": 901, "segmentation": {"size": [512, 512], "counts": "\\hb41l?4L3N2M4L3N3L3O110O0010O010O00010O010O00010O010O0010O0010O010O00010O010O0N2N3L3N3L3N3M2Mlgf2"}, "image_id": 993, "id": 16543}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 249.0, 68.0, 47.0], "area": 1925, "segmentation": {"size": [512, 512], "counts": "bh]31m?2M4M2_@HW?KhA5W>HlA8U>EmAARB>o=_OSBIBd0[>@ZB>h=@ZB=j=_OZB>e>M4M2M3NRfh1"}, "image_id": 993, "id": 16547}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 305.0, 14.0, 13.0], "area": 111, "segmentation": {"size": [512, 512], "counts": "hie72l?3L3N2010O010O00010O0N3L3M^V3"}, "image_id": 993, "id": 16548}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 307.0, 6.0, 18.0], "area": 65, "segmentation": {"size": [512, 512], "counts": "PZm74j?2M3M4M2M3]F"}, "image_id": 993, "id": 16549}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 324.0, 29.0, 40.0], "area": 713, "segmentation": {"size": [512, 512], "counts": "T:P1Q?O0010O0010O0010O0010O010O00010XOQA=o>ATA?l>]OWAc0T?O01O01N1M4M2M4M2M3NXUa7"}, "image_id": 993, "id": 16550}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 345.0, 24.0, 21.0], "area": 263, "segmentation": {"size": [512, 512], "counts": "Skf21n?3N1N3M2O2M2N2OO010O00010O00010O102M2N3N1N3M3NmTm4"}, "image_id": 993, "id": 16551}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 352.0, 25.0, 33.0], "area": 649, "segmentation": {"size": [512, 512], "counts": "gkc75f?5L4L4K5L5N1000010O000000010O0000010O0000010O000lD"}, "image_id": 993, "id": 16552}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 354.0, 21.0, 24.0], "area": 305, "segmentation": {"size": [512, 512], "counts": "b[j33k?3L3N3M2M3N3M210O010O01O000N3M2M4M2M3N3MjTk3"}, "image_id": 993, "id": 16553}, {"iscrowd": 0, "category_id": 1, "bbox": [174.0, 370.0, 61.0, 34.0], "area": 1663, "segmentation": {"size": [512, 512], "counts": "\\\\g23a?010O0010O0010O0010O0010ON3L3N2O20OHPA@Q?=QACo>:UACm>;>M2M4M2MnQo4"}, "image_id": 993, "id": 16555}, {"iscrowd": 0, "category_id": 1, "bbox": [199.0, 437.0, 29.0, 28.0], "area": 506, "segmentation": {"size": [512, 512], "counts": "YnS31l?4M2M3N3L3N2M4M20001O01O010O00010O01O01O0O1M4L3N2M4L3M3MXb]4"}, "image_id": 993, "id": 16556}, {"iscrowd": 0, "category_id": 1, "bbox": [441.0, 468.0, 52.0, 44.0], "area": 1275, "segmentation": {"size": [512, 512], "counts": "nol62m?1N2N2O1N2O1N2N2O1N2O1N2O1N2N2O1N2O1N2O1N2N2O1N2O1N2O1001O1O001O001O1M2O2M2N3N2M2N3N1N3M3N1N3M3N1N3N1NPQ9"}, "image_id": 993, "id": 16557}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 500.0, 12.0, 12.0], "area": 104, "segmentation": {"size": [512, 512], "counts": "o_j71l?3N2M3N200001O001OO1N2"}, "image_id": 993, "id": 16558}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 220.0, 20.0, 21.0], "area": 256, "segmentation": {"size": [512, 512], "counts": "Xgl33k?3L3N3M2M310O010O0010O0010O00M4M2M4M2NPYi3"}, "image_id": 994, "id": 16559}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 252.0, 47.0, 34.0], "area": 838, "segmentation": {"size": [512, 512], "counts": "ZhR54j?2M4M2N3L3N2010O010O00010O010O00010O010O00010O010O010O00010O010O00010O010O00010O0M4M2M4M2N2MggU2"}, "image_id": 994, "id": 16560}, {"iscrowd": 0, "category_id": 1, "bbox": [219.0, 296.0, 23.0, 20.0], "area": 276, "segmentation": {"size": [512, 512], "counts": "ai]34j?2M4L3O1010O010O00010O01O01O010O00010L3M4M2MdfV4"}, "image_id": 994, "id": 16561}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 0.0, 18.0, 105.0], "area": 1779, "segmentation": {"size": [512, 512], "counts": "PPg75k?i2WM;E000000000000000000000000000000"}, "image_id": 996, "id": 16562}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 152.0, 207.0, 119.0], "area": 17817, "segmentation": {"size": [512, 512], "counts": "jTW29g?00000000000000000000000000000000000Nd0^O3M000000000000000000000000000000000000000000000000000UACS>=mACS>=mACS>=mACS>=mACS>=mACS>=mACS>=mA]OY>c0gA]OY>c0gA]OY>c0gA]OY>c0gA]OY>c0gA]OY>c0gA]O0Z1]Bd0\\Od0\\Oe0ZOe0\\Od0\\Od0\\Od0\\Od0\\Oe0[Od0\\OoRe6"}, "image_id": 996, "id": 16564}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 283.0, 167.0, 229.0], "area": 22953, "segmentation": {"size": [512, 512], "counts": "j]m2>Q?a0]B[OZ;V1fDjNm:c1SE]Nm:d1RE\\Nn:d1RE\\Nn:d1dDjN\\;V1SD[Om;\\200000000B>O1000000001O000000000000000000000000000000001O00000000CVLdDj3h:jLXEV3T:^MlEb2a9QN_Fo1m8eNSG[1f8lNZGT1f8lNZGT1f8lNZGT1f8lNZGT1f8lNZGT1f8lNZGT1f8lNZGT1f8lN[GS1e8mN[GS1e8nNZGR1f8nNZGR1g8S300000000000000000000000000000000000000000000001mLcGaN]8l0VHTOj7:hHFX7KWI5i6KWI5i6KWI5i6KWI5i6_MZGT2m1=Q7CoH=Q7CoH=Q7CoH=Q7CoH=o6EQI;o6EQI;o6EQI;o6FPI:R7DnHO1O1O1O1O100O1O1O1O1O1O100O1O1O1O1O100O20N2O1N3N1000oNYAf0j>WOYAg0h>WOZAg0h>WOZAg0Q?N2N2N3N0O00001O2N2N2O1N3M2Nne8"}, "image_id": 996, "id": 16566}, {"iscrowd": 0, "category_id": 1, "bbox": [206.0, 355.0, 20.0, 10.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "S[W38h?000000000000010O00000000000000010O00Mnd^4"}, "image_id": 996, "id": 16567}, {"iscrowd": 0, "category_id": 1, "bbox": [86.0, 399.0, 32.0, 51.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "_\\[19g?k0UO?A0000000000000000000000000000000000000000000000000000Ob0_Of0ZOZbT6"}, "image_id": 996, "id": 16568}, {"iscrowd": 0, "category_id": 1, "bbox": [319.0, 428.0, 121.0, 84.0], "area": 7641, "segmentation": {"size": [512, 512], "counts": "goo49V?a0_Oa0^Ob0_Oa0J600000000000000001O00000000000000000000000000000000000000008H2N1O2N1O1O2N1O2N1O1OO10000O100O10000O100O100O1O1O100O1O100O1O100O1O1O100O1O100O1O1O10LbBfM]=Z2eBeMZ=[260O01O2O1N3M2N3N1N2N3N1N3M210O0O1N3M2O2M2N3N1N2N3N1N3M2N2O2M2N3N1N3M2O1N3M2N3N1NQaS1"}, "image_id": 996, "id": 16569}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 450.0, 54.0, 31.0], "area": 1568, "segmentation": {"size": [512, 512], "counts": "T^c1l0T?1O00000000000000000O10000000O100000000000000000000000000000000000000000000000O10000000O10000000000000005Kiaa5"}, "image_id": 996, "id": 16570}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 319.0, 143.0], "area": 40152, "segmentation": {"size": [512, 512], "counts": "0_4a;00000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000WOi0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0]O00000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000g0YO000000000000000000000000000000000000000000000000f^P3"}, "image_id": 997, "id": 16571}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 0.0, 174.0, 150.0], "area": 25849, "segmentation": {"size": [512, 512], "counts": "PPY5d0\\?R4nK000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, "image_id": 997, "id": 16572}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 281.0, 20.0, 9.0], "area": 171, "segmentation": {"size": [512, 512], "counts": "iXR69g?00000000000000000001O0000000000000000Vgc1"}, "image_id": 997, "id": 16573}, {"iscrowd": 0, "category_id": 1, "bbox": [398.0, 324.0, 114.0, 188.0], "area": 17957, "segmentation": {"size": [512, 512], "counts": "h\\W6U2X;c2000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000S1mN000000000000000000000000000000000"}, "image_id": 997, "id": 16574}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 182.0, 59.0, 95.0], "area": 2406, "segmentation": {"size": [512, 512], "counts": "f5o2Q=N3N1N3M2N3M3M2O2M2N3M2N3N1N3M3M2N3N1N3M2N3M2N3N2M2N3M2N3N1N3O0010O01000O010O010O010O01000O010O01N1N3M2O2M3M2N3M2N3N[YR7"}, "image_id": 998, "id": 16575}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 281.0, 15.0, 25.0], "area": 234, "segmentation": {"size": [512, 512], "counts": "i8f0[?O010O10O10O0N3M2N3M2O2M2N3MQWh7"}, "image_id": 998, "id": 16576}, {"iscrowd": 0, "category_id": 1, "bbox": [99.0, 281.0, 35.0, 33.0], "area": 590, "segmentation": {"size": [512, 512], "counts": "\\ia11n?2N2N2N3M2N2N2O1N2N2N3M1O001O01O000000001O2O2M2N00000000002O2Fb@M`?1b@M`?19NRgl5"}, "image_id": 998, "id": 16577}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 380.0, 21.0, 20.0], "area": 198, "segmentation": {"size": [512, 512], "counts": "X\\:1n?2N3N1N2N2N2N001O0001O0000002N2O1N2N2N2NoS[7"}, "image_id": 998, "id": 16578}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 387.0, 21.0, 38.0], "area": 354, "segmentation": {"size": [512, 512], "counts": "SN2ETA^On>a0TA]On>a0TA]Om>b0UA\\Om>b09O2N2M201O010O010O1O0N3M2N3McSe7"}, "image_id": 998, "id": 16579}, {"iscrowd": 0, "category_id": 1, "bbox": [447.0, 3.0, 65.0, 31.0], "area": 1894, "segmentation": {"size": [512, 512], "counts": "T`o6b0^?XOaAi0]>YOaAj0]>XO`Ak0]>WObAj0\\><03M3N1N3M3M20100O010O010O10O10O01@QBkNQ>T1PBkNR>R1QBkNQ>T1QBiNR>T1PBkNQ>S1`0N2M2N3N1N3O0100O01000O001M2O2@QOkAR1R>QOkAQ1T>POjAS1S>POjAR1T>POjAS1T>>N01O2M3N1N3M2010O10O010O0100O010N2M2N300O010O10QOiA3W>KkA6U>GnA6T>ImA6U>GnA6T>HoA5T>HnA6U>GnA6T>HnA7T>GnA6T>HnA6P?M2N3NkZm0"}, "image_id": 999, "id": 16581}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 43.0, 47.0, 41.0], "area": 1081, "segmentation": {"size": [512, 512], "counts": "b1d0\\?000000005OL00000000000000092TOi@9W?Gi@9_?00000000000000000000000000000EHk@;U?Ek@;b>k0000000000000000000000000e^X7"}, "image_id": 1000, "id": 16582}, {"iscrowd": 0, "category_id": 1, "bbox": [292.0, 191.0, 55.0, 81.0], "area": 1618, "segmentation": {"size": [512, 512], "counts": "ZXb42l?3M2N3M2N3M2M4M2N0000000O1000003M2N2M4M2N3M2N3M2N3L1000O100000O01000002N2M3N3M2N3L3N3M2N3M2M3N3M2N3L3N3M2N2NlYb2"}, "image_id": 1001, "id": 16583}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 314.0, 11.0, 17.0], "area": 168, "segmentation": {"size": [512, 512], "counts": "nYk5=_?400000000000000000AeVo1"}, "image_id": 1001, "id": 16584}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 381.0, 29.0, 24.0], "area": 324, "segmentation": {"size": [512, 512], "counts": "V\\P41m?3N2M2N3N1O200O010O01000O010O01000O010O01000OO2M2O2M2N3NgSa3"}, "image_id": 1001, "id": 16585}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 455.0, 41.0, 29.0], "area": 437, "segmentation": {"size": [512, 512], "counts": "^nm02l?2O2M2N3O00100O0100O0100O010O010O01000O010O010O010O10O10O010O10O010O010N2M2O2M2NXa]6"}, "image_id": 1001, "id": 16586}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 153.0, 32.0], "area": 4391, "segmentation": {"size": [512, 512], "counts": "3m0S?00000000000000000000000000000000000000000000000000000O1000000000O10000000000000000000000000000000000000000000000000000000N20000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000:@h@DX?<600000000000f_c5"}, "image_id": 1002, "id": 16587}, {"iscrowd": 0, "category_id": 1, "bbox": [368.0, 133.0, 62.0, 58.0], "area": 1511, "segmentation": {"size": [512, 512], "counts": "cTh52m?1O2N2M3N1O2M3N2N1QA^O^>e0_A^O_>d0_A^Oa>b0]A@b>a0\\AAd>>[ACf>=XAEh>j0O1000ZOZA1f>M\\A3c>L_A3b>JaA6_>HbA9^>EdA;\\>BgA>X>AjA?V>_OkAb0i>0N3N2N2M3N2N1N1000O01000000O01000002N2N1N3N2N2N2N2M2O2N2N2N2M3N1O2NTkX1"}, "image_id": 1002, "id": 16588}, {"iscrowd": 0, "category_id": 1, "bbox": [276.0, 187.0, 73.0, 56.0], "area": 2159, "segmentation": {"size": [512, 512], "counts": "bVZ41n?1N3N2M2O2N2M3N1O2M3N2N1N3N200O0100000O010n@SOP?o000O01000JROXAn0f>TO[Aj0e>XO[Ah0b>[O]Ae0b>>N2N20O10O10O1000O10O1000O10O1M2O2N2M3N1OO01000O0101N3N1O2M3N1O0O3N2N1N3N2N2D]@7h?N2N1NfYa2"}, "image_id": 1002, "id": 16589}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 253.0, 11.0, 65.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "[hj7l0T?0B>00000000001O00^AYOk=^1RH"}, "image_id": 1002, "id": 16590}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 0.0, 83.0, 143.0], "area": 10061, "segmentation": {"size": [512, 512], "counts": "P`f6l0T?0000000000000000000000000000P1POc2]M000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}, "image_id": 1004, "id": 16591}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 12.0, 136.0, 137.0], "area": 15019, "segmentation": {"size": [512, 512], "counts": "`2U2k=0000000000000000000000000000000000000000000000000lMFaD;_;S2000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004aLeEQ1[:oN[3000000000000O1000000000000000`ok5"}, "image_id": 1004, "id": 16592}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 226.0, 63.0, 107.0], "area": 4699, "segmentation": {"size": [512, 512], "counts": "R7[3e<000000000000000000000000M3C>O0001O0000000000000000000000000001O0002N0000[NWC3i<[OiCf0VT??A>A?B>B>B>A?B>B>B>A?C=00000001O00000000000000000001O00000001O0000000UHZJ\\5f5UJiJk5W5eIYK[6g4VIhKj6X4gHWLY7X6001O00000000000000000000000000001O0000000000000000000000000000001O00000000000000000000000000001O0000000000000000000000000000001O00000000000000000000000000001O0000000000000000000000000000001O00000000000000000000000000001O0000000000000000000000000000001O00000000000000000000000000001O0000000000000000000000000000001O00000000000000000000000000001O0000000000000000000000000000001O0F:@`0A?A?@`0A?@`0A?@`0A?@`0A?@`0A`0_O`0AZXh1"}, "image_id": 1004, "id": 16594}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 401.0, 55.0, 111.0], "area": 3578, "segmentation": {"size": [512, 512], "counts": "a<_3a<00000000000000gLjCh2g<00000000000001O00000000000A?[Oe0^Ob0000O1N20001O0000000000000001O0000000000000000000001O00N2\\Od0\\OcST7"}, "image_id": 1004, "id": 16595}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 46.0, 88.0], "area": 2744, "segmentation": {"size": [512, 512], "counts": "0c2]=00000000000000000000001O00003M00000000RN[Ca0eW3e3nNjL_M]O<4W3e3nNjL_M]O<4W3e3nNjL_M]O<4W3e3nNjL_M]O<4W3e3nNjL_M]O<4W3e3oNiL^M^O<4W3f3nNhL_M^O<5V3e3oNhL`M]O;6V3e3oNhL`M^O:5W3e3oNhL`M^O:5W3e3oNhL`M^O:5W3e3oNhL`M^O:5W3e3oNhL`M^O:5W3e3oNiL_M]O;5W3e3oNiL_M]O;5W3e3oNiL_M]O<4V3f3oNiL_M]O<4V3f3oNiL_M]O<4V3f3oNiL_M]O<4V3f3oNiL_M]O<4V3f3POhL^M^O<4V3f3POhL^M^O<5U3f3POgL`M]O;6U3f3POgL`M^O:5V3f3POgL`M^O0?`3\\3POgL`MV1`3S2POgL`MV1`3S2POgL`MV1`3S2POhL_MU1a3S2POhL_MU1a3S2POhL_MU1a3S2POhL_MD4N]3f3POhL_M^O::Q3`3VOhL_M^O::Q3`3VOhL_M^O::Q3`3VOhL_M^O::Q3`3WOgL[MC<6R3a3VOPNhM_NR3a3VOPNhM_NR3a3VOPNhM_NR3a3VOPNhM_NR3a3VOPNhM_NR3a3VOPNhM`NQ3`3WOPNhM`NQ3`3WOPNhM`NQ3`3WOPNiM_NP3a3WOPNiM_NP3a3WOPNiM_NP3a3WOPNiM_NP3a3WOPNiM_NP3a3WOPNiM[NT3e3TOPNgMPN`3P4iNfL]MA:J_3P4iNeL^MA:J_3P4iNeL^MA:J_3P4iNeL_M@9K_3P4iNeL_M@9K_3P4iNeL_M@9K_3P4iNeL_M@9K_3P4iNeL_M@9K_3P4iNeL_M@9K_3c3VOSMQM_O;J^3T3FcMaL_O;J^3e25RNRL_O;J^3U2e0bNbK_O;J^3f1T1QOSK_O;J^3V1e1@bJ@;J^3g0T2AaJOLJ^36e2B`J>]OJ^3GT3AaJn0mNJ^3WO^5n0TGKo<5QCKo<5QCKe_10000000001O0000000001O000000000000000001O000000000001O0000000F:A?@VWT1"}, "image_id": 1005, "id": 16597}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 182.0, 36.0, 93.0], "area": 1759, "segmentation": {"size": [512, 512], "counts": "i5j2V=00000000D<^Ob0^Oc0K41O0L4000000000000000000000000000010O00000000000000H8^Okj]7"}, "image_id": 1005, "id": 16598}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 183.0, 25.0, 242.0], "area": 4638, "segmentation": {"size": [512, 512], "counts": "]fc7?a?d0\\Oe0[Od0\\Od0\\Od0\\Od0\\Od0\\Oe0[Od0\\Od0gN`IiGd6T8d000000000000000000O10O100YJ"}, "image_id": 1005, "id": 16599}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 323.0, 30.0, 78.0], "area": 2132, "segmentation": {"size": [512, 512], "counts": "S:]2c=0000000001O0000000000000001O000000000001O0000000M3A?B>A?BYf`7"}, "image_id": 1005, "id": 16600}, {"iscrowd": 0, "category_id": 1, "bbox": [505.0, 92.0, 7.0, 19.0], "area": 77, "segmentation": {"size": [512, 512], "counts": "[cl72k?3M3M4M2M3O2SM"}, "image_id": 1008, "id": 16601}, {"iscrowd": 0, "category_id": 1, "bbox": [493.0, 116.0, 19.0, 30.0], "area": 405, "segmentation": {"size": [512, 512], "counts": "Ydf72k?4L3M3M4L3M3M4O001O01O01O01O01O01O01XL"}, "image_id": 1008, "id": 16602}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 0.0, 86.0, 49.0], "area": 1892, "segmentation": {"size": [512, 512], "counts": "P`m01o?001O1O001O1O001O1O001O1O001O1O001O1O001O1O001O1O001O001O1O001O1O001O1O001O1O001O1O001O1O2N001O1O001O1O001O1O001O1O001O001O1ORO\\A`0d>_O^Aa0a>]ObAc0]>[OeAe0[>TO_A38i0[>UOhAk0f>010O10O1O0N3N2M2O2M3N1N3N2M2O2M3NT_g5"}, "image_id": 1009, "id": 16603}, {"iscrowd": 0, "category_id": 1, "bbox": [239.0, 0.0, 85.0, 72.0], "area": 3122, "segmentation": {"size": [512, 512], "counts": "Xag33k?2N3L3N3L3N2M4M2N3L3N3N10010O0010OO1M4L3M4M2M30N21O010O01O010O01O\\OdAD]>7gAIY>4jAMV>OnA0R>0nA1Q>OPB0Q>0nA0R>0oA0Q>OoA1Q>0nA1Q>OPB0Q>0nA0R>0oA0Q>OoA1Q>0nA1Q>OPB0Q>0nA0R>0oA0Q>0nA0R>0nA1Q>OPB0Q>0nA0R>0mA2S>NjA4V>LhA7W>IfA:[>e0010O01O01O010O01O01YOcAI]>4fAMZ>0hA0X>MlA3S>KoA5R>GRB8n=FTB;l=BVB>j=_OZBa0e=]O]Ba0c>M2M3N3L3N3M]nm2"}, "image_id": 1009, "id": 16604}, {"iscrowd": 0, "category_id": 1, "bbox": [312.0, 0.0, 13.0, 5.0], "area": 39, "segmentation": {"size": [512, 512], "counts": "PPl41o?001O00001O001O00001O00NR`m2"}, "image_id": 1009, "id": 16605}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 0.0, 14.0, 11.0], "area": 102, "segmentation": {"size": [512, 512], "counts": "UPY52l?2M4O00001O001O001ON2N2N3MQP`2"}, "image_id": 1009, "id": 16606}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 0.0, 39.0, 29.0], "area": 589, "segmentation": {"size": [512, 512], "counts": "h`[53k?2N3M2N3M2O11M2N3M2N2NO2N3M4M2M3O2O001O00001O001O00001O000010N1M3N3L3N3L3M3NooP2"}, "image_id": 1009, "id": 16607}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 0.0, 53.0, 19.0], "area": 517, "segmentation": {"size": [512, 512], "counts": "PPR61o?001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001OO1N2M3N2M3N2M3NR`S1"}, "image_id": 1009, "id": 16608}, {"iscrowd": 0, "category_id": 1, "bbox": [25.0, 7.0, 85.0, 83.0], "area": 3244, "segmentation": {"size": [512, 512], "counts": "ba<1m?3M2O2M3N1N3N2M2O2M2O2M3M2O2M3N1N3N1N3N2M2O2M3M2O2M2OO101NRBZNg=d1YB^Ng=a1WBaNi=`1TBcNl=\\1SBfNm=f10100O0100M2`NPBo0R>oNPBn00gNn=8TBP1NjNm=5WBn0LoNn=0YBo0HTOn=K\\BR1DTOQ>H]B^1S>0O010O10O10O01000O01000O010OO2M3N1N3M10O0102M30O01M3M2O2M2O2M3N1M4M3N1N3M3N\\nX6"}, "image_id": 1009, "id": 16609}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 14.0, 13.0, 13.0], "area": 107, "segmentation": {"size": [512, 512], "counts": "e`U53k?3L3N30O00010O001N1N2M4M`oc2"}, "image_id": 1009, "id": 16610}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 45.0, 60.0, 43.0], "area": 1598, "segmentation": {"size": [512, 512], "counts": "Tbf51l?3N3L3N2M4M2N3M2010O00i@\\OS?h001O01O010N1N20N3M2M4N100010O010O00010O010O0010O0010O010ON2N3L310O00N3L3N3M2M4M2M3N3L3N3M2M3N3L_^[1"}, "image_id": 1009, "id": 16611}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 54.0, 75.0, 88.0], "area": 3086, "segmentation": {"size": [512, 512], "counts": "f1d1]>O01O1O00100O001O010O1O00100O001O10O0mNnA9Q>CSB>l=^OYBa0h=ZO]Bf0b=VObBk0]=QOhBn0V>0O01000O010O1N1O1O0010O00000011N2N3M2O1N3M2N2O1N3M20010O000010N1N2O2bNfAQ1\\>mNfA0Ni0^>VOeAOOi0_>UOdA1Ni0Q?M2N2N3M2O1N2N3M2O1N3M2NR\\j6"}, "image_id": 1009, "id": 16612}, {"iscrowd": 0, "category_id": 1, "bbox": [215.0, 58.0, 85.0, 72.0], "area": 2955, "segmentation": {"size": [512, 512], "counts": "Qc[31l?4M2N2M4M2O20O01M2N2M4M2N3L3N2M4M2N3L3O2O00010O0010O0010O0010O010O00010fNeAm0[>QOgAP1Y>lNkAS1a>0010O010O01O01O010OXAlNd>X10010O010O00010O01O01O010O01O01O010O00010O01O0N2M4M2M4M2M3N3L3M4M2M3N3LTmY3"}, "image_id": 1009, "id": 16613}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 66.0, 36.0, 51.0], "area": 1093, "segmentation": {"size": [512, 512], "counts": "oR^71n?2M3N2N2N2N2M2O2N2N2N2N2M3N1O2N2N2N200O1N1N3N2N2N000O11O1O2M3N2N2N2M2OjM"}, "image_id": 1009, "id": 16614}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 77.0, 9.0, 11.0], "area": 59, "segmentation": {"size": [512, 512], "counts": "eRk41l?3M4M200010M2M4Mc]P3"}, "image_id": 1009, "id": 16615}, {"iscrowd": 0, "category_id": 1, "bbox": [389.0, 93.0, 26.0, 25.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "^cR61m?3L3N2M4M2N3L30001O010O01O01O010O01O0O1N3M2M4M2M3No\\`1"}, "image_id": 1009, "id": 16616}, {"iscrowd": 0, "category_id": 1, "bbox": [178.0, 117.0, 105.0, 63.0], "area": 3269, "segmentation": {"size": [512, 512], "counts": "nSi2h0X?00000000000000000000000000000000000000000000001O8C501O00001O0N3M20001O001O00001O001O001O000010O01O0000_NYBj0^>0010O00010O01O01O010O01O01O01O010O001N1O1N3N1O2O01N2N1O000O0100000000000O2O2N2N2N2N2N2N2M2O2N2N2N2N2N2M_[b3"}, "image_id": 1009, "id": 16617}, {"iscrowd": 0, "category_id": 1, "bbox": [490.0, 117.0, 19.0, 17.0], "area": 205, "segmentation": {"size": [512, 512], "counts": "PTe72l?3M2N2M4M2000O1010O010O010O01L3N2N3MX\\1"}, "image_id": 1009, "id": 16618}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 118.0, 58.0, 43.0], "area": 1451, "segmentation": {"size": [512, 512], "counts": "^d]51l?3N3L3M3M4M2M4L3N30O00010O010O010O010O00010O010O010O0010O0010O010O0010O0O2L3N3M2N2010O0N3M2M3N3M2M4M2N3L3N2N3L3N3MS\\e1"}, "image_id": 1009, "id": 16619}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 138.0, 61.0, 73.0], "area": 2348, "segmentation": {"size": [512, 512], "counts": "Z4^17iNn=W1PBkNP>V1mAlNT>`1000001O000001O00000O1N2N2N2N3M2N2N2O1N2N2N2O01N2N2N00001O000002N2N2N0001ON20001O0001OKUAVOk>j0WATOi>l07N2Kk@]OV?a0l@]OV?a07M2N2N2O1N2N2N3MlZQ7"}, "image_id": 1009, "id": 16620}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 139.0, 43.0, 56.0], "area": 1514, "segmentation": {"size": [512, 512], "counts": "gdP73k?2RAMl=7bAI14Z>5bAK12[>5bAK04Z>5cAI14\\>a0aAA_>Q1O010O010O00010O010O010O00010O010O01O0O1M4M2M4M2N2M4M2M4M2N3L3N2N3L3N3LYk9"}, "image_id": 1009, "id": 16621}, {"iscrowd": 0, "category_id": 1, "bbox": [397.0, 158.0, 13.0, 15.0], "area": 116, "segmentation": {"size": [512, 512], "counts": "WeV63k?2N2M4M201O010OO2L3N2N3LRkb1"}, "image_id": 1009, "id": 16622}, {"iscrowd": 0, "category_id": 1, "bbox": [131.0, 159.0, 71.0, 83.0], "area": 3415, "segmentation": {"size": [512, 512], "counts": "ZfQ22k?3N3L3N3M2M3N3VA\\OV>d0hA_OW>b0eAA\\>>bAE]><_AG^><`AF^>P1M2M4M2M3N3L3N3M2M4O01O01O010OfN]B7d=F^B:b=CaB>^=@eB?\\=^OfBc0Y=\\OiBc0X=]OgBc0Y=]OgBd0X=\\OiBc0X=]OgBd0X=\\OiBc0V=_OiBa0U=AkB`0R=BoB=Q=DnB=Q=ClB`0T=AiBb0W=]OgBe0Y=[OeBe0]=m03L3N3L3N2M4M2M4M2N2dNeAP1_>lNeAQ1]>mNeAP1g>N3L3N2N3L3N3L3N2M4M2N_jj4"}, "image_id": 1009, "id": 16623}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 159.0, 2.0, 6.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "RUo73j?3QK"}, "image_id": 1009, "id": 16624}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 168.0, 1.0, 1.0], "area": 1, "segmentation": {"size": [512, 512], "counts": "Xeo71g:"}, "image_id": 1009, "id": 16625}, {"iscrowd": 0, "category_id": 1, "bbox": [371.0, 195.0, 65.0, 45.0], "area": 1847, "segmentation": {"size": [512, 512], "counts": "kfi51l?3L5L3L4L4M4N100010O00RAXOd>h0XA]Oh>c0TA@l>k0O00010O000010O01O01O01O00010OM3L4L5O00010O0000010O000010O0O1L4M4L3O11O01O0001O01O0001M2L4L4L5K4L4L4KniU1"}, "image_id": 1009, "id": 16626}, {"iscrowd": 0, "category_id": 1, "bbox": [440.0, 195.0, 29.0, 30.0], "area": 503, "segmentation": {"size": [512, 512], "counts": "hVl61m?2N3L3N3M2M4M2N3M2O110O010O010O0010O001M2N3L3N3M2N2M4M2NgYe0"}, "image_id": 1009, "id": 16627}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 196.0, 63.0, 93.0], "area": 2818, "segmentation": {"size": [512, 512], "counts": "YWT33k?3L3M4M2M3N3L3N210l@YOm>m010PBQOmg15N2M4L3N3L3N2M4L3N2M4M2M4L3N2M4M2M4LfYl3"}, "image_id": 1009, "id": 16628}, {"iscrowd": 0, "category_id": 1, "bbox": [473.0, 210.0, 39.0, 36.0], "area": 825, "segmentation": {"size": [512, 512], "counts": "Sg\\73k?2M4M2N2M4M2N3N1010O010O00010O010O010O00010O010O010O00010O010O010O00010O010OQI"}, "image_id": 1009, "id": 16629}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 211.0, 85.0, 71.0], "area": 2711, "segmentation": {"size": [512, 512], "counts": "YXm31n?3N1N3M2O2M2N2AA[Aa0b>A\\Aa0b>A\\Ab0b>@\\Aa0b>A\\A?Z>ZOiA:J<^>[OgAR1Y>POeAQ1Z>ROcAn0^>SOaAl0_>:010O0001O01O00010O002N3N1N3M01O01O0001O01O0001O00010O0000010O0000010O000101O1O2O0O101N1O101N100O2N100O100O10O00010OO1N3L3N3L3N2M4M2NfXh2"}, "image_id": 1009, "id": 16630}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 216.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "jVo71m?2XI"}, "image_id": 1009, "id": 16631}, {"iscrowd": 0, "category_id": 1, "bbox": [66.0, 259.0, 83.0, 97.0], "area": 3772, "segmentation": {"size": [512, 512], "counts": "aZQ11[>1VC1i<0UC2i<0VC1h<1VC1h<1VC2g<0UBFk0c0hA[OX>e0kAXOV>g0lAWOT>i0nAUOR>l0oASOP>m0SBPOm=P1UBnNk=R1b0O01O000001O01O000001O01O00000001O2O1N2N3M2M3M3L4L5L3LdWe5"}, "image_id": 1009, "id": 16632}, {"iscrowd": 0, "category_id": 1, "bbox": [268.0, 266.0, 57.0, 55.0], "area": 1507, "segmentation": {"size": [512, 512], "counts": "YYV42n?2M3N2M3N2M3N2M3N2M3N2M010O010O010O10O010OO2N101N1O2N101N1O2N101N1101N101N101N101N2M2N3N1N3M2N3N2M2N3M2O2M2N3M3NZWm2"}, "image_id": 1009, "id": 16633}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 277.0, 30.0, 32.0], "area": 583, "segmentation": {"size": [512, 512], "counts": "SiP74j?2N3L3`@FX?WO_Aj0`>TObAl0_>POeAo0f>10O01O01O010L04M2M3M4M2M4M2M3M4M2M4L3N2010O000O2L3N3L3N2M4L3HbAlNa>P1bAmNa>Q18M4M2M4L3N2M4M2M3M4M2MmV^4"}, "image_id": 1009, "id": 16635}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 285.0, 65.0, 48.0], "area": 1669, "segmentation": {"size": [512, 512], "counts": "ei[51l?3N3M2M4M2N3L3N3M2N30O010O01O01O010OO2L3O5L0010O0O2M2010O00010O010O010OQOUAi0k>UOWAk0P?0O010O0010O0010O010O0010O0010O010OO1M4M2N3L3N3M2M3N3M]fc1"}, "image_id": 1009, "id": 16636}, {"iscrowd": 0, "category_id": 1, "bbox": [480.0, 309.0, 32.0, 46.0], "area": 984, "segmentation": {"size": [512, 512], "counts": "dZ`73k?2M3N3L3N3L3N2N3L3N3L3N2N3O010O01O01O010O010O00010O010O00POXAh0i>UOYAl0f>RO]Am0g4"}, "image_id": 1009, "id": 16637}, {"iscrowd": 0, "category_id": 1, "bbox": [297.0, 312.0, 21.0, 23.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "Yjd41m?2N3M2N3M2M4M2N201O010O01M2N3M2N3M2M3N3MUfP3"}, "image_id": 1009, "id": 16638}, {"iscrowd": 0, "category_id": 1, "bbox": [417.0, 327.0, 30.0, 30.0], "area": 541, "segmentation": {"size": [512, 512], "counts": "kj`61m?3L3N3L3N2N3L3N3N10010O0010O0010O0010O0010ON3L3N2M4M2N3L3NcUP1"}, "image_id": 1009, "id": 16639}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 341.0, 93.0, 59.0], "area": 2857, "segmentation": {"size": [512, 512], "counts": "nk^32l?2N3L3N3M2O1010O010O_OC_A>^>DcA;Z>HfA8X>KgA6V>LiA5T>OhA5U>MiA5W>LfA7Z>HcA;]>d010O0010O010O0010O0010O010O00O2M2N3N10O1N3M2M4M2N3L3N2N3L3N3M2M4M21O010O010O01O01N1N3N1010O00010O010O010O00010O010O0N3N11O010O01ZOh@c0Z?01M2N2N3L3N3M2MidR3"}, "image_id": 1009, "id": 16640}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 350.0, 64.0, 59.0], "area": 1729, "segmentation": {"size": [512, 512], "counts": "l[j63k?2N3M2N3M2N3M2N3N1N3M2N3M2N3M2O2O010O010O010O010O010O010ORO[Aa0f>\\O]Ad0b>ZO`Af0a>WObAi0]>UOeAk0i>0O010O010O010O010O010O10O010O010O010O0O2M2N3M2N3M3M2N3M2N3Moc5"}, "image_id": 1009, "id": 16641}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 369.0, 94.0, 71.0], "area": 3350, "segmentation": {"size": [512, 512], "counts": "\\lU12l?3L3M3N3L3N2M4O001O01O010O01O01Oh@]OS?h010O00010O0010O00O2QATOf>n0WAUOj>Q1O00010O010O00010O012M00010O01O01O010O00010OO2L300010O01O01SAoNi>T110O01O01O000M4M2N3L3N2M4M2M4M2M3O2O0010O001ROWBDh=:[BBh=;[BBi=:[BCg=;[BBi=:ZBDh=:[BBh=;[BCh=:ZBCi=:P1N3L3N3L[T[5"}, "image_id": 1009, "id": 16642}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 378.0, 88.0, 64.0], "area": 3202, "segmentation": {"size": [512, 512], "counts": "ell43?0i>4SAOi>5TAOh>4UAOh>4UAOi>f0O00010N1M3L5L3N02M3L5L3M3O1010O00010O0001O01O01O01O01O01O01O01_AoNQ>Q1lAROU>m0hAWOW>j0eAYO[>V110O0000010O00010O00010O00010O00010M1M12N3M3M3M4K4M3M4L3001O01O00010O0001O01O00010O0M3M3L5L3L4MkSg1"}, "image_id": 1009, "id": 16643}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 416.0, 82.0, 65.0], "area": 3030, "segmentation": {"size": [512, 512], "counts": "a^d61l?3M3L5L3ZOEdA>X>FeA>W>EfA>V>GfAGeA>Z>d00010O00010O000010O0001O01O000010OCgASOY>j0kAUOU>k0kAUOV>j0jAWOU>IhAj02]O]>c0dA\\O]>c0cA^O\\>b0dA^O\\>c0dA\\O\\>d0dA\\O]>c0cA^O\\>c0dA\\O\\>d0dA\\O]>c0cA^O\\>c0eA[O[>e0eA[O[>e0fA[OZ>d0fA\\OZ>e0eA[O[>e0fAZOZ>f0fA[OZ>e0eA[O[>e0eA[O[>e0fA[OZ>d0fA\\OZ>e0eA[O[>e0fAYO[>g0eAVO_>j0`ASOc>m0800010O00010L3N200010O00010O0000010K4M3L4L5L3L4L5L]b2"}, "image_id": 1009, "id": 16644}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 420.0, 67.0, 68.0], "area": 2293, "segmentation": {"size": [512, 512], "counts": "m]m21l?4M2M4M2N2M4M2M4N10010O0010O0010O0010O00O2L3N3M20010O010O0jAmN_=S1^BQOb=o0YBUOg=k0VBXOk=h0QB\\On=d0oA_OR>`0kADT>T10010O010O00010O010O01O0N2N3L3M4M2M3M4M2M4L3N3L3M3N3L3N3L3M3N]RQ4"}, "image_id": 1009, "id": 16645}, {"iscrowd": 0, "category_id": 1, "bbox": [281.0, 440.0, 46.0, 72.0], "area": 2167, "segmentation": {"size": [512, 512], "counts": "Zo\\42Q?OhA4T>0iA3LJd=6]B4JJf=6\\B3JKg=5\\B3JKg=5\\B4HKi=5[Be0c=]OZBf0f=j01O00001O00001O0000001O00001O00001O0L4M4L3M3M4L3M3M4L3M3M4L3L4M4L3M3M4L3MoQl2"}, "image_id": 1009, "id": 16646}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 459.0, 59.0, 53.0], "area": 2068, "segmentation": {"size": [512, 512], "counts": "h_Q55g?4M3L4N3O00000M4K4L40010O000001O000000O1L4L4M3L4L4M3001O0000001O00001O0000001O0000001O0000001O0L4L4L5L3L4L4L5K4L4L4LaQQ2"}, "image_id": 1009, "id": 16647}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 479.0, 44.0, 33.0], "area": 994, "segmentation": {"size": [512, 512], "counts": "ooY61n?100O100O1N2L4L4L4L4L4L4M31O0000001O0000001O000000N2O100001O0000001O000O2K4M3L4L5L3L4LQQP1"}, "image_id": 1009, "id": 16648}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 488.0, 38.0, 24.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "o_U71k?4M3L4L4N200001O0000001O00001O0000K5L41O0000001O0000001O0000001O0N2L5K4L4Lg`7"}, "image_id": 1009, "id": 16649}, {"iscrowd": 0, "category_id": 1, "bbox": [83.0, 510.0, 3.0, 2.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "ooY11n?11OQ`d6"}, "image_id": 1009, "id": 16650}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 0.0, 72.0, 80.0], "area": 2809, "segmentation": {"size": [512, 512], "counts": "XQ`03l?2N1O2M3N2N1N3N2N2N2M2100000O010000000N1N3N2N2M2O2N2N2M3N1O2M3N2N1O2M3N2N2M2O2N2N2O01000O1M3N1O2NRO`B\\O`=d0bB[O_=d0cBZO^=e0dBYO]=f0eBWO]=g0eBXO]=f0eBXO\\=g0fBVO]=g0fBWO\\=g0fBWO\\=g0eBXO\\=g0fBVO]=i0dBUO^=j0cBTO_=j0l0O2Ej@HY?5j@IX?5j@IX?5i@JX?5;M3Ndo[6"}, "image_id": 1010, "id": 16651}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 0.0, 23.0, 7.0], "area": 93, "segmentation": {"size": [512, 512], "counts": "PP^41o?00001O00001O00001O00001O00001O00001O0000O1MS`V3"}, "image_id": 1010, "id": 16652}, {"iscrowd": 0, "category_id": 1, "bbox": [338.0, 0.0, 31.0, 13.0], "area": 269, "segmentation": {"size": [512, 512], "counts": "PPY51o?00001O000S@3g?3000001O0000001O0000001O0000001O0000001O0000O1L4LT`W2"}, "image_id": 1010, "id": 16653}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 0.0, 37.0, 36.0], "area": 1005, "segmentation": {"size": [512, 512], "counts": "RPY63k?5M2N00001k@H`>8\\AMc>3ZA0f>0VA4j>MRA6n>?O0000001O0000001O0000001O000000O100O1001O00_OVALj>0ZAOg>M^ANf>O]ANf>N^ANfnT1"}, "image_id": 1010, "id": 16654}, {"iscrowd": 0, "category_id": 1, "bbox": [448.0, 0.0, 44.0, 56.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "cPP72i?7M2O2M1O1TACU>>fAGX>:dAJY>9cAKY>9cAKZ>P1O0000001O0000001O00001O00000000001O1OO1JhAdNX>X1nAeNS>W1=K5L4L4K5O1O100O1Bf@6Z?Fk@8a?Jio9"}, "image_id": 1010, "id": 16655}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 39.0, 55.0, 66.0], "area": 2345, "segmentation": {"size": [512, 512], "counts": "jRQ22U?0_A3R>NcA382R>NcA283R>NcA284Q>;jAHV>9fAKX>o00001N101O00010O00010O0010O0010O00010O00010O010O00010ON2N3L3M3dNaAT1h>M2M3M4M2N30O00O2M2M3M4M2M4L3MX^S5"}, "image_id": 1010, "id": 16656}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 77.0, 69.0, 67.0], "area": 2290, "segmentation": {"size": [512, 512], "counts": "QSn21m?3N2M3N1O2M3N2N1O2000000M2O2M3N2N1N3N2N2000O10N2fAkNe=X1YBiNh=W1VBjNk=U1kAkN23S>R1iAmN22U>a1100O10O01O10O01O100O00100O1O010O10O01O100O001O1cN_AX1f>mNZAi0h>UOZAh0i>UOYAj0h>UOZAh0Q?N2M2O2M3N2M2O2M3N2M2OY\\o3"}, "image_id": 1010, "id": 16657}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 86.0, 86.0, 58.0], "area": 3338, "segmentation": {"size": [512, 512], "counts": "Z3?a?2M3M3N2M2O2M3N2M2OO010O010O1O100O100O1000000000000n0bAQO_>k0eATO]>g0`0L4L4L5K4LY\\g1"}, "image_id": 1010, "id": 16659}, {"iscrowd": 0, "category_id": 1, "bbox": [88.0, 128.0, 79.0, 66.0], "area": 2730, "segmentation": {"size": [512, 512], "counts": "WU\\13j?3M3M4L3M3N3L3M3M4N11O01O01O01O01O01O01O01O0XARO_>n0]AUOc>T1010O0010O00010O00010O00010O00010O000N3L3M000000000003N3L310O01O01O01O01O01O01O01O01M2M3M4L3M3M4L3M4M2M3M4L3Mi[\\5"}, "image_id": 1010, "id": 16660}, {"iscrowd": 0, "category_id": 1, "bbox": [405.0, 129.0, 48.0, 65.0], "area": 1977, "segmentation": {"size": [512, 512], "counts": "mdZ64g?7I6J7J400O2O0O11O01O0001O01O0bAROj=n0RBVOk=m0QBXOk=l0PBXOm=k0oAYOQ>[110O0000010O0000010O0000010O0000L5K4L4L4L5K4L4L4L6J6J5Kh[m0"}, "image_id": 1010, "id": 16661}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 137.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "Y43o?Nekn7"}, "image_id": 1010, "id": 16662}, {"iscrowd": 0, "category_id": 1, "bbox": [176.0, 155.0, 63.0, 58.0], "area": 2509, "segmentation": {"size": [512, 512], "counts": "oUh24g?5L4K5L4K6J500VAROb>n0ZAVOf>R110O000001O01O00L4L50O0TASOe>T101O01O0001O01O00N2K6N10000010O00000010O00000O1L5M2001O01O0000M4K4K5L4N3O0CXABh>9]ACg>8^ACg>9c0JV[X4"}, "image_id": 1010, "id": 16663}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 171.0, 52.0, 44.0], "area": 1826, "segmentation": {"size": [512, 512], "counts": "[5\\1d>000>B00000000000000000000000000000000000000000000O100000000000000E;0000000000000000000000000000000000cjU7"}, "image_id": 1010, "id": 16664}, {"iscrowd": 0, "category_id": 1, "bbox": [324.0, 177.0, 50.0, 52.0], "area": 1880, "segmentation": {"size": [512, 512], "counts": "_VR52g?7J6h@Ia>=XAJa>n0L5O000001O000001O000001O000001O00bNgAU1c>O0000000L40001O00000001O000001O00000001O0001O0N2H8I7I7IWjT2"}, "image_id": 1010, "id": 16665}, {"iscrowd": 0, "category_id": 1, "bbox": [367.0, 178.0, 17.0, 11.0], "area": 142, "segmentation": {"size": [512, 512], "counts": "feg55g?4000001O01O000001O01O0000000M3Kajo1"}, "image_id": 1010, "id": 16666}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 196.0, 59.0, 35.0], "area": 1512, "segmentation": {"size": [512, 512], "counts": "lfP63g?6J6J6J601O00000000010O0000000001O01O0000000001O01O00000001O01O00000001O0001O000001O0001O000001O000001O0001O0J6J6J6JjiQ1"}, "image_id": 1010, "id": 16667}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 202.0, 78.0, 46.0], "area": 1873, "segmentation": {"size": [512, 512], "counts": "oVo04i?3M3M3M4N10000010O000010O000M4L3L41O01O0001O01O01O00010O0001O01O00010O000010O000010O000010O0000010O3M010O0001O01O00010O00XORA>o>^OTAb0U?1O01O00010O00001O0O1O1N3K4M3Lohi5"}, "image_id": 1010, "id": 16668}, {"iscrowd": 0, "category_id": 1, "bbox": [418.0, 231.0, 24.0, 24.0], "area": 430, "segmentation": {"size": [512, 512], "counts": "kWa61i?6J6J6N2010O00000000010O00000000010O00000K5J6JlhR1"}, "image_id": 1010, "id": 16669}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 237.0, 36.0, 48.0], "area": 1229, "segmentation": {"size": [512, 512], "counts": "_7T1m>0O01O01O010O010O00010O010O0O1M4L3O2O010O01O01O01M2M4M2BVACm>;UACn>9VACm>;UABo>:=N2M4L_h]7"}, "image_id": 1010, "id": 16670}, {"iscrowd": 0, "category_id": 1, "bbox": [165.0, 242.0, 64.0, 51.0], "area": 2354, "segmentation": {"size": [512, 512], "counts": "dhb2?S?>0000000000000000000000000000I700ER1N2I70000000001O00000000000H800000000000M30001O000000000_NiAY1_>000000000000000000000000I7WOjhY2"}, "image_id": 1010, "id": 16672}, {"iscrowd": 0, "category_id": 1, "bbox": [55.0, 248.0, 79.0, 44.0], "area": 1883, "segmentation": {"size": [512, 512], "counts": "]hk01k?5L3L4M3O20O00010O0001O01O01M2M3L5O00001O01O00010O0001O01O01O00010O00010O0000010O00010O000010O0001O01O00010O0001O01O0001O01WOPA`0P?]OSAd0U?O0000010O00010O00000N3K4M3L5Kcgl5"}, "image_id": 1010, "id": 16673}, {"iscrowd": 0, "category_id": 1, "bbox": [381.0, 263.0, 55.0, 30.0], "area": 1455, "segmentation": {"size": [512, 512], "counts": "nhn55Y?b0000001O00000000000000000000000000000000000001O00M3M30000000000000000000001O0000000000000000000000000000000igU1"}, "image_id": 1010, "id": 16674}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 296.0, 67.0, 85.0], "area": 2509, "segmentation": {"size": [512, 512], "counts": "P;h0V?2M4M2M3O2O010O01O01O010O01M1OO010O010O010O01001O3O010O0O1M4M2M4M1N3N210O01O01O010N1N2M1O0101N3N3L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3LcVn6"}, "image_id": 1010, "id": 16675}, {"iscrowd": 0, "category_id": 1, "bbox": [416.0, 298.0, 21.0, 21.0], "area": 416, "segmentation": {"size": [512, 512], "counts": "ZY`61o?c0]O000000001O000000000000000000000000000fVU1"}, "image_id": 1010, "id": 16676}, {"iscrowd": 0, "category_id": 1, "bbox": [76.0, 310.0, 44.0, 70.0], "area": 2209, "segmentation": {"size": [512, 512], "counts": "R[V15b?9K5000000E^OXAb0_>H_A9[>j0O00000000000iAiNe=W1RBROn=`11O0001O000000000N2M3001O01O0000000000000001E:G9F:F:G9FbfS6"}, "image_id": 1010, "id": 16677}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 312.0, 71.0, 50.0], "area": 2592, "segmentation": {"size": [512, 512], "counts": "\\j]2k0P?5000000000000000000000000B>O1000000000000000O10000004L00000000000000L400000000000000000008H=C000000000000000000000000000000000000O10000006J^e^4"}, "image_id": 1010, "id": 16678}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 319.0, 6.0, 15.0], "area": 51, "segmentation": {"size": [512, 512], "counts": "o9?a?N2M4M2N3LRfl7"}, "image_id": 1010, "id": 16679}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 330.0, 75.0, 43.0], "area": 2456, "segmentation": {"size": [512, 512], "counts": "hjR5b0^?;E0DWOaAi0_>7N00000000000O10O10000000000000O10O8I9G9G:F9GkbU1"}, "image_id": 1010, "id": 16684}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 384.0, 53.0, 51.0], "area": 1980, "segmentation": {"size": [512, 512], "counts": "T\\P55k?7I7H8I5K00O1002N7I7H2OO1000000000O10O1000000000O10O100000001O7I0O0M400000000000O10O10000000005K8G8I7I7IhRU2"}, "image_id": 1010, "id": 16685}, {"iscrowd": 0, "category_id": 1, "bbox": [51.0, 388.0, 60.0, 53.0], "area": 2182, "segmentation": {"size": [512, 512], "counts": "ili05k?6J6I8I6J2N0000O10O10000000000O0KSOYAm0g>600000000O0100003M000O100JgNeAY1[>60O10O1000000000O010000000000O10XOcAL^>4iAEW>;QB]Oo=c0g0O0100000O5K8G8I\\SX6"}, "image_id": 1010, "id": 16686}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 388.0, 53.0, 94.0], "area": 2844, "segmentation": {"size": [512, 512], "counts": "`\\T21o?6EJ`@M3L5L3M]Q_7"}, "image_id": 1010, "id": 16688}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 440.0, 61.0, 53.0], "area": 2144, "segmentation": {"size": [512, 512], "counts": "lnm01o?6J6I5L0^OCaA=_>I[A7e>a001000000000O010000000000O01000000000FoNeAP1\\>:000000O10O10000000O1000O100000O1000L`AgNa>Y13O1000O1005K6I7JO100004L7I6IXaS6"}, "image_id": 1010, "id": 16689}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 442.0, 50.0, 31.0], "area": 1235, "segmentation": {"size": [512, 512], "counts": "o]T64l?8H8H6J000O1000000000O100000O10000000O100000O10000000O100000O10000000O100000O10000000O1000006J9G8H_aR1"}, "image_id": 1010, "id": 16690}, {"iscrowd": 0, "category_id": 1, "bbox": [335.0, 454.0, 47.0, 42.0], "area": 1468, "segmentation": {"size": [512, 512], "counts": "ZnW58h?9G4L00000O106J8H3M0000000O10O100000000000O10O10000000000000O0100000000000009G8G9H4L0000008He`P2"}, "image_id": 1010, "id": 16691}, {"iscrowd": 0, "category_id": 1, "bbox": [426.0, 469.0, 25.0, 43.0], "area": 812, "segmentation": {"size": [512, 512], "counts": "h^e66j?8H7H9H7I4L0000O10O1000000000O1000O1007I7I8H7H9HVPn0"}, "image_id": 1010, "id": 16692}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 0.0, 74.0, 128.0], "area": 4018, "segmentation": {"size": [512, 512], "counts": "W3?_?2M4M2N3L3N2N3L3N3M2M3N3M2M4M2M3N3M2M4M2N3L3N2N3L3N00O010O10O01000O010O0100O01000O010O102M3N3M2M3N3L3N3L3N2N3L3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N2M4M2Ngoj6"}, "image_id": 1012, "id": 16693}, {"iscrowd": 0, "category_id": 1, "bbox": [149.0, 0.0, 51.0, 24.0], "area": 642, "segmentation": {"size": [512, 512], "counts": "P`Z21o?00001O00001O00001O001O00001O00001O000]@K[?6a@N^?:0001O001O00001O00001O00001O001O00001O00001OO1M3N2M3M3N2M4LRPl4"}, "image_id": 1012, "id": 16694}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 0.0, 27.0, 14.0], "area": 252, "segmentation": {"size": [512, 512], "counts": "TPi32k?3O2O00001O001O00001O001O00001O001O00001O00O1M3M3N2MS`i3"}, "image_id": 1012, "id": 16695}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 12.0, 29.0, 27.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "QaX41m?3M2M3N3L3N3M2M4M21O01O01M2N3N1010O0010O0010ON2N3L3N3M2M_oX3"}, "image_id": 1012, "id": 16696}, {"iscrowd": 0, "category_id": 1, "bbox": [198.0, 21.0, 56.0, 46.0], "area": 1634, "segmentation": {"size": [512, 512], "counts": "fQS33j?3N2M4M2N3L3N201O010O010N1N1OO0102N2M3N3O010O010O00010O010O00010O010O010N1M3N3M2010O01O01O0N3M2M4M2N2_OQANS?NQAOQ?OQAOR?NQANR?OQAOV^Q4"}, "image_id": 1012, "id": 16697}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 38.0, 45.0, 53.0], "area": 1412, "segmentation": {"size": [512, 512], "counts": "\\bY43k?2N3L3N3L3N2N3L3n@XOj>k0SAWOj>R1M2M4M2N2M4M21N1M4O00010O010O0001N0OO4M2M3N3M2M4M2M3N3L3N3M2M4M2M3N3Mdno2"}, "image_id": 1012, "id": 16698}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 47.0, 61.0, 55.0], "area": 1947, "segmentation": {"size": [512, 512], "counts": "_Ro41m?3L3N2M4L3N3L3O110O010O00010O010O00010M2N3L31O010O00010O01RAROi>m0UAUOj>Q1N3L3N2M4L3O20O00010O010O00001M200010O01O0ZOaAL^>1fAM\\>0fAM]>0gAM\\>OgAN\\>0fAN\\>OhAM\\>0fAM]>0S_R2"}, "image_id": 1012, "id": 16699}, {"iscrowd": 0, "category_id": 1, "bbox": [73.0, 73.0, 67.0, 72.0], "area": 2843, "segmentation": {"size": [512, 512], "counts": "dcT11m?2N3L3`@IU?:h@HV?:h@IU?b0M4M2N3M2M3N3M2N3L3N3M2N3L30010O010O010O00010O010O010O00010O010O010O01O01O010ON3M2M4M2N2M4M2N3L3N3M2M3N3M2N3L3N3M2M3N3M2MVmi5"}, "image_id": 1012, "id": 16700}, {"iscrowd": 0, "category_id": 1, "bbox": [377.0, 77.0, 77.0, 76.0], "area": 3062, "segmentation": {"size": [512, 512], "counts": "Rdl53k?3L3M4M2M3N3L3N30O0010O0010O0010O00010O010O0HXOXAh0f>[OZAe0b>^O^Ab0`>A`A?\\>DdAGgA9W>JiA6S>MmA3Q>0oA0m=3SBMk=5UBKh=9XBGe=;[BEd==[BDe=;\\BDd=<\\BDd==\\BCd=<\\BDd==[BDe=;XBHh=X1010O010O00010O0N3L3N2M4M2M3N0O011N4L3N2M4M2M4M2M3N3L3N3L3N2M4M2M3M^ml0"}, "image_id": 1012, "id": 16701}, {"iscrowd": 0, "category_id": 1, "bbox": [158.0, 81.0, 74.0, 58.0], "area": 2380, "segmentation": {"size": [512, 512], "counts": "\\S_23k?2W@Lc?:N3L3M4M2M3O10N210O0010O0010O0010m@YOk>g0SA[Om>l010O010O00010O010L3N2010O01O01O010O01O01O010O01L3M3N3L3N3L300010O01O01M2M4M2M3N3L3N3L3N2M4L3N2M4M2M4M2M3N3LYm[4"}, "image_id": 1012, "id": 16702}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 105.0, 54.0, 45.0], "area": 1395, "segmentation": {"size": [512, 512], "counts": "Sdm63j?3N2M4L3N2M4M2M4L3N210O010O00010O01O01O010O00010O01O01O010O00010O01O01O010O00010O010OO1N3L3N3L3M3N3L3M3N3LY\\7"}, "image_id": 1012, "id": 16703}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 108.0, 42.0, 52.0], "area": 1266, "segmentation": {"size": [512, 512], "counts": "fTn31m?2M4L3N2M4M2M4M2HYOTAj0i>ZOTAi0h>9N3L3N2O2O0010O0010O0010O001O0N2M4M2M4L3N2M4M2M3N3L3M4M2M3N3L]l\\3"}, "image_id": 1012, "id": 16704}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 115.0, 32.0, 29.0], "area": 587, "segmentation": {"size": [512, 512], "counts": "VTX51l?4M2M4M2M4M2M3O20O010O00010O010ON2O2O010O0001N1M4M2M3N3L3N3M2MZlW2"}, "image_id": 1012, "id": 16705}, {"iscrowd": 0, "category_id": 1, "bbox": [148.0, 120.0, 13.0, 18.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "WTZ21m?3M2M4M2N2M20O2N2M4M2N3LY\\_5"}, "image_id": 1012, "id": 16706}, {"iscrowd": 0, "category_id": 1, "bbox": [217.0, 123.0, 19.0, 23.0], "area": 258, "segmentation": {"size": [512, 512], "counts": "\\d\\32k?3N3M2M4M2N3L300010O001L3N2N3L3N3M2MTlY4"}, "image_id": 1012, "id": 16707}, {"iscrowd": 0, "category_id": 1, "bbox": [508.0, 143.0, 4.0, 18.0], "area": 41, "segmentation": {"size": [512, 512], "counts": "_Tn72n?6J5K5\\K"}, "image_id": 1012, "id": 16708}, {"iscrowd": 0, "category_id": 1, "bbox": [113.0, 155.0, 91.0, 114.0], "area": 3033, "segmentation": {"size": [512, 512], "counts": "Rhh11m?3L3N2M4M2M4M2N210O010O00010O00010O010O000O2L3N3L100O010O010O0201010O00010O01N1M3N3O0010O0fN[OYCf0d<]OYCe0e<]OXCf0e<]OXCg0e<\\OXCf0e<]OXCg0e<\\OXCf0f<\\OWCf0g<]OWCb0jo001O0M4M2N2M4M2N3M201O01O001M2O1010O010O010O01jNbAh0^>VOdAk0[>ROiAm0X>POjAQ1b>YOTA8o>ETA8n>EUA9n>DUA8n>FTA8[?N3M2MPZd1"}, "image_id": 1012, "id": 16710}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 172.0, 66.0, 63.0], "area": 2348, "segmentation": {"size": [512, 512], "counts": "jf]62k?4M2M4M2N3L3N2N3M210O010O00010O010O01OO2M2N3L3N3M2M3N3L3N3M2M4M2O110O01N1N3M2N210O0010O010O00010O01ROfA5Y>IiA7W>FmA:S>CoA=Q>@SB?n=^OTBc0k=[OWBc0l=YOXBd0j=ZOXBc0e>M2N3L3N2M4M2N_Ya0"}, "image_id": 1012, "id": 16711}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 173.0, 32.0, 38.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "YfQ32l?2M4M2N3M2N3L3N3M2N3L3N2O2O010O010O01M2M4M2N2N3L3N3M2N3L3N3M2N]Z^4"}, "image_id": 1012, "id": 16712}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 188.0, 56.0, 49.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "QWX43k?3L3M4EEl@>Q?Fl@n0mAVOR>j0lAXOU>Z1O00010O010O01O01O010OO2@kAPOX>n0kAoNW>n0lAoNX>n0kAoNW>o0?L3N2M4M2M4M2N3L3N2MiXm1"}, "image_id": 1012, "id": 16715}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 224.0, 46.0, 35.0], "area": 934, "segmentation": {"size": [512, 512], "counts": "aWY74i?3M4M2M3M4M2O1010O0010O0010O00010O00010O0010O0010O00010O00010O0010O0010O00010O0M3M4M2M4L3MeH"}, "image_id": 1012, "id": 16716}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 231.0, 51.0, 50.0], "area": 1537, "segmentation": {"size": [512, 512], "counts": "Shm21m?3L3M3M4M2M4i@^Ok>e0RA^Ok>e0SA]Ok>n0N100010O01O01O010O00010O00010O01O01O010O0001N1M3M4L12M4L3M3N3L3M4L3N2M4L3M3N3L3M3MghX4"}, "image_id": 1012, "id": 16717}, {"iscrowd": 0, "category_id": 1, "bbox": [252.0, 240.0, 50.0, 51.0], "area": 1394, "segmentation": {"size": [512, 512], "counts": "]Xn33k?3L3N2M4O010a@DX?i0kAYOU>g0hA\\OX>d0eA@[>R101N1M3M4M2N210O0010O00010O0010OO0N1O3N3L3ChAoN[>n0iAoNZ>n0hAoN[>n0=M4M2M4L3M3N3L3M3NZXm0"}, "image_id": 1012, "id": 16719}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 274.0, 23.0, 26.0], "area": 374, "segmentation": {"size": [512, 512], "counts": "SYc33k?2M4L3M4M2M3O2O01O01O01O010O01OO2L3M3N3L3M4MZWQ4"}, "image_id": 1012, "id": 16720}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 287.0, 56.0, 67.0], "area": 1837, "segmentation": {"size": [512, 512], "counts": "jYh42l?2M4M2N3L3N2N3M2M4M2N1O12O00010O010O010O01O0fAROc=m0[BUOe=l0XBWOh=h0UB[Ok=f0RB\\Oo=c0oA@P>a0lABT>>jAEV>:hAHX>Q10O0010O0010O010O010N1N3M2N2@_A^Oe>?]A_Oe>>^A@e>=^A_Oe>?]A_Of>=`0N2N3M2M4M`f[2"}, "image_id": 1012, "id": 16721}, {"iscrowd": 0, "category_id": 1, "bbox": [501.0, 311.0, 11.0, 23.0], "area": 154, "segmentation": {"size": [512, 512], "counts": "Yjj71l?3N3L3N2M4M2M40O0001WF"}, "image_id": 1012, "id": 16722}, {"iscrowd": 0, "category_id": 1, "bbox": [428.0, 316.0, 21.0, 30.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "cZf62k?3N3L3M3M4M2010O0001M2M3M4OO1N3L3M3Dg@O\\?Oe@N^?O]Vo0"}, "image_id": 1012, "id": 16723}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 317.0, 51.0, 54.0], "area": 1592, "segmentation": {"size": [512, 512], "counts": "T[j53k?2M3M4M2M4N11O01M2M4M2M3M4O000010O010OM3N3L3N3L3N2010O0010O0010O00001M2M4L3N2M4M2M4L3N2M4M2M3M4M2M4MkU\\1"}, "image_id": 1012, "id": 16724}, {"iscrowd": 0, "category_id": 1, "bbox": [482.0, 335.0, 13.0, 13.0], "area": 100, "segmentation": {"size": [512, 512], "counts": "gZa71m?3M2N3M2010O010O0N3N1N3M_U8"}, "image_id": 1012, "id": 16725}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 349.0, 42.0, 44.0], "area": 1128, "segmentation": {"size": [512, 512], "counts": "bki62k?4L3N2M4L3g@@R?a0k@BT?e010O0010O00010O01O010O01O01L3M4M2O20O0001L3O020O01O01O_O[AGf>5]AKc>2aAN^>0dAM`>OcAN`>OdAM_>0dAM`>0gUa0"}, "image_id": 1012, "id": 16726}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 366.0, 38.0, 54.0], "area": 1295, "segmentation": {"size": [512, 512], "counts": "X1\\Ac06^O[>2]Ac04_O\\>j0bAXO\\>X1L3N2M4M2N3L3N2N3N110O01N1N2M4M2N3L3N2M4M2M4M2M40OUOYA=g>A[A`0e>\\O_Ac0a>[OaAe0`>WOcAj0j>N1N3L3N3L3N2N3L3Nhcc6"}, "image_id": 1012, "id": 16728}, {"iscrowd": 0, "category_id": 1, "bbox": [500.0, 371.0, 12.0, 22.0], "area": 189, "segmentation": {"size": [512, 512], "counts": "R\\j73j?3M4M2M3M4N101O01O01O0\\D"}, "image_id": 1012, "id": 16729}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 390.0, 53.0, 67.0], "area": 1808, "segmentation": {"size": [512, 512], "counts": "i]`12l?3L3N2N3L3BCWA`0f>CVA`0g>CWA?g>?L3[AiN`>\\11O010O010O00010O010O0001O0M4M1O0O0100O10O12M3N3L3N2M4M2M4M2M3O20O01N1N2M4M2M4M2M3NeSe5"}, "image_id": 1012, "id": 16730}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 430.0, 54.0, 56.0], "area": 1584, "segmentation": {"size": [512, 512], "counts": "enR23j?3N2M4L3N3L30010O010O00010O010O000O2M2M4M2M3M4M2M4M2M3M40O01O01O010O01OO2M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3NZRR5"}, "image_id": 1012, "id": 16731}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 435.0, 62.0, 67.0], "area": 2065, "segmentation": {"size": [512, 512], "counts": "Y_m22k?3M3M4M2M4L3M3M4M2M4L3M3M4M2M4M21O010O010O01O01O010O010O000O2L3N3M2N0O0100O1000O011O2N2N3L3N3M2N2O2O010O010O0O1M4L3N3L3M3MYbS4"}, "image_id": 1012, "id": 16732}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 448.0, 48.0, 64.0], "area": 1831, "segmentation": {"size": [512, 512], "counts": "__61l?4M2N3M2M4j@B_>0[A`04B^>2[A?4A_>i0^AZO_>i0_AYO_>U1M3N2M3N2M3N2O100001O00001O0O1N3M2N2M4M2N2N3L30001SOYA?g>^O\\Ac0c>ZOaAe0`>WOcAj0i>01L3N2M4L3N2M4M2M\\aQ7"}, "image_id": 1012, "id": 16733}, {"iscrowd": 0, "category_id": 1, "bbox": [242.0, 468.0, 52.0, 43.0], "area": 1342, "segmentation": {"size": [512, 512], "counts": "]_i32k?4M2M4L3N3L3M3N3L3O2O01O010O010O00010O010O00010O01O0O1010O01O010O01O01O010O01O001M2M3N3L3N3L3N2M4M2M4Mo`\\3"}, "image_id": 1012, "id": 16734}, {"iscrowd": 0, "category_id": 1, "bbox": [65.0, 476.0, 48.0, 36.0], "area": 1017, "segmentation": {"size": [512, 512], "counts": "goP12l?3L3\\@J[?9c@I[??N200001O001O00001O001O00001OO1M3N2N2N2M3N2N2N2O11O00001O001M2N2M4O00001M2N3L3N2M4L3N3LoPW6"}, "image_id": 1012, "id": 16735}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 485.0, 23.0, 24.0], "area": 345, "segmentation": {"size": [512, 512], "counts": "dod53j?3N3L3M3M4N1010O00010O00010O00010L3N3L3M3M4Lh`o1"}, "image_id": 1012, "id": 16736}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 497.0, 22.0, 15.0], "area": 225, "segmentation": {"size": [512, 512], "counts": "o_k41m?2M3M3N2M3O1001O001O00001O00001O001O0N2N3L\\`i2"}, "image_id": 1012, "id": 16737}, {"iscrowd": 0, "category_id": 1, "bbox": [162.0, 499.0, 26.0, 13.0], "area": 224, "segmentation": {"size": [512, 512], "counts": "m_a23k?2M3N2N2O1001O00001O001O001O00001O001O00001O001N1NW`Q5"}, "image_id": 1012, "id": 16738}, {"iscrowd": 0, "category_id": 1, "bbox": [107.0, 501.0, 24.0, 11.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "noe12l?2M3N2N21O00001O001O00001O001O00001O001O00001NTPn5"}, "image_id": 1012, "id": 16739}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 509.0, 8.0, 3.0], "area": 14, "segmentation": {"size": [512, 512], "counts": "o_o31m?2001O001O0000Q`l3"}, "image_id": 1012, "id": 16740}, {"iscrowd": 0, "category_id": 1, "bbox": [43.0, 0.0, 6.0, 3.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "P`e01o?001O001ONR`W7"}, "image_id": 1014, "id": 16741}, {"iscrowd": 0, "category_id": 1, "bbox": [61.0, 0.0, 54.0, 26.0], "area": 895, "segmentation": {"size": [512, 512], "counts": "U`n04i?3N201O001O00001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O00001O001O00001O00M4M2M3N3L3M3N3LP`V6"}, "image_id": 1014, "id": 16742}, {"iscrowd": 0, "category_id": 1, "bbox": [132.0, 0.0, 51.0, 28.0], "area": 1025, "segmentation": {"size": [512, 512], "counts": "cPR23j?3M3N3L3N3L3M301O001O00001O00001OO1M3M3N2O1001O00001O001O00001O001O00001O00001O001O00M3M3N2M3N2M3G\\@3j?No_T5"}, "image_id": 1014, "id": 16743}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 0.0, 51.0, 54.0], "area": 1643, "segmentation": {"size": [512, 512], "counts": "aPP32l?2M3N3M2N3L3N3N101\\AZOl=f0QB]Oo=c0oA@P>a0lABT>>jADV>=fAGY>9dAJ\\>7aAL^>k00001O001O00001O001O00001OO1N2M3M3N2M3N2M3N2M3N2M3N2M3M3N2M3N2M3N2MS`V4"}, "image_id": 1014, "id": 16744}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 0.0, 72.0, 52.0], "area": 2084, "segmentation": {"size": [512, 512], "counts": "WPY42m?2V@Md?5Z@Md?9N2N2N2N2O1O1O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O1O1001O00000N2N2N2N2N2N2N1O00000001O0000000001O002N2N3M2N2N2N2N2O1N2N2N2N2N2N2N_ob2"}, "image_id": 1014, "id": 16745}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 0.0, 46.0, 19.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "RP]62l?3O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O0000M3N2M3N2M3N2MSPl0"}, "image_id": 1014, "id": 16746}, {"iscrowd": 0, "category_id": 1, "bbox": [475.0, 5.0, 37.0, 54.0], "area": 1597, "segmentation": {"size": [512, 512], "counts": "Pa]7k0U?00000ZOf0K50000000000000000000000000000000000000F:C=01O0001O0000000000000J"}, "image_id": 1014, "id": 16747}, {"iscrowd": 0, "category_id": 1, "bbox": [29.0, 6.0, 18.0, 20.0], "area": 218, "segmentation": {"size": [512, 512], "counts": "c`>3k?2M4M2N2N3M2010O010O000N3L3N3M2N3Lh_X7"}, "image_id": 1014, "id": 16748}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 15.0, 21.0, 41.0], "area": 454, "segmentation": {"size": [512, 512], "counts": "?Y1g>O2N2N2N2N2N2N2N2N2N2N2N2N2M3N2N2N2N2N2No^e7"}, "image_id": 1014, "id": 16749}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 17.0, 41.0, 53.0], "area": 1171, "segmentation": {"size": [512, 512], "counts": "ja=2l?3M2M4M2N2N3L3N3M2M4M2N2M4M2N3L3N3M210O01O01ON3M2N3L3N3M2N2M4M2N3L3N3M2M3N3M2N3LXom6"}, "image_id": 1014, "id": 16750}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 39.0, 44.0, 37.0], "area": 1283, "segmentation": {"size": [512, 512], "counts": "Yah54l?9G9G9G2N0O100000000000O100000O10000004L1N100004L000O10O1000000L4O1000O100000000009F:GT^a1"}, "image_id": 1014, "id": 16751}, {"iscrowd": 0, "category_id": 1, "bbox": [237.0, 47.0, 84.0, 79.0], "area": 3756, "segmentation": {"size": [512, 512], "counts": "Ycf31g?0^@2_?2]@2`?8M4M2M3N3M2M4M2M4M20001O010O01O01O010M2M3N3L3NO0100O3N3L3O2O00010O010O00010O010O01O01O010O01O01O010O01O01O010O01O0iNQB>n=_OVB`0k=]OWBd0h=YO\\Bf0e=WO]Bi0c=TO`Bm0_=QOdBn0]=nNfBS1Y=kNjBT1Q>0010N1M4M2M3N3L3N3L3N2M4M2M4MT]o2"}, "image_id": 1014, "id": 16752}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 51.0, 29.0, 31.0], "area": 496, "segmentation": {"size": [512, 512], "counts": "YRW51m?2N3M2N2N3L3N3M2N3M2O20O010O010O010O0M3N3M2N3M2N3M2N3M2MX^Z2"}, "image_id": 1014, "id": 16753}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 58.0, 55.0, 74.0], "area": 2207, "segmentation": {"size": [512, 512], "counts": "fcb03j?3N3L3M4M2M3M4AXObAj0[>ZOaAi0]>YOaAj0[>YObAj0[>`0M21O010O01O01O010O01O0O1M4M2M4L3N2M11N3N3L3N2M4M2M3M4M2M4M2M3N3L3N3L3M3N3L3N3L3N2M4Moma6"}, "image_id": 1014, "id": 16754}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 58.0, 64.0, 61.0], "area": 2146, "segmentation": {"size": [512, 512], "counts": "gRk11m?2M4M2N3M2M4O01O010O010O01N1N2M4M201O010O0010OUAVO`>j0]AYOd>g0YA\\Of>d0XA^Oi>m0N1N2M4M2N3M2O2O000O2O001O0010O010O00101WNjAd1Y>O1N3M2M4M2N3M3oNTAi0S?N2N3L3N3M2N3L4MdmT5"}, "image_id": 1014, "id": 16755}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 91.0, 43.0, 48.0], "area": 1384, "segmentation": {"size": [512, 512], "counts": "ocZ71k?4L4M4K4L4L5K4L4M3N3O0000010O0001O01O0001O01O00010O00nNZAj0f>RO_Am0j>1O01O0001O01O0001O01O0001O0eL"}, "image_id": 1014, "id": 16756}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 105.0, 32.0, 37.0], "area": 711, "segmentation": {"size": [512, 512], "counts": "TTm42k?3N2M4M2M4M2M3N3M2M4M200010O01O01O010O01OO2L3N3L3N2M4M2M4M2M3Nblb2"}, "image_id": 1014, "id": 16757}, {"iscrowd": 0, "category_id": 1, "bbox": [117.0, 117.0, 59.0, 78.0], "area": 2007, "segmentation": {"size": [512, 512], "counts": "gdj13k?3L3N3WAGg=;WBHg=9WBIi=8SBLm=3QBOn=3nA0P>2nA1n=2oA1o=2nA1n=2PB0n=S1L3N2N3L3N30O0nN]BGd=6^BKa=2cBJ`=4bBJa=3aBJb=3bBJa=5_BIc=8]BDg=;YBCi=>WB^Ol=b0VBZOm=e0UBWOm=j0e0O01O01O010O0O1M4M2N1N11N4O001O01O010O0N2N3M2M4M2M4MTlW5"}, "image_id": 1014, "id": 16758}, {"iscrowd": 0, "category_id": 1, "bbox": [74.0, 120.0, 15.0, 17.0], "area": 136, "segmentation": {"size": [512, 512], "counts": "STU12l?2O2M2N3M2N30O010M2O2M3M2N3MU\\c6"}, "image_id": 1014, "id": 16759}, {"iscrowd": 0, "category_id": 1, "bbox": [78.0, 123.0, 40.0, 54.0], "area": 1240, "segmentation": {"size": [512, 512], "counts": "TUW11m?3M2M4M2M4M2N2M4M2M4M2M3N3M2M4M2M301O010O0010O0010O0010O0N3[ObACa>;bABa>:bACa>;aACa>;bABa>:d0N3L3N2MPlT6"}, "image_id": 1014, "id": 16760}, {"iscrowd": 0, "category_id": 1, "bbox": [182.0, 123.0, 67.0, 103.0], "area": 3057, "segmentation": {"size": [512, 512], "counts": "dVk21l?4M2M4M2M3N3O001O01O01L3N3L3N2M4M2M4L3N2M4M2M3N3L3N3L3\\OPNUCS2i]10OO1N3L3N3L3N2N3M201O010O01N1N2M4M2N3L3N2M4HPB\\NR>b18M2M3N3L3N3L3N3L3N2M4M2N3L3N2M4M2Mgke0"}, "image_id": 1014, "id": 16763}, {"iscrowd": 0, "category_id": 1, "bbox": [63.0, 141.0, 19.0, 23.0], "area": 259, "segmentation": {"size": [512, 512], "counts": "ndo02l?2M4M2N2M4M2N3N1010O000N3M2M4M2N3L3Nakf6"}, "image_id": 1014, "id": 16764}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 144.0, 29.0, 31.0], "area": 532, "segmentation": {"size": [512, 512], "counts": "UUX43k?2N3L3N3M2M3N3M2M40O0010O010O0010O001O0N3L3N2N3L3N3M2M4MY[Y3"}, "image_id": 1014, "id": 16765}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 144.0, 43.0, 72.0], "area": 1843, "segmentation": {"size": [512, 512], "counts": "[fZ71n?1N3M2^OKWA8f>KXA6g>KWA8f>KWA8f>JYA7e>LXA7f>b0N3CiNmAZ1P>hNnAZ1P>iNnAY1P>^OWAe0f>=M2M3N3L3M301O0O2M2M3N3L3N3O01O01O010OO1M4M2M4L3N2M4M2M4M2M3M4M2M4M2]Ol@7W?Gk@6Y?Fk@7`?M3NZjf3"}, "image_id": 1014, "id": 16767}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 188.0, 14.0, 12.0], "area": 101, "segmentation": {"size": [512, 512], "counts": "Rfj52l?2M4N110O00010O00010OO2M2MSZn1"}, "image_id": 1014, "id": 16768}, {"iscrowd": 0, "category_id": 1, "bbox": [278.0, 194.0, 80.0, 84.0], "area": 3262, "segmentation": {"size": [512, 512], "counts": "gW[42l?2N2M4M2M4M2N2M4M2N3L3N3N100010O010O010O00010O010O010O00010O01N1N2M4M2M4M2N3L3N2010O01M2M4M2M3N30O010O00010O010O0QN]Bb1c=[N_Be1a=YNbBf0K1c=VOeBg0J4o=HUB7k=GWB:i=CZB10L3N3L3N2N3L3N3M2M3NVh\\2"}, "image_id": 1014, "id": 16769}, {"iscrowd": 0, "category_id": 1, "bbox": [496.0, 209.0, 16.0, 34.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "ZWh73k?2N3L3N2M4M2N3L3N30O0010N1M4M2_I"}, "image_id": 1014, "id": 16770}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 216.0, 29.0, 27.0], "area": 456, "segmentation": {"size": [512, 512], "counts": "YWY71m?3M2M4M2N3M2M3O20O010O010O01O01O010O010O001M2M3N3M2N3L3NQY8"}, "image_id": 1014, "id": 16771}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 222.0, 42.0, 50.0], "area": 1196, "segmentation": {"size": [512, 512], "counts": "QXR64j?2N2M4M2N3M2M4M2N2N3L3N3M2M4M2O20O0010O010O010O0010M2N3L3N3M2N2M4M2N3M2M4M2N3M2M3NihX1"}, "image_id": 1014, "id": 16772}, {"iscrowd": 0, "category_id": 1, "bbox": [436.0, 262.0, 40.0, 50.0], "area": 1266, "segmentation": {"size": [512, 512], "counts": "QYj6193V?0h@2U?1h@2V?>L3M3N3L3O2O01O01O0N2M4M2O2O01O01O01O010O01OO2L3M3N3L3M4M2M3M4L3N2M4L3N3Ldga0"}, "image_id": 1014, "id": 16773}, {"iscrowd": 0, "category_id": 1, "bbox": [19.0, 275.0, 53.0, 53.0], "area": 1590, "segmentation": {"size": [512, 512], "counts": "hi91l?3N2M4M2M4M2M3M4M2M4M2M3N3L3O1010O010O00010O010O00010O01O01iN]Ao0c>nNaAR1f>01O01O010O00010O01N1M3N3L3N3L3M3N3L3N3L3Nifk6"}, "image_id": 1014, "id": 16774}, {"iscrowd": 0, "category_id": 1, "bbox": [476.0, 277.0, 36.0, 54.0], "area": 1204, "segmentation": {"size": [512, 512], "counts": "lY^73k?2M4M2M4M2M3N3M2010O0010O0N3L3N2M4M2M4M2M3N30O010O00010O010L3N2M4M2M4MWG"}, "image_id": 1014, "id": 16775}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 283.0, 27.0, 28.0], "area": 466, "segmentation": {"size": [512, 512], "counts": "\\Yn53k?2M4M2N3L3N2N3N1010O01O01O010O01O010OO2M2N3L3N2M4M2NPWd1"}, "image_id": 1014, "id": 16776}, {"iscrowd": 0, "category_id": 1, "bbox": [79.0, 305.0, 55.0, 54.0], "area": 1790, "segmentation": {"size": [512, 512], "counts": "ejW12k?3N3M2N2M4M2N3L3N3M2M3N3M2M4M2O2O0010O0010O010O0N3O01O010O01O010O01O010O01ON3M2N3L3N3M2M4M2N2M4M2N3L3N3M2N2MUfl5"}, "image_id": 1014, "id": 16777}, {"iscrowd": 0, "category_id": 1, "bbox": [429.0, 309.0, 29.0, 23.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "Sjf61m?3L3M3M4M2O101O01O010O01O01O010O00010O0010O0001M2M4L3M3NUfj0"}, "image_id": 1014, "id": 16778}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 317.0, 27.0, 26.0], "area": 455, "segmentation": {"size": [512, 512], "counts": "^jT74i?3N3L3M3N3L3O1010O00010O01ON3O010O00010M2M3N3L3M4M2Moe="}, "image_id": 1014, "id": 16779}, {"iscrowd": 0, "category_id": 1, "bbox": [180.0, 323.0, 20.0, 24.0], "area": 303, "segmentation": {"size": [512, 512], "counts": "fZj21l?4K4M3M4L3M301O00010O0001O0M3L5L3M3M4Llek4"}, "image_id": 1014, "id": 16780}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 334.0, 88.0, 70.0], "area": 3325, "segmentation": {"size": [512, 512], "counts": "e[U32k?4M2M4M2M4c@_OV?g0N2M4M2M4MO03N3L3N3N101O01O01O010O010O001hAaNn=^1oAeNQ>[1lAhNU>b1O0010ZNnA\\1R>bNPB^1Z>0O01O01OM4L3M4O01O01O010O01O01O010O01O01O01O010O01O01O010O01O01N1N3L3O110O01M2M3N3L3N3L3N2M4M2M4L3N2M4M2M3Nhd^3"}, "image_id": 1014, "id": 16781}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 342.0, 52.0, 44.0], "area": 1201, "segmentation": {"size": [512, 512], "counts": "dkR21l?3M3N3L3M3N3M2010O00010O00010O0010O0010O00010O0010O0N3L3N2M4L3M3N3N11O01O01O0M3M4L3N3L3M3N3L3M3M4M2M4LVUS5"}, "image_id": 1014, "id": 16782}, {"iscrowd": 0, "category_id": 1, "bbox": [46.0, 351.0, 33.0, 30.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "a[g03k?2M4M2M4L3N2M4O010O01O01O01O01O010O01O01O01O01O01O0M4L3N2M4M2M3MkTh6"}, "image_id": 1014, "id": 16783}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 351.0, 2.0, 6.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "R[o73j?3QE"}, "image_id": 1014, "id": 16784}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 357.0, 18.0, 22.0], "area": 219, "segmentation": {"size": [512, 512], "counts": "fkl21l?3N3L3N3L3N2N3O010O01M2M3N3L3N3L3NiTj4"}, "image_id": 1014, "id": 16785}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 361.0, 11.0, 27.0], "area": 173, "segmentation": {"size": [512, 512], "counts": "Y;j0V?10N1M3N3L3M3N3L3N3LfTj7"}, "image_id": 1014, "id": 16786}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 383.0, 25.0, 24.0], "area": 372, "segmentation": {"size": [512, 512], "counts": "_\\V21l?4M2M3N3M2M4N10010O0010O0010O010O0001M2N3L3N2M4MlS]5"}, "image_id": 1014, "id": 16787}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 397.0, 70.0, 75.0], "area": 2630, "segmentation": {"size": [512, 512], "counts": "lm]53k?3M2M3N3L3N3M2SA\\O_>d0^A^Ob>b0\\AAd>>YAEg>g000010O010O010O00010O[OmN]BT1`=nNaBc0_ODm=McB?@Bm=1cB=A_Ol=7cB:@\\Om==cB7a=K]B7`=M\\B7c=I[B9f=GWBf010O0010O0010O010O0010N1M4M2N3L3N2N3L3N3L3N2N3L3N3M2M4M2M3N3M2M4M2NUS_1"}, "image_id": 1014, "id": 16788}, {"iscrowd": 0, "category_id": 1, "bbox": [102.0, 402.0, 56.0, 43.0], "area": 1230, "segmentation": {"size": [512, 512], "counts": "W]c11l?4M2N3L3N3M2N3L3N210O0010O010O0010O010O0010O010O0010O0010O010O0010O010O0010O010O0010O0010O010M2M4M2N3M2M3N3M2Mmb`5"}, "image_id": 1014, "id": 16789}, {"iscrowd": 0, "category_id": 1, "bbox": [159.0, 406.0, 85.0, 53.0], "area": 2256, "segmentation": {"size": [512, 512], "counts": "mm_23k?2N3M2N3N2M2N3M2N3N2M2N3M2O2O001O1N101O000O01N2NKnN`AQ1a>52N2O0O2N2N20O1000O10O1000O10O1000O6K2N0O10O1000O10O1000O10O1000O10O10O1000O10O1000O10O1000O10O1000O01000004K5L4L4L4KgbU4"}, "image_id": 1014, "id": 16790}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 424.0, 100.0, 58.0], "area": 3373, "segmentation": {"size": [512, 512], "counts": "\\]f04l?3M4K4M3M4K4M4L3M4K4M4L1N1000O101O4L3L4M000O010000O010000O010000O01000O4M3M4L3O2OM0O0100000O01000O01002M5L3M1O0O10O102N3L10O1000O01000O13L5L2NO0100000O01000O010003M4K2O000O010000O010000O104L3L5L3M4L\\ag5"}, "image_id": 1014, "id": 16791}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 426.0, 57.0, 53.0], "area": 1822, "segmentation": {"size": [512, 512], "counts": "\\^d62k?4M2M3M4M2M4M2M3M4M2M4M2M301O01O010O01O01O01O010O01O01O01O010O01O01O01O010O01O01O010OO2M2M3N3L3M3N3L3N3L3M3N3L3NXR?"}, "image_id": 1014, "id": 16792}, {"iscrowd": 0, "category_id": 1, "bbox": [403.0, 437.0, 17.0, 21.0], "area": 185, "segmentation": {"size": [512, 512], "counts": "UnY61m?2N3L3N2N3L3N3O010ON3L3N3M2M3N3MYb]1"}, "image_id": 1014, "id": 16793}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 443.0, 49.0, 69.0], "area": 1566, "segmentation": {"size": [512, 512], "counts": "k=d1]>01O0000000kNdAe0\\>YOfAg0Z>WOhAi0X>UOjAk0V>SOlAm0T>ROmAn0S>POoAP1Q>nNoAU1\\>2O11O0O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O00O1O1O1001N2N2O2M2H_@Nc?0_@Nc?O`@OP`W7"}, "image_id": 1014, "id": 16794}, {"iscrowd": 0, "category_id": 1, "bbox": [491.0, 452.0, 21.0, 28.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "fne72k?3N3L3N3L3N3M2O1010O01O01O010O01O01O010MiA"}, "image_id": 1014, "id": 16795}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 468.0, 65.0, 44.0], "area": 1529, "segmentation": {"size": [512, 512], "counts": "[_j41n?2N2O2M2N2N2N2N2N2N3N1N2N2N2N2N3M2O0O1O1O1O1O1O1O100O12N1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O2N1O1O1O1O1O1O2N1O1O1O1O1O2N1O1O1O1OQPU2"}, "image_id": 1014, "id": 16796}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 481.0, 43.0, 31.0], "area": 793, "segmentation": {"size": [512, 512], "counts": "a_k61l?4M2M3M4M2M4O00001e@]OX?e00010O0010O0010O0010O00010O0010O0010O001O00001O001O0N2M4M2M3N3L3NdP?"}, "image_id": 1014, "id": 16797}, {"iscrowd": 0, "category_id": 1, "bbox": [372.0, 482.0, 30.0, 29.0], "area": 522, "segmentation": {"size": [512, 512], "counts": "e_j51l?4M2N3M2M3N3M2M4O0010O010O0010O0010O010O001M2N2M4M2N3L3N3Mg`f1"}, "image_id": 1014, "id": 16798}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 484.0, 28.0, 28.0], "area": 580, "segmentation": {"size": [512, 512], "counts": "m_b73k?2M3N2M3N2M3N2M3M3N21O00001O001O00001O001O00001O001O00"}, "image_id": 1014, "id": 16799}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 505.0, 24.0, 7.0], "area": 90, "segmentation": {"size": [512, 512], "counts": "n_g62m?11O001ON2N2N20000001O001O001O001O001O00001O00Q`l0"}, "image_id": 1014, "id": 16800}, {"iscrowd": 0, "category_id": 1, "bbox": [284.0, 507.0, 11.0, 5.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "o_^41o?0O100O100O1O1001O2NRP\\3"}, "image_id": 1014, "id": 16801}, {"iscrowd": 0, "category_id": 1, "bbox": [141.0, 0.0, 39.0, 20.0], "area": 396, "segmentation": {"size": [512, 512], "counts": "P`V21o?1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1OO1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1OQPV5"}, "image_id": 1015, "id": 16802}, {"iscrowd": 0, "category_id": 1, "bbox": [201.0, 0.0, 20.0, 8.0], "area": 97, "segmentation": {"size": [512, 512], "counts": "P`T32n?1O00001O00001O00001O00001O00001OO1M3MS`a4"}, "image_id": 1015, "id": 16803}, {"iscrowd": 0, "category_id": 1, "bbox": [231.0, 0.0, 61.0, 59.0], "area": 2345, "segmentation": {"size": [512, 512], "counts": "Y`c3l0T?000000000000000000000000000I70f0ZO0000000000000000000000000N200000000000ZOf00000000000000000000000000a0_O00000000000000000000_o]3"}, "image_id": 1015, "id": 16804}, {"iscrowd": 0, "category_id": 1, "bbox": [301.0, 0.0, 29.0, 21.0], "area": 403, "segmentation": {"size": [512, 512], "counts": "Y`f44j?2M3N3M201O00001O001O00001O001O00001O001OM3N2M3N2M3N2M3NRPk2"}, "image_id": 1015, "id": 16805}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 53.0, 16.0, 23.0], "area": 321, "segmentation": {"size": [512, 512], "counts": "f1f0Z?000000O1000O1000000000O108H8Hkmg7"}, "image_id": 1015, "id": 16806}, {"iscrowd": 0, "category_id": 1, "bbox": [360.0, 67.0, 40.0, 28.0], "area": 559, "segmentation": {"size": [512, 512], "counts": "fRd51n?2O2M2N3N2M00010O010N10010O0010O000102M3N1N3M100O01O01O01O3N1N3N2M2N3N1N3N2M2N^mg1"}, "image_id": 1015, "id": 16807}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 79.0, 52.0, 74.0], "area": 2307, "segmentation": {"size": [512, 512], "counts": "\\dQ72k?3N3@JQA9]>FhA4H9]>EhA5H9]>EiA4H9]>FgAj0U>YOiAi0U>c0L3N2M4M2O20O0010O0010O010O00010O010O000N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3NT]4"}, "image_id": 1015, "id": 16808}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 91.0, 20.0, 23.0], "area": 415, "segmentation": {"size": [512, 512], "counts": "m2c0\\?3N000000000000O10O1000000000000004K;Fgle7"}, "image_id": 1015, "id": 16809}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 103.0, 35.0, 30.0], "area": 846, "segmentation": {"size": [512, 512], "counts": "fc^1=]?600000000000000000000001OF;O0000000000000001O0000000000000001O0000BVmo5"}, "image_id": 1015, "id": 16810}, {"iscrowd": 0, "category_id": 1, "bbox": [364.0, 111.0, 59.0, 56.0], "area": 1637, "segmentation": {"size": [512, 512], "counts": "]Tf52l?2N3N1N3M2N3M3N1N3M2N3M2N3N1N3M3N110O010O010O010O10O10O01lNZAm0e>QO]Ao0k>0O0100O0100O010O010O0100O010N1O2M2N3M2N3N2M2N3M2N3N1N3Me[\\1"}, "image_id": 1015, "id": 16811}, {"iscrowd": 0, "category_id": 1, "bbox": [494.0, 143.0, 18.0, 39.0], "area": 369, "segmentation": {"size": [512, 512], "counts": "oTg71n?2N2O2M2N3M2O2M2J_Oj@c0U?^Oj@d0S?6N3N1N3M2O1N0bK"}, "image_id": 1015, "id": 16812}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 155.0, 31.0, 26.0], "area": 703, "segmentation": {"size": [512, 512], "counts": "^Ul25\\??L400001O0001O0000000000000000000001O000001O00000000000000C`[d4"}, "image_id": 1015, "id": 16813}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 155.0, 26.0, 39.0], "area": 767, "segmentation": {"size": [512, 512], "counts": "nTX46j?6J5K6I7J6J1O00000O10O10000000O10O10001O6J6J6I7J6JVjZ3"}, "image_id": 1015, "id": 16814}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 160.0, 15.0, 21.0], "area": 265, "segmentation": {"size": [512, 512], "counts": "P5d0\\?00000001O01O0000000001K4J6IU[h7"}, "image_id": 1015, "id": 16815}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 162.0, 77.0, 49.0], "area": 2290, "segmentation": {"size": [512, 512], "counts": "meg14g?5L4K5K6J5O1001O01O0001O0001O0001O0001O0001O0001O0001O01O00003N0O0O1K5N201O000001O01O000001O01O000001O01ORO^A>b>]OdAb0\\>YOiAg0h>1O01O000001O01O000001O01O000001OM4K4K5K5K[jQ5"}, "image_id": 1015, "id": 16816}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 165.0, 24.0, 25.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "aUc3T1\\AoNc>W110O00010O010O00010O00010O010O0001N1M3O2O010O00010O001M2N2010O00O2L3N30L3N2M4M210O01M2N2M4M2M3M4M2M4L3N2MTiX5"}, "image_id": 1015, "id": 16819}, {"iscrowd": 0, "category_id": 1, "bbox": [314.0, 216.0, 62.0, 50.0], "area": 1897, "segmentation": {"size": [512, 512], "counts": "aWm41k?4L4L4L5K40000010O0000010O0000010O0000n@]Og>d0TA@m>i00001L3L4N201O01O0001O01O0001O01O0001O01O0001OiN]AQ1i>000010O0000010O0000010ROYAa0g>[O^A`0S?L4L4L5KahS2"}, "image_id": 1015, "id": 16820}, {"iscrowd": 0, "category_id": 1, "bbox": [254.0, 222.0, 29.0, 36.0], "area": 623, "segmentation": {"size": [512, 512], "counts": "hWo33k?2M4M2M4M2M3N3L3N3L3O2O00010O010O00N3M2M4M2N2M4M2M4M2M3NnXb3"}, "image_id": 1015, "id": 16821}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 223.0, 72.0, 57.0], "area": 2024, "segmentation": {"size": [512, 512], "counts": "]X`21l?3M3N3L3M4M2M3M4M2M3N3L3N30O0010O01O010O01O01O010O01O00N1N3N2M2N3N2N1100O0100O0100N1O2M2O2M2011O0O110O010O01O01O010O010O0N2M4M2N3L3N2M4M2N3L3Nih[4"}, "image_id": 1015, "id": 16822}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 253.0, 41.0, 49.0], "area": 1109, "segmentation": {"size": [512, 512], "counts": "^Xd32l?3L3N2N3L3N3L301VA\\OV>d0hA_OX>`0fABZ>?bAE]>;aAG`>9\\AJd>j00O010O00010O010O01O01O0O2L3N3L3N2M4M2N3L3N2M4M2N3L3NhWg3"}, "image_id": 1015, "id": 16823}, {"iscrowd": 0, "category_id": 1, "bbox": [467.0, 257.0, 45.0, 56.0], "area": 1466, "segmentation": {"size": [512, 512], "counts": "ViY71m?3L3N3M2M4M2M3N3N110O01O01O010O0N2N3N101O01O010O01O01O01M2M4M2M3N3L3N3N10010O0010O010O00lG"}, "image_id": 1015, "id": 16824}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 268.0, 91.0, 50.0], "area": 2797, "segmentation": {"size": [512, 512], "counts": "\\iV43k?3L3N2M4L3N3L3M3N3L3O101O010O00010O01O01O010O00010O010O00010O01O01OnNWAm0n>10O00010O000O2O00000000O1000O1000000000O1N1N3N2M3N2M3N2N1101O001O00001N101O00001O001O0N2N3L3N3L3N2M4M2M4M2M4M2M\\g[2"}, "image_id": 1015, "id": 16825}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 290.0, 20.0, 25.0], "area": 301, "segmentation": {"size": [512, 512], "counts": "dY`02k?4M2N3L3N3L3N2O2O010O00O2M2N3L3N3L3N2NlfU7"}, "image_id": 1015, "id": 16826}, {"iscrowd": 0, "category_id": 1, "bbox": [435.0, 314.0, 77.0, 60.0], "area": 2259, "segmentation": {"size": [512, 512], "counts": "\\ji63k?3M2N3M2N3N1N3M2N3N110O010O10O010O010O010O10O010O0N3M3O010O010O010O010O010O010O10O10O010O010O010O010O010O10O10O010O010O010O010O010N2M2N3N1010O010N1N002NfE"}, "image_id": 1015, "id": 16827}, {"iscrowd": 0, "category_id": 1, "bbox": [44.0, 315.0, 57.0, 43.0], "area": 1490, "segmentation": {"size": [512, 512], "counts": "cZf03j?3N3L3M3M40O01O01O01O01ON3M2M4L3N2O20O00010O0010O0010ON2N30O00010O01O01O010O00010O01O01M2N3L3N2M4L3N2M4M2M4M2M3NmU]6"}, "image_id": 1015, "id": 16828}, {"iscrowd": 0, "category_id": 1, "bbox": [105.0, 333.0, 74.0, 56.0], "area": 1853, "segmentation": {"size": [512, 512], "counts": "Xkd12k?4M2M3M4M2M4M2M3M4M2O110O0010O0010O0010O0010O00010O0010O0010O0010TORAd0n>ZOTAf0S?10O0010O0010O00010O01k@VOS?k000010O0010O00ZOm@`0S?]Oo@c0X?O01O010O00010O01O01O01O0N3L3N2M4L3N2MaTV5"}, "image_id": 1015, "id": 16829}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 350.0, 101.0, 74.0], "area": 2960, "segmentation": {"size": [512, 512], "counts": "bkW61m?2O2N2M3N1N3N2M2O2N2M2O2M3N2O010O10O10O10O1000O10O10O10O10O1000ON3O10O01000O0100000O01000O010000O010000O01000O0100000O01000O010000O01000O0N3N20O10O1000OOO10O010O10O101N2O2M3N2N1N3N2M2O2M3N2N1N3N2M2O2Nnc5"}, "image_id": 1015, "id": 16830}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 362.0, 49.0, 82.0], "area": 2302, "segmentation": {"size": [512, 512], "counts": "o\\P31d00c>4YA0R>NlA50OR>OkA400R>OkA50OR>NlA5O0R>OkA50OR>Y1M3N3L3N3L3M3N3O0010O0O2L3N2M4M2M4M2M310O010L3N2M4M2M4M2M3K6L300N3L3N3L3N2M4M2M4L3N[TW4"}, "image_id": 1015, "id": 16831}, {"iscrowd": 0, "category_id": 1, "bbox": [28.0, 367.0, 63.0, 44.0], "area": 1424, "segmentation": {"size": [512, 512], "counts": "V\\>2k?3N3L3N3L3N2O2O010O01O01O010O01O01O010O00010O010O00010O010O00010O01O01O0N3L3N2M4M2M4M2O1010O01OO2M2M4M2M3N3L3N3L3N2M4M2M4M2M]Tb6"}, "image_id": 1015, "id": 16832}, {"iscrowd": 0, "category_id": 1, "bbox": [101.0, 373.0, 27.0, 28.0], "area": 458, "segmentation": {"size": [512, 512], "counts": "Xlb12l?2M3M4M2M3M4L301O01O010O00010O01O01O010L3M3N3L3M3N3LWdo5"}, "image_id": 1015, "id": 16833}, {"iscrowd": 0, "category_id": 1, "bbox": [374.0, 384.0, 40.0, 55.0], "area": 1439, "segmentation": {"size": [512, 512], "counts": "W\\k52n?4K5L4L3L5L4L4L3L5L4L4L3L2O00000O01000O0100000O01000003L5L4L3M4K5L4L3L5L4L4L3Leb`1"}, "image_id": 1015, "id": 16834}, {"iscrowd": 0, "category_id": 1, "bbox": [250.0, 395.0, 44.0, 68.0], "area": 2243, "segmentation": {"size": [512, 512], "counts": "U^m35d?7I7I7I8H7J6I7I7M301O01O0000000001O01O0000000001O01O0000000001O01O000J6I7I7J6I8H7I7I7Igc\\3"}, "image_id": 1015, "id": 16835}, {"iscrowd": 0, "category_id": 1, "bbox": [307.0, 399.0, 43.0, 60.0], "area": 1722, "segmentation": {"size": [512, 512], "counts": "Vmi42n?5[ONQA8j>MQA3m>a00O10O1000001O5J6K5K5K5KO010000000O010000000O010000000O2O5K5K5K5J6K5K5K5K5J6Kna`2"}, "image_id": 1015, "id": 16836}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 406.0, 69.0, 54.0], "area": 1741, "segmentation": {"size": [512, 512], "counts": "b=2l?2M4M2M3N3L3N3L3N3M2M3O2O0010O0010O0010O0010O00UOTAc0l>YOWAg0i>WOYAi0P?0O010O00010O010O00010O010O00010O010O00010O010O00010O010O00010O010ON2N3L3N3L3N2M4M2M_Rm6"}, "image_id": 1015, "id": 16837}, {"iscrowd": 0, "category_id": 1, "bbox": [509.0, 415.0, 3.0, 13.0], "area": 31, "segmentation": {"size": [512, 512], "counts": "Pmn77i?5J0RC"}, "image_id": 1015, "id": 16838}, {"iscrowd": 0, "category_id": 1, "bbox": [70.0, 417.0, 46.0, 33.0], "area": 848, "segmentation": {"size": [512, 512], "counts": "f]S11l?4M2JK^@9_?5M3N3L3010O00010O010O0010O0010O0010O0010O0010O00M4M2M4M20010O010O00010O010O0O2M2M3NbbU6"}, "image_id": 1015, "id": 16839}, {"iscrowd": 0, "category_id": 1, "bbox": [98.0, 442.0, 13.0, 13.0], "area": 103, "segmentation": {"size": [512, 512], "counts": "R^a12l?2M4M2O20O0010O01O0M3N3LURX6"}, "image_id": 1015, "id": 16840}, {"iscrowd": 0, "category_id": 1, "bbox": [375.0, 444.0, 25.0, 25.0], "area": 478, "segmentation": {"size": [512, 512], "counts": "Pnk54l?5J6K6J1O000O010000000O01000000000O01000005K5J7Jdag1"}, "image_id": 1015, "id": 16841}, {"iscrowd": 0, "category_id": 1, "bbox": [185.0, 446.0, 9.0, 8.0], "area": 49, "segmentation": {"size": [512, 512], "counts": "Qnl23j?301O0001O01ON3LSbn4"}, "image_id": 1015, "id": 16842}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 452.0, 21.0, 29.0], "area": 561, "segmentation": {"size": [512, 512], "counts": "^nb4b0T?:1O000000000001O0000000000000000000000EVbR3"}, "image_id": 1015, "id": 16843}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 454.0, 39.0, 49.0], "area": 1233, "segmentation": {"size": [512, 512], "counts": "V>X1h>0010O00010O010O00010O000mN[Al0e>PO^AP1i>010O010O00010O000M4M21O0N2M4L3N2M4L3M3N3L3M4L3N]Q\\7"}, "image_id": 1015, "id": 16844}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 461.0, 54.0, 51.0], "area": 1839, "segmentation": {"size": [512, 512], "counts": "n_V22k?3O100O1M3M3N2YOChA`0V>DfA>X>FcA=[>G`A<^>d0O1O10000O100O100O11O001O00001O001O00001O001O00001O001O00001N1M4mNWAj0Q?N3L3N2M4L3N3L3M3NSan4"}, "image_id": 1015, "id": 16845}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 463.0, 61.0, 49.0], "area": 1864, "segmentation": {"size": [512, 512], "counts": "oo]61m?2M3N2M3N2M3N2M3N2O100001OO1N2M3N2M3N2M3N2M3N2M31O001O00001O001O00001O001O00001O001O00001N1M4M21O01M2N3L3N2M4M2M4M2M3N3LUac0"}, "image_id": 1015, "id": 16846}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 480.0, 20.0, 19.0], "area": 232, "segmentation": {"size": [512, 512], "counts": "ZoS52k?4M2N3M2O2O01O01O010O010O010OO2M2N3M2MmPb2"}, "image_id": 1015, "id": 16847}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 490.0, 25.0, 22.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "h_?3k?3M2M4M2M4M200010O010O00001O001O00001N1M4M2M4M2NaPT7"}, "image_id": 1015, "id": 16848}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 491.0, 42.0, 21.0], "area": 575, "segmentation": {"size": [512, 512], "counts": "m_P13k?2M3N2M3N2M3N2O100001O001O00001O001O00001O00001O001O00001O001O00001O001O000K]@Md?0_@Oi?000Q`Z6"}, "image_id": 1015, "id": 16849}, {"iscrowd": 0, "category_id": 1, "bbox": [200.0, 493.0, 47.0, 19.0], "area": 539, "segmentation": {"size": [512, 512], "counts": "o_T31m?2N2M3001O00001O00N2N2O11O00001O001O00M3N2M3N2O100001O001O00001O001O00001O001O00001O000M4M2M^PT4"}, "image_id": 1015, "id": 16850}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 504.0, 16.0, 8.0], "area": 71, "segmentation": {"size": [512, 512], "counts": "ooa41n?1O1O1O1O1O100O11O1O1O1O1O1O1OQPV3"}, "image_id": 1015, "id": 16851}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 508.0, 8.0, 4.0], "area": 18, "segmentation": {"size": [512, 512], "counts": "o_]41n?100O1O11O1O1OQ`^3"}, "image_id": 1015, "id": 16852}, {"iscrowd": 0, "category_id": 1, "bbox": [118.0, 0.0, 10.0, 5.0], "area": 30, "segmentation": {"size": [512, 512], "counts": "PPk11o?1O1O1O1O00O1O1O1OQPP6"}, "image_id": 1016, "id": 16853}, {"iscrowd": 0, "category_id": 1, "bbox": [126.0, 0.0, 27.0, 21.0], "area": 343, "segmentation": {"size": [512, 512], "counts": "XPo12l?3N2N1O2N2N2N2O1O1O1O1O1O00O1O1O1O1O1O2N2N2N2N2M2O2Nk_c5"}, "image_id": 1016, "id": 16854}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 0.0, 46.0, 28.0], "area": 717, "segmentation": {"size": [512, 512], "counts": "W`c42m?1O2M3N2N2N2O001O1O1O1O1O001O1O1O1O1O001O1O1O1ON2O1O1O1O1N2O1O1O1O1N2O1O1O1O1O1N2O1O1O1O1NR`e2"}, "image_id": 1016, "id": 16855}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 0.0, 5.0, 3.0], "area": 9, "segmentation": {"size": [512, 512], "counts": "P`\\51o?1O001ONRPa2"}, "image_id": 1016, "id": 16856}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 0.0, 64.0, 40.0], "area": 1676, "segmentation": {"size": [512, 512], "counts": "aP`51n?2M3N2HJc@8[?Jc@7[?9N2N2N2O1O1O001O1O1O1O1O1O001O1O1ON2O1O1O1O1O1N2O1001O1O1O1O001O1O1O1O1O1O001O1O1O00O1O1POXAf0k>WOXAg0j>WOXAg0R?N2N1O2M3N2N2N2N2N1N3N^o_1"}, "image_id": 1016, "id": 16857}, {"iscrowd": 0, "category_id": 1, "bbox": [410.0, 29.0, 78.0, 78.0], "area": 2789, "segmentation": {"size": [512, 512], "counts": "cR]62l?3N2N1O2M3N2N2N2M3N1O2DYO[Aj0c>XO[Aj0c>WO\\Ak0a>XO]Aj0a>DhA[OnAd0S>YOPBg0P>WOQBj0o=TOSBl0m=QOVBo0i=POYBP1g=nN[BQ1f=mN\\BS1X>100000O100N2N1N3N2N2N2N2M3N1O2N2N2M3N2N1O2Ncjn0"}, "image_id": 1016, "id": 16861}, {"iscrowd": 0, "category_id": 1, "bbox": [264.0, 152.0, 149.0, 108.0], "area": 7793, "segmentation": {"size": [512, 512], "counts": "TWT41n?2M3N2N2N2N1N3N2N2N2N2N2M2O2N2N2N2O10O10OO2N2N2N2N2MDZOcAd0]>^ObAa0^>A`A?`>B_A>`>E]A;d>GZA9g>HWA9j>?O10O1000000000N1O2M3N2N2N2N2N1N3N2N2N2N2FSN]Bo1b=SN\\Bo1b=SN\\Bo1b=9M3N2N1O2N2M2O000000O10O100000O10O100002M3N1O2O1O100000O1000O100000O1000O10000000O1000O100000O1000O100N2N1O2N2M3UNXB[1j=cNXB[1j=cNWB\\1j=cNXB[1j=cNXBZ1k=cNXB[1j=cNXB[1X>N1O2M3N2N00O1000O1000002N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2N]Ya1"}, "image_id": 1016, "id": 16862}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 153.0, 62.0, 60.0], "area": 2208, "segmentation": {"size": [512, 512], "counts": "mef23l?2N2N2N2N2M2O2N2N2N2N2N2M3JSOVAn0i>TOUAn0i>6N2N2M3N2N2N0000000O02O2N2N20000O0100000000N2N1O2N2N2N1O0O10O102N2ROfA3\\>JgA4Z>KhA3Z>KhA2[>LfA3\\>KfA3\\>KfA3\\>JgA4[>JgA3\\>KfA3[>LgA2[>LfA3U?NSZZ4"}, "image_id": 1016, "id": 16863}, {"iscrowd": 0, "category_id": 1, "bbox": [457.0, 169.0, 55.0, 97.0], "area": 2835, "segmentation": {"size": [512, 512], "counts": "meT71n?2N2N3M2O1N2N3M2N2O1N2N3M2N2O1N3M2N2N2O2M2N2N2N3N11O0001O01O0001lA^Nf=c1XB`Ne=b1ZB_Ne=b1YB`Ne=c1XB_Nf=P2M2N2O1N3M2N2N2O2O0000010O0000`MjBS2Y=jMiBU2X=iMjBU2c7"}, "image_id": 1016, "id": 16864}, {"iscrowd": 0, "category_id": 1, "bbox": [420.0, 209.0, 17.0, 17.0], "area": 157, "segmentation": {"size": [512, 512], "counts": "iVb62m?2N2N2N2N2N2N2N01O2N1O2N2N2N2N2M[YU1"}, "image_id": 1016, "id": 16865}, {"iscrowd": 0, "category_id": 1, "bbox": [238.0, 218.0, 16.0, 17.0], "area": 147, "segmentation": {"size": [512, 512], "counts": "QWg32m?2N2N2M3N2N2O1000N2N2N2N2N2N1NQiP4"}, "image_id": 1016, "id": 16866}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 237.0, 66.0, 77.0], "area": 2319, "segmentation": {"size": [512, 512], "counts": "Th\\62m?2N1O2M3N2N2N2M3N1100000O1000O1000O100N2N1O2N2N2M3N2N1O2M3N2N2N2N1N3kA_Nh=c1VB^Ni=d1UB^Nk=b1RBaNn=i1O1000O100000O10O100nNRB1n=MTB3l=JWB6h=JYB5h=KWB4k=JWB3l=JWB4k=JWB4j=KXB3j=KXB2k=JXB5j=HYB6i=HYB6i=HYB6h=IZB4i=JYB4j>O2NkVb0"}, "image_id": 1016, "id": 16867}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 272.0, 63.0, 76.0], "area": 2518, "segmentation": {"size": [512, 512], "counts": "\\Z`22m?2M3FJg@8W?Jg@8k>GYA3K7k>HXA3J8l>GXAc0e>_OZAc0d>_OYAd0e>=N1DeNQB^1m=cNRB_1k=dNSB^1k=dNSB^1k=dNRB_1l=;N10000O01000001O2M3N2N1100000N2N2N1O2N2M3N1O00O12M3N1O2N2N2M5L2N2N2M2O2N2N2N2M3N1O2N2N2M3NeV`4"}, "image_id": 1016, "id": 16868}, {"iscrowd": 0, "category_id": 1, "bbox": [362.0, 285.0, 84.0, 74.0], "area": 3114, "segmentation": {"size": [512, 512], "counts": "cYe51n?2M2O2N2N2M3N2b@AX?`0g@BW?d0O2M3N2N2N2N1N3N2N2O1000O1000cAgNR>Y1lAiNS>X1kAjNU>V1hAmNX>R1gAPOY>\\10O100000O1000O10000000O10O10000O1N2M2O2N2N2N2M3N1O0000O102N2N2N2M2O2N2N200000N2N2N1N3N2N2N2XOUA7n>GTA7m>HUA6m>GVA6m>HUA6m>HTA7\\?N2M3N2NeeP1"}, "image_id": 1016, "id": 16869}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 287.0, 78.0, 67.0], "area": 2953, "segmentation": {"size": [512, 512], "counts": "kYP1b0^?0000000000000000000E;00000000000000G:J50000001O0000000000000E;008H00000000000L40002N00000000000lAiN_=l10000000000000000d0\\O0000000bNkAQ1b>00000007I0000000000000000000CVfh5"}, "image_id": 1016, "id": 16870}, {"iscrowd": 0, "category_id": 1, "bbox": [221.0, 303.0, 69.0, 84.0], "area": 2921, "segmentation": {"size": [512, 512], "counts": "Uk^31n?2N2nNLUB6h=MVB5h=MVB5h=MUB5j=MTB5j=LUB6i=LUB6h=MVB5h=MUB6i=LUB6i=LUB5j=LUB6i=LUB6h=MXB3f=OZB1d=1\\BOb=3^BMb=3]BMc=3^BMb=3]BNc=2[B0e=0YB2g=T10O10O1000000000O10ON3N2N2N2N2M2O2N2N2N2FcAnN_>P1cAmN`>Q1aAnN`>Q1bAmN`>S170000N2N1O2N2N2M3N2N2N1O2N2M3N2N2N1O2NYe^3"}, "image_id": 1016, "id": 16871}, {"iscrowd": 0, "category_id": 1, "bbox": [325.0, 343.0, 60.0, 69.0], "area": 2273, "segmentation": {"size": [512, 512], "counts": "_kR52l?3N2N2N2N1N3N2N2N2M2O2N2N2M3N1O2N2N2O1000O10O100000O01N2N2aAeNV>]1hAeNV>\\1hAgNV>b1O2N2N2000O0100O1O1N2N1OSOUBHj=7XBHh=6ZBIh=5ZBIh=5ZBHi=6YBHi=5ZBIh=5YBJi=4YBIj=FgA8a00i=GhA6b01h=GhA6b01P>MRB0Q>NPB1R>MPB1Q?NgSo1"}, "image_id": 1016, "id": 16872}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 359.0, 89.0, 83.0], "area": 3522, "segmentation": {"size": [512, 512], "counts": "P\\Y22m?2M3N2N1O2M3N2N2N2N2M2O2N2N20000O1000O1000O10000000O10O100000000O0100000N2N2N2M3N1O2N2N200N2M3N1O2N2M3N200O100N1N3N2N2N2M3N2N1O2O10O1cNdB3^=KdB2_=LcB2^=McB2_=KdB3^=KcB3`=K_B6c=H_B6b=CTBCN2N2N2M2O2NSSZ4"}, "image_id": 1016, "id": 16873}, {"iscrowd": 0, "category_id": 1, "bbox": [72.0, 378.0, 84.0, 84.0], "area": 3122, "segmentation": {"size": [512, 512], "counts": "a]T13l?2N2N2N1N3N2N2N2M3N1O2N2N2M3N1O2N2N0O01000XAmNd>S1\\AoNb>P1_AROa>W10O10O10000000O10O1[OeNdB[1Z=gNfBY1W=jNiBU1V=lNkBT1S=nNlBK^OQ1e=VOmBG@Q1b=[OnBB_OT1c=\\OoBd0Q=^OmBa0T=@kB`0U=BiB>V=EgBh0\\A[Od>e0YA]Oh>b0VAAj>k00000000O01000000000O1000O1O1O1O1O001O1O1O1O1O001N2O1O1O1O0O2M3N2N2N2N2M201O10QOVBGj=7XBGj=7XBGj=9VBEl=9VBDl=;UBDm=:UBDm=9VBEl=9VBEl=9VBDm=:UBDm=:TBEm=5_AHf01m=4`AHf02l=4T1N2N2NZcd2"}, "image_id": 1016, "id": 16875}, {"iscrowd": 0, "category_id": 1, "bbox": [479.0, 386.0, 33.0, 63.0], "area": 1207, "segmentation": {"size": [512, 512], "counts": "]m_72m?1O2N2N2N2N2DEPA=n>EPA=n>EPA=n>Eo@>o>:N3N2O100O1N2IiNaAZ1]>6N2N0O100000O0101O2N2000O10jC"}, "image_id": 1016, "id": 16876}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 429.0, 75.0, 71.0], "area": 2708, "segmentation": {"size": [512, 512], "counts": "b^n13l?2N2N1O2M3N2N2N2N2M3N2N0000002M3N2RAROg>U1N2N2O1O001O1O1O1O1O1O1N101O1O1O1O1OO2N2N2N2N0O100000O10O100000O10O100000002M3N2N2N2N1N3N2N2N2N2M2O2N2N2Ec@0_?Nc@O`?Oa@0`?Ob@O`?O\\Rl4"}, "image_id": 1016, "id": 16877}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 436.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "d=2Zbo7"}, "image_id": 1016, "id": 16878}, {"iscrowd": 0, "category_id": 1, "bbox": [236.0, 442.0, 75.0, 70.0], "area": 2992, "segmentation": {"size": [512, 512], "counts": "m^f32d?Od@2[?0b@3\\?Nc@4[?Nc@4Z?;N2O1000O10N2N2M3N2N2O01000000000N2N2IkN`AV1_>lN_AV1_>7M3N2N2N2N1N3N2N2N2O10000O01O1O1O1O001O1O1O1O1O001O1O1O1O0YOiAEX>9jAFW>7lAHU>7lAHU>7kAIU>6mAIT>4oAKR>3PBLQ>2QBMP>1QBOP>NSB1m=NUB1l=MVB2k=LWB3j=JYB5h=IYB7g=H[B7h>M3N2NVPT3"}, "image_id": 1016, "id": 16879}, {"iscrowd": 0, "category_id": 1, "bbox": [437.0, 445.0, 75.0, 67.0], "area": 3102, "segmentation": {"size": [512, 512], "counts": "Poj61n?2N2M3N2N2N2M3N1O2N2M3N2N200O10O10N2N2M3N1O02000O1IkN`AV1_>lN^AW1`>6M3N2N2N2M2O2N2N2O100O01O1O1O1O1O001O1O1O1O001O1O1O1O1O001O1O1O1O001O1O1O1O1O001PO\\Aa0f>]O\\Aa0f>]O\\Aa0f>]O\\Aa0e>^O]A`0e>]O^A`0R?Oi@"}, "image_id": 1016, "id": 16880}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 479.0, 77.0, 33.0], "area": 1333, "segmentation": {"size": [512, 512], "counts": "hof22m?2M3N2N1O2M3N2N1O1N2O1O1O1O1N2O1O1O1N2O11O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O1O001O1O1O1O1O00O1O1O1N2O1O11O001O1O1O1O1O001O000000000000O10000001O1O1O1O1OW`R4"}, "image_id": 1016, "id": 16881}, {"iscrowd": 0, "category_id": 1, "bbox": [14.0, 481.0, 65.0, 31.0], "area": 1308, "segmentation": {"size": [512, 512], "counts": "j_71n?2N2N2N2IHc@:[?Hb@:]?5O1O1O1N2O1O1O1O1O11O1O1O1O1O1O1O001O1OO1O1O1N2001O1O1O1O1OO1N2O1O1O1O1O1N2O1O11O1O1O1O1O001O1N2M3N2N2N2N1N3N2N2N2MaPh6"}, "image_id": 1016, "id": 16882}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 488.0, 45.0, 24.0], "area": 565, "segmentation": {"size": [512, 512], "counts": "f_^63l?2N1N3N2]@F]??N2O1O1O1N2O1001O1O1O1O1O1O001O1O1O1O1O1O1O001O1O1O00O1N21O1O1O1O1O1O001O1O1O1OQPk0"}, "image_id": 1016, "id": 16883}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 500.0, 23.0, 12.0], "area": 145, "segmentation": {"size": [512, 512], "counts": "noi12m?1O1O1O1O1N2O1O1O11O1O1O1O001O1O1O1O1O1O001OQ`j5"}, "image_id": 1016, "id": 16884}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 501.0, 21.0, 11.0], "area": 123, "segmentation": {"size": [512, 512], "counts": "ooZ11m?2O1O1O1O1N2O1O11O1O001O1O1O1O1O1O001O1OQ`Z6"}, "image_id": 1016, "id": 16885}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 334.0, 14.0, 14.0], "area": 114, "segmentation": {"size": [512, 512], "counts": "gZm22l?2M4M2N30O010O001O0M4M2N2N`ek4"}, "image_id": 1017, "id": 16886}, {"iscrowd": 0, "category_id": 1, "bbox": [48.0, 0.0, 34.0, 12.0], "area": 222, "segmentation": {"size": [512, 512], "counts": "PPh01o?001O00001O001O00001O001O00001O001O00001O001O00001O001O0000N2M3N2MSPg6"}, "image_id": 1018, "id": 16887}, {"iscrowd": 0, "category_id": 1, "bbox": [130.0, 0.0, 20.0, 7.0], "area": 73, "segmentation": {"size": [512, 512], "counts": "PPQ21o?00001O00001O001O00001O001O00001OO1N2MSPe5"}, "image_id": 1018, "id": 16888}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 0.0, 181.0, 193.0], "area": 19640, "segmentation": {"size": [512, 512], "counts": "mdm32k?4M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N2N3L3N3L301O01O010O01O01O010O01O01O010O01O01O010OTN[M]Ff2`9\\MaFc2\\9aMnEJQO05e2j:cMmELPO08b2h:dMnELPO0;KF`2n:mMmE4Co1]:oMnE4Em1Z:SNmE4Ih1X:VNmE4Kg1T:XNoE3Me1R:[NmE41a1n9^NoE33_1l9aNmE47X1k9gNoE06W1l9kNmEN7U1n9mNlEN6S1o9oNkEN6T1n9oNkEN7R1n9POlEM5T1o9POkEL3X1Q:lNmELOZ1T:jNmELL^1V:gNmELJ_1Y:eNnEd2R:]MmEd2R:\\MoEc2Q:\\MPFe2o9YMSFg2m9WMVFh2j9XMVFi2i9XMWFg2i9YMWFh2h9YMWFg2i9YMXFf2h9ZMXFg2g9ZMYFe2g9[MYFf2f9[MXFf2h9ZMUFi2k9WMSFl2l9UMQFm2o9SMnEQ3Q:_10001O001O001O00001O001O000000M3N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2M3N3M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3M2M3N3LkoW1"}, "image_id": 1018, "id": 16889}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 0.0, 11.0, 5.0], "area": 35, "segmentation": {"size": [512, 512], "counts": "PPi71o?1O001O1O1O0000O1O1NR`1"}, "image_id": 1018, "id": 16890}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 57.0, 2.0, 6.0], "area": 10, "segmentation": {"size": [512, 512], "counts": "kQo74j?2WN"}, "image_id": 1018, "id": 16891}, {"iscrowd": 0, "category_id": 1, "bbox": [313.0, 73.0, 159.0, 286.0], "area": 16084, "segmentation": {"size": [512, 512], "counts": "ajl41l?3N2M4M2N3L3N3L3N2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M4M2N2M4M2M4M2N2M4M2M4M2M3N3M2M4M2M4M2M3N3M2M4M2M3N3L3N3M2M2O0O010O01000O010O0100O0100O010O01000O010O010O01000O010O010O01000O010O010O01000O010O0100O0100O010O01000O02O2M3N2M10O10O2O3L3N2M4M2N3L3N2M4M2M4M2gL^CR3db?M3N3M2Md]m7"}, "image_id": 1018, "id": 16894}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 85.0, 14.0, 22.0], "area": 188, "segmentation": {"size": [512, 512], "counts": "PSi71m?2N3M2M4M2`@D]??010O01O010O010VM"}, "image_id": 1018, "id": 16895}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 104.0, 267.0, 208.0], "area": 30511, "segmentation": {"size": [512, 512], "counts": "mf23j?3N2M4M2M4M2M3N3M2M4M2M3N3L3N3L3N3M2M3N3L3N3L3N2N3L3N3L3N2M4M2M4M2N2M4M2M4M2M4M2M3N3M2M4M2M3N3M201O01O010O010O00010O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00001L3N3L3N2N3L3N3L3N2M4M2M4M2N3L3N2M4M2M4M2N2M4M2M4M2M3N3L3N3M2M3N3L3N3L3N3L3N2N3L3N3L3N2M4M2N3L3NQjg3"}, "image_id": 1018, "id": 16896}, {"iscrowd": 0, "category_id": 1, "bbox": [466.0, 133.0, 46.0, 71.0], "area": 1666, "segmentation": {"size": [512, 512], "counts": "SUY73j?3N2M4M2M4M2N2M4M2M4M2M310O0gAmNf=S1WBPOh=P1UBSOk=n0RBTOo=k0nAYOQ>g0mA[OT>Y1OZNjAb1V>\\NlAd1Y>0O0010iNgAe0Y>YOiAh0W>UOlAj0T>SOoAm0R>POQBP1n=mNUBS1]>0O00010O010O00010O010O00oNWAl0n>0cJ"}, "image_id": 1018, "id": 16897}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 140.0, 1.0, 2.0], "area": 2, "segmentation": {"size": [512, 512], "counts": "\\do72b;"}, "image_id": 1018, "id": 16898}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 142.0, 8.0, 18.0], "area": 94, "segmentation": {"size": [512, 512], "counts": "^4b0_?O001M2M3N3L3Nakk7"}, "image_id": 1018, "id": 16899}, {"iscrowd": 0, "category_id": 1, "bbox": [449.0, 187.0, 28.0, 29.0], "area": 471, "segmentation": {"size": [512, 512], "counts": "_fP71m?3M2M4M2N3M2N3M2M4O00010O010O010O01O0M4M2N3M2N2N3L3N3MoYa0"}, "image_id": 1018, "id": 16900}, {"iscrowd": 0, "category_id": 1, "bbox": [472.0, 217.0, 40.0, 49.0], "area": 1300, "segmentation": {"size": [512, 512], "counts": "hW\\73k?2M3M4M2M3N3L3M4M2M3N3L3010O00010O00010O010O00010O010O00010O00010O010O00010O01lH"}, "image_id": 1018, "id": 16901}, {"iscrowd": 0, "category_id": 1, "bbox": [425.0, 240.0, 31.0, 34.0], "area": 631, "segmentation": {"size": [512, 512], "counts": "Yhd62l?2M4M2M3N3L3M4M2M3N3O0010O0010O0010O001O0M3N3L3N3L3N2M4L3N2M\\hk0"}, "image_id": 1018, "id": 16902}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 270.0, 51.0, 58.0], "area": 1718, "segmentation": {"size": [512, 512], "counts": "biV71l?4M2N3L3N3L3N210O010O00010O010O010O000N3M2M4M2N3L3N2M4M2N3N10010O010O0010O0010O010O00010O01XOdAK[>3gAMZ>0iA0V>MmA3S>KPB4Z5"}, "image_id": 1018, "id": 16903}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 293.0, 35.0, 31.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "hi\\61m?3L3N2M4M2N3L3N30O0010O0010O0010O010O00010O010O0010O001M2M3N3L3N3M2McfQ1"}, "image_id": 1018, "id": 16904}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 326.0, 59.0, 50.0], "area": 1665, "segmentation": {"size": [512, 512], "counts": "S[i62k?3N3L3N3L3N2M4M2M4M2M3O20O010O00010O010O00010O010O01O01O010O01O01O010O01O01O010O010O00010O010L3N2M4M2N3L3N2M4M2M4M2MZU9"}, "image_id": 1018, "id": 16905}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 332.0, 20.0, 30.0], "area": 515, "segmentation": {"size": [512, 512], "counts": "^:l0T?00000O100000O1000000000O10O101O7I8H8Hlde7"}, "image_id": 1018, "id": 16906}, {"iscrowd": 0, "category_id": 1, "bbox": [383.0, 345.0, 29.0, 38.0], "area": 670, "segmentation": {"size": [512, 512], "counts": "hko52k?4GLa@7\\?8M4M2M3M4L3M310O00010O00M4M2M4N100010ON2M4L3N2M4L3MSea1"}, "image_id": 1018, "id": 16907}, {"iscrowd": 0, "category_id": 1, "bbox": [143.0, 365.0, 25.0, 27.0], "area": 428, "segmentation": {"size": [512, 512], "counts": "nkW24j?2N2M4M2M4M2M4O000010O010O00010O0O2M2M3N3L3N3L3N_d[5"}, "image_id": 1018, "id": 16908}, {"iscrowd": 0, "category_id": 1, "bbox": [412.0, 371.0, 57.0, 56.0], "area": 1738, "segmentation": {"size": [512, 512], "counts": "f\\^63j?3N3L3N2M4M2M4M2M3N3L3N3M2M301O010O010O00010O010O00010OkN^Al0c>QO_AP1`>mNdAR1e>010O0010O0010O0010O0010O0010M2M4M2M3N3L3N3L3N2N3L3NfSe0"}, "image_id": 1018, "id": 16909}, {"iscrowd": 0, "category_id": 1, "bbox": [82.0, 385.0, 61.0, 52.0], "area": 1823, "segmentation": {"size": [512, 512], "counts": "P]Y12l?3M2N3L3N3M2N3M2M3N3M2010O010O01O01O010O010O010O01M2N3M2M3N3M210O010O0010O0010O010O01O0M4M2N2N3L3N3M2N3M2M4M2N2N3L3N3M2NdSh5"}, "image_id": 1018, "id": 16910}, {"iscrowd": 0, "category_id": 1, "bbox": [187.0, 392.0, 53.0, 67.0], "area": 2074, "segmentation": {"size": [512, 512], "counts": "olm22>O20\\>4^A03N\\>5_AO2O]>4^A12N\\>5_AO2O]>c0aA@[>T1N2M4M2O20O00010O010O00010O010O00010O010L3N2M4M2M3N3L3N3L3M3N3L3N3M21O010O01O0Be@8Z?Ei@;_?0OO2M2M3N3LScW4"}, "image_id": 1018, "id": 16911}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 399.0, 37.0, 32.0], "area": 706, "segmentation": {"size": [512, 512], "counts": "Qme52l?3L3N3L3N2M4M2O2O0010O0010O010O00010O010O00010O010O0010O0O2L3N2N3L3N3L3NXcg1"}, "image_id": 1018, "id": 16912}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 429.0, 35.0, 42.0], "area": 840, "segmentation": {"size": [512, 512], "counts": "]^]32l?3L3N3M2M3N3L3N3L3N2M4M2M4O00010O010O01O0O1N3L3N3L3N2M4M2M4M2M3N3L3N]RQ4"}, "image_id": 1018, "id": 16913}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 429.0, 59.0, 50.0], "area": 1688, "segmentation": {"size": [512, 512], "counts": "YnQ61l?4M2M4M2M3N3L3N3L3N2M40O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010N1N2@o@NU?On@NT?Oo@NU?Om@OU?N`bP1"}, "image_id": 1018, "id": 16914}, {"iscrowd": 0, "category_id": 1, "bbox": [279.0, 439.0, 47.0, 73.0], "area": 2130, "segmentation": {"size": [512, 512], "counts": "]n[43k?2M3N3L3N3L3WA_OQ>c0mA@2MW=g0cB_O3MX=f0cB_O3NY=d0`BB3M^=`0]BE3M`=>ZBI2Le=Q1YBQOg=d1001O001O00001O001O00001O001O00001\\OSBPOODn=Z1UBPOMHo=T1XBPOIOo=o0ZBPOH2n=k0]BPOI4k=i0dBZO\\=c0eB_O\\=>fBBZ=;iBEW=9lBFU=6nBJR=4QCKP=1SCNn<0TCMo<0UCMn<0\\cl2"}, "image_id": 1018, "id": 16915}, {"iscrowd": 0, "category_id": 1, "bbox": [62.0, 448.0, 49.0, 58.0], "area": 1617, "segmentation": {"size": [512, 512], "counts": "\\_o01m?2M4M2M4M2N3L3N2M4M2M4M2M3N3L3N3L3N210O010O01O01O010O01O01O010O01aNfAV1Z>hNhAX1a>L3N2M4M2M4M2M3N3L3N3L3N2M4M]QX6"}, "image_id": 1018, "id": 16916}, {"iscrowd": 0, "category_id": 1, "bbox": [344.0, 451.0, 28.0, 29.0], "area": 489, "segmentation": {"size": [512, 512], "counts": "g^\\52k?3M3M4L3N3L3M3010O00010O00010O010O00010M2M3M4M2M3M4L3NhaU2"}, "image_id": 1018, "id": 16917}, {"iscrowd": 0, "category_id": 1, "bbox": [365.0, 473.0, 67.0, 39.0], "area": 1430, "segmentation": {"size": [512, 512], "counts": "iof52k?4M2M4M2M3N3L3N3M2M4M2M3N20000001O001O00001O001O00001O0VOUA?l>]OWAc0i>[OYAe0R?O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O0N2N3L[`W1"}, "image_id": 1018, "id": 16918}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 486.0, 28.0, 26.0], "area": 462, "segmentation": {"size": [512, 512], "counts": "ioc21m?3M2M4M2N3M2M4M2O110O010O01O001O00001O001N1N3M2M4M2N2NdPn4"}, "image_id": 1018, "id": 16919}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 491.0, 29.0, 21.0], "area": 413, "segmentation": {"size": [512, 512], "counts": "koS24j?2M3N3L3N2M3O11O001O00001O001O00001O001O00001O0O2L3N2M4M^`]5"}, "image_id": 1018, "id": 16920}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 503.0, 16.0, 9.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "n_c32l?2M3N2001O001O00001O00001O000NW`T4"}, "image_id": 1018, "id": 16921}, {"iscrowd": 0, "category_id": 1, "bbox": [327.0, 504.0, 21.0, 8.0], "area": 89, "segmentation": {"size": [512, 512], "counts": "ooS51m?2M3N2001O001O001O00001O001O001O00001O00Q`a2"}, "image_id": 1018, "id": 16922}, {"iscrowd": 0, "category_id": 1, "bbox": [27.0, 0.0, 63.0, 96.0], "area": 3395, "segmentation": {"size": [512, 512], "counts": "c`=1l?3N3M2M3N001TAFQ>>lADR>>kAFe0JWXOWAe0S?M4M2N3L3N2N3L3NSob6"}, "image_id": 1019, "id": 16923}, {"iscrowd": 0, "category_id": 1, "bbox": [189.0, 0.0, 25.0, 10.0], "area": 161, "segmentation": {"size": [512, 512], "counts": "P`n23m?00001O001O00001O00001O00001O001O00001O0000N2N2MSPe4"}, "image_id": 1019, "id": 16924}, {"iscrowd": 0, "category_id": 1, "bbox": [336.0, 0.0, 58.0, 26.0], "area": 943, "segmentation": {"size": [512, 512], "counts": "PPX51o?00001X@O_?1^@2b?N\\@4d?5O001O00001O00001O00001O001O00001O00O11O00001O00001O001O00001O00001O00001O001O00001O00001ON2M3N2M3M3E_@5h?Lnoj1"}, "image_id": 1019, "id": 16925}, {"iscrowd": 0, "category_id": 1, "bbox": [392.0, 0.0, 74.0, 65.0], "area": 2978, "segmentation": {"size": [512, 512], "counts": "lPT61m?3L3N2N3L3N3L3N2M4M2N3QAROk>R1O0010O010O00010O010O0001\\AnNY>Q1dARO\\>l0dAVO]>g0bA]O]>`0dAB\\>>aAF_>m000N3L3N3M2O101O001O00001O001OO1O11O001O00001O001O0000XOSBZOP>c0SB[Oo=c0SBZOP>d0SBYOo=d0TBYOo=e0TBXOn=f0TBWOo=i0d0O1N2M3N2M3Id@H^?67M3NRPg0"}, "image_id": 1019, "id": 16926}, {"iscrowd": 0, "category_id": 1, "bbox": [478.0, 0.0, 34.0, 48.0], "area": 1123, "segmentation": {"size": [512, 512], "counts": "kP_71m?2M4M2M4M2M3O2O010O0j@_Om>a0QABl>j0N2M4M2M4O00001O001O00001O001O0000N2N2M3N2M3"}, "image_id": 1019, "id": 16927}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 11.0, 64.0, 89.0], "area": 2824, "segmentation": {"size": [512, 512], "counts": "_b^13k?2O2M3M2O2M2N3M3N1N3M2O2M3M2N3N1N3M3[OeNbB\\1\\=gNaB\\1\\=fNbB\\1]=fN`B]1]=eNbB]1[=fNbB\\1]=fN`B]1]=eNbB\\1\\=f0N2M201ON3N1N3M3M2O2M2N3M3N1N3M2O2M3M2N3N1N3M3gN^AQ1c>nN^AP1k>QOSAg0P?VOSAg0T?O2M3M2O2M2N3M3N1N3Mh^a5"}, "image_id": 1019, "id": 16928}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 33.0, 12.0, 15.0], "area": 109, "segmentation": {"size": [512, 512], "counts": "ZQU73k?2M4L3O1010O010M2M3M4Mnnd0"}, "image_id": 1019, "id": 16929}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 35.0, 58.0, 94.0], "area": 3095, "segmentation": {"size": [512, 512], "counts": "PR`24a1Mg<5VCNg<6VCMf<6WCMg<5WCNe<6WCMg<5WCMf<7`BYO>d0o<5aBZOoBEQ=nN`Ao0k>M2O101O010O0M3M4M2M4M2M3N3L3MQnb4"}, "image_id": 1019, "id": 16930}, {"iscrowd": 0, "category_id": 1, "bbox": [445.0, 56.0, 54.0, 51.0], "area": 1539, "segmentation": {"size": [512, 512], "counts": "bbn62k?3N3L3M4M2M3O2O00010O0010O0010O00010NOO3M4n@WOi>k0UAXOg>R1O110O0[AiNa>[110O010O00010O00010O010OO1M6K2M3M4O0010OO1M4XOj@b0[?M4Ha@Jb?46Mk]6"}, "image_id": 1019, "id": 16931}, {"iscrowd": 0, "category_id": 1, "bbox": [213.0, 58.0, 59.0, 100.0], "area": 3360, "segmentation": {"size": [512, 512], "counts": "`dZ33_?Nk@5Z>LSB1A6Y>KTB2B3W>NTB2E1m=GRB992E0m=HSB883FOm=9ZB;c=GZBPOPBk0U>POQBk0e>M3N10O01O010O000M4M2N3L3N2M4M2MRmg3"}, "image_id": 1019, "id": 16932}, {"iscrowd": 0, "category_id": 1, "bbox": [272.0, 88.0, 67.0, 94.0], "area": 3242, "segmentation": {"size": [512, 512], "counts": "kSX42a0O60k=3lA16Oj=4mAO60k=3lA02HI8U>3nA005P>NlA024o=Z1M2M4M2O110O010O000[BlM_=Z20010O0010O001L3O1010OO2L3N2M2O2M3N1N3N2M3N1N10O010O010O011100O1M4M2M4M200010O01M2N2M4M2M4M2M3N3L3N3LS]f2"}, "image_id": 1019, "id": 16933}, {"iscrowd": 0, "category_id": 1, "bbox": [329.0, 108.0, 62.0, 111.0], "area": 3444, "segmentation": {"size": [512, 512], "counts": "_fT52k?3N3M2M3N3`MA[Ea0e:BUEb0j:@XCL`1f0W;AWCK]1h0[;_OUCM[1f0_;@TCLY1f0c;LWD8h;JTD8k;LPD7o;KlC8SkNkAS1Z>iNhAS1]>kNcAS1f>01O010O0O1N3L3N3M2M4M2N2M4M2M4M2Na[l1"}, "image_id": 1019, "id": 16934}, {"iscrowd": 0, "category_id": 1, "bbox": [359.0, 178.0, 69.0, 85.0], "area": 2804, "segmentation": {"size": [512, 512], "counts": "_gc52m?2M3N1N3N2N2M2O2N2M30O10O1N1N3N2N2M2O2M3N2N1KgNaA\\1]>5N2M2O2N2M3NO010O10O10O10O10O10O10O10O10O010O1100010000O0100N2M2TOkALW>1kAMX>1jAMX>1jAMW>1lALW>C`A::2X>B`A::2]>KfA2]>LeA2\\>MeA2]>KfA2V?N1OSiY1"}, "image_id": 1019, "id": 16935}, {"iscrowd": 0, "category_id": 1, "bbox": [112.0, 188.0, 121.0, 164.0], "area": 6962, "segmentation": {"size": [512, 512], "counts": "[Wh13j?3N3M2N3gACl<>QCFn<:PCHQ=8lBKR=6lBLS=6jBLW=3fB1Y=OeB3\\=K`BC]Oe0R>H^B>b=C[B`0e=_OYBc0g=^OVBe0g=]OWBe0h=\\OUBh0k=XORBj0n=d010O0N3N1010O010O01M2M3N3M2N3M2M4M2N3M2M4M2N2NUO[C`Nb<`1aC_N^<`1dCaN[<]1hCbNY<[1iCfNV^O_Aa0c0WOU=5ZBe0>XOX=1]Bf09\\OZ=J`Bk02^Om=a0QBBn=?oACR>:jAHW>7fALZ>n0O010O010O010O010O01O01O010O010O01L3N3M2N3M2M3N3M2N3M2M4M2N3M2N3L3N2N3M2N_V[4"}, "image_id": 1019, "id": 16936}, {"iscrowd": 0, "category_id": 1, "bbox": [414.0, 223.0, 73.0, 85.0], "area": 2961, "segmentation": {"size": [512, 512], "counts": "mX_62m?2N2N2N2N2N2N2N2N2N2N2ROZO_Bh0_=ZO_Bh0`=YO^Bi0`=YO^Bi0`=YO^Bi0`=YO^Bi0`=YO_Bi0^=YO`Bi0^=YO`Bi0^=YO`Bg0`=[O^Be0b=]O\\Bc0d=_OZBa0f=AXB?h=l001O00000000000001O002N2N2N2N2N2N2N2N2N2N2N2O1O11O01O000N2N2N2N2N2ROUAd0m>ZOUAd0m>ZOUAd0U?N2N2N2N2N2N2N2N2N2O1NeW<"}, "image_id": 1019, "id": 16937}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 283.0, 80.0, 68.0], "area": 2074, "segmentation": {"size": [512, 512], "counts": "]Zh61n?2N2N2N2N2N2N2N2N2N2O1N2N2N2N2N2N2N1O00000001O2N2N2N2N1O1O0000000000000000000002N2N2N3M2N000000000000000000001O2N2N1O00000002N2N2IUAVOm>h0UAVOm>h07N2N2N2N2N2N2N2N2N2N2N2NeF"}, "image_id": 1019, "id": 16938}, {"iscrowd": 0, "category_id": 1, "bbox": [320.0, 309.0, 84.0, 66.0], "area": 3108, "segmentation": {"size": [512, 512], "counts": "dZP51m?3M2M3N3L3N3M2O1010O010O01O01OM4M2M4M20001O010O010O00010O010O00010O010O0010O00XAPO`>Q1]AROc>U1O1M4M2M4O00010M2M4O001O01O010O01O0N2N30O010M2N2M4M2M4M2N2M4M2M4M2O110N101\\OPA6P?HSA6n>GUA9l>DWA9k>DZA8Z?Kcee1"}, "image_id": 1019, "id": 16939}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 331.0, 191.0, 181.0], "area": 20955, "segmentation": {"size": [512, 512], "counts": "[:T3l<010O010O00010O010O01O01O010O01O01O010O01O010O00010O010O00010M2M4M2M4M2M3010hC_MN2i9_2WFbML2n9\\2RFeMN2o9Y2QFhML2S:V2nEkMM1V:T2jEmMM3X:P2hEQNM1\\:n1dESNM3^:j1bEWNM1b:g1_EZNL2e:e1[E\\NN1g:c1YE_NL1l:`1TEbNN0n:^1REJo:c200001O001O001O00001O001O00001O001O001O00001O001O00001O001O0O2M2M3N3L3N3L3N2N30O010O01O01O010O01O010O01O01O010O01O01O010O010O00010O010O00010O010O0010O0010OmCWLo;m31O00001O001O001O00001O001O00001O001O001N1M3N3L3N3L3N2M4M2N3L3N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N3L3N2M4M2MYSP5"}, "image_id": 1019, "id": 16940}, {"iscrowd": 0, "category_id": 1, "bbox": [214.0, 362.0, 31.0, 39.0], "area": 690, "segmentation": {"size": [512, 512], "counts": "W\\[33k?3L3N2M4M2M4M2M3N3L3N3M21O010O01O01O0M4M2M3N3L3N2M4M2M4M2M3NaTU4"}, "image_id": 1019, "id": 16941}, {"iscrowd": 0, "category_id": 1, "bbox": [288.0, 384.0, 83.0, 61.0], "area": 3074, "segmentation": {"size": [512, 512], "counts": "T]`44e?7I8I6J600000001O01O00000001O01O000001O01O00000001O01O00000001O01O00000001O01OXASOY>\\1N2000010O0000000001O0001L3K5L4001O00000001O01O0000000J7G8G9N2000001O00000[OSA6n>DXA;Y?N1N3L3K\\SV2"}, "image_id": 1019, "id": 16942}, {"iscrowd": 0, "category_id": 1, "bbox": [195.0, 421.0, 82.0, 74.0], "area": 3635, "segmentation": {"size": [512, 512], "counts": "dnQ33k?2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4M201O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O010O01O01O0O2L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2M3NURe3"}, "image_id": 1019, "id": 16943}, {"iscrowd": 0, "category_id": 1, "bbox": [486.0, 427.0, 26.0, 85.0], "area": 1305, "segmentation": {"size": [512, 512], "counts": "f_c71m?3N2kNL[B6c=K[B7d=KZB7c=L[B6Y=DTB8a06Y=DTB8`07Z=CTB7a07Y=DTB8a06Y=6eBLY=6dBL[=6cBK[=7dBJ[=8cBI\\=9aBI^=9`BH^=;`BF_=^1O1O1N2O1O01NgB"}, "image_id": 1019, "id": 16944}, {"iscrowd": 0, "category_id": 1, "bbox": [369.0, 437.0, 89.0, 46.0], "area": 2575, "segmentation": {"size": [512, 512], "counts": "_nh53k?2M3N3L3N3M2M3N3L3N3O00010O0010O010O0010O0010O0010O010O001L3N2M4N110O01O01O010O01L3N2N3L3000M301O010O01O01O010O010OPOTAl0Q?O01O010O01O01O010M2N2O20O010O0N2N3L3N3M20010N1N3L3N2M4M2M4Mlaj0"}, "image_id": 1019, "id": 16945}, {"iscrowd": 0, "category_id": 1, "bbox": [315.0, 472.0, 96.0, 40.0], "area": 2064, "segmentation": {"size": [512, 512], "counts": "oom41l?3N2M3N2N2M3N2M3N2M3N2M3N2M3N2N21O001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001Db@4^?Jd@6c?01O001O00001O001O00001O001O00001O00QPb1"}, "image_id": 1019, "id": 16946}, {"iscrowd": 0, "category_id": 1, "bbox": [140.0, 0.0, 19.0, 7.0], "area": 69, "segmentation": {"size": [512, 512], "counts": "PPV21o?00001O001O00001O001O00001O001OO1N2MS``5"}, "image_id": 1021, "id": 16947}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 0.0, 7.0, 3.0], "area": 12, "segmentation": {"size": [512, 512], "counts": "P`j21o?001O00001ONRPR5"}, "image_id": 1021, "id": 16948}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 0.0, 19.0, 15.0], "area": 194, "segmentation": {"size": [512, 512], "counts": "YPa32l?2M4L3O101O001O00001O0000O1M3N2M3M3NR`U4"}, "image_id": 1021, "id": 16949}, {"iscrowd": 0, "category_id": 1, "bbox": [291.0, 0.0, 18.0, 7.0], "area": 68, "segmentation": {"size": [512, 512], "counts": "P`a41o?001O00001O001O00001O001O00001ON2MS`U3"}, "image_id": 1021, "id": 16950}, {"iscrowd": 0, "category_id": 1, "bbox": [310.0, 0.0, 12.0, 6.0], "area": 42, "segmentation": {"size": [512, 512], "counts": "PPk42n?001O00001O001O001ON2MSPo2"}, "image_id": 1021, "id": 16951}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 0.0, 32.0, 26.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "dPo41m?2N3L3N3M2M3N3L3O2O001ON2M3N2O1001O00001O001OO1N2M3N2N2M3N2N2MSPa2"}, "image_id": 1021, "id": 16952}, {"iscrowd": 0, "category_id": 1, "bbox": [378.0, 0.0, 51.0, 18.0], "area": 472, "segmentation": {"size": [512, 512], "counts": "PPm51o?001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00001O001O00N2M3N2M3N2M3NR`Y1"}, "image_id": 1021, "id": 16953}, {"iscrowd": 0, "category_id": 1, "bbox": [37.0, 4.0, 70.0, 43.0], "area": 1782, "segmentation": {"size": [512, 512], "counts": "e`b02l?3L3N2O201N010O0N2N3L3N3N100010O0010O0010O0010O0010O0010O0010ON2M4M2O20O00010O010O00010O010O00010ON3L3O110O010OO1M4M2M4L3N2M4M2M4M2M3N3L3Ng_Z6"}, "image_id": 1021, "id": 16954}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 8.0, 64.0, 70.0], "area": 2402, "segmentation": {"size": [512, 512], "counts": "RRc22^?Oo@4n>No@5o>Mo@6m>NPA4n>No@6n>?M3N3L3N30O00010O010O010O00010O010O0000M010O10O010O04M2N2M4M201O010O00010OO2M2N2M4M2M4M2N3L3N2M4M2N3L3N2M4M2N3L3N3M_o\\4"}, "image_id": 1021, "id": 16955}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 13.0, 30.0, 29.0], "area": 557, "segmentation": {"size": [512, 512], "counts": "oPm13k?3L3N2M4L3N2M4O001O01O01O01O010O01O01O01O01O0M4L3N2M4M2M4L^oc5"}, "image_id": 1021, "id": 16956}, {"iscrowd": 0, "category_id": 1, "bbox": [249.0, 27.0, 44.0, 66.0], "area": 1693, "segmentation": {"size": [512, 512], "counts": "Vbl33k?2ZOM]A6_>N]A5a>M]A5a>N[A6a>M]A5a>M]A64]OQ>Y1mAjNQ>W1lAlNT>`1010O010O0001M2M4N101OO2M2M4M2M4M2N2M4M2M4M2N2M4M2N3L3N3L3N2N3L3N3Ln^]3"}, "image_id": 1021, "id": 16957}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 29.0, 73.0, 40.0], "area": 2486, "segmentation": {"size": [512, 512], "counts": "QaY5d0\\?>B000000O100000MPOVAP1j>303M0000O100000000000000000000000000000O1000000000O10000000007I0000000000000000000000000000000O100000000000000000O1000000002Nkna1"}, "image_id": 1021, "id": 16958}, {"iscrowd": 0, "category_id": 1, "bbox": [17.0, 31.0, 51.0, 57.0], "area": 1538, "segmentation": {"size": [512, 512], "counts": "ia81l?4M2N3M2N3M2N3L3N3M2N3M2010O0010O0]APOW>P1fASOY>m0eAUO\\>k0aAXO^>h0`AZOa>R1010O010O01O010O010O010O010O01N1N3D[AYOg>e0\\AXOg>e0[AYOg>e0:aAL9MT>9`AM:MS>f0jA\\OT>f0iA^OV>c0gA_OY>a0eABZ>R1N2M4M2N30O01O010O01O0[NPB[1P>aNSB_1m=_NUBb1k=[NXBd1R>10O0010OO2L3N2N3M2M4M2N3L3N3M2N2M4M2N3M2M4M2N2NilW7"}, "image_id": 1021, "id": 16962}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 84.0, 64.0, 54.0], "area": 1740, "segmentation": {"size": [512, 512], "counts": "Wcb51o?1N3M2N2N2O2M2N2N2N3N1O11O01O000O1N3M2O1N2N3M2N2O1N3M2N02N2N100O000000010O000000010O001O2N2O2M2N2N2N3N1N2N2N3M2O1N2N3M2N2O1N3Me\\]1"}, "image_id": 1021, "id": 16963}, {"iscrowd": 0, "category_id": 1, "bbox": [115.0, 97.0, 29.0, 32.0], "area": 536, "segmentation": {"size": [512, 512], "counts": "gci12l?2M4M2N3L3N3L3N2M4O0010O00010O010O010M2M3N3L3N3M2M3N3L3Njlg5"}, "image_id": 1021, "id": 16964}, {"iscrowd": 0, "category_id": 1, "bbox": [58.0, 110.0, 46.0, 53.0], "area": 1287, "segmentation": {"size": [512, 512], "counts": "gTm02l?2M3N3L3M4N10010O00010O0O2L3N2M4M2M3M4M2M4L3N2N3O01O01O01O0N2M4M2M4L3N2M4L3N2M4L3N3L3M3N3L]l[6"}, "image_id": 1021, "id": 16965}, {"iscrowd": 0, "category_id": 1, "bbox": [331.0, 117.0, 21.0, 26.0], "area": 330, "segmentation": {"size": [512, 512], "counts": "XdU52k?4L3M3M4L3M3O20O00010O00010N1L4M4L3M3M4LYl_2"}, "image_id": 1021, "id": 16966}, {"iscrowd": 0, "category_id": 1, "bbox": [111.0, 120.0, 58.0, 73.0], "area": 2052, "segmentation": {"size": [512, 512], "counts": "ceg11m?2M4M2M4M2M301O0010O0001L3N3L3N2N3L3N3L3N2M4M2M4M2M3N3L3N3L31O010M2N2M4M2M4M2M3N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3L3No[[5"}, "image_id": 1021, "id": 16967}, {"iscrowd": 0, "category_id": 1, "bbox": [173.0, 123.0, 28.0, 28.0], "area": 469, "segmentation": {"size": [512, 512], "counts": "]df22l?2M4L3N3L3N2M4O010O010O00010O010O01O01OO2M2M4M2N2M4M2MP\\k4"}, "image_id": 1021, "id": 16968}, {"iscrowd": 0, "category_id": 1, "bbox": [411.0, 123.0, 59.0, 50.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "ed]63k?3L3N3M2M4M2M4M2N3L3N2010O010O010O01O010O010O01O010O010O01O010O010O01O010O010O01O010O010O01O0N3L3N3M2M4M2N2M4M2N3L3Nbkd0"}, "image_id": 1021, "id": 16969}, {"iscrowd": 0, "category_id": 1, "bbox": [240.0, 134.0, 59.0, 64.0], "area": 1991, "segmentation": {"size": [512, 512], "counts": "cTh31m?5K4L4L4M2O1N100O2O0O101N4M10Oo@TOn>o0010O0ROSAi0R?1O01O01^AUOn=j0oAZOP>g0lA\\OT>d0jA^OW>a0fACY>>cAE]>o010O001L31O01O010O0O1M4L3O11L3M3N3CeAPO^>m0eAPO_>m0dAPO^>m0N2N2N2N2N2O1N2@UOhAm0V>VOgAl0W>VOgAl0W>VOgAl0W>VOgAl0X>UOfAm0X>>O001O0001O2N2N2N3M2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2O1N2N2NTjc4"}, "image_id": 1021, "id": 16974}, {"iscrowd": 0, "category_id": 1, "bbox": [289.0, 172.0, 33.0, 29.0], "area": 545, "segmentation": {"size": [512, 512], "counts": "ne`41m?3M2N3M2N3M2N3M2N210O010O010O010O010O010OOO0001O2N3M2N3M2N3M2N2Nbjn2"}, "image_id": 1021, "id": 16975}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 187.0, 48.0, 58.0], "area": 1574, "segmentation": {"size": [512, 512], "counts": "Ygb31m?2M3N3M2M4M2M4M2N2CYOaAh0\\>[OcAe0Z>^OgAb0V>AiA?U>CjA?R>DkA?U>BhA`0Y>_OeAd0Z>a010O0010OO1O20O010O000N3M2M4M2N3L3N2N3L3N3M2M3N3L3N3M2M4M2NlYe3"}, "image_id": 1021, "id": 16976}, {"iscrowd": 0, "category_id": 1, "bbox": [282.0, 208.0, 47.0, 57.0], "area": 1406, "segmentation": {"size": [512, 512], "counts": "[W]42i?0Y@2e?5O2M3N2j@Bg>?XACe>`0YABe>`0XACf>>YACe>`0YABe>n0N2M2O2N2M3N1O2000O1M2O2N2M3N1O2M3N2N1N3N2N2M2O2N2M3N1O2M3N2N1N3N2N2M2OlXk2"}, "image_id": 1021, "id": 16977}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 211.0, 29.0, 32.0], "area": 581, "segmentation": {"size": [512, 512], "counts": "XW\\64j?2M4DHk@;R?Hl@:R??n@DP?g0O20O010O010O00010O010O010O00010O010O0O2M200010O010O01O01O01M2N3M2M4M2N3L3N2NoWW7"}, "image_id": 1021, "id": 16980}, {"iscrowd": 0, "category_id": 1, "bbox": [229.0, 243.0, 26.0, 25.0], "area": 371, "segmentation": {"size": [512, 512], "counts": "Shb31m?2N2M4M2N3L3N30O00010O010O010O00010O010L3N3M2M3N3MWXP4"}, "image_id": 1021, "id": 16981}, {"iscrowd": 0, "category_id": 1, "bbox": [464.0, 248.0, 30.0, 29.0], "area": 457, "segmentation": {"size": [512, 512], "counts": "WXX71m?3N2M3N1N3N2N1N3N2M20100O01000O01000OO2M3N1N3N2M2O2M3N1N3Nmg8"}, "image_id": 1021, "id": 16982}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 255.0, 45.0, 59.0], "area": 1530, "segmentation": {"size": [512, 512], "counts": "]i\\62Z?OWA3Z>N`A144Y>N`A143Z>N`A153X>O`A152Z>N_A253Y>o010O010O010O010O010O010O010ON3M2N3M2N3M2N3M2N3M2O2M2N1O002N2N3M2N3M2L5Mfgl0"}, "image_id": 1021, "id": 16983}, {"iscrowd": 0, "category_id": 1, "bbox": [42.0, 269.0, 58.0, 70.0], "area": 1945, "segmentation": {"size": [512, 512], "counts": "QZe01m?2M3N3L3N3M2M3N3D]OVAf0h>\\OVAg0f>;M3N3L3N3L3N2N3O010O01iNmA?S>_OPBa0P>\\ORBd0n=YOVBg0i=WOYBi0h=SO[Bm0e=QO^Bo0b=mNaBS1W>0O00010O010O00010O0O2M2N3NO2M4M2M3N3M2M4M2M3N3L3N3L3N2Mgf]6"}, "image_id": 1021, "id": 16984}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 275.0, 32.0, 37.0], "area": 812, "segmentation": {"size": [512, 512], "counts": "c8P1Q?O0010O0010O010O00010O010L3O20O0010O010O00010N1M4M2M4M2N2M4M2MSg_7"}, "image_id": 1021, "id": 16985}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 284.0, 31.0, 29.0], "area": 497, "segmentation": {"size": [512, 512], "counts": "]i\\52l?2N3M2N3M2M4M2N210O010O010O010O010O010O010O01M2N2M4M2N3M2N3MkfS2"}, "image_id": 1021, "id": 16986}, {"iscrowd": 0, "category_id": 1, "bbox": [443.0, 285.0, 57.0, 58.0], "area": 1583, "segmentation": {"size": [512, 512], "counts": "Rjm62m?1N3N2M3N1O2M3N1010000N1O2M3N2N1N3N2M210GnNcAR1[>oNfAQ1W>ROfAP1Y>ROeAP1X>SOeAP1Y>=N1N20O2M2O2N2M3N1O2M3N1N3N2N2M2O2M3N2N1N3N2N1N3N2M3N1O2M3N1Oaf5"}, "image_id": 1021, "id": 16987}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 311.0, 52.0, 53.0], "area": 1450, "segmentation": {"size": [512, 512], "counts": "mZe13j?3N2M4L3N2O2O010O00010O01O01O010N1N2M4L3N2M4L3N3L3M3N3N10010O010O0001O0M3N3L3M4M2M3M4M2M3M4M2M4L3N2M4LSf`5"}, "image_id": 1021, "id": 16988}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 312.0, 20.0, 30.0], "area": 420, "segmentation": {"size": [512, 512], "counts": "h9m0S?N2010O01O01O010O01O0O1N3L3N3L3N3L3N2NSfe7"}, "image_id": 1021, "id": 16989}, {"iscrowd": 0, "category_id": 1, "bbox": [192.0, 316.0, 27.0, 27.0], "area": 444, "segmentation": {"size": [512, 512], "counts": "]ZP33j?3N2M4L3N3L3O1010O0010O0010O00010O0010OM3N3L3M3N3L3MPVb4"}, "image_id": 1021, "id": 16990}, {"iscrowd": 0, "category_id": 1, "bbox": [488.0, 327.0, 24.0, 51.0], "area": 800, "segmentation": {"size": [512, 512], "counts": "Q[d72l?2O2EKh@7V?Kh@7V?Jh@9U?Ji@76Df>g0WA\\Og>f0WA\\Og>f0WA\\Og>o0N3N2N2000O10O10N1N10O10000jE"}, "image_id": 1021, "id": 16991}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 334.0, 74.0, 65.0], "area": 2382, "segmentation": {"size": [512, 512], "counts": "k[61l?4M2M4M2N2M4M210O01O010O01N1M4M2M3N3O0010O0001O0N3L3N2N3O010O010O00010O010O00010O010O0O2L10000O010O0102N3L3N3O010O0N2M4M2M4M2O101N1N3L3N3L3N2N3L3N[ed6"}, "image_id": 1021, "id": 16992}, {"iscrowd": 0, "category_id": 1, "bbox": [230.0, 343.0, 21.0, 24.0], "area": 312, "segmentation": {"size": [512, 512], "counts": "X[c32k?4M2M3N3L3N3M20010O0010O000O2L3M4M2M3N3LWUR4"}, "image_id": 1021, "id": 16993}, {"iscrowd": 0, "category_id": 1, "bbox": [155.0, 344.0, 53.0, 42.0], "area": 1237, "segmentation": {"size": [512, 512], "counts": "^k]22k?3N3L3M4M2O1010O01O01O010O00010O01O01O010OO2L3M3N3L3N201O0UAPOg>U1O00010O00O2M210O00010N1M3N3L3N3L3M3N3L3M4M2MReg4"}, "image_id": 1021, "id": 16994}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 349.0, 4.0, 9.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "m:9g?M4M2NTem7"}, "image_id": 1021, "id": 16995}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 355.0, 34.0, 31.0], "area": 621, "segmentation": {"size": [512, 512], "counts": "gkU11m?3L3M3M4M2M3M4N11O010O01O01O01O01O010O01O01O01O01O010L3M3N3L3M3M4MeTY6"}, "image_id": 1021, "id": 16996}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 368.0, 50.0, 50.0], "area": 1323, "segmentation": {"size": [512, 512], "counts": "d\\_31m?2M4M2N3L3N2M4M2N3L3N210O010O0010O0010O010O00010O010M2M4M2M3N3L2O02M3N3M2M3N3L3N3L3N3M2M3N3L3N3M2M]dg3"}, "image_id": 1021, "id": 16997}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 388.0, 47.0, 54.0], "area": 1516, "segmentation": {"size": [512, 512], "counts": "em]41m?2M4L3]OJYA:c>IZA:c>I[A9c>JYA:c>c0O101O01O0M3M3M02O2M310O3M010O00010O00010O01OO0N0100O4L3M3M4M2M3M4L3M4M2M3M4Ljcj2"}, "image_id": 1021, "id": 16998}, {"iscrowd": 0, "category_id": 1, "bbox": [124.0, 394.0, 61.0, 58.0], "area": 2077, "segmentation": {"size": [512, 512], "counts": "Z]n11m?3M2N3L3N3M2M3N3M2M4M2N3M2N2010O010O010O01O01O01O0M4M201O01O01O012M10O00M4O0010O010O010O0VOfANZ>OiA2W>KlA4T>JnA7R>FPB:P>CTB=l=@VB`0j=^OYB`0h=]O[Ba0h=\\O[Ba0c>N2M4M2N3L3NcRS5"}, "image_id": 1021, "id": 16999}, {"iscrowd": 0, "category_id": 1, "bbox": [339.0, 404.0, 60.0, 46.0], "area": 1579, "segmentation": {"size": [512, 512], "counts": "XmY52m?1N3M3N1N3N2M2O2M2N3N2M2O2O1O01000O0100O0100o@ROn>P11000M2N3N2N110O01N2O01000O010O10O01N1N100O1O2O2M3N1N3N1N3M3N1N3N2M2O2M3MoRh1"}, "image_id": 1021, "id": 17000}, {"iscrowd": 0, "category_id": 1, "bbox": [30.0, 405.0, 61.0, 62.0], "area": 1932, "segmentation": {"size": [512, 512], "counts": "o\\?2m?2X@N_?4_@O^?3`@O^?;N2N2N2N2O100000000l@WOn>o0N2N2N2O1N2N3N100000000000000000000000000O1N2N2O01N2O1N2N2N2kNZAm0h>QOZAm0h>QOZAm0n>N2N2N2N2N2N3M2N2N2N2N2N2O1NTRb6"}, "image_id": 1021, "id": 17001}, {"iscrowd": 0, "category_id": 1, "bbox": [184.0, 416.0, 61.0, 53.0], "area": 1767, "segmentation": {"size": [512, 512], "counts": "o]l23j?3N2GJe@:X?Ie@9X?:M2M3N3M2M4O00010O010O00010O010O00010O010O0010O001oNWAj0h>TOZAl0m>10O0010O0010O0010O0010O0010O010O0001O0N3M2M3N3L3N3L3N2M4MWRU4"}, "image_id": 1021, "id": 17002}, {"iscrowd": 0, "category_id": 1, "bbox": [253.0, 416.0, 24.0, 29.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "dmn33k?2N3L3N3L3N2M4M2O2O01O01O010OM4M2M3N3L3N3L3N2MnRe3"}, "image_id": 1021, "id": 17003}, {"iscrowd": 0, "category_id": 1, "bbox": [413.0, 417.0, 26.0, 25.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "]m^61m?2O2N2N2N2M3N1O2N2N2O1000O10000O0O2M3N2N2N2N2M2O2NdRT1"}, "image_id": 1021, "id": 17004}, {"iscrowd": 0, "category_id": 1, "bbox": [347.0, 439.0, 26.0, 24.0], "area": 298, "segmentation": {"size": [512, 512], "counts": "Qn]51m?3N1O2M3N1O2M3000O01000O01000O10O10O10ON3N2M2O2M3NmQU2"}, "image_id": 1021, "id": 17005}, {"iscrowd": 0, "category_id": 1, "bbox": [395.0, 449.0, 49.0, 45.0], "area": 1107, "segmentation": {"size": [512, 512], "counts": "dnU61n?2N2N3M2N2N2N2N2N2N2O1N2N2N2N2N3M2N2N2N2O100000N2N2N2N2N2N3M2N2O1N000000002N3M2N2N2N2N2N2N2O1N2N[aQ1"}, "image_id": 1021, "id": 17006}, {"iscrowd": 0, "category_id": 1, "bbox": [133.0, 450.0, 63.0, 59.0], "area": 2092, "segmentation": {"size": [512, 512], "counts": "inR23k?2N3L3N2N3M2M4M2TAYO]>j0`AXO]>k0aAXO^>i0^AZOc>e0[A^Od>o010O00010[AhN`>]10O010O01O01O010O010O010O00010O010O010O000N3M2M4M2N3L3N3M2O110O010O010O00010O010M2Ch@0[?Mg@0\\?Ng@Of?No`m4"}, "image_id": 1021, "id": 17007}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 457.0, 60.0, 55.0], "area": 1903, "segmentation": {"size": [512, 512], "counts": "j_h43j?3M3M4L3N2M4L3M3N30O0000N2N2001O000000M3M3M3O11O00001O00001O00N2M3N2M3M3N20M3M4L3M3M4M200010ON2M4L3M301O001M2M3M4L3M3McaY2"}, "image_id": 1021, "id": 17008}, {"iscrowd": 0, "category_id": 1, "bbox": [75.0, 462.0, 47.0, 50.0], "area": 1429, "segmentation": {"size": [512, 512], "counts": "moU13k?2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2M3N21O001O00001O001O00001O001M2N2M4M2M4M2M3N3L3N3L3N2M4M2MZaR6"}, "image_id": 1021, "id": 17009}, {"iscrowd": 0, "category_id": 1, "bbox": [451.0, 466.0, 28.0, 28.0], "area": 411, "segmentation": {"size": [512, 512], "counts": "nnQ71o?2M2N2N2N2N3N1N2N2N3M2O1N1O0001O3M2O1N2N3M2N2N2O2M2N2NQQ`0"}, "image_id": 1021, "id": 17010}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 484.0, 56.0, 28.0], "area": 874, "segmentation": {"size": [512, 512], "counts": "n_62l?2N2M3N2M3N2M3N2M30000N2N2001O00001O001O001O00001O0]Oh@>^?O001O001O00001O001O001O00001O001O001O00001O001O001O000O2LX`m6"}, "image_id": 1021, "id": 17011}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 484.0, 47.0, 28.0], "area": 720, "segmentation": {"size": [512, 512], "counts": "ood51n?1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O1O11O1O1O1O1O1O1O2M2N2N2N2O1N2N2N2N\\`c1"}, "image_id": 1021, "id": 17012}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 496.0, 39.0, 16.0], "area": 434, "segmentation": {"size": [512, 512], "counts": "g_i62n?2M3N2M2O0O100O100O100O1001O2N2N0000O100O100O100O100O100O1001O2N2N2N2N2N2N2NQPc0"}, "image_id": 1021, "id": 17013}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 507.0, 9.0, 5.0], "area": 25, "segmentation": {"size": [512, 512], "counts": "oo[21n?1O1O1O11O1O1O1OQ`_5"}, "image_id": 1021, "id": 17014}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 507.0, 11.0, 5.0], "area": 31, "segmentation": {"size": [512, 512], "counts": "o_]71o?0O100O100O100O11O2NRP="}, "image_id": 1021, "id": 17015}, {"iscrowd": 0, "category_id": 1, "bbox": [6.0, 0.0, 268.0, 44.0], "area": 6229, "segmentation": {"size": [512, 512], "counts": "PP3?a?000000000000000000000000O1000000000000000000000000000000000000O100000000000000000000000000000000000000O1000000000000000000000000000000000000O12Nc0]OHo00000000000000000O1000000000000000000000000000000000000O100000000000000000000000000000000000000O1000000000000000000000000000000000000O1000000000000000PPg3"}, "image_id": 1022, "id": 17016}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 0.0, 38.0, 53.0], "area": 1943, "segmentation": {"size": [512, 512], "counts": "PP]7>b?i0WO>B0000000000000000000000000000000000O10000000000000000000000000000000000"}, "image_id": 1022, "id": 17017}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 89.0, 16.0, 42.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "kR]73m?a0^Ob0_O3M000O100001O000000008Ha0_O]l:"}, "image_id": 1022, "id": 17018}, {"iscrowd": 0, "category_id": 1, "bbox": [404.0, 90.0, 41.0, 14.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "kRZ6=c?0000000000000000000000000000000O1000000000000000000000000000O10000000000000003MS]Q1"}, "image_id": 1022, "id": 17019}, {"iscrowd": 0, "category_id": 1, "bbox": [446.0, 431.0, 66.0, 81.0], "area": 3174, "segmentation": {"size": [512, 512], "counts": "Z_o6f0Z?00000000000000000000000000000000000000000000000000000000000000000_Na1000000000000000000N20000H80000000000000000000000007I0000000000"}, "image_id": 1022, "id": 17020}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 0.0, 85.0, 94.0], "area": 5978, "segmentation": {"size": [512, 512], "counts": "P`Y32V21_;6YDLf;5XDMg;3WDOi;1VD1i;OVD3i;MWCK1;g0O100000O100000O100000O1000O100000O1000O100000O100000O100000O1000O1CcNWB]1i=jNPBV1P><10000000O1000O100000O1000O10000000O10O10000000O1000O100000O1000O10M3000000000O10O10000000O10O1000000000O10O10000000O105K7I6J7I6I7J7I6J7I[mZ6"}, "image_id": 1023, "id": 17023}, {"iscrowd": 0, "category_id": 1, "bbox": [293.0, 37.0, 57.0, 90.0], "area": 3014, "segmentation": {"size": [512, 512], "counts": "Qbb41n?4M4L3M3L5L3M4K4ZOTORBo0k=UOQBo0k=TOQBP1k=UOQBo0k=TORBo0k=f0M4K4M4L3L100000O010000O01000O04M3M4L3L5L000O10003L4M3M4L3L5L3M3L5L3M4L3L4M4L3L5L3M3L_l`2"}, "image_id": 1023, "id": 17024}, {"iscrowd": 0, "category_id": 1, "bbox": [385.0, 55.0, 127.0, 105.0], "area": 13011, "segmentation": {"size": [512, 512], "counts": "hcP6V1V=d1C=000000000000000000000000000000000001O00000000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000001O00000000000000000000000WN"}, "image_id": 1023, "id": 17025}, {"iscrowd": 0, "category_id": 1, "bbox": [246.0, 87.0, 86.0, 125.0], "area": 5381, "segmentation": {"size": [512, 512], "counts": "dTk31o?4L3L5L3M3L5L3M4L2M10000O01000O01000aNSOkCl0Vb?d0\\Oc0]Oc0]Od0\\Oc0]O1O0O10000000000000000WOkLgDU3Y;i00000000000000000000000000000O1000000000000000000000000000000000000O1000000000000000000000000000000000000O100000000000000000000000000000000000000O1000000000000000000000000000000000000O1000000000000000000000000000000000000O100000000000000000000000000000000000000O1000000000000000000000000000000000000O1000000000000000000000000000000000000O1000000000000000000GaKQE_4o:900O100000000000000000000000000000000000000O1000000000000000000000000000000000000O1000000000000000000000000000000000000O100000000002Nc0]Oc0]Od0\\O2N000000000HXCXMhYOiA13f0S>\\OhA02d0W>IgA7X>h010O010O10O010O10O02O2M3N2M2O0O2O2M4M2M3N2M3N2N2M3N3L3N2M3N2M3No]i1"}, "image_id": 1024, "id": 17038}, {"iscrowd": 0, "category_id": 1, "bbox": [135.0, 25.0, 16.0, 11.0], "area": 105, "segmentation": {"size": [512, 512], "counts": "m`S21n?4M3M0O10O10O01000O010001N4M2NQ_d5"}, "image_id": 1024, "id": 17039}, {"iscrowd": 0, "category_id": 1, "bbox": [424.0, 28.0, 27.0, 28.0], "area": 401, "segmentation": {"size": [512, 512], "counts": "YQd62m?2N2N2N2N2N2N2N2N2N1O2N2N2000N2N2N2N2N2N2N2N2N2N2N2Nh^n0"}, "image_id": 1024, "id": 17040}, {"iscrowd": 0, "category_id": 1, "bbox": [20.0, 29.0, 21.0, 21.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "]Q:3b?;K501O00000001O0000000001O0000000001H7E\\_[7"}, "image_id": 1024, "id": 17041}, {"iscrowd": 0, "category_id": 1, "bbox": [461.0, 39.0, 51.0, 70.0], "area": 1608, "segmentation": {"size": [512, 512], "counts": "ibV71n?2N3\\@LW?6g@LW?6g@LW?6g@Lg>JeAg0X>[OfAg0X>[OfAf0Y>\\OeAd0[>^OcAb0]>@aA`0_>B_A:O[Ob>U1`AiN`>W15O00000000000000000001O000000000000001O0002N2N2O1N2N2N2N2N2N2N2N2N2N2N3MZN"}, "image_id": 1024, "id": 17042}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 44.0, 44.0, 39.0], "area": 801, "segmentation": {"size": [512, 512], "counts": "b1YOYAg0g>\\OVAd0i>9100000O01000O3N00000O01000O10O3N3M4K4M4L3M4K4M4L3L^mQ3"}, "image_id": 1024, "id": 17045}, {"iscrowd": 0, "category_id": 1, "bbox": [421.0, 53.0, 15.0, 16.0], "area": 126, "segmentation": {"size": [512, 512], "counts": "mab62m?2N2N2N2N2N2N0002N2N2N2N2N2NVnU1"}, "image_id": 1024, "id": 17046}, {"iscrowd": 0, "category_id": 1, "bbox": [92.0, 58.0, 28.0, 28.0], "area": 479, "segmentation": {"size": [512, 512], "counts": "QR^11o?3M2M4M3M3L4M2M2O00O01000O010O10O10O10O12M4M3M3L4M2N3LdmS6"}, "image_id": 1024, "id": 17047}, {"iscrowd": 0, "category_id": 1, "bbox": [386.0, 74.0, 29.0, 25.0], "area": 470, "segmentation": {"size": [512, 512], "counts": "`RQ62n?3M4K5L4L2N0O0100000O010000O010000O0100000O0103M4K5L3M4LT]`1"}, "image_id": 1024, "id": 17048}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 79.0, 56.0, 32.0], "area": 1317, "segmentation": {"size": [512, 512], "counts": "VSm12k?4K4M3L4M4K4O101O01O0001O01O01O01O0001O01O01O00O1000O1000O1000O1000O1000O1000O1000O1000O10O100000O104L5K5K5J6KilV5"}, "image_id": 1024, "id": 17049}, {"iscrowd": 0, "category_id": 1, "bbox": [323.0, 82.0, 22.0, 19.0], "area": 262, "segmentation": {"size": [512, 512], "counts": "nbQ52m?4M2M10OLK^@6a?410O102N0O01000O01000O04M3M3L3NS]c2"}, "image_id": 1024, "id": 17050}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 85.0, 15.0, 25.0], "area": 200, "segmentation": {"size": [512, 512], "counts": "e2i0X?N1N3M3N1N30O01O1M2O20OJZ@2g?L[@3j?MP]h7"}, "image_id": 1024, "id": 17051}, {"iscrowd": 0, "category_id": 1, "bbox": [91.0, 85.0, 7.0, 10.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "eb]19g?00001O0003MXm^6"}, "image_id": 1024, "id": 17052}, {"iscrowd": 0, "category_id": 1, "bbox": [39.0, 89.0, 45.0, 45.0], "area": 1476, "segmentation": {"size": [512, 512], "counts": "Vcc0k0U?00O101O0000000000000000000000000000000000000006J00000B>000000000000000000000000000000=]OPme6"}, "image_id": 1024, "id": 17053}, {"iscrowd": 0, "category_id": 1, "bbox": [484.0, 101.0, 28.0, 50.0], "area": 770, "segmentation": {"size": [512, 512], "counts": "PTb72m?2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N2N00000000000lL"}, "image_id": 1024, "id": 17054}, {"iscrowd": 0, "category_id": 1, "bbox": [401.0, 106.0, 23.0, 15.0], "area": 339, "segmentation": {"size": [512, 512], "counts": "ZcX6?a?000000000000000000000000000000000000000006J`l[1"}, "image_id": 1024, "id": 17055}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 114.0, 28.0, 31.0], "area": 620, "segmentation": {"size": [512, 512], "counts": "ec^1380Z?5a@O[??L4K3N00O100000O01000000O0100000O3N00O@m@5S?KQA0P?0TALl>3YAIf>8d0M4LW\\S6"}, "image_id": 1024, "id": 17056}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 114.0, 17.0, 10.0], "area": 117, "segmentation": {"size": [512, 512], "counts": "ecX21o?3L4M000000O010000000O0100000O5LZl^5"}, "image_id": 1024, "id": 17057}, {"iscrowd": 0, "category_id": 1, "bbox": [322.0, 124.0, 15.0, 17.0], "area": 242, "segmentation": {"size": [512, 512], "counts": "mSQ58g?9H000000000000000000000004LP\\g2"}, "image_id": 1024, "id": 17058}, {"iscrowd": 0, "category_id": 1, "bbox": [391.0, 126.0, 36.0, 56.0], "area": 1369, "segmentation": {"size": [512, 512], "counts": "\\dS61n?3N2M2N3N2M3N2M2O1N1YA@n=Z1N1O100O100O100O1O100O100O11O2N2N2N2N2N2N2N1O2N2Ic[Z1"}, "image_id": 1024, "id": 17059}, {"iscrowd": 0, "category_id": 1, "bbox": [1.0, 134.0, 37.0, 32.0], "area": 608, "segmentation": {"size": [512, 512], "counts": "ed02m?1N3M3M2O2M2N3N1N3O10O010O01000O010O010O10O10O010O010N2M2O2M2N3N2M2N3M2O\\k\\7"}, "image_id": 1024, "id": 17060}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 135.0, 87.0, 48.0], "area": 2588, "segmentation": {"size": [512, 512], "counts": "RUi14f?7J5K5J6N2000010O00000000001O01O000000K501O0000000001O01O0000000000010O000000000000POVAk0o>01O0000000001O01O0000000001O0001O000006J00000001O0001O0000QOXAf0Q?000000001O01O000000000N2H8H[[k4"}, "image_id": 1024, "id": 17061}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 143.0, 71.0, 44.0], "area": 2216, "segmentation": {"size": [512, 512], "counts": "^eZ47^?;F;O000001O0000000000000001O0001O0000000000000001O000F:N2000000001O000001O0000000000000006J1O01O00000000ROUAg0R?00000001O01O000000000000000000F;D[ka2"}, "image_id": 1024, "id": 17062}, {"iscrowd": 0, "category_id": 1, "bbox": [49.0, 149.0, 15.0, 19.0], "area": 146, "segmentation": {"size": [512, 512], "counts": "Seh01m?3M2N3M2N3M2N30N1N3M3M2N3M2NYko6"}, "image_id": 1024, "id": 17063}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 151.0, 2.0, 4.0], "area": 6, "segmentation": {"size": [512, 512], "counts": "hTo72m?2XK"}, "image_id": 1024, "id": 17064}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 168.0, 31.0, 26.0], "area": 542, "segmentation": {"size": [512, 512], "counts": "Ze^15i?200000001d@Jm>5m@1S?Og@7Y?8000001O01O00000001O01O00000000010O000M3J6J6JjjQ6"}, "image_id": 1024, "id": 17065}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 174.0, 6.0, 11.0], "area": 37, "segmentation": {"size": [512, 512], "counts": "bUm72n?1N2N2N2N2aJ"}, "image_id": 1024, "id": 17066}, {"iscrowd": 0, "category_id": 1, "bbox": [352.0, 179.0, 7.0, 9.0], "area": 62, "segmentation": {"size": [512, 512], "counts": "cU`58h?1O000000000]Z\\2"}, "image_id": 1024, "id": 17067}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 186.0, 24.0, 44.0], "area": 574, "segmentation": {"size": [512, 512], "counts": "m5Y1f>000001O2N2M3N2N2N2N1O2N2N2N2N2N2N2M3N2N2N2N1Ofic7"}, "image_id": 1024, "id": 17068}, {"iscrowd": 0, "category_id": 1, "bbox": [57.0, 186.0, 36.0, 64.0], "area": 1637, "segmentation": {"size": [512, 512], "counts": "mfl01=2P?1n@1_>MdA4K1_>3_AN01^>c0_A@_>b0_A_O_>S1M3N1N3O1O001N2O001O1O001N01O1N2O1N2O0O2KgAbNZ>]1gAaN[>]15O1N3N7H9G8H8H^Ya6"}, "image_id": 1024, "id": 17069}, {"iscrowd": 0, "category_id": 1, "bbox": [434.0, 190.0, 8.0, 8.0], "area": 55, "segmentation": {"size": [512, 512], "counts": "QVi65h?300000000000JXjR1"}, "image_id": 1024, "id": 17070}, {"iscrowd": 0, "category_id": 1, "bbox": [128.0, 192.0, 57.0, 40.0], "area": 1593, "segmentation": {"size": [512, 512], "counts": "gVP25b?9L400000001O000001O00e@DR?e00000L4I8O000001O0000000001O000001O000M3000000SOUAe0S?0000000010O00000000000000010O0000000I7G9GmYS5"}, "image_id": 1024, "id": 17071}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 194.0, 24.0, 19.0], "area": 440, "segmentation": {"size": [512, 512], "counts": "\\V]59]?:0000000000000000000000000000000000000000000JTjV2"}, "image_id": 1024, "id": 17072}, {"iscrowd": 0, "category_id": 1, "bbox": [283.0, 206.0, 53.0, 33.0], "area": 1478, "segmentation": {"size": [512, 512], "counts": "Yg]42_??D<0001O00000001O00000000000000000001O00000001O00000000000000000001O00000001O00000000000000000001O00D]OdAc0\\>ZOgAe0Z>YOhAg0X>WOjAi0V>UOkAk0V>SOlAk0U>SOnAk0e>N2M3N2N1O2N2M3N2N2N1O2M3NPg`5"}, "image_id": 1024, "id": 17080}, {"iscrowd": 0, "category_id": 1, "bbox": [458.0, 261.0, 31.0, 23.0], "area": 526, "segmentation": {"size": [512, 512], "counts": "cXU74f?6K6L30000001O01O00000001O01O000001O0001O000001O01O0000L4J7IlW;"}, "image_id": 1024, "id": 17081}, {"iscrowd": 0, "category_id": 1, "bbox": [60.0, 263.0, 33.0, 32.0], "area": 547, "segmentation": {"size": [512, 512], "counts": "gXn01n?2N1N3N2N2N2N2M2O2N2N2M3N2O010O100000O1N2N1N3N2N2N2N2M2O2N2N2M3N[Wa6"}, "image_id": 1024, "id": 17082}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 272.0, 62.0, 54.0], "area": 1497, "segmentation": {"size": [512, 512], "counts": "dYS42m?2N2N1O2M3N2N2N2N2N2N1N3N2N2N2N2N2M3H6J60O2O1O2N1O1O1O1N3N1O1O1O2N1O1N2O1O2N1O1O1O1H9K4010O000M3001O0000000001O000001O0O1Gdgm2"}, "image_id": 1024, "id": 17083}, {"iscrowd": 0, "category_id": 1, "bbox": [7.0, 282.0, 34.0, 27.0], "area": 554, "segmentation": {"size": [512, 512], "counts": "Si32n?3L4M3M3L4M1OO01000O01000O01000O01000O0100O01000O0100O2O3M3L4M3M3LgV[7"}, "image_id": 1024, "id": 17084}, {"iscrowd": 0, "category_id": 1, "bbox": [453.0, 282.0, 33.0, 26.0], "area": 624, "segmentation": {"size": [512, 512], "counts": "\\iR73h?5J6K5N200010O000000010O00000001O01O000001O01O000000010O0O1K5J6KVg<"}, "image_id": 1024, "id": 17085}, {"iscrowd": 0, "category_id": 1, "bbox": [67.0, 301.0, 55.0, 53.0], "area": 1445, "segmentation": {"size": [512, 512], "counts": "SjQ11m?3N2N1O2M3N2N2N1N3N2N2N1N3N2N2M3O010N2N2O10O01000000O01000000O0100000lNXAo0h>nN[AQ1j>1000O1N2M2O2N2M3N1O2N2M3N2N1O2M3N2NeeR6"}, "image_id": 1024, "id": 17086}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 309.0, 57.0, 35.0], "area": 1479, "segmentation": {"size": [512, 512], "counts": "ji\\66i?k000000000000000000000000000000000000000BbA>^>F]A;c>HZA7f>b000000O05L3M4L4K4M4LO01000O10O1000O101O4L4K4M3M0O10O1000O5L3TOWA;m>@XA@XA;Z?M4L3LVdi6"}, "image_id": 1024, "id": 17091}, {"iscrowd": 0, "category_id": 1, "bbox": [399.0, 340.0, 8.0, 9.0], "area": 67, "segmentation": {"size": [512, 512], "counts": "ejW68h?0O10000000O2O[Ud1"}, "image_id": 1024, "id": 17092}, {"iscrowd": 0, "category_id": 1, "bbox": [226.0, 349.0, 61.0, 52.0], "area": 1926, "segmentation": {"size": [512, 512], "counts": "f[a32l?2O2M2N3M2N3M3N1N3M2N3M201O010N1N300O0100O010O010O010O01O1N1N3M201O0100O010O010O0100N1N3M2N3OlNgAa0Y>]OiAb0Y>]OhA`0Z>AeA>]>AdA<^>EbA8a>G_A7c>J]A3f>K[A3h>KZA3g>K[A3h>J[ALC5T?M^A0e>N\\A0bc`3"}, "image_id": 1024, "id": 17093}, {"iscrowd": 0, "category_id": 1, "bbox": [361.0, 369.0, 36.0, 24.0], "area": 632, "segmentation": {"size": [512, 512], "counts": "kkd51o?9G00000000000O100000OJ8O4L000O1000000000O10000000000000O1000000002N=BQTi1"}, "image_id": 1024, "id": 17094}, {"iscrowd": 0, "category_id": 1, "bbox": [503.0, 372.0, 9.0, 10.0], "area": 78, "segmentation": {"size": [512, 512], "counts": "ekk77h?101O00000000010ZD"}, "image_id": 1024, "id": 17095}, {"iscrowd": 0, "category_id": 1, "bbox": [318.0, 386.0, 9.0, 12.0], "area": 108, "segmentation": {"size": [512, 512], "counts": "R\\o4B00000000O1000000000000000]gm0"}, "image_id": 1025, "id": 17102}, {"iscrowd": 0, "category_id": 1, "bbox": [387.0, 428.0, 27.0, 29.0], "area": 385, "segmentation": {"size": [512, 512], "counts": "PnQ61n?2M3N1O2N2M3N1O2M3N2N1N2OO10O11N3N2N2M2O2N2N2M3N1O2M^b`1"}, "image_id": 1025, "id": 17103}, {"iscrowd": 0, "category_id": 1, "bbox": [77.0, 0.0, 67.0, 58.0], "area": 2077, "segmentation": {"size": [512, 512], "counts": "j`V13m?1N3M2O2M2N3N1N3M2O2IZOo@h0o>ZOPAh0n>7M2N3N1N3M2O2M1O010HaNnA_1S>bNlA]1U>eNhA\\1X>51O100O1O100AeAXO\\>h0fAUO[>j0gAUOY>j0jASOW>m0jAQOW>n0lAPOT>o0?00O11O1O2N1O1OO100O100O1O2O2M2N3N1N3M3N1N3M2O2M2N3N1N3NYog5"}, "image_id": 1028, "id": 17104}, {"iscrowd": 0, "category_id": 1, "bbox": [139.0, 0.0, 40.0, 35.0], "area": 836, "segmentation": {"size": [512, 512], "counts": "P`U22n?1O2N1O2N1\\@H^?:_@H`?>N1O2N1O2N1O2N1O2N1O2N1O2N00O1O100O1O100O1O100WOo@b0R?^OPA?Q?@QA?o>@TA=m>CTA;m>DVA:Y?N3M2O2M]_V5"}, "image_id": 1028, "id": 17105}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 90.0, 19.0, 45.0], "area": 722, "segmentation": {"size": [512, 512], "counts": "l2[1e>000O010000000O0100000000O2O5K5XOTA2Q?HUA3a?JQ\\f7"}, "image_id": 1028, "id": 17106}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 145.0, 36.0, 58.0], "area": 1133, "segmentation": {"size": [512, 512], "counts": "b4g1Y>00O1000O1002oNiA4^>FgA5^>EbA_O]Aa0Q?3M1O0O100000O10O100000O1000O1000O1000O1005K6I6K5Kni]7"}, "image_id": 1028, "id": 17107}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 216.0, 48.0, 49.0], "area": 1420, "segmentation": {"size": [512, 512], "counts": "i6`1`>O1000O01000004K4M4L0O01001N4M3M00O010000O01000O010000O010000O010000O01000O010003L4M4L3M3L5L3MWhW7"}, "image_id": 1028, "id": 17108}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 278.0, 68.0, 76.0], "area": 2604, "segmentation": {"size": [512, 512], "counts": "m8U2j=01O01O01O0IWBYNh=h1ZBUNg=j1[BTNe=l1712M2N3N1N2N10O00010O00010O000101N3M2O2M2N3N1N1O2N3N1N2N3NO01O01O01O01O01O02N2O0O00010O0001O2O1N3M2O2M2N3N1N3M2O2M2Ncfm6"}, "image_id": 1028, "id": 17109}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 317.0, 77.0, 77.0], "area": 2844, "segmentation": {"size": [512, 512], "counts": "`[81n?3M2O2M3M2O2M2N3N1N3M2N3N2M2N3HnN^AS1`>oN^AQ1b>70O01O01O01O01O01GeNlA[1T>hNiAX1W>jNhAV1X>801O01O01O01O01O00010O00010O0010O00010O0001O01002O01N2M2O2nNaA>a>@aA?a>^O`Aa0b>^O]Ab0f>[OZAf0g>XO[Af0h>XOYAf0S?N1N3M3N1N3M2O2M2O2M2NiTa6"}, "image_id": 1028, "id": 17110}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 369.0, 67.0, 78.0], "area": 2617, "segmentation": {"size": [512, 512], "counts": "^mj01o?2^OOPA3n>OPA4m>OQA2n>OPA4m>OPA3n>OQA3m>a0N3M2O1N00010O000IlNdAT1\\>mNbAS1^>7010O00010O0001HdNlA[1T>gNjAY1W>8O00010O00010O00010O00010O00012M2N3N01N2N3N1oNgA8\\>FfA7\\>GeA9]>DcA<_>CbA;a>BbA<_>BcA<`>BbA;`>CcA;_>CbA;`>CcA:S?O2M2N3N1NQcS6"}, "image_id": 1028, "id": 17111}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 395.0, 16.0, 26.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "^e1lA^NQ>c1nA_NQ>`1PBbNn=^1QBdNo=Z1QBiNo=T1RBmN4Cc=_1XBQO3Af=P2WBSNh=S210O0100O010O010O010N1O2N1O2N1O1O0O101N3M2O2M2N3N1cN`AW1g>M2O2N101M2O2M1O01O02N3N1N3M2O2M2N3N1N3JZ@Mh?1kPU5"}, "image_id": 1028, "id": 17114}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 433.0, 56.0, 59.0], "area": 1918, "segmentation": {"size": [512, 512], "counts": "l=_1`>2O0O010O0002O1N3M2O1N0010O00010O0010O00010O0010O00010O00010O01O3N1N3M2O2M10O01O01O01O01O01O3Il@^OV?`06N3M2O2M2N3N1NoaS7"}, "image_id": 1028, "id": 17115}, {"iscrowd": 0, "category_id": 1, "bbox": [59.0, 470.0, 68.0, 42.0], "area": 1739, "segmentation": {"size": [512, 512], "counts": "Rom01o?2M2N3N1N3M2O2M2N3N1N2N3N1N3M2O2M2N3O01O0O2N1O00O100O1O100O1O100O1O12N1O2N1O2N1O2N1O2N00O1O100001O2N1O2N1O2N1O2N1O2N1O2N1O2N1O2N1O2N1ORPP6"}, "image_id": 1028, "id": 17116}, {"iscrowd": 0, "category_id": 1, "bbox": [12.0, 493.0, 41.0, 19.0], "area": 404, "segmentation": {"size": [512, 512], "counts": "o_61o?0O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O100O1O1001O2N1O2N1O2N1O2N1O2N1O2NQPU7"}, "image_id": 1028, "id": 17117}, {"iscrowd": 0, "category_id": 1, "bbox": [210.0, 0.0, 3.0, 2.0], "area": 5, "segmentation": {"size": [512, 512], "counts": "PPY31o?1O0P`e4"}, "image_id": 1029, "id": 17118}, {"iscrowd": 0, "category_id": 1, "bbox": [218.0, 0.0, 29.0, 25.0], "area": 495, "segmentation": {"size": [512, 512], "counts": "_P]33j?3N3L3N3M2M301O001O00001O001O001O0000O1N2N2M3N2M3N2N2M3NR`T4"}, "image_id": 1029, "id": 17119}, {"iscrowd": 0, "category_id": 1, "bbox": [255.0, 0.0, 45.0, 46.0], "area": 1441, "segmentation": {"size": [512, 512], "counts": "n`o32l?2M4M2M4M2^OD]A?c>AZAc0e>^OXAd0h>\\OUAh0j>70001O001O00001O001O00001O001O001O00001O001O00001O00N2YO_AMc>0`AMc>1_AMc>0aALb>2`AKc>2`ALb>2g0MSPZ3"}, "image_id": 1029, "id": 17120}, {"iscrowd": 0, "category_id": 1, "bbox": [400.0, 0.0, 62.0, 49.0], "area": 1979, "segmentation": {"size": [512, 512], "counts": "kPX63k?3L3N3L3M4M2M3N3L3N3N100O2O010O000101N010O00010O01O0M3010ON3M201O00001O001O00001O001O01O010L3N2N3L3N3L3N2M4M2M4M2N2M4M2M4Mjoh0"}, "image_id": 1029, "id": 17121}, {"iscrowd": 0, "category_id": 1, "bbox": [468.0, 0.0, 44.0, 53.0], "area": 1306, "segmentation": {"size": [512, 512], "counts": "]PZ71m?2N3L3N3M2N201O00001O001\\A_Og=a0WBAi=?UBDj==RBFn=:PBIo=8nAJR>6kANT>2jA0V>1gA1Z>NdA5[>h010O010O0002M1M4M2N3L3N3N11O01O010O0O2\\Oo@4T?Io@5S?IPA4g>"}, "image_id": 1029, "id": 17122}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 33.0, 52.0, 45.0], "area": 1533, "segmentation": {"size": [512, 512], "counts": "a1>_?4M2M4M2N2M4N1010O00010O010O00010O010O01O01OM4N110O00010O010O00010O010O0M21N3L3N2M4M2M4M2M3N3M2M4M2M3NhnU7"}, "image_id": 1029, "id": 17123}, {"iscrowd": 0, "category_id": 1, "bbox": [388.0, 47.0, 44.0, 41.0], "area": 1530, "segmentation": {"size": [512, 512], "counts": "dRR64Q?k0000000000000000000000000000000L400J6000O10000000000000003M00000000000000000000000000^nW1"}, "image_id": 1029, "id": 17124}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 49.0, 61.0, 55.0], "area": 1807, "segmentation": {"size": [512, 512], "counts": "cbd02l?2N3L3N3L3N3M2M3N3M2M4M2N2M4N1010O010O00010O010O010O0001[AfNc>6_Al0`>RObAn0_>nNdAR1f>0O01O0N30O00010O010O010M2N2N3OO1M4M2N3L3N2M4M2N3L3N3M2M3Nlm\\6"}, "image_id": 1029, "id": 17125}, {"iscrowd": 0, "category_id": 1, "bbox": [471.0, 68.0, 41.0, 56.0], "area": 1541, "segmentation": {"size": [512, 512], "counts": "]c[73k?3L3DKk@8g>G[A3K9h>FZAf0d>]OYAf0c>=N3M2010O0010O0010O0010O0010O0010O0010O010M201O000F_ASOd>j0^ASOe>k0:L3N2M4M2N3N`M"}, "image_id": 1029, "id": 17126}, {"iscrowd": 0, "category_id": 1, "bbox": [104.0, 83.0, 61.0, 52.0], "area": 1831, "segmentation": {"size": [512, 512], "counts": "eSd11m?2N3L3N3L3N2M4M2N3L3N2M4N110O010O00010O010O00010O010M2O110O010O00010O010O0010O00N3L3NO10O03N2M3N3L3N3M2M3N3L3N3L3N2M4M2NZ]]5"}, "image_id": 1029, "id": 17127}, {"iscrowd": 0, "category_id": 1, "bbox": [32.0, 88.0, 25.0, 25.0], "area": 364, "segmentation": {"size": [512, 512], "counts": "ZS`02k?3N3L3N3M2M4M2O101O010ON3L3N210O010O0O2M2N2M4M2MU]S7"}, "image_id": 1029, "id": 17128}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 88.0, 21.0, 23.0], "area": 304, "segmentation": {"size": [512, 512], "counts": "XSh62l?3L3N2N3M2M3N3O001O01O01O01N1N3L3N2M4M2MW]m0"}, "image_id": 1029, "id": 17129}, {"iscrowd": 0, "category_id": 1, "bbox": [160.0, 97.0, 52.0, 61.0], "area": 1629, "segmentation": {"size": [512, 512], "counts": "_T`22k?3N3L3N2M4M2O2O0010O0010O001L3N2M4M2N3L3N2M4M2M4M2M3N3O001O010ON3L3N2M4M2M4M2M3N3L3N3L3N2M4M2M4M2N2M4Mhle4"}, "image_id": 1029, "id": 17130}, {"iscrowd": 0, "category_id": 1, "bbox": [64.0, 113.0, 50.0, 41.0], "area": 1185, "segmentation": {"size": [512, 512], "counts": "TTP12k?3N3M2N3M2M4g@_Oo>c0n@@P?i0O2O010O01O01O010O01O000M4MQAZOh>g0UA[Ol>m0O010O0001L3N3M20010O0010O0010O0010OO1N3L3N3L3N2M4M2MRlV6"}, "image_id": 1029, "id": 17131}, {"iscrowd": 0, "category_id": 1, "bbox": [202.0, 121.0, 53.0, 68.0], "area": 1906, "segmentation": {"size": [512, 512], "counts": "VUU32k?4M2M3N3L3O20O0001k@_Oj>`0SADl>=QAEa>JiAl0T>WOkAi0R>ZOnAf0P>\\OQBd0l=_OQBc0l=@RBc0k=@QBc0l=@RBb0l=k0O0010O0M4M2M3O2O0010O0001O0N3L3N2N3L3N3L3N2M4M2N3L3N2M4M2M4M2N2Mn[P4"}, "image_id": 1029, "id": 17132}, {"iscrowd": 0, "category_id": 1, "bbox": [506.0, 123.0, 6.0, 16.0], "area": 54, "segmentation": {"size": [512, 512], "counts": "VTm73k?2N2N3L3N3TL"}, "image_id": 1029, "id": 17133}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 125.0, 47.0, 64.0], "area": 1733, "segmentation": {"size": [512, 512], "counts": "Y5:d?2M4M2M4M2M3N3L3N3L3N2M4L3N3L3N2M4N1010O010O00010O0M4M2M3N3L3N3M2M3N3L3N3L3N2M4M2N3L3N3L3N2Mk[X7"}, "image_id": 1029, "id": 17134}, {"iscrowd": 0, "category_id": 1, "bbox": [256.0, 125.0, 51.0, 69.0], "area": 2064, "segmentation": {"size": [512, 512], "counts": "YUP43k?3L3N3c@Fo>=n@FP?f0M4M2M3N3M2M4M2M4N11OO2M2M4M2N210O010O0010M2N3L3N2M4M2N3O00010O010@`A]Oa>`0bA^Oa>?bA]Oa>a0aA]Oa>`0cA\\Oa>a0`0M4M2N2M4M2Mj[V3"}, "image_id": 1029, "id": 17135}, {"iscrowd": 0, "category_id": 1, "bbox": [136.0, 151.0, 29.0, 32.0], "area": 572, "segmentation": {"size": [512, 512], "counts": "\\UT24j?2N3L3N2M4M2M4M2N2010O01O01O010O01O01O0M4M2N2M4M2M4M2M3NT[]5"}, "image_id": 1029, "id": 17136}, {"iscrowd": 0, "category_id": 1, "bbox": [306.0, 159.0, 48.0, 61.0], "area": 1703, "segmentation": {"size": [512, 512], "counts": "aVi41l?4M2N2M4ZODdA>Y>FdA=Y>EdA>Z>EcA>Y>EeA=Y>EdA>Z>EcA>Z>e0010O010O00010O010O010O00O1N000O03N2N2M4M2N3L3N3M2M3N3M2M4M2N3L3N2N3L3Nlj^2"}, "image_id": 1029, "id": 17137}, {"iscrowd": 0, "category_id": 1, "bbox": [346.0, 177.0, 54.0, 66.0], "area": 1715, "segmentation": {"size": [512, 512], "counts": "oV]52k?3N3L3N2M4M2O2O00010O0m@\\Ok>d0QA_Oo>a0o@AQ?h00O010ON3M2M3N3M2M4M2M3N2M010O103L3000O1N3L3N3L3N2M4M2N3L3N2M4M2M4M2M3N3M2M4M2MZjg1"}, "image_id": 1029, "id": 17138}, {"iscrowd": 0, "category_id": 1, "bbox": [167.0, 179.0, 16.0, 17.0], "area": 169, "segmentation": {"size": [512, 512], "counts": "nec22k?4L3N2M40O00010O00010O0M4M2M3M\\ZT5"}, "image_id": 1029, "id": 17139}, {"iscrowd": 0, "category_id": 1, "bbox": [179.0, 182.0, 27.0, 27.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "Xfi22k?4M2M3N3L3N2M40O01O01O010O01O01O010O000N3L3N3L3M3N3LUjh4"}, "image_id": 1029, "id": 17140}, {"iscrowd": 0, "category_id": 1, "bbox": [222.0, 187.0, 14.0, 16.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "WV_31m?2N3M2M3N3N110O01N1N2N3M2MUjY4"}, "image_id": 1029, "id": 17141}, {"iscrowd": 0, "category_id": 1, "bbox": [21.0, 191.0, 90.0, 55.0], "area": 1930, "segmentation": {"size": [512, 512], "counts": "[g:1n?3N2M3NO01O010O010O010O2O2M2O2M3N0O10O01G]OUAb0l>@QAa0n>BPA=Q?8010O010O010O010O010O010O010O010O010O010O01O01O010O010O010O010O010O010O011N3N2M010O010O010O0010O0010O010O010O010O2O2M3N2M3N2M3N2M3N`YX6"}, "image_id": 1029, "id": 17142}, {"iscrowd": 0, "category_id": 1, "bbox": [232.0, 196.0, 31.0, 32.0], "area": 604, "segmentation": {"size": [512, 512], "counts": "iVd32k?4M2M4L3N2M4M2M4O010O01O01O010O010O01O01O01N1M4M2N3L3N2N3L3NeYl3"}, "image_id": 1029, "id": 17143}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 201.0, 11.0, 14.0], "area": 99, "segmentation": {"size": [512, 512], "counts": "]69f?2N010O010O02O2M3N2MaYj7"}, "image_id": 1029, "id": 17144}, {"iscrowd": 0, "category_id": 1, "bbox": [304.0, 215.0, 28.0, 28.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "XWh43k?2M4M2N3L3N2N3N1010O010O010O0010O0010N1N3M2M4M2N2M4M2NSii2"}, "image_id": 1029, "id": 17145}, {"iscrowd": 0, "category_id": 1, "bbox": [384.0, 217.0, 55.0, 60.0], "area": 1856, "segmentation": {"size": [512, 512], "counts": "oWP61m?3L3N3M2M4M2010O00O2M2H[ORAi0k>ZORAh0l>8M2M4M2N2dAcNT>_1iAcNX>\\1fAgNY>`110O01OO2N1010O010O01O01O0N3L3O2N1N3M2N2M4M2N3M2M4M2N3M2M3N3M2N3L3NlXT1"}, "image_id": 1029, "id": 17146}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 218.0, 56.0, 69.0], "area": 1938, "segmentation": {"size": [512, 512], "counts": "oWk62l?2M4M2N3L3O2O01ON3L3N3N110M2N3N11O01_APOP>P1mASOR>o0kATOR>n0lATOR>o0kATOQ>o0lATOR>^1N3M2010O010O010M2N3L3N2N3M2M4M2N3L3N2N3L3N3M2N3L3N3M2M3N3M2N3Llh8"}, "image_id": 1029, "id": 17147}, {"iscrowd": 0, "category_id": 1, "bbox": [271.0, 224.0, 25.0, 33.0], "area": 524, "segmentation": {"size": [512, 512], "counts": "cgW4161_?1_@1^?3_@O_?;L3M4M2M3O20O01O0O110O01M2N2M4M2M4M2M3N3L3Nmh[3"}, "image_id": 1029, "id": 17148}, {"iscrowd": 0, "category_id": 1, "bbox": [326.0, 231.0, 26.0, 24.0], "area": 398, "segmentation": {"size": [512, 512], "counts": "fWS52k?4M2M3N3L3N3O00010O01O01O010O00010O01O0O1M4M2M4L3Ndh_2"}, "image_id": 1029, "id": 17149}, {"iscrowd": 0, "category_id": 1, "bbox": [487.0, 232.0, 25.0, 55.0], "area": 754, "segmentation": {"size": [512, 512], "counts": "dhc71m?3M2M3N3M2N3N101M2M3N3M2M4M2N2M4M2M4M2N3M20010O0gH"}, "image_id": 1029, "id": 17150}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 236.0, 127.0, 106.0], "area": 4241, "segmentation": {"size": [512, 512], "counts": "hi62m?2O2M2N3N1N3M2N2O2M2N3N1N2N3N1N3M2O1N3M2OO0001O01O01O01O00010O00010O0001O01O01O01O01O01O00010O0001O01O01O01O01O01O00010O0G^AUOb>k0aASO_>m0bAQO^>o0dAoN\\>Q1910O00010O00010O000010O000010O00010O00010O00010O0000010O00010O00010O00010O000010O000010O00101N2N3N1N3M2N3N1N3M2O1N3M2O2M2NPhi5"}, "image_id": 1029, "id": 17151}, {"iscrowd": 0, "category_id": 1, "bbox": [145.0, 236.0, 26.0, 24.0], "area": 348, "segmentation": {"size": [512, 512], "counts": "fgX21n?2N2M2O4K3N1O200000OO2M3N101000N1O2M3N2M2O2N2M2O2M^XZ5"}, "image_id": 1029, "id": 17152}, {"iscrowd": 0, "category_id": 1, "bbox": [363.0, 272.0, 23.0, 33.0], "area": 443, "segmentation": {"size": [512, 512], "counts": "Rie5180\\?2b@0\\?3a@0\\?2b@0\\?\\OSAb0X?M2N2O2M2N2N3N1NfVa5"}, "image_id": 1029, "id": 17155}, {"iscrowd": 0, "category_id": 1, "bbox": [408.0, 283.0, 33.0, 34.0], "area": 566, "segmentation": {"size": [512, 512], "counts": "gY\\61m?2M4M2N3M2M3N3M2M4M2N3L31O0N3M2N3M2N2N3M2N30O000O2O01O001L3N3L3MQWS1"}, "image_id": 1029, "id": 17156}, {"iscrowd": 0, "category_id": 1, "bbox": [469.0, 283.0, 21.0, 25.0], "area": 319, "segmentation": {"size": [512, 512], "counts": "]iZ72k?4L3N2M4M2M4M21O01O010O01O0N2N3L3M3N3L3MSg:"}, "image_id": 1029, "id": 17157}, {"iscrowd": 0, "category_id": 1, "bbox": [164.0, 284.0, 61.0, 34.0], "area": 1549, "segmentation": {"size": [512, 512], "counts": "hYb23`?=E;00000010O00000000000000000001O0001O000N20000001O000000000001O0002N0000O100O100O010O010O010O10O010O3N3L3N3M2M4M2M4M2M_V_4"}, "image_id": 1029, "id": 17158}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 303.0, 60.0, 46.0], "area": 1572, "segmentation": {"size": [512, 512], "counts": "eYj34?Oi>3TA0i>3UAOj>3RA1n>No@5Q?Km@8S?:0001N11O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01O01O01O01O01O010O01O01O01L3M3N3L3M3M4M2M4LPfW3"}, "image_id": 1029, "id": 17159}, {"iscrowd": 0, "category_id": 1, "bbox": [56.0, 308.0, 102.0, 103.0], "area": 4654, "segmentation": {"size": [512, 512], "counts": "mZl01o?2M2N2O2M2N3N1N2N3N1N2N3EXOZAi0e>XOYAk0d>WOZAk0d>XOZAi0e>;M2N3O01O010O01O01O010O01O01O010O00VObNQC_1ljNmAU1R>mNoAP1Q>SOnAk0R>WOnAh0R>YOoAd0Q>_OnA?R>CnA;S>FlA:S>IkA6U>LiA4W>OfA1[>j0O01O2O1N3M0001ON3O01O100O0010O2N2O2M2N2N3N1N3M2N01O01O0001O01O010O2N2N3N1N2N3M2O2M2N2O2M2NVdX3"}, "image_id": 1029, "id": 17165}, {"iscrowd": 0, "category_id": 1, "bbox": [41.0, 359.0, 26.0, 33.0], "area": 368, "segmentation": {"size": [512, 512], "counts": "\\kd01o?1N2N3N1N3M2_@D\\?b0M2O2O010O010O10O1O1M3N1O2M2N3N1N3M2MRTn6"}, "image_id": 1029, "id": 17166}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 360.0, 43.0, 64.0], "area": 1561, "segmentation": {"size": [512, 512], "counts": "m;[1d>01O0001O0JfNhAY1X>jNeAV1\\>kNbAV1]>600010O000010O0000010O03M2N3N1N2N3M2O1N3M2O1N1O0010O01O2Ah@3[?Jh@4Y?Ji@4Z?Ih@5d?M2OiSZ7"}, "image_id": 1029, "id": 17167}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 376.0, 72.0, 45.0], "area": 1912, "segmentation": {"size": [512, 512], "counts": "^\\n41l?3M3M3M4K4M3N3O000010O00010O000103L00010O0000010O00010O00010O00010O000L5L3N21O01O01O01O01O01O01O01O01O01OPOXAh0h>UO[Ak0m>0010O00010O000L5L3M3M4O000O1M4K4Micm1"}, "image_id": 1029, "id": 17168}, {"iscrowd": 0, "category_id": 1, "bbox": [396.0, 398.0, 71.0, 45.0], "area": 1958, "segmentation": {"size": [512, 512], "counts": "Q]V63i?5L3L4M4L3N201O01O00010O0001O01Ok@ZOP?k0010O000010O00010O000010O00010O0000O2L3M3O20O0001O01O01O01O01O01O00010nNXAj0h>SO[Am0l>10O00010O00001L3M3M3L5N100O2L3M3MTSf0"}, "image_id": 1029, "id": 17169}, {"iscrowd": 0, "category_id": 1, "bbox": [211.0, 408.0, 21.0, 23.0], "area": 239, "segmentation": {"size": [512, 512], "counts": "nlY33m?1N2N3M2O1N2N3N1N210O00N3M2N2O1N3M2N2O2Mjb[4"}, "image_id": 1029, "id": 17170}, {"iscrowd": 0, "category_id": 1, "bbox": [181.0, 412.0, 36.0, 40.0], "area": 727, "segmentation": {"size": [512, 512], "counts": "Xmj22m?2O2M2N2N3N1N3M2N2O2M2N3M2O1N3M2O20O001M2N2N3N1N3M2O1N3M2N3N1N2N3N1N3M[Rc4"}, "image_id": 1029, "id": 17171}, {"iscrowd": 0, "category_id": 1, "bbox": [5.0, 416.0, 37.0, 39.0], "area": 811, "segmentation": {"size": [512, 512], "counts": "Wm2121h?1W@00Ne?:O2O001M2N3N1N2N3M2O2M2N10020O0N2N3N10O1N2O2M2N3M2O2M2N2O2@b@9`?Eb@9d?O2M2N3MVbZ7"}, "image_id": 1029, "id": 17172}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 423.0, 26.0, 29.0], "area": 389, "segmentation": {"size": [512, 512], "counts": "`]i13l?2N2O1N3M2N2a@BY?`0f@AX?f0M2N2O10N2N2N2N2O1N2N3M2N2N2N2N2O2MZbi5"}, "image_id": 1029, "id": 17173}, {"iscrowd": 0, "category_id": 1, "bbox": [137.0, 435.0, 54.0, 50.0], "area": 1475, "segmentation": {"size": [512, 512], "counts": "YnT21n?2N3N1N2N3N1N3M2N3N1N2N3M2O2M2N2N3N1N3M0010O0000010O00010O1O3M2O1N2N0001O01O0001O01O2N2O1N3Dl@GV?7m@FV?8k@GV?7;N3M2O1NPRP5"}, "image_id": 1029, "id": 17174}, {"iscrowd": 0, "category_id": 1, "bbox": [275.0, 437.0, 77.0, 64.0], "area": 2598, "segmentation": {"size": [512, 512], "counts": "WnY44i?3L4M4L3M3O2O01O05K010O0010O01O100O1O1O101N2M3K4M1101N1O101O02N101N1O2O0OhN_AS1_>jNdAW1c>1N1O2N101N2N10O0O101N1O101N1O101N1O101N1O101NNKWAXOi>i0XAVOh>i0[ATOf>k0\\ATOe>k08O1O2O00010K4^Of@;b?L3M3Mga_2"}, "image_id": 1029, "id": 17175}, {"iscrowd": 0, "category_id": 1, "bbox": [465.0, 439.0, 47.0, 63.0], "area": 1657, "segmentation": {"size": [512, 512], "counts": "YoX71n?3M2O2M2N3N1N3M2O0O00010SO^O[Bb0e=AXB?h=CWB=i=DUBh00O00010O00010O00102M2N3N1N2J^AkNc>U1^AjNa>V14010O0002O1NUB"}, "image_id": 1029, "id": 17176}, {"iscrowd": 0, "category_id": 1, "bbox": [106.0, 449.0, 16.0, 16.0], "area": 130, "segmentation": {"size": [512, 512], "counts": "U^e11n?3N1N2N3M2O1O2O00N3N1N2N3M2O1NfaR6"}, "image_id": 1029, "id": 17177}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 451.0, 32.0, 47.0], "area": 884, "segmentation": {"size": [512, 512], "counts": "]>U1j>00010O00010O0000010O00010O01O2O2M2N3M2O1N3M2O2M2N2N3N1N3M2O2MWa_7"}, "image_id": 1029, "id": 17178}, {"iscrowd": 0, "category_id": 1, "bbox": [85.0, 460.0, 24.0, 26.0], "area": 326, "segmentation": {"size": [512, 512], "counts": "dnZ13l?2O1N3M2N2O2M2N2N3N1N2001M2N2O1N3M2N2N3N1N2N3MUQY6"}, "image_id": 1029, "id": 17179}, {"iscrowd": 0, "category_id": 1, "bbox": [357.0, 463.0, 98.0, 49.0], "area": 2490, "segmentation": {"size": [512, 512], "counts": "kob52h?O\\@3b?0\\@2a?700O1O100O11O2N00O100O1HCl@>S?Dk@=U?Ei@;V?8O100O1O1O100O1O1O100O1O100OKUAWOj>i0YAUOf>k0\\ATOd>l0700010O0000010O0001O01O00010O002000010O010O00010O01O01N1O1O10O000000001OO1O1O100O1O1O11O1O1O2N1O1O2YOPA:Q?DQA;Q?CQA;P?CRA]BGc=9YBKg=5UB0j=0QB5P>KkA9U>GfA>Z>e0O01ON2K5L5J5L4K5L5J5L4L4K5L5J500ZOeLgD[3T;jLlDW3o:nLQEQ3j:TMVEl2g:WMYEi2g:WMYEj2f:VMZEj2f:WMZEh2f:XMZEh2f:XMVEl2j:TMREQ3m:PMmDU3S;l00001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O0kFfId8Z6WGkIi8U6SGoIm8b601O0000001O0000001O00000000O1L4L4K50000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O0000001O000000001O0000001O0000M3L4K5L4L4K5L4K5L4K5L4K5L4L4`NbEYMc:b2bEZMb:b2bEYMc:c2aEYMc:g2^ESMg:m2YEoLk:Q3UEjLP;V3PEfLT;Z3lDbLX;_3d0M3L4L4K5L4K5L4K5L4K5L4L4K5L4K5L4K5L4L4K5L4K5L4K5L4KUP2"}, "image_id": 1031, "id": 17185}, {"iscrowd": 0, "category_id": 1, "bbox": [273.0, 370.0, 48.0, 26.0], "area": 718, "segmentation": {"size": [512, 512], "counts": "QlX41k?4L4K5N3O01O0001O01O000001O01O0001O01O0001O01O0000010O000001O01O0001O01O0001O01O000001ON3K4K5LXTo2"}, "image_id": 1031, "id": 17186}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 379.0, 174.0, 133.0], "area": 17713, "segmentation": {"size": [512, 512], "counts": "k;U4k;000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001O000000001O0000001N1L4L4K5L_CWMTYOeAi0[>WOfAh0Z>XOfAh0l;cAF\\?O00000010ON2K5L_aX5"}, "image_id": 1031, "id": 17187}, {"iscrowd": 0, "category_id": 1, "bbox": [354.0, 392.0, 39.0, 24.0], "area": 575, "segmentation": {"size": [512, 512], "counts": "f\\a52j?4L4L5M2001O01O0001O01O0001O0001O01O0001O01O0001O01O0001O01O0001O01O0M3L4L4LdSk1"}, "image_id": 1031, "id": 17188}, {"iscrowd": 0, "category_id": 1, "bbox": [96.0, 0.0, 40.0, 22.0], "area": 558, "segmentation": {"size": [512, 512], "counts": "VP`12k?4M2O2O001O00001O001O001O00001O001O001O001O00001O001O001O00001OM3N2M3N2M3N2M3NRPl5"}, "image_id": 1032, "id": 17189}, {"iscrowd": 0, "category_id": 1, "bbox": [142.0, 0.0, 12.0, 4.0], "area": 29, "segmentation": {"size": [512, 512], "counts": "PPW21o?00001O001O00001O0000MSPc5"}, "image_id": 1032, "id": 17190}, {"iscrowd": 0, "category_id": 1, "bbox": [166.0, 0.0, 44.0, 45.0], "area": 1198, "segmentation": {"size": [512, 512], "counts": "RQc22l?2M3N3L3N3L3M3N3L3EUO_Am0a>SO\\AQ1c>oNZAT1f>4O00001O001O00001O001O0000N2M3N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3NRPg4"}, "image_id": 1032, "id": 17191}, {"iscrowd": 0, "category_id": 1, "bbox": [223.0, 0.0, 57.0, 52.0], "area": 1744, "segmentation": {"size": [512, 512], "counts": "Sa_32l?2N3M2N3M2N3M2N3N110M2N3M2N3M2N3M2N3M2O2O001O001O001O001O001ON2N2N2N2N2N2N2N2N2N2N2N2N2N2N2001O001O00N2N2Ag@5[?Ig@5[?Ih@4c?Nioc3"}, "image_id": 1032, "id": 17192}, {"iscrowd": 0, "category_id": 1, "bbox": [308.0, 0.0, 77.0, 40.0], "area": 1818, "segmentation": {"size": [512, 512], "counts": "kPj42k?3N3M2M4M2M4M2N2M4M2N30O010O00010O010O010O01O000O2M2M30N2M3N2N2N2O100O100O10000O100O10000O100O10000O10000O100O11O001O001O00001O001O00O1M3N2N2M3N2M3N2M3NR`o1"}, "image_id": 1032, "id": 17193}, {"iscrowd": 0, "category_id": 1, "bbox": [456.0, 0.0, 50.0, 18.0], "area": 460, "segmentation": {"size": [512, 512], "counts": "PPT71o?00001O001O00001O001O00001O001O001O00001O001O00001O001O00001O001O00001O001O00001O001OO1M3N2N2M3N2MSP3"}, "image_id": 1032, "id": 17194}, {"iscrowd": 0, "category_id": 1, "bbox": [402.0, 2.0, 87.0, 64.0], "area": 2611, "segmentation": {"size": [512, 512], "counts": "mPY62l?2M4M2M4M2M3N3L3N3l@VOm>P1N3O010O01O010O01O010O01O010O01O010O01O010O01O010O01O03N0O0010O010O0010O0010O001M2O20O0010O0010O010O0010O0010O01ZOSA8l>FWA9i>DZA=f>@]A?c>_O_Aa0P?10O010O010O00010O0O2M2M4M2N2M4Ma^;"}, "image_id": 1032, "id": 17195}, {"iscrowd": 0, "category_id": 1, "bbox": [373.0, 27.0, 30.0, 30.0], "area": 544, "segmentation": {"size": [512, 512], "counts": "]aj53j?3M4M2M4L3N2N3O010O01O01O010O01O010O01O010OO2L3N2M4M2M4M2Nn^f1"}, "image_id": 1032, "id": 17196}, {"iscrowd": 0, "category_id": 1, "bbox": [277.0, 49.0, 70.0, 105.0], "area": 3683, "segmentation": {"size": [512, 512], "counts": "UdZ43j?3N3L3N2M4M210M2N3L3N2kNUOUCl0hV1QBfNR>b0kAL6@Q>a0mAL5@Q>a0lAMa>1aALc>0aAMa>1aALc>1`ALb>1aAMa>1d^b2"}, "image_id": 1032, "id": 17197}, {"iscrowd": 0, "category_id": 1, "bbox": [409.0, 52.0, 62.0, 58.0], "area": 2033, "segmentation": {"size": [512, 512], "counts": "Rb\\61l?4M2n@JX>9dAJY>9eAJX>9eAIY>9dAJ]>5aAN^>3^A0c>O[A4d>d010O00010O010O00010O010O010O00010O010O00010O010O01O01O010O010O00010O010O0001L3N3M2M4M2M3N3L3N3M2M3N3L3Ng]d0"}, "image_id": 1032, "id": 17198}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 72.0, 94.0, 69.0], "area": 3093, "segmentation": {"size": [512, 512], "counts": "[cj02k?4M2AKPA8n>JPA9l>KPA8n>JPA9n>=00010O010O010O00010O010M2O01N3L3N2M4N1010O0010O010O0010O0010]AeN_>_1O01lNaAf0^>WOfAh0[>UOgAk0Y>ROjAo0U>oNnAP1a>10O01O01O010O01O01O010O010O00010OSAQOi>S110O0010M2oNVAk0Q?0O0O1N3L3N3L3N3M20O1N3L3N2M4M2M4M2N3L3N2M4M2M4M2N2Mo\\f5"}, "image_id": 1032, "id": 17199}, {"iscrowd": 0, "category_id": 1, "bbox": [343.0, 75.0, 38.0, 41.0], "area": 796, "segmentation": {"size": [512, 512], "counts": "`c[52k?3N2M4L3N3L3O11M1O03L3N3L3M3N3L3NO3M4L3N2O2O01O01O010O00001L3N2M4L3M4M2M3Ma]Q2"}, "image_id": 1032, "id": 17200}, {"iscrowd": 0, "category_id": 1, "bbox": [146.0, 96.0, 32.0, 32.0], "area": 601, "segmentation": {"size": [512, 512], "counts": "fSY21m?2N3L3N2M4M2N3L3N2O2O0010O0010O010O00010O010ON2M4M2M4M2M3N3L3NjlV5"}, "image_id": 1032, "id": 17201}, {"iscrowd": 0, "category_id": 1, "bbox": [380.0, 107.0, 79.0, 56.0], "area": 2404, "segmentation": {"size": [512, 512], "counts": "UTn51m?3M2M4M2N2M4M2M4M2N3L30010O010O01O01O010O01O01O010O010O00010O010O01O01O010O01OO2M2M4N11O010OO2M201O00010O010OfN]AW1f>010O010O00010O010OXOZA6f>H\\A9d>C`A<`>BbA>_>_OdAa0[>\\OhAd0j>O2L3N3L3N2M4M2Nc[j0"}, "image_id": 1032, "id": 17202}, {"iscrowd": 0, "category_id": 1, "bbox": [334.0, 123.0, 32.0, 28.0], "area": 528, "segmentation": {"size": [512, 512], "counts": "\\TW51m?2M4M2M3N3L3N3O001O01O010O01O01O010O01O01O010O01ON3L3N3L3N3M2MokX2"}, "image_id": 1032, "id": 17203}, {"iscrowd": 0, "category_id": 1, "bbox": [26.0, 124.0, 62.0, 67.0], "area": 2301, "segmentation": {"size": [512, 512], "counts": "WU=4j?2M3N3M2M4M2M3N3L3N3N11O010OO2L3N2M4M2N3L3N3M21O010O01O01O010O010O00010O010O01O01O010nNiA9W>ElA;S>BPB>Q>_ORB`0n=]OUBd0k=YOWBg0i=VO[Bj0e=SO]Bm0\\>N1N2M4M2N3L3N2M4M2M4Mmjc6"}, "image_id": 1032, "id": 17204}, {"iscrowd": 0, "category_id": 1, "bbox": [138.0, 127.0, 52.0, 67.0], "area": 2032, "segmentation": {"size": [512, 512], "counts": "aTU21l?3N2_AKT=9iBIU=9hBKT=9iBIU=9iBJS=:iBIX=6fBMY=3eBO\\=1`B3_=M_B5b=K[B7e=IXB;h=EUB=k=CRBa0n=^OPBd0P>d03M2M4N11O010M2N3M2010O0010OO1M4M2N3L3N3M2M3N3M2M4M2M3N3M2M4M2N2M4M2NgkP5"}, "image_id": 1032, "id": 17205}, {"iscrowd": 0, "category_id": 1, "bbox": [244.0, 148.0, 62.0, 120.0], "area": 3106, "segmentation": {"size": [512, 512], "counts": "kWj33j?3N3L3M4M201O0010OC_O[Ab0F^Oi>2bAb0[>AcAb0Z>AcAa0Z>BcAb0Z>AcAa0\\>b010O0010O0010O0UO_NTCe1i<^NTCd1i<_NTCd1j<_NSCc1j<`NSCa1^K^1`KZ1eKX1f<]N^CY2bM4DfAnN\\>o0gAnN\\>f0`A_O6I]>e0`A^Ol>?WA_Ol>>0001O00000000000001O00000001O00000000000001O000001O00000000000001O00H8DWkW1"}, "image_id": 1032, "id": 17207}, {"iscrowd": 0, "category_id": 1, "bbox": [177.0, 164.0, 56.0, 78.0], "area": 2584, "segmentation": {"size": [512, 512], "counts": "Qgh21l?3N2M4M2JFc@>Z?203N2QAZOa>j0[AZOb>h0\\AZOa>i0\\AZOb>T1L3N3L3O110O0010O0010OO1N3OO1M3N3^O\\NdBf1\\=ZNdBg1[=YNfBf1Z=`0000N2M3N2M3N2N2M3N2M3N2M3N2M3N2M3N2M3N2N2M3N2MoZ[4"}, "image_id": 1032, "id": 17208}, {"iscrowd": 0, "category_id": 1, "bbox": [303.0, 187.0, 26.0, 29.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "`fg41l?3M4M2M4L3N3L3N210O010O0N2010O010O0O1M4M2N3L3N3E]@5h?MPZk2"}, "image_id": 1032, "id": 17209}, {"iscrowd": 0, "category_id": 1, "bbox": [16.0, 190.0, 57.0, 51.0], "area": 1840, "segmentation": {"size": [512, 512], "counts": "oV83a?0e@4X?Ne@5X?;N3L3N3L3N2M4N1010O00010O010O00010O010O00010O010O00010O010O00010O0010O00M2O0O012M4M20001O0N3M2M3N3L3M4M2M3NjYk6"}, "image_id": 1032, "id": 17210}, {"iscrowd": 0, "category_id": 1, "bbox": [345.0, 197.0, 74.0, 55.0], "area": 2185, "segmentation": {"size": [512, 512], "counts": "kf\\543Mc?9O20O00N3M2N3M2M4M2N2M4M2010O010O0010O0010O010O0010O010O0010O010O0010O0010O010O0010O001N110O00010O010O010O0N2O2O010ON3M2M4M2N2M4M2N3L3N3M2M4M2N2NYY^1"}, "image_id": 1032, "id": 17211}, {"iscrowd": 0, "category_id": 1, "bbox": [316.0, 223.0, 26.0, 32.0], "area": 518, "segmentation": {"size": [512, 512], "counts": "YWn43j?3N3M2d@Hn>;n@HQ?9m@JR?b010O0010O0010O010O010O00O2M2N3M2Ie@F_?76N3Mjhd2"}, "image_id": 1032, "id": 17212}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 237.0, 102.0, 72.0], "area": 3268, "segmentation": {"size": [512, 512], "counts": "]7P1P?1O011N10O00010O010WOTAAWA?8\\OR>3iAa01@V>KlAe0KCe>>XAEh>i000010O010O00010kNWAQ1l>@TALl>1WA0i>L[A3e>K]A6c>F`A:`>DcA;Q?10O01O01O010O00010O010O00010O01O0N2N3TO^OUBe0c=[OkA3?e0b=D[B?c=C[B?d=C[B>d=B]B=d=C[B=e=C[B>d=A^B>c=@^B1AOP>0`BNB2o=0^BKG4k=1^BHJ7g=3_BCL8e=7_B_OO0C6k=>`B@61W=c0_B_O:LX=f0\\B@<^AFb>m00O0010O[O_AKa>3aAM`>2`AO_>1bAN_>2`AO_>1bAN^>2bAN_>2`AO_>1bAN^>2bAO^>1bAN^>2bAN_>2`AO_>1bAN^>2bAO^>1bAN^>2bAN_>2`AO_>1bAN^>2bAN_>2`AMa>3`AJc>5]AHf>5^AGe>7]AGf>5]AHf>6c0M4LXh\\6"}, "image_id": 1032, "id": 17213}, {"iscrowd": 0, "category_id": 1, "bbox": [333.0, 252.0, 58.0, 46.0], "area": 1635, "segmentation": {"size": [512, 512], "counts": "dhV53k?2M3N3M2M4M2M3N3M2N30O00010O010O00010O010OM3N3N110O01O01O010O01O01O010O01O01O010O01O01O03N0O0010M20O1N3ZOQA6R?HPA6S?FQA6R?HPA6^?M4M\\Wl1"}, "image_id": 1032, "id": 17214}, {"iscrowd": 0, "category_id": 1, "bbox": [122.0, 260.0, 77.0, 91.0], "area": 3201, "segmentation": {"size": [512, 512], "counts": "`Zm13k?3M2M4M2M3N3M2M4M2M310O0010O010O00010O01N1M3N3M2M4M2M3F`NRBc1k=_NRBd1l=8N10O010O010O3N2M310O010O0001O0N3L3N2M4M2OO2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N3M2M3N3L3N3L3N2M4MdWl4"}, "image_id": 1032, "id": 17215}, {"iscrowd": 0, "category_id": 1, "bbox": [274.0, 264.0, 28.0, 29.0], "area": 482, "segmentation": {"size": [512, 512], "counts": "mXY41l?3N2M4L3M3N3L3N30O00010O00010O0010O0010N1M3M4M2M3M4L3NcgX3"}, "image_id": 1032, "id": 17216}, {"iscrowd": 0, "category_id": 1, "bbox": [188.0, 276.0, 82.0, 113.0], "area": 3984, "segmentation": {"size": [512, 512], "counts": "c[n23j?3N3L3N2N3L3K^Oj@e0R?6N3M2N3L3N2N3M2N3O010O0nNbNaCc1YPOYAP1k>00010O010O010O0001N1N3M2M3N3L3N3M2M4M2N2MSVR2"}, "image_id": 1032, "id": 17219}, {"iscrowd": 0, "category_id": 1, "bbox": [450.0, 312.0, 62.0, 61.0], "area": 2048, "segmentation": {"size": [512, 512], "counts": "oZQ71m?2N2M4M2M4M2N3L3N2M4N1010O0010O0010O010O0010O0010O0010O0010O010O0010O0010O010O0001M2N3L3N2M4M2N3L3N3M2M31N1M3N3M2M4M2N3L3NVF"}, "image_id": 1032, "id": 17220}, {"iscrowd": 0, "category_id": 1, "bbox": [269.0, 314.0, 32.0, 24.0], "area": 412, "segmentation": {"size": [512, 512], "counts": "VjV42l?2M4M2M3N30O010O01O01O010O010O01O01O010O010O01O01O001M2M4M2N2NmUY3"}, "image_id": 1032, "id": 17221}, {"iscrowd": 0, "category_id": 1, "bbox": [295.0, 337.0, 50.0, 53.0], "area": 1556, "segmentation": {"size": [512, 512], "counts": "ckc42Z?1TA3h>0VA2h>1TA3i>OUA3h>0VA2h>e0M201O01O010O01M201O01O010O010O00010O010O0lN]Al0b>RO`An0a>oNbAP1g>10O01O01O010UORAb0n>\\OTAe0k>YOWAg0R?0O01O0O2M2M3N5K2M3N3LaTc2"}, "image_id": 1032, "id": 17222}, {"iscrowd": 0, "category_id": 1, "bbox": [13.0, 343.0, 23.0, 19.0], "area": 253, "segmentation": {"size": [512, 512], "counts": "Pk62l?3M2M3N3O0010O010O00010O010O010O00010M2N3L3NTe]7"}, "image_id": 1032, "id": 17223}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 348.0, 6.0, 18.0], "area": 64, "segmentation": {"size": [512, 512], "counts": "l:b0_?M2M4M2M3NTel7"}, "image_id": 1032, "id": 17224}, {"iscrowd": 0, "category_id": 1, "bbox": [53.0, 361.0, 28.0, 26.0], "area": 445, "segmentation": {"size": [512, 512], "counts": "jkj02k?3N2M4M2M4L3O101O010O01O01O010O01O01O01O0M3N3L3N3L3M3NbTg6"}, "image_id": 1032, "id": 17225}, {"iscrowd": 0, "category_id": 1, "bbox": [114.0, 363.0, 26.0, 26.0], "area": 433, "segmentation": {"size": [512, 512], "counts": "m[i12k?3M4L3N2M4L3O20O00010O00010O00010O00001L3M3M4M2M4Ladi5"}, "image_id": 1032, "id": 17226}, {"iscrowd": 0, "category_id": 1, "bbox": [455.0, 372.0, 28.0, 23.0], "area": 366, "segmentation": {"size": [512, 512], "counts": "okS73k?3L3M4L31O010O01O01O010O01O01O010O010O01O01O0O2L3N2M4MTT>"}, "image_id": 1032, "id": 17227}, {"iscrowd": 0, "category_id": 1, "bbox": [498.0, 383.0, 14.0, 25.0], "area": 240, "segmentation": {"size": [512, 512], "counts": "_\\i73j?3N3L3N3L3N3N1010O0010O001nC"}, "image_id": 1032, "id": 17228}, {"iscrowd": 0, "category_id": 1, "bbox": [280.0, 391.0, 64.0, 50.0], "area": 1870, "segmentation": {"size": [512, 512], "counts": "Q]\\43a?Nh@4U?Oh@5U?Nh@4U?>M2M3O2O010O01ON3O0010O0010O0010O0010O0010O0010O00010O010O00010O010O00010O010O00010O010O00010O010ON2M4M2M3N3L3N3L3N2M4MWcc2"}, "image_id": 1032, "id": 17229}, {"iscrowd": 0, "category_id": 1, "bbox": [438.0, 393.0, 59.0, 56.0], "area": 1793, "segmentation": {"size": [512, 512], "counts": "_]k61l?3N2M4M2M4M2M3N3L3M4M2M3N3L3N30O00010O010O00010O0010OnN]Ag0c>VO`Ak0`>QOdAn0\\>POfAP1e>10O010O00010O01O01ON3L3N3O01N1M4M2M3N3L3N3L3N2M4L3N3L3NUS7"}, "image_id": 1032, "id": 17230}, {"iscrowd": 0, "category_id": 1, "bbox": [40.0, 396.0, 52.0, 59.0], "area": 1543, "segmentation": {"size": [512, 512], "counts": "b]d01n?1O2N2M3N2N2N2GCl@?R?Ck@?T?8M3N2N2JoNZAS1c>PO[AR1c>7N1O2N2M3N2N2N20N2N2N2M2O2N2N2N2M3N1O2N2N2M3N2N1O2N2M3N2N2N1O2M3N2N2N2Nmba6"}, "image_id": 1032, "id": 17231}, {"iscrowd": 0, "category_id": 1, "bbox": [151.0, 397.0, 27.0, 31.0], "area": 468, "segmentation": {"size": [512, 512], "counts": "Um[21m?2M3N3L3N3L3M3N3L3O2O01O010O01OO2M2N3M2M4M2N2N3L3N3M^cV5"}, "image_id": 1032, "id": 17232}, {"iscrowd": 0, "category_id": 1, "bbox": [0.0, 402.0, 32.0, 41.0], "area": 853, "segmentation": {"size": [512, 512], "counts": "b01O010O01O010O01O010O01O01O010O01O01O010L3N3L3N2M4M2N3L3N2M4MSc_7"}, "image_id": 1032, "id": 17233}, {"iscrowd": 0, "category_id": 1, "bbox": [93.0, 416.0, 47.0, 61.0], "area": 1756, "segmentation": {"size": [512, 512], "counts": "Tn^11m?3L3N2N3N1h@Dl>=PAGf>M[A;MJe>N^A9JKf>O`Aa0\\>BdA>Z>DgAHjA8T>JlA7S>JkA7V>HhA:X>h00O01O010O01O010ON3M2M40O0O2O01O01ElAgNU>W1nAfNU>V1nAhNT>V1=M2M3N3M2N3[Ol@=S?APA6MKU?LPA6OK[?3;Mabi5"}, "image_id": 1032, "id": 17234}, {"iscrowd": 0, "category_id": 1, "bbox": [251.0, 420.0, 35.0, 33.0], "area": 564, "segmentation": {"size": [512, 512], "counts": "omm33k?2N3M2M3N3M2M4M2N2M2O00O3N3M2M4M20010O010O00010O010O0010O00N3M2N3L3Neb`3"}, "image_id": 1032, "id": 17235}, {"iscrowd": 0, "category_id": 1, "bbox": [197.0, 423.0, 30.0, 28.0], "area": 509, "segmentation": {"size": [512, 512], "counts": "jmR31m?3M2M4M2N3M2N3M2N30O0010O010O010OO2O000010O01N1]Og@=^?O2N3M2N2MaR^4"}, "image_id": 1032, "id": 17236}, {"iscrowd": 0, "category_id": 1, "bbox": [147.0, 431.0, 44.0, 65.0], "area": 1611, "segmentation": {"size": [512, 512], "counts": "VoY21m?2M4M2ROIPB;m=GQB;l=IPB:m=IQB:l=HQB;l=IQB:l=HQB;l=IQB9o=GnA=R>h001O01O010O010O00010O0M4M2M3N3L3N3L3N2M4M2M4M2M3N3L3N3L3N2M4MVRP5"}, "image_id": 1032, "id": 17237}, {"iscrowd": 0, "category_id": 1, "bbox": [511.0, 448.0, 1.0, 3.0], "area": 3, "segmentation": {"size": [512, 512], "counts": "Pno73m1"}, "image_id": 1032, "id": 17238}, {"iscrowd": 0, "category_id": 1, "bbox": [266.0, 450.0, 64.0, 54.0], "area": 1956, "segmentation": {"size": [512, 512], "counts": "S_U42l?3M2M4M2N2M4M2N3L3N2N3L3N3M2O1010O01O01O01M201O01O01OPO^Ab0c>TO]A33i0c>TO`Al0j>010O0010O0010O0010O0010O0M3N3L3N3L2OO102M3N3L3N3L3N2M4M2M4M2M3N3L3Ngaj2"}, "image_id": 1032, "id": 17239}, {"iscrowd": 0, "category_id": 1, "bbox": [510.0, 455.0, 2.0, 3.0], "area": 4, "segmentation": {"size": [512, 512], "counts": "Y^o71m?2iA"}, "image_id": 1032, "id": 17240}, {"iscrowd": 0, "category_id": 1, "bbox": [430.0, 456.0, 57.0, 36.0], "area": 1529, "segmentation": {"size": [512, 512], "counts": "l^g68`?8I8J500001O000001O000001O0001O000001O000001O000001O0001O000001O000001O000001O0001O000001O000001O000001O0J6J6I7JfQ<"}, "image_id": 1032, "id": 17241}, {"iscrowd": 0, "category_id": 1, "bbox": [492.0, 463.0, 20.0, 28.0], "area": 367, "segmentation": {"size": [512, 512], "counts": "R_f71l?3N2M4M2N3L3N3M200010O010O0010O0010O01\\A"}, "image_id": 1032, "id": 17242}, {"iscrowd": 0, "category_id": 1, "bbox": [186.0, 464.0, 47.0, 48.0], "area": 1248, "segmentation": {"size": [512, 512], "counts": "b_m22m?1N3N1N3M2O2M3M2O2M2O2M2N3N1N3N2M2N3N1N3M2010O001O001O0O2M3M2O2M2N3N2M2N3N1N3M2O2M3N1N3M2O2MTQ[4"}, "image_id": 1032, "id": 17243}, {"iscrowd": 0, "category_id": 1, "bbox": [262.0, 501.0, 38.0, 11.0], "area": 251, "segmentation": {"size": [512, 512], "counts": "n_S42k?3N2M3O11O00001O00O1O1001O001O00001O001O00001O001O00001O001O00001O00001O00Q`Y3"}, "image_id": 1032, "id": 17244}, {"iscrowd": 0, "category_id": 1, "bbox": [225.0, 503.0, 23.0, 9.0], "area": 132, "segmentation": {"size": [512, 512], "counts": "no`32k?3N2N20000001O001O00001O001O00001O001O001O00R`S4"}, "image_id": 1032, "id": 17245}, {"iscrowd": 0, "category_id": 1, "bbox": [474.0, 507.0, 12.0, 5.0], "area": 32, "segmentation": {"size": [512, 512], "counts": "o_]71m?2N21O001O00001O001O00Q`<"}, "image_id": 1032, "id": 17246}, {"iscrowd": 0, "category_id": 1, "bbox": [432.0, 509.0, 9.0, 3.0], "area": 19, "segmentation": {"size": [512, 512], "counts": "n_h62n?0O100001O001O00QPS1"}, "image_id": 1032, "id": 17247}]} \ No newline at end of file diff --git a/mmpretrain/apis/__init__.py b/mmpretrain/apis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6fbf443772a983c41f7273124f843bdfbb7f0f46 --- /dev/null +++ b/mmpretrain/apis/__init__.py @@ -0,0 +1,22 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .base import BaseInferencer +from .feature_extractor import FeatureExtractor +from .image_caption import ImageCaptionInferencer +from .image_classification import ImageClassificationInferencer +from .image_retrieval import ImageRetrievalInferencer +from .model import (ModelHub, get_model, inference_model, init_model, + list_models) +from .multimodal_retrieval import (ImageToTextRetrievalInferencer, + TextToImageRetrievalInferencer) +from .nlvr import NLVRInferencer +from .visual_grounding import VisualGroundingInferencer +from .visual_question_answering import VisualQuestionAnsweringInferencer + +__all__ = [ + 'init_model', 'inference_model', 'list_models', 'get_model', 'ModelHub', + 'ImageClassificationInferencer', 'ImageRetrievalInferencer', + 'FeatureExtractor', 'ImageCaptionInferencer', + 'TextToImageRetrievalInferencer', 'VisualGroundingInferencer', + 'VisualQuestionAnsweringInferencer', 'ImageToTextRetrievalInferencer', + 'BaseInferencer', 'NLVRInferencer' +] diff --git a/mmpretrain/apis/base.py b/mmpretrain/apis/base.py new file mode 100644 index 0000000000000000000000000000000000000000..7bff6bd18675a3a0996dcd09081a15728311657f --- /dev/null +++ b/mmpretrain/apis/base.py @@ -0,0 +1,390 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from abc import abstractmethod +from math import ceil +from typing import Callable, Iterable, List, Optional, Tuple, Union + +import numpy as np +import torch +from mmengine.config import Config +from mmengine.dataset import default_collate +from mmengine.fileio import get_file_backend +from mmengine.model import BaseModel +from mmengine.runner import load_checkpoint + +from mmpretrain.structures import DataSample +from mmpretrain.utils import track +from .model import get_model, list_models + +ModelType = Union[BaseModel, str, Config] +InputType = Union[str, np.ndarray, list] + + +class BaseInferencer: + """Base inferencer for various tasks. + + The BaseInferencer provides the standard workflow for inference as follows: + + 1. Preprocess the input data by :meth:`preprocess`. + 2. Forward the data to the model by :meth:`forward`. ``BaseInferencer`` + assumes the model inherits from :class:`mmengine.models.BaseModel` and + will call `model.test_step` in :meth:`forward` by default. + 3. Visualize the results by :meth:`visualize`. + 4. Postprocess and return the results by :meth:`postprocess`. + + When we call the subclasses inherited from BaseInferencer (not overriding + ``__call__``), the workflow will be executed in order. + + All subclasses of BaseInferencer could define the following class + attributes for customization: + + - ``preprocess_kwargs``: The keys of the kwargs that will be passed to + :meth:`preprocess`. + - ``forward_kwargs``: The keys of the kwargs that will be passed to + :meth:`forward` + - ``visualize_kwargs``: The keys of the kwargs that will be passed to + :meth:`visualize` + - ``postprocess_kwargs``: The keys of the kwargs that will be passed to + :meth:`postprocess` + + All attributes mentioned above should be a ``set`` of keys (strings), + and each key should not be duplicated. Actually, :meth:`__call__` will + dispatch all the arguments to the corresponding methods according to the + ``xxx_kwargs`` mentioned above. + + Subclasses inherited from ``BaseInferencer`` should implement + :meth:`_init_pipeline`, :meth:`visualize` and :meth:`postprocess`: + + - _init_pipeline: Return a callable object to preprocess the input data. + - visualize: Visualize the results returned by :meth:`forward`. + - postprocess: Postprocess the results returned by :meth:`forward` and + :meth:`visualize`. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``cls.list_models()`` and you can also query it in + :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str | torch.device | None): Transfer the model to the target + device. Defaults to None. + device_map (str | dict | None): A map that specifies where each + submodule should go. It doesn't need to be refined to each + parameter/buffer name, once a given module name is inside, every + submodule of it will be sent to the same device. You can use + `device_map="auto"` to automatically generate the device map. + Defaults to None. + offload_folder (str | None): If the `device_map` contains any value + `"disk"`, the folder where we will offload weights. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + """ + + preprocess_kwargs: set = set() + forward_kwargs: set = set() + visualize_kwargs: set = set() + postprocess_kwargs: set = set() + + def __init__(self, + model: ModelType, + pretrained: Union[bool, str] = True, + device: Union[str, torch.device, None] = None, + device_map=None, + offload_folder=None, + **kwargs) -> None: + + if isinstance(model, BaseModel): + if isinstance(pretrained, str): + load_checkpoint(model, pretrained, map_location='cpu') + if device_map is not None: + from .utils import dispatch_model + model = dispatch_model( + model, + device_map=device_map, + offload_folder=offload_folder) + elif device is not None: + model.to(device) + else: + model = get_model( + model, + pretrained, + device=device, + device_map=device_map, + offload_folder=offload_folder, + **kwargs) + + model.eval() + + self.config = model._config + self.model = model + self.pipeline = self._init_pipeline(self.config) + self.visualizer = None + + def __call__( + self, + inputs, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs, + ) -> dict: + """Call the inferencer. + + Args: + inputs (InputsType): Inputs for the inferencer. + return_datasamples (bool): Whether to return results as + :obj:`BaseDataElement`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + **kwargs: Key words arguments passed to :meth:`preprocess`, + :meth:`forward`, :meth:`visualize` and :meth:`postprocess`. + Each key in kwargs should be in the corresponding set of + ``preprocess_kwargs``, ``forward_kwargs``, ``visualize_kwargs`` + and ``postprocess_kwargs``. + + Returns: + dict: Inference and visualization results. + """ + ( + preprocess_kwargs, + forward_kwargs, + visualize_kwargs, + postprocess_kwargs, + ) = self._dispatch_kwargs(**kwargs) + + ori_inputs = self._inputs_to_list(inputs) + inputs = self.preprocess( + ori_inputs, batch_size=batch_size, **preprocess_kwargs) + preds = [] + for data in track( + inputs, 'Inference', total=ceil(len(ori_inputs) / batch_size)): + preds.extend(self.forward(data, **forward_kwargs)) + visualization = self.visualize(ori_inputs, preds, **visualize_kwargs) + results = self.postprocess(preds, visualization, return_datasamples, + **postprocess_kwargs) + return results + + def _inputs_to_list(self, inputs: InputType) -> list: + """Preprocess the inputs to a list. + + Cast the input data to a list of data. + + - list or tuple: return inputs + - str: + - Directory path: return all files in the directory + - other cases: return a list containing the string. The string + could be a path to file, a url or other types of string according + to the task. + - other: return a list with one item. + + Args: + inputs (str | array | list): Inputs for the inferencer. + + Returns: + list: List of input for the :meth:`preprocess`. + """ + if isinstance(inputs, str): + backend = get_file_backend(inputs) + if hasattr(backend, 'isdir') and backend.isdir(inputs): + # Backends like HttpsBackend do not implement `isdir`, so only + # those backends that implement `isdir` could accept the inputs + # as a directory + file_list = backend.list_dir_or_file(inputs, list_dir=False) + inputs = [ + backend.join_path(inputs, file) for file in file_list + ] + + if not isinstance(inputs, (list, tuple)): + inputs = [inputs] + + return list(inputs) + + def preprocess(self, inputs: InputType, batch_size: int = 1, **kwargs): + """Process the inputs into a model-feedable format. + + Customize your preprocess by overriding this method. Preprocess should + return an iterable object, of which each item will be used as the + input of ``model.test_step``. + + ``BaseInferencer.preprocess`` will return an iterable chunked data, + which will be used in __call__ like this: + + .. code-block:: python + + def __call__(self, inputs, batch_size=1, **kwargs): + chunked_data = self.preprocess(inputs, batch_size, **kwargs) + for batch in chunked_data: + preds = self.forward(batch, **kwargs) + + Args: + inputs (InputsType): Inputs given by user. + batch_size (int): batch size. Defaults to 1. + + Yields: + Any: Data processed by the ``pipeline`` and ``default_collate``. + """ + chunked_data = self._get_chunk_data( + map(self.pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + @torch.no_grad() + def forward(self, inputs: Union[dict, tuple], **kwargs): + """Feed the inputs to the model.""" + return self.model.test_step(inputs) + + def visualize(self, + inputs: list, + preds: List[DataSample], + show: bool = False, + **kwargs) -> List[np.ndarray]: + """Visualize predictions. + + Customize your visualization by overriding this method. visualize + should return visualization results, which could be np.ndarray or any + other objects. + + Args: + inputs (list): Inputs preprocessed by :meth:`_inputs_to_list`. + preds (Any): Predictions of the model. + show (bool): Whether to display the image in a popup window. + Defaults to False. + + Returns: + List[np.ndarray]: Visualization results. + """ + if show: + raise NotImplementedError( + f'The `visualize` method of {self.__class__.__name__} ' + 'is not implemented.') + + @abstractmethod + def postprocess( + self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasample=False, + **kwargs, + ) -> dict: + """Process the predictions and visualization results from ``forward`` + and ``visualize``. + + This method should be responsible for the following tasks: + + 1. Convert datasamples into a json-serializable dict if needed. + 2. Pack the predictions and visualization results and return them. + 3. Dump or log the predictions. + + Customize your postprocess by overriding this method. Make sure + ``postprocess`` will return a dict with visualization results and + inference results. + + Args: + preds (List[Dict]): Predictions of the model. + visualization (np.ndarray): Visualized predictions. + return_datasample (bool): Whether to return results as datasamples. + Defaults to False. + + Returns: + dict: Inference and visualization results with key ``predictions`` + and ``visualization`` + + - ``visualization (Any)``: Returned by :meth:`visualize` + - ``predictions`` (dict or DataSample): Returned by + :meth:`forward` and processed in :meth:`postprocess`. + If ``return_datasample=False``, it usually should be a + json-serializable dict containing only basic data elements such + as strings and numbers. + """ + + @abstractmethod + def _init_pipeline(self, cfg: Config) -> Callable: + """Initialize the test pipeline. + + Return a pipeline to handle various input data, such as ``str``, + ``np.ndarray``. It is an abstract method in BaseInferencer, and should + be implemented in subclasses. + + The returned pipeline will be used to process a single data. + It will be used in :meth:`preprocess` like this: + + .. code-block:: python + def preprocess(self, inputs, batch_size, **kwargs): + ... + dataset = map(self.pipeline, dataset) + ... + """ + + def _get_chunk_data(self, inputs: Iterable, chunk_size: int): + """Get batch data from dataset. + + Args: + inputs (Iterable): An iterable dataset. + chunk_size (int): Equivalent to batch size. + + Yields: + list: batch data. + """ + inputs_iter = iter(inputs) + while True: + try: + chunk_data = [] + for _ in range(chunk_size): + processed_data = next(inputs_iter) + chunk_data.append(processed_data) + yield chunk_data + except StopIteration: + if chunk_data: + yield chunk_data + break + + def _dispatch_kwargs(self, **kwargs) -> Tuple[dict, dict, dict, dict]: + """Dispatch kwargs to preprocess(), forward(), visualize() and + postprocess() according to the actual demands. + + Returns: + Tuple[Dict, Dict, Dict, Dict]: kwargs passed to preprocess, + forward, visualize and postprocess respectively. + """ + # Ensure each argument only matches one function + method_kwargs = self.preprocess_kwargs | self.forward_kwargs | \ + self.visualize_kwargs | self.postprocess_kwargs + + union_kwargs = method_kwargs | set(kwargs.keys()) + if union_kwargs != method_kwargs: + unknown_kwargs = union_kwargs - method_kwargs + raise ValueError( + f'unknown argument {unknown_kwargs} for `preprocess`, ' + '`forward`, `visualize` and `postprocess`') + + preprocess_kwargs = {} + forward_kwargs = {} + visualize_kwargs = {} + postprocess_kwargs = {} + + for key, value in kwargs.items(): + if key in self.preprocess_kwargs: + preprocess_kwargs[key] = value + if key in self.forward_kwargs: + forward_kwargs[key] = value + if key in self.visualize_kwargs: + visualize_kwargs[key] = value + if key in self.postprocess_kwargs: + postprocess_kwargs[key] = value + + return ( + preprocess_kwargs, + forward_kwargs, + visualize_kwargs, + postprocess_kwargs, + ) + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List models defined in metafile of corresponding packages. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern) diff --git a/mmpretrain/apis/feature_extractor.py b/mmpretrain/apis/feature_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..b7c52c2fcbc18bf3d93f326ac843508600b534ac --- /dev/null +++ b/mmpretrain/apis/feature_extractor.py @@ -0,0 +1,128 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Callable, List, Optional, Union + +import torch +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from .base import BaseInferencer, InputType +from .model import list_models + + +class FeatureExtractor(BaseInferencer): + """The inferencer for extract features. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``FeatureExtractor.list_models()`` and you can also query it in + :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import FeatureExtractor + >>> inferencer = FeatureExtractor('resnet50_8xb32_in1k', backbone=dict(out_indices=(0, 1, 2, 3))) + >>> feats = inferencer('demo/demo.JPEG', stage='backbone')[0] + >>> for feat in feats: + >>> print(feat.shape) + torch.Size([256, 56, 56]) + torch.Size([512, 28, 28]) + torch.Size([1024, 14, 14]) + torch.Size([2048, 7, 7]) + """ # noqa: E501 + + def __call__(self, + inputs: InputType, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + inputs (str | array | list): The image path or array, or a list of + images. + batch_size (int): Batch size. Defaults to 1. + **kwargs: Other keyword arguments accepted by the `extract_feat` + method of the model. + + Returns: + tensor | Tuple[tensor]: The extracted features. + """ + ori_inputs = self._inputs_to_list(inputs) + inputs = self.preprocess(ori_inputs, batch_size=batch_size) + preds = [] + for data in inputs: + preds.extend(self.forward(data, **kwargs)) + + return preds + + @torch.no_grad() + def forward(self, inputs: Union[dict, tuple], **kwargs): + inputs = self.model.data_preprocessor(inputs, False)['inputs'] + outputs = self.model.extract_feat(inputs, **kwargs) + + def scatter(feats, index): + if isinstance(feats, torch.Tensor): + return feats[index] + else: + # Sequence of tensor + return type(feats)([scatter(item, index) for item in feats]) + + results = [] + for i in range(inputs.shape[0]): + results.append(scatter(outputs, i)) + + return results + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + if test_pipeline_cfg[0]['type'] == 'LoadImageFromFile': + # Image loading is finished in `self.preprocess`. + test_pipeline_cfg = test_pipeline_cfg[1:] + test_pipeline = Compose( + [TRANSFORMS.build(t) for t in test_pipeline_cfg]) + return test_pipeline + + def preprocess(self, inputs: List[InputType], batch_size: int = 1): + + def load_image(input_): + img = imread(input_) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return dict( + img=img, + img_shape=img.shape[:2], + ori_shape=img.shape[:2], + ) + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self): + raise NotImplementedError( + "The FeatureExtractor doesn't support visualization.") + + def postprocess(self): + raise NotImplementedError( + "The FeatureExtractor doesn't need postprocessing.") + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern) diff --git a/mmpretrain/apis/image_caption.py b/mmpretrain/apis/image_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..aef21878112763bf1ae12d2373e9645b73049665 --- /dev/null +++ b/mmpretrain/apis/image_caption.py @@ -0,0 +1,164 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from pathlib import Path +from typing import Callable, List, Optional + +import numpy as np +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from .base import BaseInferencer, InputType +from .model import list_models + + +class ImageCaptionInferencer(BaseInferencer): + """The inferencer for image caption. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``ImageCaptionInferencer.list_models()`` and you can also + query it in :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import ImageCaptionInferencer + >>> inferencer = ImageCaptionInferencer('blip-base_3rdparty_caption') + >>> inferencer('demo/cat-dog.png')[0] + {'pred_caption': 'a puppy and a cat sitting on a blanket'} + """ # noqa: E501 + + visualize_kwargs: set = {'resize', 'show', 'show_dir', 'wait_time'} + + def __call__(self, + images: InputType, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + images (str | array | list): The image path or array, or a list of + images. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + draw_score (bool): Whether to draw the prediction scores + of prediction categories. Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + return super().__call__(images, return_datasamples, batch_size, + **kwargs) + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + if test_pipeline_cfg[0]['type'] == 'LoadImageFromFile': + # Image loading is finished in `self.preprocess`. + test_pipeline_cfg = test_pipeline_cfg[1:] + test_pipeline = Compose( + [TRANSFORMS.build(t) for t in test_pipeline_cfg]) + return test_pipeline + + def preprocess(self, inputs: List[InputType], batch_size: int = 1): + + def load_image(input_): + img = imread(input_) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return dict( + img=img, + img_shape=img.shape[:2], + ori_shape=img.shape[:2], + ) + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[InputType], + preds: List[DataSample], + show: bool = False, + wait_time: int = 0, + resize: Optional[int] = None, + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (input_, data_sample) in enumerate(zip(ori_inputs, preds)): + image = imread(input_) + if isinstance(input_, str): + # The image loaded from path is BGR format. + image = image[..., ::-1] + name = Path(input_).stem + else: + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_image_caption( + image, + data_sample, + resize=resize, + show=show, + wait_time=wait_time, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess(self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + results.append({'pred_caption': data_sample.get('pred_caption')}) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Image Caption') diff --git a/mmpretrain/apis/image_classification.py b/mmpretrain/apis/image_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..081672614c3c15a8820267e823cdfdbd2b5fa858 --- /dev/null +++ b/mmpretrain/apis/image_classification.py @@ -0,0 +1,221 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from pathlib import Path +from typing import Callable, List, Optional, Union + +import numpy as np +import torch +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from .base import BaseInferencer, InputType, ModelType +from .model import list_models + + +class ImageClassificationInferencer(BaseInferencer): + """The inferencer for image classification. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``ImageClassificationInferencer.list_models()`` and you can also + query it in :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + 1. Use a pre-trained model in MMPreTrain to inference an image. + + >>> from mmpretrain import ImageClassificationInferencer + >>> inferencer = ImageClassificationInferencer('resnet50_8xb32_in1k') + >>> inferencer('demo/demo.JPEG') + [{'pred_score': array([...]), + 'pred_label': 65, + 'pred_score': 0.6649367809295654, + 'pred_class': 'sea snake'}] + + 2. Use a config file and checkpoint to inference multiple images on GPU, + and save the visualization results in a folder. + + >>> from mmpretrain import ImageClassificationInferencer + >>> inferencer = ImageClassificationInferencer( + model='configs/resnet/resnet50_8xb32_in1k.py', + pretrained='https://download.openmmlab.com/mmclassification/v0/resnet/resnet50_8xb32_in1k_20210831-ea4938fc.pth', + device='cuda') + >>> inferencer(['demo/dog.jpg', 'demo/bird.JPEG'], show_dir="./visualize/") + """ # noqa: E501 + + visualize_kwargs: set = { + 'resize', 'rescale_factor', 'draw_score', 'show', 'show_dir', + 'wait_time' + } + + def __init__(self, + model: ModelType, + pretrained: Union[bool, str] = True, + device: Union[str, torch.device, None] = None, + classes=None, + **kwargs) -> None: + super().__init__( + model=model, pretrained=pretrained, device=device, **kwargs) + + if classes is not None: + self.classes = classes + else: + self.classes = getattr(self.model, '_dataset_meta', + {}).get('classes') + + def __call__(self, + inputs: InputType, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + inputs (str | array | list): The image path or array, or a list of + images. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + rescale_factor (float, optional): Rescale the image by the rescale + factor for visualization. This is helpful when the image is too + large or too small for visualization. Defaults to None. + draw_score (bool): Whether to draw the prediction scores + of prediction categories. Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + return super().__call__( + inputs, + return_datasamples=return_datasamples, + batch_size=batch_size, + **kwargs) + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + if test_pipeline_cfg[0]['type'] == 'LoadImageFromFile': + # Image loading is finished in `self.preprocess`. + test_pipeline_cfg = test_pipeline_cfg[1:] + test_pipeline = Compose( + [TRANSFORMS.build(t) for t in test_pipeline_cfg]) + return test_pipeline + + def preprocess(self, inputs: List[InputType], batch_size: int = 1): + + def load_image(input_): + img = imread(input_) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return dict( + img=img, + img_shape=img.shape[:2], + ori_shape=img.shape[:2], + ) + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[InputType], + preds: List[DataSample], + show: bool = False, + wait_time: int = 0, + resize: Optional[int] = None, + rescale_factor: Optional[float] = None, + draw_score=True, + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (input_, data_sample) in enumerate(zip(ori_inputs, preds)): + image = imread(input_) + if isinstance(input_, str): + # The image loaded from path is BGR format. + image = image[..., ::-1] + name = Path(input_).stem + else: + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_cls( + image, + data_sample, + classes=self.classes, + resize=resize, + show=show, + wait_time=wait_time, + rescale_factor=rescale_factor, + draw_gt=False, + draw_pred=True, + draw_score=draw_score, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess(self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + pred_scores = data_sample.pred_score + pred_score = float(torch.max(pred_scores).item()) + pred_label = torch.argmax(pred_scores).item() + result = { + 'pred_scores': pred_scores.detach().cpu().numpy(), + 'pred_label': pred_label, + 'pred_score': pred_score, + } + if self.classes is not None: + result['pred_class'] = self.classes[pred_label] + results.append(result) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Image Classification') diff --git a/mmpretrain/apis/image_retrieval.py b/mmpretrain/apis/image_retrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..980d65cc3c7922c9e4fa0cff441e106b636fa765 --- /dev/null +++ b/mmpretrain/apis/image_retrieval.py @@ -0,0 +1,285 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from pathlib import Path +from typing import Callable, List, Optional, Union + +import numpy as np +import torch +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import BaseDataset, Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from .base import BaseInferencer, InputType, ModelType +from .model import list_models + + +class ImageRetrievalInferencer(BaseInferencer): + """The inferencer for image to image retrieval. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``ImageRetrievalInferencer.list_models()`` and you can also + query it in :doc:`/modelzoo_statistics`. + prototype (str | list | dict | DataLoader, BaseDataset): The images to + be retrieved. It can be the following types: + + - str: The directory of the the images. + - list: A list of path of the images. + - dict: A config dict of the a prototype dataset. + - BaseDataset: A prototype dataset. + - DataLoader: A data loader to load the prototype data. + + prototype_cache (str, optional): The path of the generated prototype + features. If exists, directly load the cache instead of re-generate + the prototype features. If not exists, save the generated features + to the path. Defaults to None. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import ImageRetrievalInferencer + >>> inferencer = ImageRetrievalInferencer( + ... 'resnet50-arcface_8xb32_inshop', + ... prototype='./demo/', + ... prototype_cache='img_retri.pth') + >>> inferencer('demo/cat-dog.png', topk=2)[0][1] + {'match_score': tensor(0.4088, device='cuda:0'), + 'sample_idx': 3, + 'sample': {'img_path': './demo/dog.jpg'}} + """ # noqa: E501 + + visualize_kwargs: set = { + 'draw_score', 'resize', 'show_dir', 'show', 'wait_time', 'topk' + } + postprocess_kwargs: set = {'topk'} + + def __init__( + self, + model: ModelType, + prototype, + prototype_cache=None, + prepare_batch_size=8, + pretrained: Union[bool, str] = True, + device: Union[str, torch.device, None] = None, + **kwargs, + ) -> None: + super().__init__( + model=model, pretrained=pretrained, device=device, **kwargs) + + self.prototype_dataset = self._prepare_prototype( + prototype, prototype_cache, prepare_batch_size) + + def _prepare_prototype(self, prototype, cache=None, batch_size=8): + from mmengine.dataset import DefaultSampler + from torch.utils.data import DataLoader + + def build_dataloader(dataset): + return DataLoader( + dataset, + batch_size=batch_size, + collate_fn=default_collate, + sampler=DefaultSampler(dataset, shuffle=False), + persistent_workers=False, + ) + + if isinstance(prototype, str): + # A directory path of images + prototype = dict( + type='CustomDataset', with_label=False, data_root=prototype) + + if isinstance(prototype, list): + test_pipeline = [dict(type='LoadImageFromFile'), self.pipeline] + dataset = BaseDataset( + lazy_init=True, serialize_data=False, pipeline=test_pipeline) + dataset.data_list = [{ + 'sample_idx': i, + 'img_path': file + } for i, file in enumerate(prototype)] + dataset._fully_initialized = True + dataloader = build_dataloader(dataset) + elif isinstance(prototype, dict): + # A config of dataset + from mmpretrain.registry import DATASETS + test_pipeline = [dict(type='LoadImageFromFile'), self.pipeline] + dataset = DATASETS.build(prototype) + dataloader = build_dataloader(dataset) + elif isinstance(prototype, DataLoader): + dataset = prototype.dataset + dataloader = prototype + elif isinstance(prototype, BaseDataset): + dataset = prototype + dataloader = build_dataloader(dataset) + else: + raise TypeError(f'Unsupported prototype type {type(prototype)}.') + + if cache is not None and Path(cache).exists(): + self.model.prototype = cache + else: + self.model.prototype = dataloader + self.model.prepare_prototype() + + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + if cache is None: + logger.info('The prototype has been prepared, you can use ' + '`save_prototype` to dump it into a pickle ' + 'file for the future usage.') + elif not Path(cache).exists(): + self.save_prototype(cache) + logger.info(f'The prototype has been saved at {cache}.') + + return dataset + + def save_prototype(self, path): + self.model.dump_prototype(path) + + def __call__(self, + inputs: InputType, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + inputs (str | array | list): The image path or array, or a list of + images. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + draw_score (bool): Whether to draw the match scores. + Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + return super().__call__(inputs, return_datasamples, batch_size, + **kwargs) + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + if test_pipeline_cfg[0]['type'] == 'LoadImageFromFile': + # Image loading is finished in `self.preprocess`. + test_pipeline_cfg = test_pipeline_cfg[1:] + test_pipeline = Compose( + [TRANSFORMS.build(t) for t in test_pipeline_cfg]) + return test_pipeline + + def preprocess(self, inputs: List[InputType], batch_size: int = 1): + + def load_image(input_): + img = imread(input_) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return dict( + img=img, + img_shape=img.shape[:2], + ori_shape=img.shape[:2], + ) + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[InputType], + preds: List[DataSample], + topk: int = 3, + resize: Optional[int] = 224, + show: bool = False, + wait_time: int = 0, + draw_score=True, + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (input_, data_sample) in enumerate(zip(ori_inputs, preds)): + image = imread(input_) + if isinstance(input_, str): + # The image loaded from path is BGR format. + image = image[..., ::-1] + name = Path(input_).stem + else: + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_image_retrieval( + image, + data_sample, + self.prototype_dataset, + topk=topk, + resize=resize, + draw_score=draw_score, + show=show, + wait_time=wait_time, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess( + self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False, + topk=1, + ) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + match_scores, indices = torch.topk(data_sample.pred_score, k=topk) + matches = [] + for match_score, sample_idx in zip(match_scores, indices): + sample = self.prototype_dataset.get_data_info( + sample_idx.item()) + sample_idx = sample.pop('sample_idx') + matches.append({ + 'match_score': match_score, + 'sample_idx': sample_idx, + 'sample': sample + }) + results.append(matches) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Image Retrieval') diff --git a/mmpretrain/apis/model.py b/mmpretrain/apis/model.py new file mode 100644 index 0000000000000000000000000000000000000000..eba475e7f791f42eb9aec384afec947f72722f27 --- /dev/null +++ b/mmpretrain/apis/model.py @@ -0,0 +1,408 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import fnmatch +import os.path as osp +import re +import warnings +from os import PathLike +from pathlib import Path +from typing import List, Tuple, Union + +from mmengine.config import Config +from modelindex.load_model_index import load +from modelindex.models.Model import Model + + +class ModelHub: + """A hub to host the meta information of all pre-defined models.""" + _models_dict = {} + __mmpretrain_registered = False + + @classmethod + def register_model_index(cls, + model_index_path: Union[str, PathLike], + config_prefix: Union[str, PathLike, None] = None): + """Parse the model-index file and register all models. + + Args: + model_index_path (str | PathLike): The path of the model-index + file. + config_prefix (str | PathLike | None): The prefix of all config + file paths in the model-index file. + """ + model_index = load(str(model_index_path)) + model_index.build_models_with_collections() + + for metainfo in model_index.models: + model_name = metainfo.name.lower() + if metainfo.name in cls._models_dict: + raise ValueError( + 'The model name {} is conflict in {} and {}.'.format( + model_name, osp.abspath(metainfo.filepath), + osp.abspath(cls._models_dict[model_name].filepath))) + metainfo.config = cls._expand_config_path(metainfo, config_prefix) + cls._models_dict[model_name] = metainfo + + @classmethod + def get(cls, model_name): + """Get the model's metainfo by the model name. + + Args: + model_name (str): The name of model. + + Returns: + modelindex.models.Model: The metainfo of the specified model. + """ + cls._register_mmpretrain_models() + # lazy load config + metainfo = copy.deepcopy(cls._models_dict.get(model_name.lower())) + if metainfo is None: + raise ValueError( + f'Failed to find model "{model_name}". please use ' + '`mmpretrain.list_models` to get all available names.') + if isinstance(metainfo.config, str): + metainfo.config = Config.fromfile(metainfo.config) + return metainfo + + @staticmethod + def _expand_config_path(metainfo: Model, + config_prefix: Union[str, PathLike] = None): + if config_prefix is None: + config_prefix = osp.dirname(metainfo.filepath) + + if metainfo.config is None or osp.isabs(metainfo.config): + config_path: str = metainfo.config + else: + config_path = osp.abspath(osp.join(config_prefix, metainfo.config)) + + return config_path + + @classmethod + def _register_mmpretrain_models(cls): + # register models in mmpretrain + if not cls.__mmpretrain_registered: + from importlib_metadata import distribution + root = distribution('mmpretrain').locate_file('mmpretrain') + model_index_path = root / '.mim' / 'model-index.yml' + ModelHub.register_model_index( + model_index_path, config_prefix=root / '.mim') + cls.__mmpretrain_registered = True + + @classmethod + def has(cls, model_name): + """Whether a model name is in the ModelHub.""" + return model_name in cls._models_dict + + +def get_model(model: Union[str, Config], + pretrained: Union[str, bool] = False, + device=None, + device_map=None, + offload_folder=None, + url_mapping: Tuple[str, str] = None, + **kwargs): + """Get a pre-defined model or create a model from config. + + Args: + model (str | Config): The name of model, the config file path or a + config instance. + pretrained (bool | str): When use name to specify model, you can + use ``True`` to load the pre-defined pretrained weights. And you + can also use a string to specify the path or link of weights to + load. Defaults to False. + device (str | torch.device | None): Transfer the model to the target + device. Defaults to None. + device_map (str | dict | None): A map that specifies where each + submodule should go. It doesn't need to be refined to each + parameter/buffer name, once a given module name is inside, every + submodule of it will be sent to the same device. You can use + `device_map="auto"` to automatically generate the device map. + Defaults to None. + offload_folder (str | None): If the `device_map` contains any value + `"disk"`, the folder where we will offload weights. + url_mapping (Tuple[str, str], optional): The mapping of pretrained + checkpoint link. For example, load checkpoint from a local dir + instead of download by ``('https://.*/', './checkpoint')``. + Defaults to None. + **kwargs: Other keyword arguments of the model config. + + Returns: + mmengine.model.BaseModel: The result model. + + Examples: + Get a ResNet-50 model and extract images feature: + + >>> import torch + >>> from mmpretrain import get_model + >>> inputs = torch.rand(16, 3, 224, 224) + >>> model = get_model('resnet50_8xb32_in1k', pretrained=True, backbone=dict(out_indices=(0, 1, 2, 3))) + >>> feats = model.extract_feat(inputs) + >>> for feat in feats: + ... print(feat.shape) + torch.Size([16, 256]) + torch.Size([16, 512]) + torch.Size([16, 1024]) + torch.Size([16, 2048]) + + Get Swin-Transformer model with pre-trained weights and inference: + + >>> from mmpretrain import get_model, inference_model + >>> model = get_model('swin-base_16xb64_in1k', pretrained=True) + >>> result = inference_model(model, 'demo/demo.JPEG') + >>> print(result['pred_class']) + 'sea snake' + """ # noqa: E501 + if device_map is not None: + from .utils import dispatch_model + dispatch_model._verify_require() + + metainfo = None + if isinstance(model, Config): + config = copy.deepcopy(model) + if pretrained is True and 'load_from' in config: + pretrained = config.load_from + elif isinstance(model, (str, PathLike)) and Path(model).suffix == '.py': + config = Config.fromfile(model) + if pretrained is True and 'load_from' in config: + pretrained = config.load_from + elif isinstance(model, str): + metainfo = ModelHub.get(model) + config = metainfo.config + if pretrained is True and metainfo.weights is not None: + pretrained = metainfo.weights + else: + raise TypeError('model must be a name, a path or a Config object, ' + f'but got {type(config)}') + + if pretrained is True: + warnings.warn('Unable to find pre-defined checkpoint of the model.') + pretrained = None + elif pretrained is False: + pretrained = None + + if kwargs: + config.merge_from_dict({'model': kwargs}) + config.model.setdefault('data_preprocessor', + config.get('data_preprocessor', None)) + + from mmengine.registry import DefaultScope + + from mmpretrain.registry import MODELS + with DefaultScope.overwrite_default_scope('mmpretrain'): + model = MODELS.build(config.model) + + dataset_meta = {} + if pretrained: + # Mapping the weights to GPU may cause unexpected video memory leak + # which refers to https://github.com/open-mmlab/mmdetection/pull/6405 + from mmengine.runner import load_checkpoint + if url_mapping is not None: + pretrained = re.sub(url_mapping[0], url_mapping[1], pretrained) + checkpoint = load_checkpoint(model, pretrained, map_location='cpu') + if 'dataset_meta' in checkpoint.get('meta', {}): + # mmpretrain 1.x + dataset_meta = checkpoint['meta']['dataset_meta'] + elif 'CLASSES' in checkpoint.get('meta', {}): + # mmcls 0.x + dataset_meta = {'classes': checkpoint['meta']['CLASSES']} + + if len(dataset_meta) == 0 and 'test_dataloader' in config: + from mmpretrain.registry import DATASETS + dataset_class = DATASETS.get(config.test_dataloader.dataset.type) + dataset_meta = getattr(dataset_class, 'METAINFO', {}) + + if device_map is not None: + model = dispatch_model( + model, device_map=device_map, offload_folder=offload_folder) + elif device is not None: + model.to(device) + + model._dataset_meta = dataset_meta # save the dataset meta + model._config = config # save the config in the model + model._metainfo = metainfo # save the metainfo in the model + model.eval() + return model + + +def init_model(config, checkpoint=None, device=None, **kwargs): + """Initialize a classifier from config file (deprecated). + + It's only for compatibility, please use :func:`get_model` instead. + + Args: + config (str | :obj:`mmengine.Config`): Config file path or the config + object. + checkpoint (str, optional): Checkpoint path. If left as None, the model + will not load any weights. + device (str | torch.device | None): Transfer the model to the target + device. Defaults to None. + **kwargs: Other keyword arguments of the model config. + + Returns: + nn.Module: The constructed model. + """ + return get_model(config, checkpoint, device, **kwargs) + + +def list_models(pattern=None, exclude_patterns=None, task=None) -> List[str]: + """List all models available in MMPretrain. + + Args: + pattern (str | None): A wildcard pattern to match model names. + Defaults to None. + exclude_patterns (list | None): A list of wildcard patterns to + exclude names from the matched names. Defaults to None. + task (str | none): The evaluation task of the model. + + Returns: + List[str]: a list of model names. + + Examples: + List all models: + + >>> from mmpretrain import list_models + >>> list_models() + + List ResNet-50 models on ImageNet-1k dataset: + + >>> from mmpretrain import list_models + >>> list_models('resnet*in1k') + ['resnet50_8xb32_in1k', + 'resnet50_8xb32-fp16_in1k', + 'resnet50_8xb256-rsb-a1-600e_in1k', + 'resnet50_8xb256-rsb-a2-300e_in1k', + 'resnet50_8xb256-rsb-a3-100e_in1k'] + + List Swin-Transformer models trained from stratch and exclude + Swin-Transformer-V2 models: + + >>> from mmpretrain import list_models + >>> list_models('swin', exclude_patterns=['swinv2', '*-pre']) + ['swin-base_16xb64_in1k', + 'swin-base_3rdparty_in1k', + 'swin-base_3rdparty_in1k-384', + 'swin-large_8xb8_cub-384px', + 'swin-small_16xb64_in1k', + 'swin-small_3rdparty_in1k', + 'swin-tiny_16xb64_in1k', + 'swin-tiny_3rdparty_in1k'] + + List all EVA models for image classification task. + + >>> from mmpretrain import list_models + >>> list_models('eva', task='Image Classification') + ['eva-g-p14_30m-in21k-pre_3rdparty_in1k-336px', + 'eva-g-p14_30m-in21k-pre_3rdparty_in1k-560px', + 'eva-l-p14_mim-in21k-pre_3rdparty_in1k-196px', + 'eva-l-p14_mim-in21k-pre_3rdparty_in1k-336px', + 'eva-l-p14_mim-pre_3rdparty_in1k-196px', + 'eva-l-p14_mim-pre_3rdparty_in1k-336px'] + """ + ModelHub._register_mmpretrain_models() + matches = set(ModelHub._models_dict.keys()) + + if pattern is not None: + # Always match keys with any postfix. + matches = set(fnmatch.filter(matches, pattern + '*')) + + exclude_patterns = exclude_patterns or [] + for exclude_pattern in exclude_patterns: + exclude = set(fnmatch.filter(matches, exclude_pattern + '*')) + matches = matches - exclude + + if task is not None: + task_matches = [] + for key in matches: + metainfo = ModelHub._models_dict[key] + if metainfo.results is None and task == 'null': + task_matches.append(key) + elif metainfo.results is None: + continue + elif task in [result.task for result in metainfo.results]: + task_matches.append(key) + matches = task_matches + + return sorted(list(matches)) + + +def inference_model(model, *args, **kwargs): + """Inference an image with the inferencer. + + Automatically select inferencer to inference according to the type of + model. It's a shortcut for a quick start, and for advanced usage, please + use the correspondding inferencer class. + + Here is the mapping from task to inferencer: + + - Image Classification: :class:`ImageClassificationInferencer` + - Image Retrieval: :class:`ImageRetrievalInferencer` + - Image Caption: :class:`ImageCaptionInferencer` + - Visual Question Answering: :class:`VisualQuestionAnsweringInferencer` + - Visual Grounding: :class:`VisualGroundingInferencer` + - Text-To-Image Retrieval: :class:`TextToImageRetrievalInferencer` + - Image-To-Text Retrieval: :class:`ImageToTextRetrievalInferencer` + - NLVR: :class:`NLVRInferencer` + + Args: + model (BaseModel | str | Config): The loaded model, the model + name or the config of the model. + *args: Positional arguments to call the inferencer. + **kwargs: Other keyword arguments to initialize and call the + correspondding inferencer. + + Returns: + result (dict): The inference results. + """ # noqa: E501 + from mmengine.model import BaseModel + + if isinstance(model, BaseModel): + metainfo = getattr(model, '_metainfo', None) + else: + metainfo = ModelHub.get(model) + + from inspect import signature + + from .image_caption import ImageCaptionInferencer + from .image_classification import ImageClassificationInferencer + from .image_retrieval import ImageRetrievalInferencer + from .multimodal_retrieval import (ImageToTextRetrievalInferencer, + TextToImageRetrievalInferencer) + from .nlvr import NLVRInferencer + from .visual_grounding import VisualGroundingInferencer + from .visual_question_answering import VisualQuestionAnsweringInferencer + task_mapping = { + 'Image Classification': ImageClassificationInferencer, + 'Image Retrieval': ImageRetrievalInferencer, + 'Image Caption': ImageCaptionInferencer, + 'Visual Question Answering': VisualQuestionAnsweringInferencer, + 'Visual Grounding': VisualGroundingInferencer, + 'Text-To-Image Retrieval': TextToImageRetrievalInferencer, + 'Image-To-Text Retrieval': ImageToTextRetrievalInferencer, + 'NLVR': NLVRInferencer, + } + + inferencer_type = None + + if metainfo is not None and metainfo.results is not None: + tasks = set(result.task for result in metainfo.results) + inferencer_type = [ + task_mapping.get(task) for task in tasks if task in task_mapping + ] + if len(inferencer_type) > 1: + inferencer_names = [cls.__name__ for cls in inferencer_type] + warnings.warn('The model supports multiple tasks, auto select ' + f'{inferencer_names[0]}, you can also use other ' + f'inferencer {inferencer_names} directly.') + inferencer_type = inferencer_type[0] + + if inferencer_type is None: + raise NotImplementedError('No available inferencer for the model') + + init_kwargs = { + k: kwargs.pop(k) + for k in list(kwargs) + if k in signature(inferencer_type).parameters.keys() + } + + inferencer = inferencer_type(model, **init_kwargs) + return inferencer(*args, **kwargs)[0] diff --git a/mmpretrain/apis/multimodal_retrieval.py b/mmpretrain/apis/multimodal_retrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..5eb9c859aca309306c1e775b7a03bf3bbc1c7717 --- /dev/null +++ b/mmpretrain/apis/multimodal_retrieval.py @@ -0,0 +1,603 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from pathlib import Path +from typing import Callable, List, Optional, Tuple, Union + +import mmengine +import numpy as np +import torch +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import BaseDataset, Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from mmpretrain.utils import track +from .base import BaseInferencer +from .base import InputType as ImageType +from .base import ModelType +from .model import list_models + + +def filter_transforms(transforms: list, data_info: dict): + """Filter pipeline to avoid KeyError with partial data info.""" + data_info = deepcopy(data_info) + filtered_transforms = [] + for t in transforms: + try: + data_info = t(data_info) + filtered_transforms.append(t) + except KeyError: + pass + return filtered_transforms + + +class TextToImageRetrievalInferencer(BaseInferencer): + """The inferencer for text to image retrieval. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``TextToImageRetrievalInferencer.list_models()`` and you can also + query it in :doc:`/modelzoo_statistics`. + prototype (str | list | dict | DataLoader | BaseDataset): The images to + be retrieved. It can be the following types: + + - str: The directory of the the images. + - list: A list of path of the images. + - dict: A config dict of the a prototype dataset. + - BaseDataset: A prototype dataset. + - DataLoader: A data loader to load the prototype data. + + prototype_cache (str, optional): The path of the generated prototype + features. If exists, directly load the cache instead of re-generate + the prototype features. If not exists, save the generated features + to the path. Defaults to None. + fast_match (bool): Some algorithms will record extra image features for + further matching, which may consume large memory, set True to avoid + this behavior. Defaults to True. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import TextToImageRetrievalInferencer + >>> inferencer = TextToImageRetrievalInferencer( + ... 'blip-base_3rdparty_retrieval', + ... prototype='./demo/', + ... prototype_cache='t2i_retri.pth') + >>> inferencer('A cat and a dog.')[0] + {'match_score': tensor(0.3855, device='cuda:0'), + 'sample_idx': 1, + 'sample': {'img_path': './demo/cat-dog.png'}} + """ # noqa: E501 + + visualize_kwargs: set = { + 'draw_score', 'show_dir', 'show', 'wait_time', 'figsize', 'topk' + } + postprocess_kwargs: set = {'topk'} + + def __init__(self, + model: ModelType, + prototype, + prototype_cache=None, + fast_match=True, + prepare_batch_size=8, + pretrained: Union[bool, str] = True, + device: Union[str, torch.device, None] = None, + **kwargs) -> None: + super().__init__( + model=model, pretrained=pretrained, device=device, **kwargs) + + self.img_pipeline, self.text_pipeline = self.pipeline + + if hasattr(self.model, 'fast_match'): + self.model.fast_match = fast_match + + self.prototype_dataset = self._prepare_prototype( + prototype, prototype_cache, batch_size=prepare_batch_size) + + def _prepare_prototype(self, prototype, cache=None, batch_size=8): + from mmengine.dataset import DefaultSampler + from torch.utils.data import DataLoader + + def build_dataloader(dataset): + return DataLoader( + dataset, + batch_size=batch_size, + collate_fn=default_collate, + sampler=DefaultSampler(dataset, shuffle=False), + persistent_workers=False, + ) + + if isinstance(prototype, str): + # A directory path of images + prototype = dict( + type='CustomDataset', with_label=False, data_root=prototype) + + if isinstance(prototype, list): + test_pipeline = [dict(type='LoadImageFromFile'), self.img_pipeline] + dataset = BaseDataset( + lazy_init=True, serialize_data=False, pipeline=test_pipeline) + dataset.data_list = [{ + 'sample_idx': i, + 'img_path': file + } for i, file in enumerate(prototype)] + dataset._fully_initialized = True + dataloader = build_dataloader(dataset) + elif isinstance(prototype, dict): + # A config of dataset + from mmpretrain.registry import DATASETS + test_pipeline = [dict(type='LoadImageFromFile'), self.img_pipeline] + prototype.setdefault('pipeline', test_pipeline) + dataset = DATASETS.build(prototype) + dataloader = build_dataloader(dataset) + elif isinstance(prototype, list): + test_pipeline = [dict(type='LoadImageFromFile'), self.img_pipeline] + dataset = BaseDataset( + lazy_init=True, serialize_data=False, pipeline=test_pipeline) + dataset.data_list = [{ + 'sample_idx': i, + 'img_path': file + } for i, file in enumerate(prototype)] + dataset._fully_initialized = True + dataloader = build_dataloader(dataset) + elif isinstance(prototype, DataLoader): + dataset = prototype.dataset + dataloader = prototype + elif isinstance(prototype, BaseDataset): + dataset = prototype + dataloader = build_dataloader(dataset) + else: + raise TypeError(f'Unsupported prototype type {type(prototype)}.') + + if cache is not None and Path(cache).exists(): + self.prototype = torch.load(cache) + else: + prototype = [] + for data_batch in track(dataloader, 'Prepare prototype...'): + with torch.no_grad(): + data_batch = self.model.data_preprocessor( + data_batch, False) + feats = self.model._run_forward(data_batch, mode='tensor') + prototype.append(feats) + prototype = { + k: torch.cat([d[k] for d in prototype]) + for k in prototype[0] + } + self.prototype = prototype + + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + if cache is None: + logger.info('The prototype has been prepared, you can use ' + '`save_prototype` to dump it into a pickle ' + 'file for the future usage.') + elif not Path(cache).exists(): + self.save_prototype(cache) + logger.info(f'The prototype has been saved at {cache}.') + + return dataset + + def save_prototype(self, path): + torch.save(self.prototype, path) + + def __call__(self, + inputs: ImageType, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + inputs (str | array | list): The image path or array, or a list of + images. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + draw_score (bool): Whether to draw the match scores. + Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + return super().__call__(inputs, return_datasamples, batch_size, + **kwargs) + + @torch.no_grad() + def forward(self, data: dict, **kwargs): + """Feed the inputs to the model.""" + data = self.model.data_preprocessor(data, False) + data_samples = data['data_samples'] + feats = self.prototype.copy() + feats.update(self.model.extract_feat(data_samples=data_samples)) + return self.model.predict_all(feats, data_samples, cal_i2t=False)[0] + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + test_transfroms = [TRANSFORMS.build(t) for t in test_pipeline_cfg] + img_info = {'img': np.zeros((224, 224, 3), dtype=np.uint8)} + text_info = {'text': 'example'} + img_pipeline = Compose(filter_transforms(test_transfroms, img_info)) + text_pipeline = Compose(filter_transforms(test_transfroms, text_info)) + return img_pipeline, text_pipeline + + def preprocess(self, inputs: List[str], batch_size: int = 1): + + def process_text(input_: str): + return self.text_pipeline({'text': input_}) + + chunked_data = self._get_chunk_data( + map(process_text, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[str], + preds: List[DataSample], + topk: int = 3, + figsize: Tuple[int, int] = (16, 9), + show: bool = False, + wait_time: int = 0, + draw_score=True, + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (text, data_sample) in enumerate(zip(ori_inputs, preds)): + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_t2i_retrieval( + text, + data_sample, + self.prototype_dataset, + topk=topk, + fig_cfg=dict(figsize=figsize), + draw_score=draw_score, + show=show, + wait_time=wait_time, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess( + self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False, + topk=1, + ) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + match_scores, indices = torch.topk(data_sample.pred_score, k=topk) + matches = [] + for match_score, sample_idx in zip(match_scores, indices): + sample = self.prototype_dataset.get_data_info( + sample_idx.item()) + sample_idx = sample.pop('sample_idx') + matches.append({ + 'match_score': match_score, + 'sample_idx': sample_idx, + 'sample': sample + }) + results.append(matches) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Text-To-Image Retrieval') + + +class ImageToTextRetrievalInferencer(BaseInferencer): + """The inferencer for image to text retrieval. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``ImageToTextRetrievalInferencer.list_models()`` and you can + also query it in :doc:`/modelzoo_statistics`. + prototype (str | list | dict | DataLoader, BaseDataset): The images to + be retrieved. It can be the following types: + + - str: The file path to load the string list. + - list: A list of string. + + prototype_cache (str, optional): The path of the generated prototype + features. If exists, directly load the cache instead of re-generate + the prototype features. If not exists, save the generated features + to the path. Defaults to None. + fast_match (bool): Some algorithms will record extra image features for + further matching, which may consume large memory, set True to avoid + this behavior. Defaults to True. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import ImageToTextRetrievalInferencer + >>> inferencer = ImageToTextRetrievalInferencer( + ... 'blip-base_3rdparty_retrieval', + ... prototype=['cat', 'dog', 'snake', 'bird'], + ... prototype_cache='i2t_retri.pth') + >>> inferencer('demo/bird.JPEG')[0] + {'match_score': tensor(0.3855, device='cuda:0'), + 'sample_idx': 1, + 'sample': {'img_path': './demo/cat-dog.png'}} + """ # noqa: E501 + + visualize_kwargs: set = { + 'draw_score', 'resize', 'show_dir', 'show', 'wait_time', 'topk' + } + postprocess_kwargs: set = {'topk'} + + def __init__(self, + model: ModelType, + prototype, + prototype_cache=None, + fast_match=True, + prepare_batch_size=8, + pretrained: Union[bool, str] = True, + device: Union[str, torch.device, None] = None, + **kwargs) -> None: + super().__init__( + model=model, pretrained=pretrained, device=device, **kwargs) + + self.img_pipeline, self.text_pipeline = self.pipeline + + if hasattr(self.model, 'fast_match'): + self.model.fast_match = fast_match + + self.prototype_dataset = self._prepare_prototype( + prototype, cache=prototype_cache, batch_size=prepare_batch_size) + + def _prepare_prototype(self, prototype, cache=None, batch_size=8): + from mmengine.dataset import DefaultSampler + from torch.utils.data import DataLoader + + def build_dataloader(dataset): + return DataLoader( + [ + self.text_pipeline({ + 'sample_idx': i, + 'text': text + }) for i, text in enumerate(dataset) + ], + batch_size=batch_size, + collate_fn=default_collate, + sampler=DefaultSampler(dataset, shuffle=False), + persistent_workers=False, + ) + + if isinstance(prototype, str): + # A file path of a list of string + dataset = mmengine.list_from_file(prototype) + elif mmengine.utils.is_seq_of(prototype, str): + dataset = prototype + else: + raise TypeError(f'Unsupported prototype type {type(prototype)}.') + + dataloader = build_dataloader(dataset) + + if cache is not None and Path(cache).exists(): + self.prototype = torch.load(cache) + else: + prototype = [] + for data_batch in track(dataloader, 'Prepare prototype...'): + with torch.no_grad(): + data_batch = self.model.data_preprocessor( + data_batch, False) + feats = self.model._run_forward(data_batch, mode='tensor') + prototype.append(feats) + prototype = { + k: torch.cat([d[k] for d in prototype]) + for k in prototype[0] + } + self.prototype = prototype + + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + if cache is None: + logger.info('The prototype has been prepared, you can use ' + '`save_prototype` to dump it into a pickle ' + 'file for the future usage.') + elif not Path(cache).exists(): + self.save_prototype(cache) + logger.info(f'The prototype has been saved at {cache}.') + + return dataset + + def save_prototype(self, path): + torch.save(self.prototype, path) + + def __call__(self, + inputs: ImageType, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + inputs (str | array | list): The image path or array, or a list of + images. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + draw_score (bool): Whether to draw the match scores. + Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + return super().__call__(inputs, return_datasamples, batch_size, + **kwargs) + + @torch.no_grad() + def forward(self, data: dict, **kwargs): + """Feed the inputs to the model.""" + data = self.model.data_preprocessor(data, False) + feats = self.prototype.copy() + feats.update(self.model.extract_feat(images=data['images'])) + return self.model.predict_all( + feats, data['data_samples'], cal_t2i=False)[0] + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + test_transfroms = [TRANSFORMS.build(t) for t in test_pipeline_cfg] + img_info = {'img': np.zeros((224, 224, 3), dtype=np.uint8)} + text_info = {'text': 'example'} + img_pipeline = Compose(filter_transforms(test_transfroms, img_info)) + text_pipeline = Compose(filter_transforms(test_transfroms, text_info)) + return img_pipeline, text_pipeline + + def preprocess(self, inputs: List[ImageType], batch_size: int = 1): + + def load_image(input_): + img = imread(input_) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return dict( + img=img, + img_shape=img.shape[:2], + ori_shape=img.shape[:2], + ) + + pipeline = Compose([load_image, self.img_pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[ImageType], + preds: List[DataSample], + topk: int = 3, + resize: Optional[int] = 224, + show: bool = False, + wait_time: int = 0, + draw_score=True, + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (input_, data_sample) in enumerate(zip(ori_inputs, preds)): + image = imread(input_) + if isinstance(input_, str): + # The image loaded from path is BGR format. + image = image[..., ::-1] + name = Path(input_).stem + else: + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_i2t_retrieval( + image, + data_sample, + self.prototype_dataset, + topk=topk, + resize=resize, + draw_score=draw_score, + show=show, + wait_time=wait_time, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess( + self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False, + topk=1, + ) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + match_scores, indices = torch.topk(data_sample.pred_score, k=topk) + matches = [] + for match_score, sample_idx in zip(match_scores, indices): + text = self.prototype_dataset[sample_idx.item()] + matches.append({ + 'match_score': match_score, + 'sample_idx': sample_idx, + 'text': text + }) + results.append(matches) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Image-To-Text Retrieval') diff --git a/mmpretrain/apis/nlvr.py b/mmpretrain/apis/nlvr.py new file mode 100644 index 0000000000000000000000000000000000000000..9977c3b06f36fa61a3cd2edf36077a993b2030cd --- /dev/null +++ b/mmpretrain/apis/nlvr.py @@ -0,0 +1,150 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from typing import Callable, List, Optional, Tuple, Union + +import numpy as np +import torch +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from .base import BaseInferencer +from .model import list_models + +InputType = Tuple[Union[str, np.ndarray], Union[str, np.ndarray], str] +InputsType = Union[List[InputType], InputType] + + +class NLVRInferencer(BaseInferencer): + """The inferencer for Natural Language for Visual Reasoning. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``NLVRInferencer.list_models()`` and you can also + query it in :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + """ + + visualize_kwargs: set = { + 'resize', 'draw_score', 'show', 'show_dir', 'wait_time' + } + + def __call__(self, + inputs: InputsType, + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + inputs (tuple, List[tuple]): The input data tuples, every tuple + should include three items (left image, right image, text). + The image can be a path or numpy array. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + draw_score (bool): Whether to draw the prediction scores + of prediction categories. Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + assert isinstance(inputs, (tuple, list)) + if isinstance(inputs, tuple): + inputs = [inputs] + for input_ in inputs: + assert isinstance(input_, tuple) + assert len(input_) == 3 + + return super().__call__( + inputs, + return_datasamples=return_datasamples, + batch_size=batch_size, + **kwargs) + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + assert test_pipeline_cfg[0]['type'] == 'ApplyToList' + + list_pipeline = deepcopy(test_pipeline_cfg[0]) + if list_pipeline.scatter_key == 'img_path': + # Remove `LoadImageFromFile` + list_pipeline.transforms.pop(0) + list_pipeline.scatter_key = 'img' + + test_pipeline = Compose( + [TRANSFORMS.build(list_pipeline)] + + [TRANSFORMS.build(t) for t in test_pipeline_cfg[1:]]) + return test_pipeline + + def preprocess(self, inputs: InputsType, batch_size: int = 1): + + def load_image(input_): + img1 = imread(input_[0]) + img2 = imread(input_[1]) + text = input_[2] + if img1 is None: + raise ValueError(f'Failed to read image {input_[0]}.') + if img2 is None: + raise ValueError(f'Failed to read image {input_[1]}.') + return dict( + img=[img1, img2], + img_shape=[img1.shape[:2], img2.shape[:2]], + ori_shape=[img1.shape[:2], img2.shape[:2]], + text=text, + ) + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def postprocess(self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + pred_scores = data_sample.pred_score + pred_score = float(torch.max(pred_scores).item()) + pred_label = torch.argmax(pred_scores).item() + result = { + 'pred_scores': pred_scores.detach().cpu().numpy(), + 'pred_label': pred_label, + 'pred_score': pred_score, + } + results.append(result) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='NLVR') diff --git a/mmpretrain/apis/utils.py b/mmpretrain/apis/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..83e76325472f6925f78c746e3a10f3a58b0e6de4 --- /dev/null +++ b/mmpretrain/apis/utils.py @@ -0,0 +1,270 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +from collections import defaultdict +from contextlib import contextmanager +from itertools import chain +from typing import Dict, List, Optional, Union + +import torch +import torch.nn as nn + +from mmpretrain.utils import require + + +@require('torch>=1.9.0', 'https://pytorch.org/get-started/locally/') +@require('accelerate') +def dispatch_model( + model, + device_map: Union[str, dict], + max_memory: Optional[dict] = None, + no_split_module_classes: Optional[List[str]] = None, + offload_folder: str = None, + offload_buffers: bool = False, + preload_module_classes: Optional[List[str]] = None, +): + """Split and dispatch a model across devices. + + The function depends on the `accelerate` package. Refers to + https://huggingface.co/docs/accelerate/main/en/usage_guides/big_modeling + + Args: + model (torch.nn.Module): The model to dispatch. + device_map (str | dict | None): A map that specifies where each + submodule should go. It doesn't need to be refined to each + parameter/buffer name, once a given module name is inside, every + submodule of it will be sent to the same device. You can use + `device_map="auto"` to automatically generate the device map. + Defaults to None. + max_memory (dict | None): A dictionary device identifier to maximum + memory. Will default to the maximum memory available for each GPU + and the available CPU RAM if unset. Defaults to None. + no_split_module_classes (List[str] | None): A list of layer class names + that should never be split across device (for instance any layer + that has a residual connection). If None, try to get the settings + from the model class. Defaults to None. + offload_folder (str | None): If the `device_map` contains any value + `"disk"`, the folder where we will offload weights. + offload_buffers (bool): In the layers that are offloaded on the CPU + or the hard drive, whether or not to offload the buffers as + well as the parameters. Defaults to False. + preload_module_classes (List[str] | None): A list of classes whose + instances should load all their weights (even in the submodules) at + the beginning of the forward. This should only be used for classes + that have submodules which are registered but not called directly + during the forward, for instance if a `dense` linear layer is + registered, but at forward, `dense.weight` and `dense.bias` are + used in some operations instead of calling `dense` directly. + Defaults to None. + """ + from accelerate import dispatch_model, infer_auto_device_map + + # Check valid device_map string. + valid_map_option = ['auto', 'balanced', 'balanced_low_0', 'sequential'] + if isinstance(device_map, str) and device_map not in valid_map_option: + raise ValueError('If passing a string for `device_map`, please choose ' + f'from {valid_map_option}.') + + # Generate device map automatically + if isinstance(device_map, str): + if no_split_module_classes is None: + no_split_module_classes = getattr(model, '_no_split_modules', None) + if no_split_module_classes is None: + raise ValueError(f'{model.__class__.__name__} does not support ' + f"`device_map='{device_map}'` yet.") + + if device_map != 'sequential': + from accelerate.utils import get_balanced_memory + max_memory = get_balanced_memory( + model, + max_memory=max_memory, + no_split_module_classes=no_split_module_classes, + dtype=None, + low_zero=(device_map == 'balanced_low_0'), + ) + max_memory[0] *= 0.9 + device_map = infer_auto_device_map( + model, + max_memory=max_memory, + no_split_module_classes=no_split_module_classes, + dtype=None, + ) + + if 'disk' in device_map.values(): + if offload_folder is None: + raise ValueError( + 'The current `device_map` had weights offloaded to the disk. ' + 'Please provide an `offload_folder` for them.') + os.makedirs(offload_folder, exist_ok=True) + + main_device = next( + (d for d in device_map.values() if d not in ['cpu', 'disk']), 'cpu') + + model = dispatch_model( + model, + device_map=device_map, + main_device=main_device, + offload_dir=offload_folder, + offload_buffers=offload_buffers, + preload_module_classes=preload_module_classes, + ) + if hasattr(model, 'data_preprocessor'): + model.data_preprocessor._device = torch.device(main_device) + return model + + +@contextmanager +def init_empty_weights(include_buffers: bool = False): + """A context manager under which models are initialized with all parameters + on the meta device. + + With this context manager, we can create an empty model. Useful when just + initializing the model would blow the available RAM. + + Besides move the parameters to meta device, this method will also avoid + load checkpoint from `mmengine.runner.load_checkpoint` and + `transformers.PreTrainedModel.from_pretrained`. + + Modified from https://github.com/huggingface/accelerate + + Args: + include_buffers (bool): Whether put all buffers on the meta device + during initialization. + """ + device = torch.device('meta') + + # move parameter and buffer to meta device + old_register_parameter = nn.Module.register_parameter + if include_buffers: + old_register_buffer = nn.Module.register_buffer + # See https://github.com/huggingface/accelerate/pull/699 + tensor_constructors_to_patch = { + torch_function_name: getattr(torch, torch_function_name) + for torch_function_name in ['empty', 'zeros', 'ones', 'full'] + } + + def register_parameter(module, name, param): + old_register_parameter(module, name, param) + if param is not None: + param_cls = type(module._parameters[name]) + kwargs = module._parameters[name].__dict__ + module._parameters[name] = param_cls( + module._parameters[name].to(device), **kwargs) + + def register_buffer(module, name, buffer, *args, **kwargs): + old_register_buffer(module, name, buffer, *args, **kwargs) + if buffer is not None: + module._buffers[name] = module._buffers[name].to(device) + + def patch_tensor_constructor(fn): + + def wrapper(*args, **kwargs): + kwargs['device'] = device + return fn(*args, **kwargs) + + return wrapper + + # Patch load_checkpoint + import mmengine.runner.checkpoint as mmengine_load + old_load_checkpoint = mmengine_load.load_checkpoint + + def patch_load_checkpoint(*args, **kwargs): + return {} + + # Patch transformers from pretrained + try: + from transformers import PreTrainedModel + from transformers.models.auto.auto_factory import (AutoConfig, + _BaseAutoModelClass) + with_transformers = True + except ImportError: + with_transformers = False + + @classmethod + def patch_auto_model(cls, pretrained_model_name_or_path, *model_args, + **kwargs): + cfg = AutoConfig.from_pretrained(pretrained_model_name_or_path, + *model_args, **kwargs) + return cls.from_config(cfg) + + @classmethod + def patch_pretrained_model(cls, pretrained_model_name_or_path, *model_args, + **kwargs): + cfg = cls.config_class.from_pretrained(pretrained_model_name_or_path, + *model_args, **kwargs) + return cls(cfg) + + if with_transformers: + old_pretrained_model = PreTrainedModel.from_pretrained + old_auto_model = _BaseAutoModelClass.from_pretrained + + try: + nn.Module.register_parameter = register_parameter + mmengine_load.load_checkpoint = patch_load_checkpoint + if with_transformers: + PreTrainedModel.from_pretrained = patch_pretrained_model + _BaseAutoModelClass.from_pretrained = patch_auto_model + if include_buffers: + nn.Module.register_buffer = register_buffer + for func in tensor_constructors_to_patch.keys(): + tensor_constructor = patch_tensor_constructor( + getattr(torch, func)) + setattr(torch, func, tensor_constructor) + yield + finally: + nn.Module.register_parameter = old_register_parameter + mmengine_load.load_checkpoint = old_load_checkpoint + if with_transformers: + PreTrainedModel.from_pretrained = old_pretrained_model + _BaseAutoModelClass.from_pretrained = old_auto_model + if include_buffers: + nn.Module.register_buffer = old_register_buffer + for func, ori in tensor_constructors_to_patch.items(): + setattr(torch, func, ori) + + +def compute_module_sizes( + model: nn.Module, + dtype: Union[str, torch.dtype, None] = None, + special_dtypes: Optional[Dict[str, Union[str, torch.dtype]]] = None): + """Compute the size of each submodule of a given model.""" + + def get_dtype(dtype): + if isinstance(dtype, str): + dtype = getattr(torch, dtype) + if dtype is not None: + assert issubclass(dtype, torch.dtype) + return dtype + + def dtype_bytes(dtype: torch.dtype): + if dtype is torch.bool: + return 1 + if dtype.is_floating_point: + return torch.finfo(dtype).bits / 8 + else: + return torch.iinfo(dtype).bits / 8 + + if dtype is not None: + dtype = get_dtype(dtype) + dtype_size = dtype_bytes(dtype) + + if special_dtypes is not None: + special_dtypes = { + key: dtype_bytes(dtype) + for key, dtype in special_dtypes.items() + } + + module_sizes = defaultdict(int) + for name, tensor in chain( + model.named_parameters(recurse=True), + model.named_buffers(recurse=True)): + if special_dtypes is not None and name in special_dtypes: + size = tensor.numel() * special_dtypes[name] + elif dtype is None: + size = tensor.numel() * tensor.element_size() + else: + size = tensor.numel() * min(dtype_size, tensor.element_size()) + name_parts = name.split('.') + for idx in range(len(name_parts) + 1): + module_sizes['.'.join(name_parts[:idx])] += size + + return module_sizes diff --git a/mmpretrain/apis/visual_grounding.py b/mmpretrain/apis/visual_grounding.py new file mode 100644 index 0000000000000000000000000000000000000000..59a6ba8b2cb6ef14875de6a4113c3a7f7882b290 --- /dev/null +++ b/mmpretrain/apis/visual_grounding.py @@ -0,0 +1,180 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from pathlib import Path +from typing import Callable, List, Optional, Union + +import numpy as np +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from .base import BaseInferencer +from .model import list_models + + +class VisualGroundingInferencer(BaseInferencer): + """The inferencer for visual grounding. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``VisualGroundingInferencer.list_models()`` and you can also + query it in :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import VisualGroundingInferencer + >>> inferencer = VisualGroundingInferencer('ofa-base_3rdparty_refcoco') + >>> inferencer('demo/cat-dog.png', 'dog')[0] + {'pred_bboxes': tensor([[ 36.6000, 29.6000, 355.8000, 395.2000]])} + """ # noqa: E501 + + visualize_kwargs: set = { + 'resize', 'show', 'show_dir', 'wait_time', 'line_width', 'bbox_color' + } + + def __call__(self, + images: Union[str, np.ndarray, list], + texts: Union[str, list], + return_datasamples: bool = False, + batch_size: int = 1, + **kwargs) -> dict: + """Call the inferencer. + + Args: + images (str | array | list): The image path or array, or a list of + images. + texts (str | list): The text to do visual grounding. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + draw_score (bool): Whether to draw the prediction scores + of prediction categories. Defaults to True. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + line_width (int): The line width of the bbox. Defaults to 3. + bbox_color (str | tuple): The color of the bbox. + Defaults to 'green'. + + Returns: + list: The inference results. + """ + if not isinstance(images, (list, tuple)): + assert isinstance(texts, str) + inputs = [{'img': images, 'text': texts}] + else: + inputs = [] + for i in range(len(images)): + input_ = {'img': images[i], 'text': texts[i]} + inputs.append(input_) + + return super().__call__(inputs, return_datasamples, batch_size, + **kwargs) + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + if test_pipeline_cfg[0]['type'] == 'LoadImageFromFile': + # Image loading is finished in `self.preprocess`. + test_pipeline_cfg = test_pipeline_cfg[1:] + test_pipeline = Compose( + [TRANSFORMS.build(t) for t in test_pipeline_cfg]) + return test_pipeline + + def preprocess(self, inputs: List[dict], batch_size: int = 1): + + def load_image(input_: dict): + img = imread(input_['img']) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return {**input_, 'img': img} + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[dict], + preds: List[DataSample], + show: bool = False, + wait_time: int = 0, + resize: Optional[int] = None, + line_width: int = 3, + bbox_color: Union[str, tuple] = 'green', + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (input_, data_sample) in enumerate(zip(ori_inputs, preds)): + image = imread(input_['img']) + if isinstance(input_['img'], str): + # The image loaded from path is BGR format. + image = image[..., ::-1] + name = Path(input_['img']).stem + else: + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_visual_grounding( + image, + data_sample, + resize=resize, + show=show, + wait_time=wait_time, + line_width=line_width, + bbox_color=bbox_color, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess(self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + results.append({'pred_bboxes': data_sample.get('pred_bboxes')}) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Visual Grounding') diff --git a/mmpretrain/apis/visual_question_answering.py b/mmpretrain/apis/visual_question_answering.py new file mode 100644 index 0000000000000000000000000000000000000000..2d056758f39f780c6863ae54b09b8f0cc725ade4 --- /dev/null +++ b/mmpretrain/apis/visual_question_answering.py @@ -0,0 +1,181 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from pathlib import Path +from typing import Callable, List, Optional, Union + +import numpy as np +from mmcv.image import imread +from mmengine.config import Config +from mmengine.dataset import Compose, default_collate + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample +from .base import BaseInferencer +from .model import list_models + + +class VisualQuestionAnsweringInferencer(BaseInferencer): + """The inferencer for visual question answering. + + Args: + model (BaseModel | str | Config): A model name or a path to the config + file, or a :obj:`BaseModel` object. The model name can be found + by ``VisualQuestionAnsweringInferencer.list_models()`` and you can + also query it in :doc:`/modelzoo_statistics`. + pretrained (str, optional): Path to the checkpoint. If None, it will + try to find a pre-defined weight from the model you specified + (only work if the ``model`` is a model name). Defaults to None. + device (str, optional): Device to run inference. If None, the available + device will be automatically used. Defaults to None. + **kwargs: Other keyword arguments to initialize the model (only work if + the ``model`` is a model name). + + Example: + >>> from mmpretrain import VisualQuestionAnsweringInferencer + >>> inferencer = VisualQuestionAnsweringInferencer('ofa-base_3rdparty-zeroshot_vqa') + >>> inferencer('demo/cat-dog.png', "What's the animal next to the dog?")[0] + {'question': "What's the animal next to the dog?", 'pred_answer': 'cat'} + """ # noqa: E501 + + visualize_kwargs: set = {'resize', 'show', 'show_dir', 'wait_time'} + + def __call__(self, + images: Union[str, np.ndarray, list], + questions: Union[str, list], + return_datasamples: bool = False, + batch_size: int = 1, + objects: Optional[List[str]] = None, + **kwargs) -> dict: + """Call the inferencer. + + Args: + images (str | array | list): The image path or array, or a list of + images. + questions (str | list): The question to the correspondding image. + return_datasamples (bool): Whether to return results as + :obj:`DataSample`. Defaults to False. + batch_size (int): Batch size. Defaults to 1. + objects (List[List[str]], optional): Some algorithms like OFA + fine-tuned VQA models requires extra object description list + for every image. Defaults to None. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + show (bool): Whether to display the visualization result in a + window. Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + show_dir (str, optional): If not None, save the visualization + results in the specified directory. Defaults to None. + + Returns: + list: The inference results. + """ + if not isinstance(images, (list, tuple)): + assert isinstance(questions, str) + inputs = [{'img': images, 'question': questions}] + if objects is not None: + assert isinstance(objects[0], str) + inputs[0]['objects'] = objects + else: + inputs = [] + for i in range(len(images)): + input_ = {'img': images[i], 'question': questions[i]} + if objects is not None: + input_['objects'] = objects[i] + inputs.append(input_) + + return super().__call__(inputs, return_datasamples, batch_size, + **kwargs) + + def _init_pipeline(self, cfg: Config) -> Callable: + test_pipeline_cfg = cfg.test_dataloader.dataset.pipeline + if test_pipeline_cfg[0]['type'] == 'LoadImageFromFile': + # Image loading is finished in `self.preprocess`. + test_pipeline_cfg = test_pipeline_cfg[1:] + test_pipeline = Compose( + [TRANSFORMS.build(t) for t in test_pipeline_cfg]) + return test_pipeline + + def preprocess(self, inputs: List[dict], batch_size: int = 1): + + def load_image(input_: dict): + img = imread(input_['img']) + if img is None: + raise ValueError(f'Failed to read image {input_}.') + return {**input_, 'img': img} + + pipeline = Compose([load_image, self.pipeline]) + + chunked_data = self._get_chunk_data(map(pipeline, inputs), batch_size) + yield from map(default_collate, chunked_data) + + def visualize(self, + ori_inputs: List[dict], + preds: List[DataSample], + show: bool = False, + wait_time: int = 0, + resize: Optional[int] = None, + show_dir=None): + if not show and show_dir is None: + return None + + if self.visualizer is None: + from mmpretrain.visualization import UniversalVisualizer + self.visualizer = UniversalVisualizer() + + visualization = [] + for i, (input_, data_sample) in enumerate(zip(ori_inputs, preds)): + image = imread(input_['img']) + if isinstance(input_['img'], str): + # The image loaded from path is BGR format. + image = image[..., ::-1] + name = Path(input_['img']).stem + else: + name = str(i) + + if show_dir is not None: + show_dir = Path(show_dir) + show_dir.mkdir(exist_ok=True) + out_file = str((show_dir / name).with_suffix('.png')) + else: + out_file = None + + self.visualizer.visualize_vqa( + image, + data_sample, + resize=resize, + show=show, + wait_time=wait_time, + name=name, + out_file=out_file) + visualization.append(self.visualizer.get_image()) + if show: + self.visualizer.close() + return visualization + + def postprocess(self, + preds: List[DataSample], + visualization: List[np.ndarray], + return_datasamples=False) -> dict: + if return_datasamples: + return preds + + results = [] + for data_sample in preds: + results.append({ + 'question': data_sample.get('question'), + 'pred_answer': data_sample.get('pred_answer'), + }) + + return results + + @staticmethod + def list_models(pattern: Optional[str] = None): + """List all available model names. + + Args: + pattern (str | None): A wildcard pattern to match model names. + + Returns: + List[str]: a list of model names. + """ + return list_models(pattern=pattern, task='Visual Question Answering') diff --git a/mmpretrain/datasets/__init__.py b/mmpretrain/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b680fb83abbc4612fbb9a4a3d09ec6ce4de11460 --- /dev/null +++ b/mmpretrain/datasets/__init__.py @@ -0,0 +1,54 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpretrain.utils.dependency import WITH_MULTIMODAL +from .base_dataset import BaseDataset +from .builder import build_dataset +from .caltech101 import Caltech101 +from .cifar import CIFAR10, CIFAR100 +from .cub import CUB +from .custom import CustomDataset +from .dataset_wrappers import KFoldDataset +from .dtd import DTD +from .fgvcaircraft import FGVCAircraft +from .flowers102 import Flowers102 +from .food101 import Food101 +from .imagenet import ImageNet, ImageNet21k +from .inshop import InShop +from .mnist import MNIST, FashionMNIST +from .multi_label import MultiLabelDataset +from .multi_task import MultiTaskDataset +from .nlvr2 import NLVR2 +from .oxfordiiitpet import OxfordIIITPet +from .places205 import Places205 +from .samplers import * # noqa: F401,F403 +from .stanfordcars import StanfordCars +from .sun397 import SUN397 +from .transforms import * # noqa: F401,F403 +from .voc import VOC + +__all__ = [ + 'BaseDataset', 'CIFAR10', 'CIFAR100', 'CUB', 'Caltech101', 'CustomDataset', + 'DTD', 'FGVCAircraft', 'FashionMNIST', 'Flowers102', 'Food101', 'ImageNet', + 'ImageNet21k', 'InShop', 'KFoldDataset', 'MNIST', 'MultiLabelDataset', + 'MultiTaskDataset', 'NLVR2', 'OxfordIIITPet', 'Places205', 'SUN397', + 'StanfordCars', 'VOC', 'build_dataset' +] + +if WITH_MULTIMODAL: + from .coco_caption import COCOCaption + from .coco_retrieval import COCORetrieval + from .coco_vqa import COCOVQA + from .flamingo import FlamingoEvalCOCOCaption, FlamingoEvalCOCOVQA + from .refcoco import RefCOCO + from .scienceqa import ScienceQA + from .visual_genome import VisualGenomeQA + + __all__.extend([ + 'COCOCaption', + 'COCORetrieval', + 'COCOVQA', + 'FlamingoEvalCOCOCaption', + 'FlamingoEvalCOCOVQA', + 'RefCOCO', + 'VisualGenomeQA', + 'ScienceQA', + ]) diff --git a/mmpretrain/datasets/base_dataset.py b/mmpretrain/datasets/base_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..dffdf04772163b5fa55afabc8e15ac8c118aadd2 --- /dev/null +++ b/mmpretrain/datasets/base_dataset.py @@ -0,0 +1,219 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from os import PathLike +from typing import List, Optional, Sequence, Union + +import mmengine +import numpy as np +from mmengine.dataset import BaseDataset as _BaseDataset + +from mmpretrain.registry import DATASETS, TRANSFORMS + + +def expanduser(path): + """Expand ~ and ~user constructions. + + If user or $HOME is unknown, do nothing. + """ + if isinstance(path, (str, PathLike)): + return osp.expanduser(path) + else: + return path + + +@DATASETS.register_module() +class BaseDataset(_BaseDataset): + """Base dataset for image classification task. + + This dataset support annotation file in `OpenMMLab 2.0 style annotation + format`. + + .. _OpenMMLab 2.0 style annotation format: + https://github.com/open-mmlab/mmengine/blob/main/docs/zh_cn/tutorials/basedataset.md + + Comparing with the :class:`mmengine.BaseDataset`, this class implemented + several useful methods. + + Args: + ann_file (str): Annotation file path. + metainfo (dict, optional): Meta information for dataset, such as class + information. Defaults to None. + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for training data. Defaults to ''. + filter_cfg (dict, optional): Config for filter data. Defaults to None. + indices (int or Sequence[int], optional): Support using first few + data in annotation file to facilitate training/testing on a smaller + dataset. Defaults to None, which means using all ``data_infos``. + serialize_data (bool): Whether to hold memory using serialized objects, + when enabled, data loader workers can use shared RAM from master + process instead of making a copy. Defaults to True. + pipeline (Sequence): Processing pipeline. Defaults to an empty tuple. + test_mode (bool, optional): ``test_mode=True`` means in test phase, + an error will be raised when getting an item fails, ``test_mode=False`` + means in training phase, another item will be returned randomly. + Defaults to False. + lazy_init (bool): Whether to load annotation during instantiation. + In some cases, such as visualization, only the meta information of + the dataset is needed, which is not necessary to load annotation + file. ``Basedataset`` can skip load annotations to save time by set + ``lazy_init=False``. Defaults to False. + max_refetch (int): If ``Basedataset.prepare_data`` get a None img. + The maximum extra number of cycles to get a valid image. + Defaults to 1000. + classes (str | Sequence[str], optional): Specify names of classes. + + - If is string, it should be a file path, and the every line of + the file is a name of a class. + - If is a sequence of string, every item is a name of class. + - If is None, use categories information in ``metainfo`` argument, + annotation file or the class attribute ``METAINFO``. + + Defaults to None. + """ # noqa: E501 + + def __init__(self, + ann_file: str, + metainfo: Optional[dict] = None, + data_root: str = '', + data_prefix: Union[str, dict] = '', + filter_cfg: Optional[dict] = None, + indices: Optional[Union[int, Sequence[int]]] = None, + serialize_data: bool = True, + pipeline: Sequence = (), + test_mode: bool = False, + lazy_init: bool = False, + max_refetch: int = 1000, + classes: Union[str, Sequence[str], None] = None): + if isinstance(data_prefix, str): + data_prefix = dict(img_path=expanduser(data_prefix)) + + ann_file = expanduser(ann_file) + metainfo = self._compat_classes(metainfo, classes) + + transforms = [] + for transform in pipeline: + if isinstance(transform, dict): + transforms.append(TRANSFORMS.build(transform)) + else: + transforms.append(transform) + + super().__init__( + ann_file=ann_file, + metainfo=metainfo, + data_root=data_root, + data_prefix=data_prefix, + filter_cfg=filter_cfg, + indices=indices, + serialize_data=serialize_data, + pipeline=transforms, + test_mode=test_mode, + lazy_init=lazy_init, + max_refetch=max_refetch) + + @property + def img_prefix(self): + """The prefix of images.""" + return self.data_prefix['img_path'] + + @property + def CLASSES(self): + """Return all categories names.""" + return self._metainfo.get('classes', None) + + @property + def class_to_idx(self): + """Map mapping class name to class index. + + Returns: + dict: mapping from class name to class index. + """ + + return {cat: i for i, cat in enumerate(self.CLASSES)} + + def get_gt_labels(self): + """Get all ground-truth labels (categories). + + Returns: + np.ndarray: categories for all images. + """ + + gt_labels = np.array( + [self.get_data_info(i)['gt_label'] for i in range(len(self))]) + return gt_labels + + def get_cat_ids(self, idx: int) -> List[int]: + """Get category id by index. + + Args: + idx (int): Index of data. + + Returns: + cat_ids (List[int]): Image category of specified index. + """ + + return [int(self.get_data_info(idx)['gt_label'])] + + def _compat_classes(self, metainfo, classes): + """Merge the old style ``classes`` arguments to ``metainfo``.""" + if isinstance(classes, str): + # take it as a file path + class_names = mmengine.list_from_file(expanduser(classes)) + elif isinstance(classes, (tuple, list)): + class_names = classes + elif classes is not None: + raise ValueError(f'Unsupported type {type(classes)} of classes.') + + if metainfo is None: + metainfo = {} + + if classes is not None: + metainfo = {'classes': tuple(class_names), **metainfo} + + return metainfo + + def full_init(self): + """Load annotation file and set ``BaseDataset._fully_initialized`` to + True.""" + super().full_init() + + # To support the standard OpenMMLab 2.0 annotation format. Generate + # metainfo in internal format from standard metainfo format. + if 'categories' in self._metainfo and 'classes' not in self._metainfo: + categories = sorted( + self._metainfo['categories'], key=lambda x: x['id']) + self._metainfo['classes'] = tuple( + [cat['category_name'] for cat in categories]) + + def __repr__(self): + """Print the basic information of the dataset. + + Returns: + str: Formatted string. + """ + head = 'Dataset ' + self.__class__.__name__ + body = [] + if self._fully_initialized: + body.append(f'Number of samples: \t{self.__len__()}') + else: + body.append("Haven't been initialized") + + if self.CLASSES is not None: + body.append(f'Number of categories: \t{len(self.CLASSES)}') + + body.extend(self.extra_repr()) + + if len(self.pipeline.transforms) > 0: + body.append('With transforms:') + for t in self.pipeline.transforms: + body.append(f' {t}') + + lines = [head] + [' ' * 4 + line for line in body] + return '\n'.join(lines) + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [] + body.append(f'Annotation file: \t{self.ann_file}') + body.append(f'Prefix of images: \t{self.img_prefix}') + return body diff --git a/mmpretrain/datasets/builder.py b/mmpretrain/datasets/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..dfa3872fe9931a4946368f07dfc5f5913a3e1f9f --- /dev/null +++ b/mmpretrain/datasets/builder.py @@ -0,0 +1,25 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpretrain.registry import DATASETS + + +def build_dataset(cfg): + """Build dataset. + + Examples: + >>> from mmpretrain.datasets import build_dataset + >>> mnist_train = build_dataset( + ... dict(type='MNIST', data_prefix='data/mnist/', test_mode=False)) + >>> print(mnist_train) + Dataset MNIST + Number of samples: 60000 + Number of categories: 10 + Prefix of data: data/mnist/ + >>> mnist_test = build_dataset( + ... dict(type='MNIST', data_prefix='data/mnist/', test_mode=True)) + >>> print(mnist_test) + Dataset MNIST + Number of samples: 10000 + Number of categories: 10 + Prefix of data: data/mnist/ + """ + return DATASETS.build(cfg) diff --git a/mmpretrain/datasets/caltech101.py b/mmpretrain/datasets/caltech101.py new file mode 100644 index 0000000000000000000000000000000000000000..71e5de85ff3bbf73c387a071f47113b46be36e2a --- /dev/null +++ b/mmpretrain/datasets/caltech101.py @@ -0,0 +1,113 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import CALTECH101_CATEGORIES + + +@DATASETS.register_module() +class Caltech101(BaseDataset): + """The Caltech101 Dataset. + + Support the `Caltech101 `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + Caltech101 dataset directory: :: + + caltech-101 + ├── 101_ObjectCategories + │ ├── class_x + │ │ ├── xx1.jpg + │ │ ├── xx2.jpg + │ │ └── ... + │ ├── class_y + │ │ ├── yy1.jpg + │ │ ├── yy2.jpg + │ │ └── ... + │ └── ... + ├── Annotations + │ ├── class_x + │ │ ├── xx1.mat + │ │ └── ... + │ └── ... + ├── meta + │ ├── train.txt + │ └── test.txt + └── .... + + Please note that since there is no official splitting for training and + test set, you can use the train.txt and text.txt provided by us or + create your own annotation files. Here is the download + `link `_ + for the annotations. + + Args: + data_root (str): The root directory for the Caltech101 dataset. + split (str, optional): The dataset split, supports "train" and "test". + Default to "train". + + Examples: + >>> from mmpretrain.datasets import Caltech101 + >>> train_dataset = Caltech101(data_root='data/caltech-101', split='train') + >>> train_dataset + Dataset Caltech101 + Number of samples: 3060 + Number of categories: 102 + Root of dataset: data/caltech-101 + >>> test_dataset = Caltech101(data_root='data/caltech-101', split='test') + >>> test_dataset + Dataset Caltech101 + Number of samples: 6728 + Number of categories: 102 + Root of dataset: data/caltech-101 + """ # noqa: E501 + + METAINFO = {'classes': CALTECH101_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'train', **kwargs): + + splits = ['train', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + self.backend = get_file_backend(data_root, enable_singleton=True) + + if split == 'train': + ann_file = self.backend.join_path('meta', 'train.txt') + else: + ann_file = self.backend.join_path('meta', 'test.txt') + + data_prefix = '101_ObjectCategories' + test_mode = split == 'test' + + super(Caltech101, self).__init__( + ann_file=ann_file, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + + pairs = list_from_file(self.ann_file) + data_list = [] + + for pair in pairs: + path, gt_label = pair.split() + img_path = self.backend.join_path(self.img_prefix, path) + info = dict(img_path=img_path, gt_label=int(gt_label)) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/categories.py b/mmpretrain/datasets/categories.py new file mode 100644 index 0000000000000000000000000000000000000000..011ee5c1609ee01614c485abfa69cf0d4fc35417 --- /dev/null +++ b/mmpretrain/datasets/categories.py @@ -0,0 +1,1440 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Pre-defined categories names of various datasets. + +VOC2007_CATEGORIES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', + 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', + 'horse', 'motorbike', 'person', 'pottedplant', 'sheep', + 'sofa', 'train', 'tvmonitor') + +CUB_CATEGORIES = ( + 'Black_footed_Albatross', 'Laysan_Albatross', 'Sooty_Albatross', + 'Groove_billed_Ani', 'Crested_Auklet', 'Least_Auklet', 'Parakeet_Auklet', + 'Rhinoceros_Auklet', 'Brewer_Blackbird', 'Red_winged_Blackbird', + 'Rusty_Blackbird', 'Yellow_headed_Blackbird', 'Bobolink', 'Indigo_Bunting', + 'Lazuli_Bunting', 'Painted_Bunting', 'Cardinal', 'Spotted_Catbird', + 'Gray_Catbird', 'Yellow_breasted_Chat', 'Eastern_Towhee', + 'Chuck_will_Widow', 'Brandt_Cormorant', 'Red_faced_Cormorant', + 'Pelagic_Cormorant', 'Bronzed_Cowbird', 'Shiny_Cowbird', 'Brown_Creeper', + 'American_Crow', 'Fish_Crow', 'Black_billed_Cuckoo', 'Mangrove_Cuckoo', + 'Yellow_billed_Cuckoo', 'Gray_crowned_Rosy_Finch', 'Purple_Finch', + 'Northern_Flicker', 'Acadian_Flycatcher', 'Great_Crested_Flycatcher', + 'Least_Flycatcher', 'Olive_sided_Flycatcher', 'Scissor_tailed_Flycatcher', + 'Vermilion_Flycatcher', 'Yellow_bellied_Flycatcher', 'Frigatebird', + 'Northern_Fulmar', 'Gadwall', 'American_Goldfinch', 'European_Goldfinch', + 'Boat_tailed_Grackle', 'Eared_Grebe', 'Horned_Grebe', 'Pied_billed_Grebe', + 'Western_Grebe', 'Blue_Grosbeak', 'Evening_Grosbeak', 'Pine_Grosbeak', + 'Rose_breasted_Grosbeak', 'Pigeon_Guillemot', 'California_Gull', + 'Glaucous_winged_Gull', 'Heermann_Gull', 'Herring_Gull', 'Ivory_Gull', + 'Ring_billed_Gull', 'Slaty_backed_Gull', 'Western_Gull', + 'Anna_Hummingbird', 'Ruby_throated_Hummingbird', 'Rufous_Hummingbird', + 'Green_Violetear', 'Long_tailed_Jaeger', 'Pomarine_Jaeger', 'Blue_Jay', + 'Florida_Jay', 'Green_Jay', 'Dark_eyed_Junco', 'Tropical_Kingbird', + 'Gray_Kingbird', 'Belted_Kingfisher', 'Green_Kingfisher', + 'Pied_Kingfisher', 'Ringed_Kingfisher', 'White_breasted_Kingfisher', + 'Red_legged_Kittiwake', 'Horned_Lark', 'Pacific_Loon', 'Mallard', + 'Western_Meadowlark', 'Hooded_Merganser', 'Red_breasted_Merganser', + 'Mockingbird', 'Nighthawk', 'Clark_Nutcracker', 'White_breasted_Nuthatch', + 'Baltimore_Oriole', 'Hooded_Oriole', 'Orchard_Oriole', 'Scott_Oriole', + 'Ovenbird', 'Brown_Pelican', 'White_Pelican', 'Western_Wood_Pewee', + 'Sayornis', 'American_Pipit', 'Whip_poor_Will', 'Horned_Puffin', + 'Common_Raven', 'White_necked_Raven', 'American_Redstart', 'Geococcyx', + 'Loggerhead_Shrike', 'Great_Grey_Shrike', 'Baird_Sparrow', + 'Black_throated_Sparrow', 'Brewer_Sparrow', 'Chipping_Sparrow', + 'Clay_colored_Sparrow', 'House_Sparrow', 'Field_Sparrow', 'Fox_Sparrow', + 'Grasshopper_Sparrow', 'Harris_Sparrow', 'Henslow_Sparrow', + 'Le_Conte_Sparrow', 'Lincoln_Sparrow', 'Nelson_Sharp_tailed_Sparrow', + 'Savannah_Sparrow', 'Seaside_Sparrow', 'Song_Sparrow', 'Tree_Sparrow', + 'Vesper_Sparrow', 'White_crowned_Sparrow', 'White_throated_Sparrow', + 'Cape_Glossy_Starling', 'Bank_Swallow', 'Barn_Swallow', 'Cliff_Swallow', + 'Tree_Swallow', 'Scarlet_Tanager', 'Summer_Tanager', 'Artic_Tern', + 'Black_Tern', 'Caspian_Tern', 'Common_Tern', 'Elegant_Tern', + 'Forsters_Tern', 'Least_Tern', 'Green_tailed_Towhee', 'Brown_Thrasher', + 'Sage_Thrasher', 'Black_capped_Vireo', 'Blue_headed_Vireo', + 'Philadelphia_Vireo', 'Red_eyed_Vireo', 'Warbling_Vireo', + 'White_eyed_Vireo', 'Yellow_throated_Vireo', 'Bay_breasted_Warbler', + 'Black_and_white_Warbler', 'Black_throated_Blue_Warbler', + 'Blue_winged_Warbler', 'Canada_Warbler', 'Cape_May_Warbler', + 'Cerulean_Warbler', 'Chestnut_sided_Warbler', 'Golden_winged_Warbler', + 'Hooded_Warbler', 'Kentucky_Warbler', 'Magnolia_Warbler', + 'Mourning_Warbler', 'Myrtle_Warbler', 'Nashville_Warbler', + 'Orange_crowned_Warbler', 'Palm_Warbler', 'Pine_Warbler', + 'Prairie_Warbler', 'Prothonotary_Warbler', 'Swainson_Warbler', + 'Tennessee_Warbler', 'Wilson_Warbler', 'Worm_eating_Warbler', + 'Yellow_Warbler', 'Northern_Waterthrush', 'Louisiana_Waterthrush', + 'Bohemian_Waxwing', 'Cedar_Waxwing', 'American_Three_toed_Woodpecker', + 'Pileated_Woodpecker', 'Red_bellied_Woodpecker', 'Red_cockaded_Woodpecker', + 'Red_headed_Woodpecker', 'Downy_Woodpecker', 'Bewick_Wren', 'Cactus_Wren', + 'Carolina_Wren', 'House_Wren', 'Marsh_Wren', 'Rock_Wren', 'Winter_Wren', + 'Common_Yellowthroat') + +IMAGENET_CATEGORIES = ( + 'tench, Tinca tinca', + 'goldfish, Carassius auratus', + 'great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias', # noqa: E501 + 'tiger shark, Galeocerdo cuvieri', + 'hammerhead, hammerhead shark', + 'electric ray, crampfish, numbfish, torpedo', + 'stingray', + 'cock', + 'hen', + 'ostrich, Struthio camelus', + 'brambling, Fringilla montifringilla', + 'goldfinch, Carduelis carduelis', + 'house finch, linnet, Carpodacus mexicanus', + 'junco, snowbird', + 'indigo bunting, indigo finch, indigo bird, Passerina cyanea', + 'robin, American robin, Turdus migratorius', + 'bulbul', + 'jay', + 'magpie', + 'chickadee', + 'water ouzel, dipper', + 'kite', + 'bald eagle, American eagle, Haliaeetus leucocephalus', + 'vulture', + 'great grey owl, great gray owl, Strix nebulosa', + 'European fire salamander, Salamandra salamandra', + 'common newt, Triturus vulgaris', + 'eft', + 'spotted salamander, Ambystoma maculatum', + 'axolotl, mud puppy, Ambystoma mexicanum', + 'bullfrog, Rana catesbeiana', + 'tree frog, tree-frog', + 'tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui', + 'loggerhead, loggerhead turtle, Caretta caretta', + 'leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea', # noqa: E501 + 'mud turtle', + 'terrapin', + 'box turtle, box tortoise', + 'banded gecko', + 'common iguana, iguana, Iguana iguana', + 'American chameleon, anole, Anolis carolinensis', + 'whiptail, whiptail lizard', + 'agama', + 'frilled lizard, Chlamydosaurus kingi', + 'alligator lizard', + 'Gila monster, Heloderma suspectum', + 'green lizard, Lacerta viridis', + 'African chameleon, Chamaeleo chamaeleon', + 'Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis', # noqa: E501 + 'African crocodile, Nile crocodile, Crocodylus niloticus', + 'American alligator, Alligator mississipiensis', + 'triceratops', + 'thunder snake, worm snake, Carphophis amoenus', + 'ringneck snake, ring-necked snake, ring snake', + 'hognose snake, puff adder, sand viper', + 'green snake, grass snake', + 'king snake, kingsnake', + 'garter snake, grass snake', + 'water snake', + 'vine snake', + 'night snake, Hypsiglena torquata', + 'boa constrictor, Constrictor constrictor', + 'rock python, rock snake, Python sebae', + 'Indian cobra, Naja naja', + 'green mamba', + 'sea snake', + 'horned viper, cerastes, sand viper, horned asp, Cerastes cornutus', + 'diamondback, diamondback rattlesnake, Crotalus adamanteus', + 'sidewinder, horned rattlesnake, Crotalus cerastes', + 'trilobite', + 'harvestman, daddy longlegs, Phalangium opilio', + 'scorpion', + 'black and gold garden spider, Argiope aurantia', + 'barn spider, Araneus cavaticus', + 'garden spider, Aranea diademata', + 'black widow, Latrodectus mactans', + 'tarantula', + 'wolf spider, hunting spider', + 'tick', + 'centipede', + 'black grouse', + 'ptarmigan', + 'ruffed grouse, partridge, Bonasa umbellus', + 'prairie chicken, prairie grouse, prairie fowl', + 'peacock', + 'quail', + 'partridge', + 'African grey, African gray, Psittacus erithacus', + 'macaw', + 'sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita', + 'lorikeet', + 'coucal', + 'bee eater', + 'hornbill', + 'hummingbird', + 'jacamar', + 'toucan', + 'drake', + 'red-breasted merganser, Mergus serrator', + 'goose', + 'black swan, Cygnus atratus', + 'tusker', + 'echidna, spiny anteater, anteater', + 'platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus', # noqa: E501 + 'wallaby, brush kangaroo', + 'koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus', # noqa: E501 + 'wombat', + 'jellyfish', + 'sea anemone, anemone', + 'brain coral', + 'flatworm, platyhelminth', + 'nematode, nematode worm, roundworm', + 'conch', + 'snail', + 'slug', + 'sea slug, nudibranch', + 'chiton, coat-of-mail shell, sea cradle, polyplacophore', + 'chambered nautilus, pearly nautilus, nautilus', + 'Dungeness crab, Cancer magister', + 'rock crab, Cancer irroratus', + 'fiddler crab', + 'king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica', # noqa: E501 + 'American lobster, Northern lobster, Maine lobster, Homarus americanus', # noqa: E501 + 'spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish', # noqa: E501 + 'crayfish, crawfish, crawdad, crawdaddy', + 'hermit crab', + 'isopod', + 'white stork, Ciconia ciconia', + 'black stork, Ciconia nigra', + 'spoonbill', + 'flamingo', + 'little blue heron, Egretta caerulea', + 'American egret, great white heron, Egretta albus', + 'bittern', + 'crane', + 'limpkin, Aramus pictus', + 'European gallinule, Porphyrio porphyrio', + 'American coot, marsh hen, mud hen, water hen, Fulica americana', + 'bustard', + 'ruddy turnstone, Arenaria interpres', + 'red-backed sandpiper, dunlin, Erolia alpina', + 'redshank, Tringa totanus', + 'dowitcher', + 'oystercatcher, oyster catcher', + 'pelican', + 'king penguin, Aptenodytes patagonica', + 'albatross, mollymawk', + 'grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus', # noqa: E501 + 'killer whale, killer, orca, grampus, sea wolf, Orcinus orca', + 'dugong, Dugong dugon', + 'sea lion', + 'Chihuahua', + 'Japanese spaniel', + 'Maltese dog, Maltese terrier, Maltese', + 'Pekinese, Pekingese, Peke', + 'Shih-Tzu', + 'Blenheim spaniel', + 'papillon', + 'toy terrier', + 'Rhodesian ridgeback', + 'Afghan hound, Afghan', + 'basset, basset hound', + 'beagle', + 'bloodhound, sleuthhound', + 'bluetick', + 'black-and-tan coonhound', + 'Walker hound, Walker foxhound', + 'English foxhound', + 'redbone', + 'borzoi, Russian wolfhound', + 'Irish wolfhound', + 'Italian greyhound', + 'whippet', + 'Ibizan hound, Ibizan Podenco', + 'Norwegian elkhound, elkhound', + 'otterhound, otter hound', + 'Saluki, gazelle hound', + 'Scottish deerhound, deerhound', + 'Weimaraner', + 'Staffordshire bullterrier, Staffordshire bull terrier', + 'American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier', # noqa: E501 + 'Bedlington terrier', + 'Border terrier', + 'Kerry blue terrier', + 'Irish terrier', + 'Norfolk terrier', + 'Norwich terrier', + 'Yorkshire terrier', + 'wire-haired fox terrier', + 'Lakeland terrier', + 'Sealyham terrier, Sealyham', + 'Airedale, Airedale terrier', + 'cairn, cairn terrier', + 'Australian terrier', + 'Dandie Dinmont, Dandie Dinmont terrier', + 'Boston bull, Boston terrier', + 'miniature schnauzer', + 'giant schnauzer', + 'standard schnauzer', + 'Scotch terrier, Scottish terrier, Scottie', + 'Tibetan terrier, chrysanthemum dog', + 'silky terrier, Sydney silky', + 'soft-coated wheaten terrier', + 'West Highland white terrier', + 'Lhasa, Lhasa apso', + 'flat-coated retriever', + 'curly-coated retriever', + 'golden retriever', + 'Labrador retriever', + 'Chesapeake Bay retriever', + 'German short-haired pointer', + 'vizsla, Hungarian pointer', + 'English setter', + 'Irish setter, red setter', + 'Gordon setter', + 'Brittany spaniel', + 'clumber, clumber spaniel', + 'English springer, English springer spaniel', + 'Welsh springer spaniel', + 'cocker spaniel, English cocker spaniel, cocker', + 'Sussex spaniel', + 'Irish water spaniel', + 'kuvasz', + 'schipperke', + 'groenendael', + 'malinois', + 'briard', + 'kelpie', + 'komondor', + 'Old English sheepdog, bobtail', + 'Shetland sheepdog, Shetland sheep dog, Shetland', + 'collie', + 'Border collie', + 'Bouvier des Flandres, Bouviers des Flandres', + 'Rottweiler', + 'German shepherd, German shepherd dog, German police dog, alsatian', + 'Doberman, Doberman pinscher', + 'miniature pinscher', + 'Greater Swiss Mountain dog', + 'Bernese mountain dog', + 'Appenzeller', + 'EntleBucher', + 'boxer', + 'bull mastiff', + 'Tibetan mastiff', + 'French bulldog', + 'Great Dane', + 'Saint Bernard, St Bernard', + 'Eskimo dog, husky', + 'malamute, malemute, Alaskan malamute', + 'Siberian husky', + 'dalmatian, coach dog, carriage dog', + 'affenpinscher, monkey pinscher, monkey dog', + 'basenji', + 'pug, pug-dog', + 'Leonberg', + 'Newfoundland, Newfoundland dog', + 'Great Pyrenees', + 'Samoyed, Samoyede', + 'Pomeranian', + 'chow, chow chow', + 'keeshond', + 'Brabancon griffon', + 'Pembroke, Pembroke Welsh corgi', + 'Cardigan, Cardigan Welsh corgi', + 'toy poodle', + 'miniature poodle', + 'standard poodle', + 'Mexican hairless', + 'timber wolf, grey wolf, gray wolf, Canis lupus', + 'white wolf, Arctic wolf, Canis lupus tundrarum', + 'red wolf, maned wolf, Canis rufus, Canis niger', + 'coyote, prairie wolf, brush wolf, Canis latrans', + 'dingo, warrigal, warragal, Canis dingo', + 'dhole, Cuon alpinus', + 'African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus', + 'hyena, hyaena', + 'red fox, Vulpes vulpes', + 'kit fox, Vulpes macrotis', + 'Arctic fox, white fox, Alopex lagopus', + 'grey fox, gray fox, Urocyon cinereoargenteus', + 'tabby, tabby cat', + 'tiger cat', + 'Persian cat', + 'Siamese cat, Siamese', + 'Egyptian cat', + 'cougar, puma, catamount, mountain lion, painter, panther, Felis concolor', # noqa: E501 + 'lynx, catamount', + 'leopard, Panthera pardus', + 'snow leopard, ounce, Panthera uncia', + 'jaguar, panther, Panthera onca, Felis onca', + 'lion, king of beasts, Panthera leo', + 'tiger, Panthera tigris', + 'cheetah, chetah, Acinonyx jubatus', + 'brown bear, bruin, Ursus arctos', + 'American black bear, black bear, Ursus americanus, Euarctos americanus', # noqa: E501 + 'ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus', + 'sloth bear, Melursus ursinus, Ursus ursinus', + 'mongoose', + 'meerkat, mierkat', + 'tiger beetle', + 'ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle', + 'ground beetle, carabid beetle', + 'long-horned beetle, longicorn, longicorn beetle', + 'leaf beetle, chrysomelid', + 'dung beetle', + 'rhinoceros beetle', + 'weevil', + 'fly', + 'bee', + 'ant, emmet, pismire', + 'grasshopper, hopper', + 'cricket', + 'walking stick, walkingstick, stick insect', + 'cockroach, roach', + 'mantis, mantid', + 'cicada, cicala', + 'leafhopper', + 'lacewing, lacewing fly', + "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", # noqa: E501 + 'damselfly', + 'admiral', + 'ringlet, ringlet butterfly', + 'monarch, monarch butterfly, milkweed butterfly, Danaus plexippus', + 'cabbage butterfly', + 'sulphur butterfly, sulfur butterfly', + 'lycaenid, lycaenid butterfly', + 'starfish, sea star', + 'sea urchin', + 'sea cucumber, holothurian', + 'wood rabbit, cottontail, cottontail rabbit', + 'hare', + 'Angora, Angora rabbit', + 'hamster', + 'porcupine, hedgehog', + 'fox squirrel, eastern fox squirrel, Sciurus niger', + 'marmot', + 'beaver', + 'guinea pig, Cavia cobaya', + 'sorrel', + 'zebra', + 'hog, pig, grunter, squealer, Sus scrofa', + 'wild boar, boar, Sus scrofa', + 'warthog', + 'hippopotamus, hippo, river horse, Hippopotamus amphibius', + 'ox', + 'water buffalo, water ox, Asiatic buffalo, Bubalus bubalis', + 'bison', + 'ram, tup', + 'bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis', # noqa: E501 + 'ibex, Capra ibex', + 'hartebeest', + 'impala, Aepyceros melampus', + 'gazelle', + 'Arabian camel, dromedary, Camelus dromedarius', + 'llama', + 'weasel', + 'mink', + 'polecat, fitch, foulmart, foumart, Mustela putorius', + 'black-footed ferret, ferret, Mustela nigripes', + 'otter', + 'skunk, polecat, wood pussy', + 'badger', + 'armadillo', + 'three-toed sloth, ai, Bradypus tridactylus', + 'orangutan, orang, orangutang, Pongo pygmaeus', + 'gorilla, Gorilla gorilla', + 'chimpanzee, chimp, Pan troglodytes', + 'gibbon, Hylobates lar', + 'siamang, Hylobates syndactylus, Symphalangus syndactylus', + 'guenon, guenon monkey', + 'patas, hussar monkey, Erythrocebus patas', + 'baboon', + 'macaque', + 'langur', + 'colobus, colobus monkey', + 'proboscis monkey, Nasalis larvatus', + 'marmoset', + 'capuchin, ringtail, Cebus capucinus', + 'howler monkey, howler', + 'titi, titi monkey', + 'spider monkey, Ateles geoffroyi', + 'squirrel monkey, Saimiri sciureus', + 'Madagascar cat, ring-tailed lemur, Lemur catta', + 'indri, indris, Indri indri, Indri brevicaudatus', + 'Indian elephant, Elephas maximus', + 'African elephant, Loxodonta africana', + 'lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens', + 'giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca', + 'barracouta, snoek', + 'eel', + 'coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch', # noqa: E501 + 'rock beauty, Holocanthus tricolor', + 'anemone fish', + 'sturgeon', + 'gar, garfish, garpike, billfish, Lepisosteus osseus', + 'lionfish', + 'puffer, pufferfish, blowfish, globefish', + 'abacus', + 'abaya', + "academic gown, academic robe, judge's robe", + 'accordion, piano accordion, squeeze box', + 'acoustic guitar', + 'aircraft carrier, carrier, flattop, attack aircraft carrier', + 'airliner', + 'airship, dirigible', + 'altar', + 'ambulance', + 'amphibian, amphibious vehicle', + 'analog clock', + 'apiary, bee house', + 'apron', + 'ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin', # noqa: E501 + 'assault rifle, assault gun', + 'backpack, back pack, knapsack, packsack, rucksack, haversack', + 'bakery, bakeshop, bakehouse', + 'balance beam, beam', + 'balloon', + 'ballpoint, ballpoint pen, ballpen, Biro', + 'Band Aid', + 'banjo', + 'bannister, banister, balustrade, balusters, handrail', + 'barbell', + 'barber chair', + 'barbershop', + 'barn', + 'barometer', + 'barrel, cask', + 'barrow, garden cart, lawn cart, wheelbarrow', + 'baseball', + 'basketball', + 'bassinet', + 'bassoon', + 'bathing cap, swimming cap', + 'bath towel', + 'bathtub, bathing tub, bath, tub', + 'beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon', # noqa: E501 + 'beacon, lighthouse, beacon light, pharos', + 'beaker', + 'bearskin, busby, shako', + 'beer bottle', + 'beer glass', + 'bell cote, bell cot', + 'bib', + 'bicycle-built-for-two, tandem bicycle, tandem', + 'bikini, two-piece', + 'binder, ring-binder', + 'binoculars, field glasses, opera glasses', + 'birdhouse', + 'boathouse', + 'bobsled, bobsleigh, bob', + 'bolo tie, bolo, bola tie, bola', + 'bonnet, poke bonnet', + 'bookcase', + 'bookshop, bookstore, bookstall', + 'bottlecap', + 'bow', + 'bow tie, bow-tie, bowtie', + 'brass, memorial tablet, plaque', + 'brassiere, bra, bandeau', + 'breakwater, groin, groyne, mole, bulwark, seawall, jetty', + 'breastplate, aegis, egis', + 'broom', + 'bucket, pail', + 'buckle', + 'bulletproof vest', + 'bullet train, bullet', + 'butcher shop, meat market', + 'cab, hack, taxi, taxicab', + 'caldron, cauldron', + 'candle, taper, wax light', + 'cannon', + 'canoe', + 'can opener, tin opener', + 'cardigan', + 'car mirror', + 'carousel, carrousel, merry-go-round, roundabout, whirligig', + "carpenter's kit, tool kit", + 'carton', + 'car wheel', + 'cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM', # noqa: E501 + 'cassette', + 'cassette player', + 'castle', + 'catamaran', + 'CD player', + 'cello, violoncello', + 'cellular telephone, cellular phone, cellphone, cell, mobile phone', + 'chain', + 'chainlink fence', + 'chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour', # noqa: E501 + 'chain saw, chainsaw', + 'chest', + 'chiffonier, commode', + 'chime, bell, gong', + 'china cabinet, china closet', + 'Christmas stocking', + 'church, church building', + 'cinema, movie theater, movie theatre, movie house, picture palace', + 'cleaver, meat cleaver, chopper', + 'cliff dwelling', + 'cloak', + 'clog, geta, patten, sabot', + 'cocktail shaker', + 'coffee mug', + 'coffeepot', + 'coil, spiral, volute, whorl, helix', + 'combination lock', + 'computer keyboard, keypad', + 'confectionery, confectionary, candy store', + 'container ship, containership, container vessel', + 'convertible', + 'corkscrew, bottle screw', + 'cornet, horn, trumpet, trump', + 'cowboy boot', + 'cowboy hat, ten-gallon hat', + 'cradle', + 'crane', + 'crash helmet', + 'crate', + 'crib, cot', + 'Crock Pot', + 'croquet ball', + 'crutch', + 'cuirass', + 'dam, dike, dyke', + 'desk', + 'desktop computer', + 'dial telephone, dial phone', + 'diaper, nappy, napkin', + 'digital clock', + 'digital watch', + 'dining table, board', + 'dishrag, dishcloth', + 'dishwasher, dish washer, dishwashing machine', + 'disk brake, disc brake', + 'dock, dockage, docking facility', + 'dogsled, dog sled, dog sleigh', + 'dome', + 'doormat, welcome mat', + 'drilling platform, offshore rig', + 'drum, membranophone, tympan', + 'drumstick', + 'dumbbell', + 'Dutch oven', + 'electric fan, blower', + 'electric guitar', + 'electric locomotive', + 'entertainment center', + 'envelope', + 'espresso maker', + 'face powder', + 'feather boa, boa', + 'file, file cabinet, filing cabinet', + 'fireboat', + 'fire engine, fire truck', + 'fire screen, fireguard', + 'flagpole, flagstaff', + 'flute, transverse flute', + 'folding chair', + 'football helmet', + 'forklift', + 'fountain', + 'fountain pen', + 'four-poster', + 'freight car', + 'French horn, horn', + 'frying pan, frypan, skillet', + 'fur coat', + 'garbage truck, dustcart', + 'gasmask, respirator, gas helmet', + 'gas pump, gasoline pump, petrol pump, island dispenser', + 'goblet', + 'go-kart', + 'golf ball', + 'golfcart, golf cart', + 'gondola', + 'gong, tam-tam', + 'gown', + 'grand piano, grand', + 'greenhouse, nursery, glasshouse', + 'grille, radiator grille', + 'grocery store, grocery, food market, market', + 'guillotine', + 'hair slide', + 'hair spray', + 'half track', + 'hammer', + 'hamper', + 'hand blower, blow dryer, blow drier, hair dryer, hair drier', + 'hand-held computer, hand-held microcomputer', + 'handkerchief, hankie, hanky, hankey', + 'hard disc, hard disk, fixed disk', + 'harmonica, mouth organ, harp, mouth harp', + 'harp', + 'harvester, reaper', + 'hatchet', + 'holster', + 'home theater, home theatre', + 'honeycomb', + 'hook, claw', + 'hoopskirt, crinoline', + 'horizontal bar, high bar', + 'horse cart, horse-cart', + 'hourglass', + 'iPod', + 'iron, smoothing iron', + "jack-o'-lantern", + 'jean, blue jean, denim', + 'jeep, landrover', + 'jersey, T-shirt, tee shirt', + 'jigsaw puzzle', + 'jinrikisha, ricksha, rickshaw', + 'joystick', + 'kimono', + 'knee pad', + 'knot', + 'lab coat, laboratory coat', + 'ladle', + 'lampshade, lamp shade', + 'laptop, laptop computer', + 'lawn mower, mower', + 'lens cap, lens cover', + 'letter opener, paper knife, paperknife', + 'library', + 'lifeboat', + 'lighter, light, igniter, ignitor', + 'limousine, limo', + 'liner, ocean liner', + 'lipstick, lip rouge', + 'Loafer', + 'lotion', + 'loudspeaker, speaker, speaker unit, loudspeaker system, speaker system', # noqa: E501 + "loupe, jeweler's loupe", + 'lumbermill, sawmill', + 'magnetic compass', + 'mailbag, postbag', + 'mailbox, letter box', + 'maillot', + 'maillot, tank suit', + 'manhole cover', + 'maraca', + 'marimba, xylophone', + 'mask', + 'matchstick', + 'maypole', + 'maze, labyrinth', + 'measuring cup', + 'medicine chest, medicine cabinet', + 'megalith, megalithic structure', + 'microphone, mike', + 'microwave, microwave oven', + 'military uniform', + 'milk can', + 'minibus', + 'miniskirt, mini', + 'minivan', + 'missile', + 'mitten', + 'mixing bowl', + 'mobile home, manufactured home', + 'Model T', + 'modem', + 'monastery', + 'monitor', + 'moped', + 'mortar', + 'mortarboard', + 'mosque', + 'mosquito net', + 'motor scooter, scooter', + 'mountain bike, all-terrain bike, off-roader', + 'mountain tent', + 'mouse, computer mouse', + 'mousetrap', + 'moving van', + 'muzzle', + 'nail', + 'neck brace', + 'necklace', + 'nipple', + 'notebook, notebook computer', + 'obelisk', + 'oboe, hautboy, hautbois', + 'ocarina, sweet potato', + 'odometer, hodometer, mileometer, milometer', + 'oil filter', + 'organ, pipe organ', + 'oscilloscope, scope, cathode-ray oscilloscope, CRO', + 'overskirt', + 'oxcart', + 'oxygen mask', + 'packet', + 'paddle, boat paddle', + 'paddlewheel, paddle wheel', + 'padlock', + 'paintbrush', + "pajama, pyjama, pj's, jammies", + 'palace', + 'panpipe, pandean pipe, syrinx', + 'paper towel', + 'parachute, chute', + 'parallel bars, bars', + 'park bench', + 'parking meter', + 'passenger car, coach, carriage', + 'patio, terrace', + 'pay-phone, pay-station', + 'pedestal, plinth, footstall', + 'pencil box, pencil case', + 'pencil sharpener', + 'perfume, essence', + 'Petri dish', + 'photocopier', + 'pick, plectrum, plectron', + 'pickelhaube', + 'picket fence, paling', + 'pickup, pickup truck', + 'pier', + 'piggy bank, penny bank', + 'pill bottle', + 'pillow', + 'ping-pong ball', + 'pinwheel', + 'pirate, pirate ship', + 'pitcher, ewer', + "plane, carpenter's plane, woodworking plane", + 'planetarium', + 'plastic bag', + 'plate rack', + 'plow, plough', + "plunger, plumber's helper", + 'Polaroid camera, Polaroid Land camera', + 'pole', + 'police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria', # noqa: E501 + 'poncho', + 'pool table, billiard table, snooker table', + 'pop bottle, soda bottle', + 'pot, flowerpot', + "potter's wheel", + 'power drill', + 'prayer rug, prayer mat', + 'printer', + 'prison, prison house', + 'projectile, missile', + 'projector', + 'puck, hockey puck', + 'punching bag, punch bag, punching ball, punchball', + 'purse', + 'quill, quill pen', + 'quilt, comforter, comfort, puff', + 'racer, race car, racing car', + 'racket, racquet', + 'radiator', + 'radio, wireless', + 'radio telescope, radio reflector', + 'rain barrel', + 'recreational vehicle, RV, R.V.', + 'reel', + 'reflex camera', + 'refrigerator, icebox', + 'remote control, remote', + 'restaurant, eating house, eating place, eatery', + 'revolver, six-gun, six-shooter', + 'rifle', + 'rocking chair, rocker', + 'rotisserie', + 'rubber eraser, rubber, pencil eraser', + 'rugby ball', + 'rule, ruler', + 'running shoe', + 'safe', + 'safety pin', + 'saltshaker, salt shaker', + 'sandal', + 'sarong', + 'sax, saxophone', + 'scabbard', + 'scale, weighing machine', + 'school bus', + 'schooner', + 'scoreboard', + 'screen, CRT screen', + 'screw', + 'screwdriver', + 'seat belt, seatbelt', + 'sewing machine', + 'shield, buckler', + 'shoe shop, shoe-shop, shoe store', + 'shoji', + 'shopping basket', + 'shopping cart', + 'shovel', + 'shower cap', + 'shower curtain', + 'ski', + 'ski mask', + 'sleeping bag', + 'slide rule, slipstick', + 'sliding door', + 'slot, one-armed bandit', + 'snorkel', + 'snowmobile', + 'snowplow, snowplough', + 'soap dispenser', + 'soccer ball', + 'sock', + 'solar dish, solar collector, solar furnace', + 'sombrero', + 'soup bowl', + 'space bar', + 'space heater', + 'space shuttle', + 'spatula', + 'speedboat', + "spider web, spider's web", + 'spindle', + 'sports car, sport car', + 'spotlight, spot', + 'stage', + 'steam locomotive', + 'steel arch bridge', + 'steel drum', + 'stethoscope', + 'stole', + 'stone wall', + 'stopwatch, stop watch', + 'stove', + 'strainer', + 'streetcar, tram, tramcar, trolley, trolley car', + 'stretcher', + 'studio couch, day bed', + 'stupa, tope', + 'submarine, pigboat, sub, U-boat', + 'suit, suit of clothes', + 'sundial', + 'sunglass', + 'sunglasses, dark glasses, shades', + 'sunscreen, sunblock, sun blocker', + 'suspension bridge', + 'swab, swob, mop', + 'sweatshirt', + 'swimming trunks, bathing trunks', + 'swing', + 'switch, electric switch, electrical switch', + 'syringe', + 'table lamp', + 'tank, army tank, armored combat vehicle, armoured combat vehicle', + 'tape player', + 'teapot', + 'teddy, teddy bear', + 'television, television system', + 'tennis ball', + 'thatch, thatched roof', + 'theater curtain, theatre curtain', + 'thimble', + 'thresher, thrasher, threshing machine', + 'throne', + 'tile roof', + 'toaster', + 'tobacco shop, tobacconist shop, tobacconist', + 'toilet seat', + 'torch', + 'totem pole', + 'tow truck, tow car, wrecker', + 'toyshop', + 'tractor', + 'trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi', # noqa: E501 + 'tray', + 'trench coat', + 'tricycle, trike, velocipede', + 'trimaran', + 'tripod', + 'triumphal arch', + 'trolleybus, trolley coach, trackless trolley', + 'trombone', + 'tub, vat', + 'turnstile', + 'typewriter keyboard', + 'umbrella', + 'unicycle, monocycle', + 'upright, upright piano', + 'vacuum, vacuum cleaner', + 'vase', + 'vault', + 'velvet', + 'vending machine', + 'vestment', + 'viaduct', + 'violin, fiddle', + 'volleyball', + 'waffle iron', + 'wall clock', + 'wallet, billfold, notecase, pocketbook', + 'wardrobe, closet, press', + 'warplane, military plane', + 'washbasin, handbasin, washbowl, lavabo, wash-hand basin', + 'washer, automatic washer, washing machine', + 'water bottle', + 'water jug', + 'water tower', + 'whiskey jug', + 'whistle', + 'wig', + 'window screen', + 'window shade', + 'Windsor tie', + 'wine bottle', + 'wing', + 'wok', + 'wooden spoon', + 'wool, woolen, woollen', + 'worm fence, snake fence, snake-rail fence, Virginia fence', + 'wreck', + 'yawl', + 'yurt', + 'web site, website, internet site, site', + 'comic book', + 'crossword puzzle, crossword', + 'street sign', + 'traffic light, traffic signal, stoplight', + 'book jacket, dust cover, dust jacket, dust wrapper', + 'menu', + 'plate', + 'guacamole', + 'consomme', + 'hot pot, hotpot', + 'trifle', + 'ice cream, icecream', + 'ice lolly, lolly, lollipop, popsicle', + 'French loaf', + 'bagel, beigel', + 'pretzel', + 'cheeseburger', + 'hotdog, hot dog, red hot', + 'mashed potato', + 'head cabbage', + 'broccoli', + 'cauliflower', + 'zucchini, courgette', + 'spaghetti squash', + 'acorn squash', + 'butternut squash', + 'cucumber, cuke', + 'artichoke, globe artichoke', + 'bell pepper', + 'cardoon', + 'mushroom', + 'Granny Smith', + 'strawberry', + 'orange', + 'lemon', + 'fig', + 'pineapple, ananas', + 'banana', + 'jackfruit, jak, jack', + 'custard apple', + 'pomegranate', + 'hay', + 'carbonara', + 'chocolate sauce, chocolate syrup', + 'dough', + 'meat loaf, meatloaf', + 'pizza, pizza pie', + 'potpie', + 'burrito', + 'red wine', + 'espresso', + 'cup', + 'eggnog', + 'alp', + 'bubble', + 'cliff, drop, drop-off', + 'coral reef', + 'geyser', + 'lakeside, lakeshore', + 'promontory, headland, head, foreland', + 'sandbar, sand bar', + 'seashore, coast, seacoast, sea-coast', + 'valley, vale', + 'volcano', + 'ballplayer, baseball player', + 'groom, bridegroom', + 'scuba diver', + 'rapeseed', + 'daisy', + "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", # noqa: E501 + 'corn', + 'acorn', + 'hip, rose hip, rosehip', + 'buckeye, horse chestnut, conker', + 'coral fungus', + 'agaric', + 'gyromitra', + 'stinkhorn, carrion fungus', + 'earthstar', + 'hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa', # noqa: E501 + 'bolete', + 'ear, spike, capitulum', + 'toilet tissue, toilet paper, bathroom tissue') + +CIFAR10_CATEGORIES = ('airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', + 'frog', 'horse', 'ship', 'truck') + +CIFAR100_CATEGORIES = ( + 'apple', 'aquarium_fish', 'baby', 'bear', 'beaver', 'bed', 'bee', 'beetle', + 'bicycle', 'bottle', 'bowl', 'boy', 'bridge', 'bus', 'butterfly', 'camel', + 'can', 'castle', 'caterpillar', 'cattle', 'chair', 'chimpanzee', 'clock', + 'cloud', 'cockroach', 'couch', 'crab', 'crocodile', 'cup', 'dinosaur', + 'dolphin', 'elephant', 'flatfish', 'forest', 'fox', 'girl', 'hamster', + 'house', 'kangaroo', 'keyboard', 'lamp', 'lawn_mower', 'leopard', 'lion', + 'lizard', 'lobster', 'man', 'maple_tree', 'motorcycle', 'mountain', + 'mouse', 'mushroom', 'oak_tree', 'orange', 'orchid', 'otter', 'palm_tree', + 'pear', 'pickup_truck', 'pine_tree', 'plain', 'plate', 'poppy', + 'porcupine', 'possum', 'rabbit', 'raccoon', 'ray', 'road', 'rocket', + 'rose', 'sea', 'seal', 'shark', 'shrew', 'skunk', 'skyscraper', 'snail', + 'snake', 'spider', 'squirrel', 'streetcar', 'sunflower', 'sweet_pepper', + 'table', 'tank', 'telephone', 'television', 'tiger', 'tractor', 'train', + 'trout', 'tulip', 'turtle', 'wardrobe', 'whale', 'willow_tree', 'wolf', + 'woman', 'worm') + +MNIST_CATEGORITES = ('0 - zero', '1 - one', '2 - two', '3 - three', '4 - four', + '5 - five', '6 - six', '7 - seven', '8 - eight', + '9 - nine') + +FASHIONMNIST_CATEGORITES = ('T-shirt/top', 'Trouser', 'Pullover', 'Dress', + 'Coat', 'Sandal', 'Shirt', 'Sneaker', 'Bag', + 'Ankle boot') + +PLACES205_CATEGORIES = ( + 'abbey', 'airport_terminal', 'alley', 'amphitheater', 'amusement_park', + 'aquarium', 'aqueduct', 'arch', 'art_gallery', 'art_studio', + 'assembly_line', 'attic', 'auditorium', 'apartment_building/outdoor', + 'badlands', 'ballroom', 'bamboo_forest', 'banquet_hall', 'bar', + 'baseball_field', 'basement', 'basilica', 'bayou', 'beauty_salon', + 'bedroom', 'boardwalk', 'boat_deck', 'bookstore', 'botanical_garden', + 'bowling_alley', 'boxing_ring', 'bridge', 'building_facade', + 'bus_interior', 'butchers_shop', 'butte', 'bakery/shop', 'cafeteria', + 'campsite', 'candy_store', 'canyon', 'castle', 'cemetery', 'chalet', + 'classroom', 'closet', 'clothing_store', 'coast', 'cockpit', 'coffee_shop', + 'conference_center', 'conference_room', 'construction_site', 'corn_field', + 'corridor', 'cottage_garden', 'courthouse', 'courtyard', 'creek', + 'crevasse', 'crosswalk', 'cathedral/outdoor', 'church/outdoor', 'dam', + 'dining_room', 'dock', 'dorm_room', 'driveway', 'desert/sand', + 'desert/vegetation', 'dinette/home', 'doorway/outdoor', 'engine_room', + 'excavation', 'fairway', 'fire_escape', 'fire_station', 'food_court', + 'forest_path', 'forest_road', 'formal_garden', 'fountain', + 'field/cultivated', 'field/wild', 'galley', 'game_room', 'garbage_dump', + 'gas_station', 'gift_shop', 'golf_course', 'harbor', 'herb_garden', + 'highway', 'home_office', 'hospital', 'hospital_room', 'hot_spring', + 'hotel_room', 'hotel/outdoor', 'ice_cream_parlor', 'iceberg', 'igloo', + 'islet', 'ice_skating_rink/outdoor', 'inn/outdoor', 'jail_cell', 'kasbah', + 'kindergarden_classroom', 'kitchen', 'kitchenette', 'laundromat', + 'lighthouse', 'living_room', 'lobby', 'locker_room', 'mansion', 'marsh', + 'martial_arts_gym', 'mausoleum', 'medina', 'motel', 'mountain', + 'mountain_snowy', 'music_studio', 'market/outdoor', 'monastery/outdoor', + 'museum/indoor', 'nursery', 'ocean', 'office', 'office_building', + 'orchard', 'pagoda', 'palace', 'pantry', 'parking_lot', 'parlor', + 'pasture', 'patio', 'pavilion', 'phone_booth', 'picnic_area', 'playground', + 'plaza', 'pond', 'pulpit', 'racecourse', 'raft', 'railroad_track', + 'rainforest', 'reception', 'residential_neighborhood', 'restaurant', + 'restaurant_kitchen', 'restaurant_patio', 'rice_paddy', 'river', + 'rock_arch', 'rope_bridge', 'ruin', 'runway', 'sandbar', 'schoolhouse', + 'sea_cliff', 'shed', 'shoe_shop', 'shopfront', 'shower', 'ski_resort', + 'ski_slope', 'sky', 'skyscraper', 'slum', 'snowfield', 'staircase', + 'supermarket', 'swamp', 'stadium/baseball', 'stadium/football', + 'stage/indoor', 'subway_station/platform', 'swimming_pool/outdoor', + 'television_studio', 'topiary_garden', 'tower', 'train_railway', + 'tree_farm', 'trench', 'temple/east_asia', 'temple/south_asia', + 'track/outdoor', 'train_station/platform', 'underwater/coral_reef', + 'valley', 'vegetable_garden', 'veranda', 'viaduct', 'volcano', + 'waiting_room', 'water_tower', 'watering_hole', 'wheat_field', 'wind_farm', + 'windmill', 'yard') + +OxfordIIITPet_CATEGORIES = ( + 'Abyssinian', 'american_bulldog', 'american_pit_bull_terrier', + 'basset_hound', 'beagle', 'Bengal', 'Birman', 'Bombay', 'boxer', + 'British_Shorthair', 'chihuahua', 'Egyptian_Mau', 'english_cocker_spaniel', + 'english_setter', 'german_shorthaired', 'great_pyrenees', 'havanese', + 'japanese_chin', 'keeshond', 'leonberger', 'Maine_Coon', + 'miniature_pinscher', 'newfoundland', 'Persian', 'pomeranian', 'pug', + 'Ragdoll', 'Russian_Blue', 'saint_bernard', 'samoyed', 'scottish_terrier', + 'shiba_inu', 'Siamese', 'Sphynx', 'staffordshire_bull_terrier', + 'wheaten_terrier', 'yorkshire_terrier') + +DTD_CATEGORIES = ('banded', 'blotchy', 'braided', 'bubbly', 'bumpy', + 'chequered', 'cobwebbed', 'cracked', 'crosshatched', + 'crystalline', 'dotted', 'fibrous', 'flecked', 'freckled', + 'frilly', 'gauzy', 'grid', 'grooved', 'honeycombed', + 'interlaced', 'knitted', 'lacelike', 'lined', 'marbled', + 'matted', 'meshed', 'paisley', 'perforated', 'pitted', + 'pleated', 'polka-dotted', 'porous', 'potholed', 'scaly', + 'smeared', 'spiralled', 'sprinkled', 'stained', 'stratified', + 'striped', 'studded', 'swirly', 'veined', 'waffled', 'woven', + 'wrinkled', 'zigzagged') + +FGVCAIRCRAFT_CATEGORIES = ( + '707-320', '727-200', '737-200', '737-300', '737-400', '737-500', + '737-600', '737-700', '737-800', '737-900', '747-100', '747-200', + '747-300', '747-400', '757-200', '757-300', '767-200', '767-300', + '767-400', '777-200', '777-300', 'A300B4', 'A310', 'A318', 'A319', 'A320', + 'A321', 'A330-200', 'A330-300', 'A340-200', 'A340-300', 'A340-500', + 'A340-600', 'A380', 'ATR-42', 'ATR-72', 'An-12', 'BAE 146-200', + 'BAE 146-300', 'BAE-125', 'Beechcraft 1900', 'Boeing 717', 'C-130', 'C-47', + 'CRJ-200', 'CRJ-700', 'CRJ-900', 'Cessna 172', 'Cessna 208', 'Cessna 525', + 'Cessna 560', 'Challenger 600', 'DC-10', 'DC-3', 'DC-6', 'DC-8', 'DC-9-30', + 'DH-82', 'DHC-1', 'DHC-6', 'DHC-8-100', 'DHC-8-300', 'DR-400', + 'Dornier 328', 'E-170', 'E-190', 'E-195', 'EMB-120', 'ERJ 135', 'ERJ 145', + 'Embraer Legacy 600', 'Eurofighter Typhoon', 'F-16A/B', 'F/A-18', + 'Falcon 2000', 'Falcon 900', 'Fokker 100', 'Fokker 50', 'Fokker 70', + 'Global Express', 'Gulfstream IV', 'Gulfstream V', 'Hawk T1', 'Il-76', + 'L-1011', 'MD-11', 'MD-80', 'MD-87', 'MD-90', 'Metroliner', 'Model B200', + 'PA-28', 'SR-20', 'Saab 2000', 'Saab 340', 'Spitfire', 'Tornado', 'Tu-134', + 'Tu-154', 'Yak-42') + +STANFORDCARS_CATEGORIES = ( + 'AM General Hummer SUV 2000', 'Acura RL Sedan 2012', 'Acura TL Sedan 2012', + 'Acura TL Type-S 2008', 'Acura TSX Sedan 2012', + 'Acura Integra Type R 2001', 'Acura ZDX Hatchback 2012', + 'Aston Martin V8 Vantage Convertible 2012', + 'Aston Martin V8 Vantage Coupe 2012', + 'Aston Martin Virage Convertible 2012', 'Aston Martin Virage Coupe 2012', + 'Audi RS 4 Convertible 2008', 'Audi A5 Coupe 2012', 'Audi TTS Coupe 2012', + 'Audi R8 Coupe 2012', 'Audi V8 Sedan 1994', 'Audi 100 Sedan 1994', + 'Audi 100 Wagon 1994', 'Audi TT Hatchback 2011', 'Audi S6 Sedan 2011', + 'Audi S5 Convertible 2012', 'Audi S5 Coupe 2012', 'Audi S4 Sedan 2012', + 'Audi S4 Sedan 2007', 'Audi TT RS Coupe 2012', + 'BMW ActiveHybrid 5 Sedan 2012', 'BMW 1 Series Convertible 2012', + 'BMW 1 Series Coupe 2012', 'BMW 3 Series Sedan 2012', + 'BMW 3 Series Wagon 2012', 'BMW 6 Series Convertible 2007', + 'BMW X5 SUV 2007', 'BMW X6 SUV 2012', 'BMW M3 Coupe 2012', + 'BMW M5 Sedan 2010', 'BMW M6 Convertible 2010', 'BMW X3 SUV 2012', + 'BMW Z4 Convertible 2012', + 'Bentley Continental Supersports Conv. Convertible 2012', + 'Bentley Arnage Sedan 2009', 'Bentley Mulsanne Sedan 2011', + 'Bentley Continental GT Coupe 2012', 'Bentley Continental GT Coupe 2007', + 'Bentley Continental Flying Spur Sedan 2007', + 'Bugatti Veyron 16.4 Convertible 2009', 'Bugatti Veyron 16.4 Coupe 2009', + 'Buick Regal GS 2012', 'Buick Rainier SUV 2007', 'Buick Verano Sedan 2012', + 'Buick Enclave SUV 2012', 'Cadillac CTS-V Sedan 2012', + 'Cadillac SRX SUV 2012', 'Cadillac Escalade EXT Crew Cab 2007', + 'Chevrolet Silverado 1500 Hybrid Crew Cab 2012', + 'Chevrolet Corvette Convertible 2012', 'Chevrolet Corvette ZR1 2012', + 'Chevrolet Corvette Ron Fellows Edition Z06 2007', + 'Chevrolet Traverse SUV 2012', 'Chevrolet Camaro Convertible 2012', + 'Chevrolet HHR SS 2010', 'Chevrolet Impala Sedan 2007', + 'Chevrolet Tahoe Hybrid SUV 2012', 'Chevrolet Sonic Sedan 2012', + 'Chevrolet Express Cargo Van 2007', 'Chevrolet Avalanche Crew Cab 2012', + 'Chevrolet Cobalt SS 2010', 'Chevrolet Malibu Hybrid Sedan 2010', + 'Chevrolet TrailBlazer SS 2009', + 'Chevrolet Silverado 2500HD Regular Cab 2012', + 'Chevrolet Silverado 1500 Classic Extended Cab 2007', + 'Chevrolet Express Van 2007', 'Chevrolet Monte Carlo Coupe 2007', + 'Chevrolet Malibu Sedan 2007', + 'Chevrolet Silverado 1500 Extended Cab 2012', + 'Chevrolet Silverado 1500 Regular Cab 2012', 'Chrysler Aspen SUV 2009', + 'Chrysler Sebring Convertible 2010', + 'Chrysler Town and Country Minivan 2012', 'Chrysler 300 SRT-8 2010', + 'Chrysler Crossfire Convertible 2008', + 'Chrysler PT Cruiser Convertible 2008', 'Daewoo Nubira Wagon 2002', + 'Dodge Caliber Wagon 2012', 'Dodge Caliber Wagon 2007', + 'Dodge Caravan Minivan 1997', 'Dodge Ram Pickup 3500 Crew Cab 2010', + 'Dodge Ram Pickup 3500 Quad Cab 2009', 'Dodge Sprinter Cargo Van 2009', + 'Dodge Journey SUV 2012', 'Dodge Dakota Crew Cab 2010', + 'Dodge Dakota Club Cab 2007', 'Dodge Magnum Wagon 2008', + 'Dodge Challenger SRT8 2011', 'Dodge Durango SUV 2012', + 'Dodge Durango SUV 2007', 'Dodge Charger Sedan 2012', + 'Dodge Charger SRT-8 2009', 'Eagle Talon Hatchback 1998', + 'FIAT 500 Abarth 2012', 'FIAT 500 Convertible 2012', + 'Ferrari FF Coupe 2012', 'Ferrari California Convertible 2012', + 'Ferrari 458 Italia Convertible 2012', 'Ferrari 458 Italia Coupe 2012', + 'Fisker Karma Sedan 2012', 'Ford F-450 Super Duty Crew Cab 2012', + 'Ford Mustang Convertible 2007', 'Ford Freestar Minivan 2007', + 'Ford Expedition EL SUV 2009', 'Ford Edge SUV 2012', + 'Ford Ranger SuperCab 2011', 'Ford GT Coupe 2006', + 'Ford F-150 Regular Cab 2012', 'Ford F-150 Regular Cab 2007', + 'Ford Focus Sedan 2007', 'Ford E-Series Wagon Van 2012', + 'Ford Fiesta Sedan 2012', 'GMC Terrain SUV 2012', 'GMC Savana Van 2012', + 'GMC Yukon Hybrid SUV 2012', 'GMC Acadia SUV 2012', + 'GMC Canyon Extended Cab 2012', 'Geo Metro Convertible 1993', + 'HUMMER H3T Crew Cab 2010', 'HUMMER H2 SUT Crew Cab 2009', + 'Honda Odyssey Minivan 2012', 'Honda Odyssey Minivan 2007', + 'Honda Accord Coupe 2012', 'Honda Accord Sedan 2012', + 'Hyundai Veloster Hatchback 2012', 'Hyundai Santa Fe SUV 2012', + 'Hyundai Tucson SUV 2012', 'Hyundai Veracruz SUV 2012', + 'Hyundai Sonata Hybrid Sedan 2012', 'Hyundai Elantra Sedan 2007', + 'Hyundai Accent Sedan 2012', 'Hyundai Genesis Sedan 2012', + 'Hyundai Sonata Sedan 2012', 'Hyundai Elantra Touring Hatchback 2012', + 'Hyundai Azera Sedan 2012', 'Infiniti G Coupe IPL 2012', + 'Infiniti QX56 SUV 2011', 'Isuzu Ascender SUV 2008', 'Jaguar XK XKR 2012', + 'Jeep Patriot SUV 2012', 'Jeep Wrangler SUV 2012', 'Jeep Liberty SUV 2012', + 'Jeep Grand Cherokee SUV 2012', 'Jeep Compass SUV 2012', + 'Lamborghini Reventon Coupe 2008', 'Lamborghini Aventador Coupe 2012', + 'Lamborghini Gallardo LP 570-4 Superleggera 2012', + 'Lamborghini Diablo Coupe 2001', 'Land Rover Range Rover SUV 2012', + 'Land Rover LR2 SUV 2012', 'Lincoln Town Car Sedan 2011', + 'MINI Cooper Roadster Convertible 2012', + 'Maybach Landaulet Convertible 2012', 'Mazda Tribute SUV 2011', + 'McLaren MP4-12C Coupe 2012', 'Mercedes-Benz 300-Class Convertible 1993', + 'Mercedes-Benz C-Class Sedan 2012', 'Mercedes-Benz SL-Class Coupe 2009', + 'Mercedes-Benz E-Class Sedan 2012', 'Mercedes-Benz S-Class Sedan 2012', + 'Mercedes-Benz Sprinter Van 2012', 'Mitsubishi Lancer Sedan 2012', + 'Nissan Leaf Hatchback 2012', 'Nissan NV Passenger Van 2012', + 'Nissan Juke Hatchback 2012', 'Nissan 240SX Coupe 1998', + 'Plymouth Neon Coupe 1999', 'Porsche Panamera Sedan 2012', + 'Ram C/V Cargo Van Minivan 2012', + 'Rolls-Royce Phantom Drophead Coupe Convertible 2012', + 'Rolls-Royce Ghost Sedan 2012', 'Rolls-Royce Phantom Sedan 2012', + 'Scion xD Hatchback 2012', 'Spyker C8 Convertible 2009', + 'Spyker C8 Coupe 2009', 'Suzuki Aerio Sedan 2007', + 'Suzuki Kizashi Sedan 2012', 'Suzuki SX4 Hatchback 2012', + 'Suzuki SX4 Sedan 2012', 'Tesla Model S Sedan 2012', + 'Toyota Sequoia SUV 2012', 'Toyota Camry Sedan 2012', + 'Toyota Corolla Sedan 2012', 'Toyota 4Runner SUV 2012', + 'Volkswagen Golf Hatchback 2012', 'Volkswagen Golf Hatchback 1991', + 'Volkswagen Beetle Hatchback 2012', 'Volvo C30 Hatchback 2012', + 'Volvo 240 Sedan 1993', 'Volvo XC90 SUV 2007', + 'smart fortwo Convertible 2012') + +SUN397_CATEGORIES = ( + 'abbey', 'airplane_cabin', 'airport_terminal', 'alley', 'amphitheater', + 'amusement_arcade', 'amusement_park', 'anechoic_chamber', + 'apartment_building_outdoor', 'apse_indoor', 'aquarium', 'aqueduct', + 'arch', 'archive', 'arrival_gate_outdoor', 'art_gallery', 'art_school', + 'art_studio', 'assembly_line', 'athletic_field_outdoor', 'atrium_public', + 'attic', 'auditorium', 'auto_factory', 'badlands', + 'badminton_court_indoor', 'baggage_claim', 'bakery_shop', + 'balcony_exterior', 'balcony_interior', 'ball_pit', 'ballroom', + 'bamboo_forest', 'banquet_hall', 'bar', 'barn', 'barndoor', + 'baseball_field', 'basement', 'basilica', 'basketball_court_outdoor', + 'bathroom', 'batters_box', 'bayou', 'bazaar_indoor', 'bazaar_outdoor', + 'beach', 'beauty_salon', 'bedroom', 'berth', 'biology_laboratory', + 'bistro_indoor', 'boardwalk', 'boat_deck', 'boathouse', 'bookstore', + 'booth_indoor', 'botanical_garden', 'bow_window_indoor', + 'bow_window_outdoor', 'bowling_alley', 'boxing_ring', 'brewery_indoor', + 'bridge', 'building_facade', 'bullring', 'burial_chamber', 'bus_interior', + 'butchers_shop', 'butte', 'cabin_outdoor', 'cafeteria', 'campsite', + 'campus', 'canal_natural', 'canal_urban', 'candy_store', 'canyon', + 'car_interior_backseat', 'car_interior_frontseat', 'carrousel', + 'casino_indoor', 'castle', 'catacomb', 'cathedral_indoor', + 'cathedral_outdoor', 'cavern_indoor', 'cemetery', 'chalet', + 'cheese_factory', 'chemistry_lab', 'chicken_coop_indoor', + 'chicken_coop_outdoor', 'childs_room', 'church_indoor', 'church_outdoor', + 'classroom', 'clean_room', 'cliff', 'cloister_indoor', 'closet', + 'clothing_store', 'coast', 'cockpit', 'coffee_shop', 'computer_room', + 'conference_center', 'conference_room', 'construction_site', + 'control_room', 'control_tower_outdoor', 'corn_field', 'corral', + 'corridor', 'cottage_garden', 'courthouse', 'courtroom', 'courtyard', + 'covered_bridge_exterior', 'creek', 'crevasse', 'crosswalk', + 'cubicle_office', 'dam', 'delicatessen', 'dentists_office', 'desert_sand', + 'desert_vegetation', 'diner_indoor', 'diner_outdoor', 'dinette_home', + 'dinette_vehicle', 'dining_car', 'dining_room', 'discotheque', 'dock', + 'doorway_outdoor', 'dorm_room', 'driveway', 'driving_range_outdoor', + 'drugstore', 'electrical_substation', 'elevator_door', 'elevator_interior', + 'elevator_shaft', 'engine_room', 'escalator_indoor', 'excavation', + 'factory_indoor', 'fairway', 'fastfood_restaurant', 'field_cultivated', + 'field_wild', 'fire_escape', 'fire_station', 'firing_range_indoor', + 'fishpond', 'florist_shop_indoor', 'food_court', 'forest_broadleaf', + 'forest_needleleaf', 'forest_path', 'forest_road', 'formal_garden', + 'fountain', 'galley', 'game_room', 'garage_indoor', 'garbage_dump', + 'gas_station', 'gazebo_exterior', 'general_store_indoor', + 'general_store_outdoor', 'gift_shop', 'golf_course', 'greenhouse_indoor', + 'greenhouse_outdoor', 'gymnasium_indoor', 'hangar_indoor', + 'hangar_outdoor', 'harbor', 'hayfield', 'heliport', 'herb_garden', + 'highway', 'hill', 'home_office', 'hospital', 'hospital_room', + 'hot_spring', 'hot_tub_outdoor', 'hotel_outdoor', 'hotel_room', 'house', + 'hunting_lodge_outdoor', 'ice_cream_parlor', 'ice_floe', 'ice_shelf', + 'ice_skating_rink_indoor', 'ice_skating_rink_outdoor', 'iceberg', 'igloo', + 'industrial_area', 'inn_outdoor', 'islet', 'jacuzzi_indoor', 'jail_indoor', + 'jail_cell', 'jewelry_shop', 'kasbah', 'kennel_indoor', 'kennel_outdoor', + 'kindergarden_classroom', 'kitchen', 'kitchenette', 'labyrinth_outdoor', + 'lake_natural', 'landfill', 'landing_deck', 'laundromat', 'lecture_room', + 'library_indoor', 'library_outdoor', 'lido_deck_outdoor', 'lift_bridge', + 'lighthouse', 'limousine_interior', 'living_room', 'lobby', 'lock_chamber', + 'locker_room', 'mansion', 'manufactured_home', 'market_indoor', + 'market_outdoor', 'marsh', 'martial_arts_gym', 'mausoleum', 'medina', + 'moat_water', 'monastery_outdoor', 'mosque_indoor', 'mosque_outdoor', + 'motel', 'mountain', 'mountain_snowy', 'movie_theater_indoor', + 'museum_indoor', 'music_store', 'music_studio', + 'nuclear_power_plant_outdoor', 'nursery', 'oast_house', + 'observatory_outdoor', 'ocean', 'office', 'office_building', + 'oil_refinery_outdoor', 'oilrig', 'operating_room', 'orchard', + 'outhouse_outdoor', 'pagoda', 'palace', 'pantry', 'park', + 'parking_garage_indoor', 'parking_garage_outdoor', 'parking_lot', 'parlor', + 'pasture', 'patio', 'pavilion', 'pharmacy', 'phone_booth', + 'physics_laboratory', 'picnic_area', 'pilothouse_indoor', + 'planetarium_outdoor', 'playground', 'playroom', 'plaza', 'podium_indoor', + 'podium_outdoor', 'pond', 'poolroom_establishment', 'poolroom_home', + 'power_plant_outdoor', 'promenade_deck', 'pub_indoor', 'pulpit', + 'putting_green', 'racecourse', 'raceway', 'raft', 'railroad_track', + 'rainforest', 'reception', 'recreation_room', 'residential_neighborhood', + 'restaurant', 'restaurant_kitchen', 'restaurant_patio', 'rice_paddy', + 'riding_arena', 'river', 'rock_arch', 'rope_bridge', 'ruin', 'runway', + 'sandbar', 'sandbox', 'sauna', 'schoolhouse', 'sea_cliff', 'server_room', + 'shed', 'shoe_shop', 'shopfront', 'shopping_mall_indoor', 'shower', + 'skatepark', 'ski_lodge', 'ski_resort', 'ski_slope', 'sky', 'skyscraper', + 'slum', 'snowfield', 'squash_court', 'stable', 'stadium_baseball', + 'stadium_football', 'stage_indoor', 'staircase', 'street', + 'subway_interior', 'subway_station_platform', 'supermarket', 'sushi_bar', + 'swamp', 'swimming_pool_indoor', 'swimming_pool_outdoor', + 'synagogue_indoor', 'synagogue_outdoor', 'television_studio', + 'temple_east_asia', 'temple_south_asia', 'tennis_court_indoor', + 'tennis_court_outdoor', 'tent_outdoor', 'theater_indoor_procenium', + 'theater_indoor_seats', 'thriftshop', 'throne_room', 'ticket_booth', + 'toll_plaza', 'topiary_garden', 'tower', 'toyshop', 'track_outdoor', + 'train_railway', 'train_station_platform', 'tree_farm', 'tree_house', + 'trench', 'underwater_coral_reef', 'utility_room', 'valley', + 'van_interior', 'vegetable_garden', 'veranda', 'veterinarians_office', + 'viaduct', 'videostore', 'village', 'vineyard', 'volcano', + 'volleyball_court_indoor', 'volleyball_court_outdoor', 'waiting_room', + 'warehouse_indoor', 'water_tower', 'waterfall_block', 'waterfall_fan', + 'waterfall_plunge', 'watering_hole', 'wave', 'wet_bar', 'wheat_field', + 'wind_farm', 'windmill', 'wine_cellar_barrel_storage', + 'wine_cellar_bottle_storage', 'wrestling_ring_indoor', 'yard', + 'youth_hostel') + +CALTECH101_CATEGORIES = ( + 'BACKGROUND_Google', 'Faces', 'Faces_easy', 'Leopards', 'Motorbikes', + 'accordion', 'airplanes', 'anchor', 'ant', 'barrel', 'bass', 'beaver', + 'binocular', 'bonsai', 'brain', 'brontosaurus', 'buddha', 'butterfly', + 'camera', 'cannon', 'car_side', 'ceiling_fan', 'cellphone', 'chair', + 'chandelier', 'cougar_body', 'cougar_face', 'crab', 'crayfish', + 'crocodile', 'crocodile_head', 'cup', 'dalmatian', 'dollar_bill', + 'dolphin', 'dragonfly', 'electric_guitar', 'elephant', 'emu', 'euphonium', + 'ewer', 'ferry', 'flamingo', 'flamingo_head', 'garfield', 'gerenuk', + 'gramophone', 'grand_piano', 'hawksbill', 'headphone', 'hedgehog', + 'helicopter', 'ibis', 'inline_skate', 'joshua_tree', 'kangaroo', 'ketch', + 'lamp', 'laptop', 'llama', 'lobster', 'lotus', 'mandolin', 'mayfly', + 'menorah', 'metronome', 'minaret', 'nautilus', 'octopus', 'okapi', + 'pagoda', 'panda', 'pigeon', 'pizza', 'platypus', 'pyramid', 'revolver', + 'rhino', 'rooster', 'saxophone', 'schooner', 'scissors', 'scorpion', + 'sea_horse', 'snoopy', 'soccer_ball', 'stapler', 'starfish', 'stegosaurus', + 'stop_sign', 'strawberry', 'sunflower', 'tick', 'trilobite', 'umbrella', + 'watch', 'water_lilly', 'wheelchair', 'wild_cat', 'windsor_chair', + 'wrench', 'yin_yang') + +FOOD101_CATEGORIES = ( + 'apple_pie', 'baby_back_ribs', 'baklava', 'beef_carpaccio', 'beef_tartare', + 'beet_salad', 'beignets', 'bibimbap', 'bread_pudding', 'breakfast_burrito', + 'bruschetta', 'caesar_salad', 'cannoli', 'caprese_salad', 'carrot_cake', + 'ceviche', 'cheesecake', 'cheese_plate', 'chicken_curry', + 'chicken_quesadilla', 'chicken_wings', 'chocolate_cake', + 'chocolate_mousse', 'churros', 'clam_chowder', 'club_sandwich', + 'crab_cakes', 'creme_brulee', 'croque_madame', 'cup_cakes', 'deviled_eggs', + 'donuts', 'dumplings', 'edamame', 'eggs_benedict', 'escargots', 'falafel', + 'filet_mignon', 'fish_and_chips', 'foie_gras', 'french_fries', + 'french_onion_soup', 'french_toast', 'fried_calamari', 'fried_rice', + 'frozen_yogurt', 'garlic_bread', 'gnocchi', 'greek_salad', + 'grilled_cheese_sandwich', 'grilled_salmon', 'guacamole', 'gyoza', + 'hamburger', 'hot_and_sour_soup', 'hot_dog', 'huevos_rancheros', 'hummus', + 'ice_cream', 'lasagna', 'lobster_bisque', 'lobster_roll_sandwich', + 'macaroni_and_cheese', 'macarons', 'miso_soup', 'mussels', 'nachos', + 'omelette', 'onion_rings', 'oysters', 'pad_thai', 'paella', 'pancakes', + 'panna_cotta', 'peking_duck', 'pho', 'pizza', 'pork_chop', 'poutine', + 'prime_rib', 'pulled_pork_sandwich', 'ramen', 'ravioli', 'red_velvet_cake', + 'risotto', 'samosa', 'sashimi', 'scallops', 'seaweed_salad', + 'shrimp_and_grits', 'spaghetti_bolognese', 'spaghetti_carbonara', + 'spring_rolls', 'steak', 'strawberry_shortcake', 'sushi', 'tacos', + 'takoyaki', 'tiramisu', 'tuna_tartare', 'waffles') + +CIFAR100_CATEGORIES_CN = ( + '苹果', '水族馆鱼', '婴儿', '熊', '河狸', '床', '蜜蜂', '甲虫', '自行车', '瓶子', '碗', '小男孩', + '桥', '公共汽车', '蝴蝶', '骆驼', '易拉罐', '城堡', '毛毛虫', '牛', '椅子', '猩猩', '钟', '白云', + '蟑螂', '沙发', '螃蟹', '鳄鱼', '杯子', '恐龙', '海豚', '大象', '比目鱼', '森林', '狐狸', '小女孩', + '仓鼠', '屋子', '袋鼠', '键盘', '台灯', '割草机', '猎豹', '狮子', '蜥蜴', '龙虾', '男人', '枫树', + '摩托车', '山', '老鼠', '蘑菇', '橡树', '橙子橘子', '兰花', '水獭', '棕榈树', '梨', '皮卡车', '松树', + '田野', '盘子', '罂粟', '豪猪', '负鼠', '兔子', '浣熊', '鳐鱼', '公路', '火箭', '玫瑰', '大海', + '海豹', '鲨鱼', '尖嘴小鼠', '臭鼬', '摩天大楼', '蜗牛', '蛇', '蜘蛛', '松鼠', '电车', '向日葵', '甜椒', + '桌子', '坦克', '电话', '电视', '老虎', '拖拉机', '火车', '鳟鱼', '郁金香', '乌龟', '衣柜', '鲸鱼', + '柳树', '狼', '女人', '蠕虫') diff --git a/mmpretrain/datasets/cifar.py b/mmpretrain/datasets/cifar.py new file mode 100644 index 0000000000000000000000000000000000000000..2a011daee0d74e6b06613106f7587b8ad8a7ed90 --- /dev/null +++ b/mmpretrain/datasets/cifar.py @@ -0,0 +1,210 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import pickle +from typing import List, Optional + +import mmengine.dist as dist +import numpy as np +from mmengine.fileio import (LocalBackend, exists, get, get_file_backend, + join_path) +from mmengine.logging import MMLogger + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import CIFAR10_CATEGORIES, CIFAR100_CATEGORIES +from .utils import check_md5, download_and_extract_archive + + +@DATASETS.register_module() +class CIFAR10(BaseDataset): + """`CIFAR10 `_ Dataset. + + This implementation is modified from + https://github.com/pytorch/vision/blob/master/torchvision/datasets/cifar.py + + Args: + data_root (str): The root directory of the CIFAR Dataset. + split (str, optional): The dataset split, supports "train" and "test". + Default to "train". + metainfo (dict, optional): Meta information for dataset, such as + categories information. Defaults to None. + download (bool): Whether to download the dataset if not exists. + Defaults to True. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ # noqa: E501 + + base_folder = 'cifar-10-batches-py' + url = 'https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz' + filename = 'cifar-10-python.tar.gz' + tgz_md5 = 'c58f30108f718f92721af3b95e74349a' + train_list = [ + ['data_batch_1', 'c99cafc152244af753f735de768cd75f'], + ['data_batch_2', 'd4bba439e000b95fd0a9bffe97cbabec'], + ['data_batch_3', '54ebc095f3ab1f0389bbae665268c751'], + ['data_batch_4', '634d18415352ddfa80567beed471001a'], + ['data_batch_5', '482c414d41f54cd18b22e5b47cb7c3cb'], + ] + + test_list = [ + ['test_batch', '40351d587109b95175f43aff81a1287e'], + ] + meta = { + 'filename': 'batches.meta', + 'key': 'label_names', + 'md5': '5ff9c542aee3614f3951f8cda6e48888', + } + METAINFO = {'classes': CIFAR10_CATEGORIES} + + def __init__(self, + data_root: str = '', + split: str = 'train', + metainfo: Optional[dict] = None, + download: bool = True, + data_prefix: str = '', + test_mode: bool = False, + **kwargs): + + splits = ['train', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + # To handle the BC-breaking + if split == 'train' and test_mode: + logger = MMLogger.get_current_instance() + logger.warning('split="train" but test_mode=True. ' + 'The training set will be used.') + + if not data_root and not data_prefix: + raise RuntimeError('Please set ``data_root`` to' + 'specify the dataset path') + + self.download = download + super().__init__( + # The CIFAR dataset doesn't need specify annotation file + ann_file='', + metainfo=metainfo, + data_root=data_root, + data_prefix=dict(root=data_prefix), + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + root = self.data_prefix['root'] + backend = get_file_backend(root, enable_singleton=True) + + if dist.is_main_process() and not self._check_integrity(): + if not isinstance(backend, LocalBackend): + raise RuntimeError(f'The dataset on {root} is not integrated, ' + f'please manually handle it.') + + if self.download: + download_and_extract_archive( + self.url, root, filename=self.filename, md5=self.tgz_md5) + else: + raise RuntimeError( + f'Cannot find {self.__class__.__name__} dataset in ' + f"{self.data_prefix['root']}, you can specify " + '`download=True` to download automatically.') + + dist.barrier() + assert self._check_integrity(), \ + 'Download failed or shared storage is unavailable. Please ' \ + f'download the dataset manually through {self.url}.' + + if self.split == 'train': + downloaded_list = self.train_list + else: + downloaded_list = self.test_list + + imgs = [] + gt_labels = [] + + # load the picked numpy arrays + for file_name, _ in downloaded_list: + file_path = join_path(root, self.base_folder, file_name) + entry = pickle.loads(get(file_path), encoding='latin1') + imgs.append(entry['data']) + if 'labels' in entry: + gt_labels.extend(entry['labels']) + else: + gt_labels.extend(entry['fine_labels']) + + imgs = np.vstack(imgs).reshape(-1, 3, 32, 32) + imgs = imgs.transpose((0, 2, 3, 1)) # convert to HWC + + if self.CLASSES is None: + # The metainfo in the file has the lowest priority, therefore + # we only need to load it if classes is not specified. + self._load_meta() + + data_list = [] + for img, gt_label in zip(imgs, gt_labels): + info = {'img': img, 'gt_label': int(gt_label)} + data_list.append(info) + return data_list + + def _load_meta(self): + """Load categories information from metafile.""" + root = self.data_prefix['root'] + + path = join_path(root, self.base_folder, self.meta['filename']) + md5 = self.meta.get('md5', None) + if not exists(path) or (md5 is not None and not check_md5(path, md5)): + raise RuntimeError( + 'Dataset metadata file not found or corrupted.' + + ' You can use `download=True` to download it') + data = pickle.loads(get(path), encoding='latin1') + self._metainfo.setdefault('classes', data[self.meta['key']]) + + def _check_integrity(self): + """Check the integrity of data files.""" + root = self.data_prefix['root'] + + for fentry in (self.train_list + self.test_list): + filename, md5 = fentry[0], fentry[1] + fpath = join_path(root, self.base_folder, filename) + if not exists(fpath): + return False + if md5 is not None and not check_md5(fpath, md5): + return False + return True + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [f"Prefix of data: \t{self.data_prefix['root']}"] + return body + + +@DATASETS.register_module() +class CIFAR100(CIFAR10): + """`CIFAR100 `_ Dataset. + + Args: + data_root (str): The root directory of the CIFAR Dataset. + split (str, optional): The dataset split, supports "train" and "test". + Default to "train". + metainfo (dict, optional): Meta information for dataset, such as + categories information. Defaults to None. + download (bool): Whether to download the dataset if not exists. + Defaults to True. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + base_folder = 'cifar-100-python' + url = 'https://www.cs.toronto.edu/~kriz/cifar-100-python.tar.gz' + filename = 'cifar-100-python.tar.gz' + tgz_md5 = 'eb9058c3a382ffc7106e4002c42a8d85' + train_list = [ + ['train', '16019d7e3df5f24257cddd939b257f8d'], + ] + + test_list = [ + ['test', 'f0ef6b0ae62326f3e7ffdfab6717acfc'], + ] + meta = { + 'filename': 'meta', + 'key': 'fine_label_names', + 'md5': '7973b15100ade9c7d40fb424638fde48', + } + METAINFO = {'classes': CIFAR100_CATEGORIES} diff --git a/mmpretrain/datasets/coco_caption.py b/mmpretrain/datasets/coco_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..541cda80398f7fcc7d3304d3d9f43155685ebe57 --- /dev/null +++ b/mmpretrain/datasets/coco_caption.py @@ -0,0 +1,42 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from pathlib import Path +from typing import List + +import mmengine +from mmengine.dataset import BaseDataset +from mmengine.fileio import get_file_backend + +from mmpretrain.registry import DATASETS + + +@DATASETS.register_module() +class COCOCaption(BaseDataset): + """COCO Caption dataset. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``.. + ann_file (str): Annotation file path. + data_prefix (dict): Prefix for data field. Defaults to + ``dict(img_path='')``. + pipeline (Sequence): Processing pipeline. Defaults to an empty tuple. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def load_data_list(self) -> List[dict]: + """Load data list.""" + img_prefix = self.data_prefix['img_path'] + annotations = mmengine.load(self.ann_file) + file_backend = get_file_backend(img_prefix) + + data_list = [] + for ann in annotations: + data_info = { + 'image_id': Path(ann['image']).stem.split('_')[-1], + 'img_path': file_backend.join_path(img_prefix, ann['image']), + 'gt_caption': ann['caption'], + } + + data_list.append(data_info) + + return data_list diff --git a/mmpretrain/datasets/coco_retrieval.py b/mmpretrain/datasets/coco_retrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..60d1586ad8672a4b57fcdc62740b3e08c3e2e20e --- /dev/null +++ b/mmpretrain/datasets/coco_retrieval.py @@ -0,0 +1,77 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json +from collections import OrderedDict +from typing import List + +from mmengine import get_file_backend + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +@DATASETS.register_module() +class COCORetrieval(BaseDataset): + """COCO Retrieval dataset. + + Args: + ann_file (str): Annotation file path. + test_mode (bool): Whether dataset is used for evaluation. This will + decide the annotation format in data list annotations. + Defaults to False. + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for training data. Defaults to ''. + pipeline (Sequence): Processing pipeline. Defaults to an empty tuple. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def load_data_list(self) -> List[dict]: + """Load data list.""" + # get file backend + img_prefix = self.data_prefix['img_path'] + file_backend = get_file_backend(img_prefix) + + anno_info = json.load(open(self.ann_file, 'r')) + # mapping img_id to img filename + img_dict = OrderedDict() + for idx, img in enumerate(anno_info['images']): + if img['id'] not in img_dict: + img_rel_path = img['coco_url'].rsplit('/', 2)[-2:] + img_path = file_backend.join_path(img_prefix, *img_rel_path) + + # create new idx for image + img_dict[img['id']] = dict( + ori_id=img['id'], + image_id=idx, # will be used for evaluation + img_path=img_path, + text=[], + gt_text_id=[], + gt_image_id=[], + ) + + train_list = [] + for idx, anno in enumerate(anno_info['annotations']): + anno['text'] = anno.pop('caption') + anno['ori_id'] = anno.pop('id') + anno['text_id'] = idx # will be used for evaluation + # 1. prepare train data list item + train_data = anno.copy() + train_image = img_dict[train_data['image_id']] + train_data['img_path'] = train_image['img_path'] + train_data['image_ori_id'] = train_image['ori_id'] + train_data['image_id'] = train_image['image_id'] + train_data['is_matched'] = True + train_list.append(train_data) + # 2. prepare eval data list item based on img dict + img_dict[anno['image_id']]['gt_text_id'].append(anno['text_id']) + img_dict[anno['image_id']]['text'].append(anno['text']) + img_dict[anno['image_id']]['gt_image_id'].append( + train_image['image_id']) + + self.img_size = len(img_dict) + self.text_size = len(anno_info['annotations']) + + # return needed format data list + if self.test_mode: + return list(img_dict.values()) + return train_list diff --git a/mmpretrain/datasets/coco_vqa.py b/mmpretrain/datasets/coco_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..85f4bdcf39ef82ec47a2072dc198e6b8792d8768 --- /dev/null +++ b/mmpretrain/datasets/coco_vqa.py @@ -0,0 +1,114 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +import re +from collections import Counter +from typing import List + +import mmengine +from mmengine.dataset import BaseDataset + +from mmpretrain.registry import DATASETS + + +@DATASETS.register_module() +class COCOVQA(BaseDataset): + """VQAv2 dataset. + + Args: + data_root (str): The root directory for ``data_prefix``, ``ann_file`` + and ``question_file``. + data_prefix (str): The directory of images. + question_file (str): Question file path. + ann_file (str, optional): Annotation file path for training and + validation. Defaults to an empty string. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root: str, + data_prefix: str, + question_file: str, + ann_file: str = '', + **kwarg): + self.question_file = question_file + super().__init__( + data_root=data_root, + data_prefix=dict(img_path=data_prefix), + ann_file=ann_file, + **kwarg, + ) + + def _join_prefix(self): + if not mmengine.is_abs(self.question_file) and self.question_file: + self.question_file = osp.join(self.data_root, self.question_file) + + return super()._join_prefix() + + def _create_image_index(self): + img_prefix = self.data_prefix['img_path'] + + files = mmengine.list_dir_or_file(img_prefix, list_dir=False) + image_index = {} + for file in files: + image_id = re.findall(r'\d{12}', file) + if len(image_id) > 0: + image_id = int(image_id[-1]) + image_index[image_id] = mmengine.join_path(img_prefix, file) + + return image_index + + def load_data_list(self) -> List[dict]: + """Load data list.""" + questions = mmengine.load(self.question_file)['questions'] + if self.ann_file: + annotations = mmengine.load(self.ann_file)['annotations'] + assert len(questions) == len(annotations) + else: + annotations = [None] * len(questions) + + # The original VQAv2 annotation file and question file includes + # only image id but no image file paths. + self.image_index = self._create_image_index() + + data_list = [] + for question, ann in zip(questions, annotations): + # question example + # { + # 'image_id': 262144, + # 'question': "Is the ball flying towards the batter?", + # 'question_id': 262144000 + # } + # + # ann example + # { + # 'question_type': "what are the", + # 'answer_type': "other", + # 'answers': [ + # {'answer': 'watching', + # 'answer_id': 1, + # 'answer_confidence': 'yes'}, + # ... + # ], + # 'image_id': 262148, + # 'question_id': 262148000, + # 'multiple_choice_answer': 'watching', + # 'answer_type': 'other', + # } + + data_info = question + data_info['img_path'] = self.image_index[question['image_id']] + + if ann is not None: + assert ann['question_id'] == question['question_id'] + + # add answer_weight & answer_count, delete duplicate answer + answers = [item['answer'] for item in ann.pop('answers')] + count = Counter(answers) + answer_weight = [i / len(answers) for i in count.values()] + data_info['gt_answer'] = list(count.keys()) + data_info['gt_answer_weight'] = answer_weight + data_info.update(ann) + + data_list.append(data_info) + + return data_list diff --git a/mmpretrain/datasets/cub.py b/mmpretrain/datasets/cub.py new file mode 100644 index 0000000000000000000000000000000000000000..8db126216fb3408e2dd18255db04a851eb5fe08f --- /dev/null +++ b/mmpretrain/datasets/cub.py @@ -0,0 +1,142 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine import get_file_backend, list_from_file +from mmengine.logging import MMLogger + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import CUB_CATEGORIES + + +@DATASETS.register_module() +class CUB(BaseDataset): + """The CUB-200-2011 Dataset. + + Support the `CUB-200-2011 `_ Dataset. + Comparing with the `CUB-200 `_ Dataset, + there are much more pictures in `CUB-200-2011`. After downloading and decompression, the dataset + directory structure is as follows. + + CUB dataset directory: :: + + CUB_200_2011 + ├── images + │ ├── class_x + │ │ ├── xx1.jpg + │ │ ├── xx2.jpg + │ │ └── ... + │ ├── class_y + │ │ ├── yy1.jpg + │ │ ├── yy2.jpg + │ │ └── ... + │ └── ... + ├── images.txt + ├── image_class_labels.txt + ├── train_test_split.txt + └── .... + + Args: + data_root (str): The root directory for CUB-200-2011 dataset. + split (str, optional): The dataset split, supports "train" and "test". + Default to "train". + + Examples: + >>> from mmpretrain.datasets import CUB + >>> train_dataset = CUB(data_root='data/CUB_200_2011', split='train') + >>> train_dataset + Dataset CUB + Number of samples: 5994 + Number of categories: 200 + Root of dataset: data/CUB_200_2011 + >>> test_dataset = CUB(data_root='data/CUB_200_2011', split='test') + >>> test_dataset + Dataset CUB + Number of samples: 5794 + Number of categories: 200 + Root of dataset: data/CUB_200_2011 + """ # noqa: E501 + + METAINFO = {'classes': CUB_CATEGORIES} + + def __init__(self, + data_root: str, + split: str = 'train', + test_mode: bool = False, + **kwargs): + + splits = ['train', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + # To handle the BC-breaking + if split == 'train' and test_mode: + logger = MMLogger.get_current_instance() + logger.warning('split="train" but test_mode=True. ' + 'The training set will be used.') + + ann_file = 'images.txt' + data_prefix = 'images' + image_class_labels_file = 'image_class_labels.txt' + train_test_split_file = 'train_test_split.txt' + + self.backend = get_file_backend(data_root, enable_singleton=True) + self.image_class_labels_file = self.backend.join_path( + data_root, image_class_labels_file) + self.train_test_split_file = self.backend.join_path( + data_root, train_test_split_file) + super(CUB, self).__init__( + ann_file=ann_file, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + def _load_data_from_txt(self, filepath): + """load data from CUB txt file, the every line of the file is idx and a + data item.""" + pairs = list_from_file(filepath) + data_dict = dict() + for pair in pairs: + idx, data_item = pair.split() + # all the index starts from 1 in CUB files, + # here we need to '- 1' to let them start from 0. + data_dict[int(idx) - 1] = data_item + return data_dict + + def load_data_list(self): + """Load images and ground truth labels.""" + sample_dict = self._load_data_from_txt(self.ann_file) + + label_dict = self._load_data_from_txt(self.image_class_labels_file) + + split_dict = self._load_data_from_txt(self.train_test_split_file) + + assert sample_dict.keys() == label_dict.keys() == split_dict.keys(),\ + f'sample_ids should be same in files {self.ann_file}, ' \ + f'{self.image_class_labels_file} and {self.train_test_split_file}' + + data_list = [] + for sample_id in sample_dict.keys(): + if split_dict[sample_id] == '1' and self.split == 'test': + # skip train samples when split='test' + continue + elif split_dict[sample_id] == '0' and self.split == 'train': + # skip test samples when split='train' + continue + + img_path = self.backend.join_path(self.img_prefix, + sample_dict[sample_id]) + gt_label = int(label_dict[sample_id]) - 1 + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/custom.py b/mmpretrain/datasets/custom.py new file mode 100644 index 0000000000000000000000000000000000000000..bb491ff0cc7f816f629603d3b8be55e3f787c373 --- /dev/null +++ b/mmpretrain/datasets/custom.py @@ -0,0 +1,287 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union + +from mmengine.fileio import (BaseStorageBackend, get_file_backend, + list_from_file) +from mmengine.logging import MMLogger + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +def find_folders( + root: str, + backend: Optional[BaseStorageBackend] = None +) -> Tuple[List[str], Dict[str, int]]: + """Find classes by folders under a root. + + Args: + root (string): root directory of folders + backend (BaseStorageBackend | None): The file backend of the root. + If None, auto infer backend from the root path. Defaults to None. + + Returns: + Tuple[List[str], Dict[str, int]]: + + - folders: The name of sub folders under the root. + - folder_to_idx: The map from folder name to class idx. + """ + # Pre-build file backend to prevent verbose file backend inference. + backend = backend or get_file_backend(root, enable_singleton=True) + folders = list( + backend.list_dir_or_file( + root, + list_dir=True, + list_file=False, + recursive=False, + )) + folders.sort() + folder_to_idx = {folders[i]: i for i in range(len(folders))} + return folders, folder_to_idx + + +def get_samples( + root: str, + folder_to_idx: Dict[str, int], + is_valid_file: Callable, + backend: Optional[BaseStorageBackend] = None, +): + """Make dataset by walking all images under a root. + + Args: + root (string): root directory of folders + folder_to_idx (dict): the map from class name to class idx + is_valid_file (Callable): A function that takes path of a file + and check if the file is a valid sample file. + backend (BaseStorageBackend | None): The file backend of the root. + If None, auto infer backend from the root path. Defaults to None. + + Returns: + Tuple[list, set]: + + - samples: a list of tuple where each element is (image, class_idx) + - empty_folders: The folders don't have any valid files. + """ + samples = [] + available_classes = set() + # Pre-build file backend to prevent verbose file backend inference. + backend = backend or get_file_backend(root, enable_singleton=True) + + if folder_to_idx is not None: + for folder_name in sorted(list(folder_to_idx.keys())): + _dir = backend.join_path(root, folder_name) + files = backend.list_dir_or_file( + _dir, + list_dir=False, + list_file=True, + recursive=True, + ) + for file in sorted(list(files)): + if is_valid_file(file): + path = backend.join_path(folder_name, file) + item = (path, folder_to_idx[folder_name]) + samples.append(item) + available_classes.add(folder_name) + empty_folders = set(folder_to_idx.keys()) - available_classes + else: + files = backend.list_dir_or_file( + root, + list_dir=False, + list_file=True, + recursive=True, + ) + samples = [file for file in sorted(list(files)) if is_valid_file(file)] + empty_folders = None + + return samples, empty_folders + + +@DATASETS.register_module() +class CustomDataset(BaseDataset): + """A generic dataset for multiple tasks. + + The dataset supports two kinds of style. + + 1. Use an annotation file to specify all samples, and each line indicates a + sample: + + The annotation file (for ``with_label=True``, supervised tasks.): :: + + folder_1/xxx.png 0 + folder_1/xxy.png 1 + 123.png 4 + nsdf3.png 3 + ... + + The annotation file (for ``with_label=False``, unsupervised tasks.): :: + + folder_1/xxx.png + folder_1/xxy.png + 123.png + nsdf3.png + ... + + Sample files: :: + + data_prefix/ + ├── folder_1 + │ ├── xxx.png + │ ├── xxy.png + │ └── ... + ├── 123.png + ├── nsdf3.png + └── ... + + Please use the argument ``metainfo`` to specify extra information for + the task, like ``{'classes': ('bird', 'cat', 'deer', 'dog', 'frog')}``. + + 2. Place all samples in one folder as below: + + Sample files (for ``with_label=True``, supervised tasks, we use the name + of sub-folders as the categories names): :: + + data_prefix/ + ├── class_x + │ ├── xxx.png + │ ├── xxy.png + │ └── ... + │ └── xxz.png + └── class_y + ├── 123.png + ├── nsdf3.png + ├── ... + └── asd932_.png + + Sample files (for ``with_label=False``, unsupervised tasks, we use all + sample files under the specified folder): :: + + data_prefix/ + ├── folder_1 + │ ├── xxx.png + │ ├── xxy.png + │ └── ... + ├── 123.png + ├── nsdf3.png + └── ... + + If the ``ann_file`` is specified, the dataset will be generated by the + first way, otherwise, try the second way. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for the data. Defaults to ''. + ann_file (str): Annotation file path. Defaults to ''. + with_label (bool): Whether the annotation file includes ground truth + labels, or use sub-folders to specify categories. + Defaults to True. + extensions (Sequence[str]): A sequence of allowed extensions. Defaults + to ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif'). + metainfo (dict, optional): Meta information for dataset, such as class + information. Defaults to None. + lazy_init (bool): Whether to load annotation during instantiation. + In some cases, such as visualization, only the meta information of + the dataset is needed, which is not necessary to load annotation + file. ``Basedataset`` can skip load annotations to save time by set + ``lazy_init=False``. Defaults to False. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root: str = '', + data_prefix: Union[str, dict] = '', + ann_file: str = '', + with_label=True, + extensions: Sequence[str] = ('.jpg', '.jpeg', '.png', '.ppm', + '.bmp', '.pgm', '.tif'), + metainfo: Optional[dict] = None, + lazy_init: bool = False, + **kwargs): + assert (ann_file or data_prefix or data_root), \ + 'One of `ann_file`, `data_root` and `data_prefix` must '\ + 'be specified.' + + self.extensions = tuple(set([i.lower() for i in extensions])) + self.with_label = with_label + + super().__init__( + # The base class requires string ann_file but this class doesn't + ann_file=ann_file, + metainfo=metainfo, + data_root=data_root, + data_prefix=data_prefix, + # Force to lazy_init for some modification before loading data. + lazy_init=True, + **kwargs) + + # Full initialize the dataset. + if not lazy_init: + self.full_init() + + def _find_samples(self): + """find samples from ``data_prefix``.""" + if self.with_label: + classes, folder_to_idx = find_folders(self.img_prefix) + samples, empty_classes = get_samples( + self.img_prefix, + folder_to_idx, + is_valid_file=self.is_valid_file, + ) + + self.folder_to_idx = folder_to_idx + + if self.CLASSES is not None: + assert len(self.CLASSES) == len(classes), \ + f"The number of subfolders ({len(classes)}) doesn't " \ + f'match the number of specified classes ' \ + f'({len(self.CLASSES)}). Please check the data folder.' + else: + self._metainfo['classes'] = tuple(classes) + else: + samples, empty_classes = get_samples( + self.img_prefix, + None, + is_valid_file=self.is_valid_file, + ) + + if len(samples) == 0: + raise RuntimeError( + f'Found 0 files in subfolders of: {self.data_prefix}. ' + f'Supported extensions are: {",".join(self.extensions)}') + + if empty_classes: + logger = MMLogger.get_current_instance() + logger.warning( + 'Found no valid file in the folder ' + f'{", ".join(empty_classes)}. ' + f"Supported extensions are: {', '.join(self.extensions)}") + + return samples + + def load_data_list(self): + """Load image paths and gt_labels.""" + if not self.ann_file: + samples = self._find_samples() + elif self.with_label: + lines = list_from_file(self.ann_file) + samples = [x.strip().rsplit(' ', 1) for x in lines] + else: + samples = list_from_file(self.ann_file) + + # Pre-build file backend to prevent verbose file backend inference. + backend = get_file_backend(self.img_prefix, enable_singleton=True) + data_list = [] + for sample in samples: + if self.with_label: + filename, gt_label = sample + img_path = backend.join_path(self.img_prefix, filename) + info = {'img_path': img_path, 'gt_label': int(gt_label)} + else: + img_path = backend.join_path(self.img_prefix, sample) + info = {'img_path': img_path} + data_list.append(info) + return data_list + + def is_valid_file(self, filename: str) -> bool: + """Check if a file is a valid sample.""" + return filename.lower().endswith(self.extensions) diff --git a/mmpretrain/datasets/dataset_wrappers.py b/mmpretrain/datasets/dataset_wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..1adff10beb024940f9066a407cc76ddb06b27404 --- /dev/null +++ b/mmpretrain/datasets/dataset_wrappers.py @@ -0,0 +1,176 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy + +import numpy as np +from mmengine.dataset import BaseDataset, force_full_init + +from mmpretrain.registry import DATASETS + + +@DATASETS.register_module() +class KFoldDataset: + """A wrapper of dataset for K-Fold cross-validation. + + K-Fold cross-validation divides all the samples in groups of samples, + called folds, of almost equal sizes. And we use k-1 of folds to do training + and use the fold left to do validation. + + Args: + dataset (:obj:`mmengine.dataset.BaseDataset` | dict): The dataset to be + divided + fold (int): The fold used to do validation. Defaults to 0. + num_splits (int): The number of all folds. Defaults to 5. + test_mode (bool): Use the training dataset or validation dataset. + Defaults to False. + seed (int, optional): The seed to shuffle the dataset before splitting. + If None, not shuffle the dataset. Defaults to None. + """ + + def __init__(self, + dataset, + fold=0, + num_splits=5, + test_mode=False, + seed=None): + if isinstance(dataset, dict): + self.dataset = DATASETS.build(dataset) + # Init the dataset wrapper lazily according to the dataset setting. + lazy_init = dataset.get('lazy_init', False) + elif isinstance(dataset, BaseDataset): + self.dataset = dataset + else: + raise TypeError(f'Unsupported dataset type {type(dataset)}.') + + self._metainfo = getattr(self.dataset, 'metainfo', {}) + self.fold = fold + self.num_splits = num_splits + self.test_mode = test_mode + self.seed = seed + + self._fully_initialized = False + if not lazy_init: + self.full_init() + + @property + def metainfo(self) -> dict: + """Get the meta information of ``self.dataset``. + + Returns: + dict: Meta information of the dataset. + """ + # Prevent `self._metainfo` from being modified by outside. + return copy.deepcopy(self._metainfo) + + def full_init(self): + """fully initialize the dataset.""" + if self._fully_initialized: + return + + self.dataset.full_init() + ori_len = len(self.dataset) + indices = list(range(ori_len)) + if self.seed is not None: + rng = np.random.default_rng(self.seed) + rng.shuffle(indices) + + test_start = ori_len * self.fold // self.num_splits + test_end = ori_len * (self.fold + 1) // self.num_splits + if self.test_mode: + indices = indices[test_start:test_end] + else: + indices = indices[:test_start] + indices[test_end:] + + self._ori_indices = indices + self.dataset = self.dataset.get_subset(indices) + + self._fully_initialized = True + + @force_full_init + def _get_ori_dataset_idx(self, idx: int) -> int: + """Convert global idx to local index. + + Args: + idx (int): Global index of ``KFoldDataset``. + + Returns: + int: The original index in the whole dataset. + """ + return self._ori_indices[idx] + + @force_full_init + def get_data_info(self, idx: int) -> dict: + """Get annotation by index. + + Args: + idx (int): Global index of ``KFoldDataset``. + + Returns: + dict: The idx-th annotation of the datasets. + """ + return self.dataset.get_data_info(idx) + + @force_full_init + def __len__(self): + return len(self.dataset) + + @force_full_init + def __getitem__(self, idx): + return self.dataset[idx] + + @force_full_init + def get_cat_ids(self, idx): + return self.dataset.get_cat_ids(idx) + + @force_full_init + def get_gt_labels(self): + return self.dataset.get_gt_labels() + + @property + def CLASSES(self): + """Return all categories names.""" + return self._metainfo.get('classes', None) + + @property + def class_to_idx(self): + """Map mapping class name to class index. + + Returns: + dict: mapping from class name to class index. + """ + + return {cat: i for i, cat in enumerate(self.CLASSES)} + + def __repr__(self): + """Print the basic information of the dataset. + + Returns: + str: Formatted string. + """ + head = 'Dataset ' + self.__class__.__name__ + body = [] + type_ = 'test' if self.test_mode else 'training' + body.append(f'Type: \t{type_}') + body.append(f'Seed: \t{self.seed}') + + def ordinal(n): + # Copy from https://codegolf.stackexchange.com/a/74047 + suffix = 'tsnrhtdd'[(n // 10 % 10 != 1) * (n % 10 < 4) * n % 10::4] + return f'{n}{suffix}' + + body.append( + f'Fold: \t{ordinal(self.fold+1)} of {self.num_splits}-fold') + if self._fully_initialized: + body.append(f'Number of samples: \t{self.__len__()}') + else: + body.append("Haven't been initialized") + + if self.CLASSES is not None: + body.append(f'Number of categories: \t{len(self.CLASSES)}') + else: + body.append('The `CLASSES` meta info is not set.') + + body.append( + f'Original dataset type:\t{self.dataset.__class__.__name__}') + + lines = [head] + [' ' * 4 + line for line in body] + return '\n'.join(lines) diff --git a/mmpretrain/datasets/dtd.py b/mmpretrain/datasets/dtd.py new file mode 100644 index 0000000000000000000000000000000000000000..034d0b1b444afebfc420eeff7e138072f7d7ee1f --- /dev/null +++ b/mmpretrain/datasets/dtd.py @@ -0,0 +1,116 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +import mat4py +from mmengine import get_file_backend + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import DTD_CATEGORIES + + +@DATASETS.register_module() +class DTD(BaseDataset): + """The Describable Texture Dataset (DTD). + + Support the `Describable Texture Dataset `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + DTD dataset directory: :: + + dtd + ├── images + │ ├── banded + | | ├──banded_0002.jpg + | | ├──banded_0004.jpg + | | └── ... + │ └── ... + ├── imdb + │ └── imdb.mat + ├── labels + | | ├──labels_joint_anno.txt + | | ├──test1.txt + | | ├──test2.txt + | | └── ... + │ └── ... + └── .... + + Args: + data_root (str): The root directory for Describable Texture dataset. + split (str, optional): The dataset split, supports "train", + "val", "trainval", and "test". Default to "trainval". + + Examples: + >>> from mmpretrain.datasets import DTD + >>> train_dataset = DTD(data_root='data/dtd', split='trainval') + >>> train_dataset + Dataset DTD + Number of samples: 3760 + Number of categories: 47 + Root of dataset: data/dtd + >>> test_dataset = DTD(data_root='data/dtd', split='test') + >>> test_dataset + Dataset DTD + Number of samples: 1880 + Number of categories: 47 + Root of dataset: data/dtd + """ # noqa: E501 + + METAINFO = {'classes': DTD_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'trainval', **kwargs): + + splits = ['train', 'val', 'trainval', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + data_prefix = 'images' + test_mode = split == 'test' + + self.backend = get_file_backend(data_root, enable_singleton=True) + ann_file = self.backend.join_path('imdb', 'imdb.mat') + + super(DTD, self).__init__( + ann_file=ann_file, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + + data = mat4py.loadmat(self.ann_file)['images'] + names = data['name'] + labels = data['class'] + parts = data['set'] + num = len(names) + assert num == len(labels) == len(parts), 'get error ann file' + + if self.split == 'train': + target_set = {1} + elif self.split == 'val': + target_set = {2} + elif self.split == 'test': + target_set = {3} + else: + target_set = {1, 2} + + data_list = [] + for i in range(num): + if parts[i] in target_set: + img_name = names[i] + img_path = self.backend.join_path(self.img_prefix, img_name) + gt_label = labels[i] - 1 + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/fgvcaircraft.py b/mmpretrain/datasets/fgvcaircraft.py new file mode 100644 index 0000000000000000000000000000000000000000..696992c06bbf02f097d017a519d42f758ba5f16f --- /dev/null +++ b/mmpretrain/datasets/fgvcaircraft.py @@ -0,0 +1,98 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import FGVCAIRCRAFT_CATEGORIES + + +@DATASETS.register_module() +class FGVCAircraft(BaseDataset): + """The FGVC_Aircraft Dataset. + + Support the `FGVC_Aircraft Dataset `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + FGVC_Aircraft dataset directory: :: + + fgvc-aircraft-2013b + └── data + ├── images + │ ├── 1.jpg + │ ├── 2.jpg + │ └── ... + ├── images_variant_train.txt + ├── images_variant_test.txt + ├── images_variant_trainval.txt + ├── images_variant_val.txt + ├── variants.txt + └── .... + + Args: + data_root (str): The root directory for FGVC_Aircraft dataset. + split (str, optional): The dataset split, supports "train", + "val", "trainval", and "test". Default to "trainval". + + Examples: + >>> from mmpretrain.datasets import FGVCAircraft + >>> train_dataset = FGVCAircraft(data_root='data/fgvc-aircraft-2013b', split='trainval') + >>> train_dataset + Dataset FGVCAircraft + Number of samples: 6667 + Number of categories: 100 + Root of dataset: data/fgvc-aircraft-2013b + >>> test_dataset = FGVCAircraft(data_root='data/fgvc-aircraft-2013b', split='test') + >>> test_dataset + Dataset FGVCAircraft + Number of samples: 3333 + Number of categories: 100 + Root of dataset: data/fgvc-aircraft-2013b + """ # noqa: E501 + + METAINFO = {'classes': FGVCAIRCRAFT_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'trainval', **kwargs): + + splits = ['train', 'val', 'trainval', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + self.backend = get_file_backend(data_root, enable_singleton=True) + ann_file = self.backend.join_path('data', + f'images_variant_{split}.txt') + data_prefix = self.backend.join_path('data', 'images') + test_mode = split == 'test' + + super(FGVCAircraft, self).__init__( + ann_file=ann_file, + data_root=data_root, + test_mode=test_mode, + data_prefix=data_prefix, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + + pairs = list_from_file(self.ann_file) + data_list = [] + for pair in pairs: + pair = pair.split() + img_name = pair[0] + class_name = ' '.join(pair[1:]) + img_name = f'{img_name}.jpg' + img_path = self.backend.join_path(self.img_prefix, img_name) + gt_label = self.METAINFO['classes'].index(class_name) + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/flamingo.py b/mmpretrain/datasets/flamingo.py new file mode 100644 index 0000000000000000000000000000000000000000..3b5745a1437537fccbc304d158a0f0c8d09f032a --- /dev/null +++ b/mmpretrain/datasets/flamingo.py @@ -0,0 +1,295 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import random +from abc import abstractmethod +from collections import Counter +from typing import List + +import mmengine +import numpy as np +from mmengine.dataset import BaseDataset +from pycocotools.coco import COCO + +from mmpretrain.registry import DATASETS +from .coco_vqa import COCOVQA + + +class FlamingoFewShotMixin: + """Flamingo fewshot eval dataset minin. + + Args: + num_shots (int): Number of shots to perform evaluation. + Defaults to 0. + Note: 0 does not mean a strict zero-shot in Flamingo setting. + It will use 2 only-text prompt without in context images. + num_support_examples (int): Number of support examples to get the + few shots from. Defaults to 2048. + num_query_examples (int): Number of query examples to perform the + final evaluation. Defaults to 5000. + incontext_prompt_temp (str): In context prompt template for few shot + examples. Defaults to ''. + final_prompt_temp (str): Final query prompt template. Defaults to ''. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + num_shots: int = 0, + num_support_examples: int = 2048, + num_query_examples: int = 5000, + incontext_prompt_temp: str = '', + final_prompt_temp: str = '', + **kwarg): + self.num_shots = num_shots + self.num_support_examples = num_support_examples + self.num_query_examples = num_query_examples + self.incontext_prompt_temp = incontext_prompt_temp + self.final_prompt_temp = final_prompt_temp + super().__init__(**kwarg) + + def get_subset_idx(self, total_num): + random_idx = np.random.choice( + total_num, + self.num_support_examples + self.num_query_examples, + replace=False) + + support_idx = random_idx[:self.num_support_examples] + query_idx = random_idx[self.num_support_examples:] + return support_idx, query_idx + + @abstractmethod + def parse_basic_anno(self, anno: dict) -> dict: + """Parse basic annotation for support and query set.""" + pass + + @abstractmethod + def parse_fewshot_anno(self, anno: dict, support_list: List) -> dict: + """Parse fewshot related annotation for query set with support list.""" + pass + + +@DATASETS.register_module() +class FlamingoEvalCOCOVQA(FlamingoFewShotMixin, COCOVQA): + """Flamingo few shot VQAv2 dataset. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. + ann_file (str): Annotation file path. + question_file (str): Question file path. + num_shots (int): Number of shots to perform evaluation. + Defaults to 0. + Note: 0 does not mean a strict zero-shot in Flamingo setting. + It will use 2 only-text prompt without in context images. + num_support_examples (int): Number of support examples to get the + few shots from. Defaults to 2048. + num_query_examples (int): Number of query examples to perform the + final evaluation. Defaults to 5000. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root: str, + question_file: str, + ann_file: str = '', + num_shots: int = 0, + num_support_examples: int = 2048, + num_query_examples: int = 5000, + **kwarg): + super().__init__( + data_root=data_root, + question_file=question_file, + ann_file=ann_file, + num_shots=num_shots, + num_support_examples=num_support_examples, + num_query_examples=num_query_examples, + **kwarg) + + def parse_basic_anno(self, ann: dict) -> dict: + """Parse basic annotation for support and query set. + + Args: + anno (dict): Annotation for single example. + + Return: + dict: Parsed annotation for single example. + """ + if ann is None: + return {} + + answers = [a['answer'] for a in ann['answers']] + count = Counter(answers) + answer_weight = [i / len(answers) for i in count.values()] + answer_info = { + 'gt_answer': list(count.keys()), + 'gt_answer_weight': answer_weight + } + return answer_info + + def parse_fewshot_anno(self, query: dict, support_list: List) -> dict: + """Parse fewshot related annotation for query set with support list. + + Args: + anno (dict): Annotation for single example. + support_list (List): List of support subset to subsample few shots. + + Return: + dict: Parsed annotation for single example. + """ + # prepare n shots examples + shots = random.sample(support_list, self.num_shots) + + # append image path for n shots + img_path = [shot['img_path'] for shot in shots] + img_path.append(query['img_path']) + query['img_path'] = img_path + + query['shots'] = [ + dict( + question=item['question'], + answer=item['gt_answer'][0], + ) for item in shots + ] + return query + + def load_data_list(self) -> List[dict]: + """Load data list.""" + questions = mmengine.load(self.question_file)['questions'] + if self.ann_file: + annotations = mmengine.load(self.ann_file)['annotations'] + assert len(questions) == len(annotations) + else: + annotations = [None] * len(questions) + if self.num_shots > 0: + raise ValueError('Unable to construct few-shot examples ' + 'since no annotation file.') + + # The original VQAv2 annotation file and question file includes + # only image id but no image file paths. + self.image_index = self._create_image_index() + + num_data = len(questions) + support_idx, query_idx = self.get_subset_idx(num_data) + + # prepare support subset + if self.num_shots > 0: + support_list = [] + for idx in support_idx: + question = questions[idx] + ann = annotations[idx] + support = {**question, **self.parse_basic_anno(ann)} + support['img_path'] = self.image_index[question['image_id']] + support_list.append(support) + + # prepare query subset + data_list = [] + for idx in query_idx: + question = questions[idx] + ann = annotations[idx] + data_info = {**question, **self.parse_basic_anno(ann)} + data_info['img_path'] = self.image_index[question['image_id']] + if self.num_shots > 0: + data_info = self.parse_fewshot_anno(data_info, support_list) + data_list.append(data_info) + + return data_list + + +@DATASETS.register_module() +class FlamingoEvalCOCOCaption(FlamingoFewShotMixin, BaseDataset): + """Flamingo few shot COCO Caption dataset. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. + ann_file (str): Annotation file path. + data_prefix (dict): Prefix for data field. Defaults to + ``dict(img_path='')``. + num_shots (int): Number of shots to perform evaluation. + Defaults to 0. + num_support_examples (int): Number of support examples to get the + few shots from. Defaults to 2048. + num_query_examples (int): Number of query examples to perform the + final evaluation. Defaults to 5000. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root: str, + ann_file: str, + num_shots: int = 0, + num_support_examples: int = 2048, + num_query_examples: int = 5000, + **kwarg): + super().__init__( + data_root=data_root, + ann_file=ann_file, + num_shots=num_shots, + num_support_examples=num_support_examples, + num_query_examples=num_query_examples, + **kwarg) + + def parse_basic_anno(self, ann: dict, coco: COCO) -> dict: + """Parse basic annotation for support and query set. + + Args: + anno (dict): Annotation for single example. + coco (COCO): The coco dataset. + + Return: + dict: Parsed annotation for single example. + """ + img_prefix = self.data_prefix['img_path'] + img = coco.imgs[ann['image_id']] + data_info = dict( + img_path=mmengine.join_path(img_prefix, img['file_name']), + gt_caption=ann['caption'], + image_id=ann['image_id'], + ) + return data_info + + def parse_fewshot_anno(self, query: dict, support_list: List) -> dict: + """Parse fewshot related annotation for query set with support list. + + Args: + query (dict): Annotation for single example. + support_list (List): List of support subset to subsample few shots. + coco (COCO): The coco dataset. + + Return: + dict: Parsed annotation for single example. + """ + # prepare n shots examples + shots = random.sample(support_list, self.num_shots) + + # append image path for n shots + img_path = [shot['img_path'] for shot in shots] + img_path.append(query['img_path']) + query['img_path'] = img_path + + query['shots'] = [dict(caption=item['gt_caption']) for item in shots] + return query + + def load_data_list(self) -> List[dict]: + """Load data list.""" + with mmengine.get_local_path(self.ann_file) as ann_file: + coco = COCO(ann_file) + + num_data = len(coco.anns) + support_idx, query_idx = self.get_subset_idx(num_data) + ann_ids = list(coco.anns) + + # prepare support subset + if self.num_shots > 0: + support_list = [] + for idx in support_idx: + support = self.parse_basic_anno(coco.anns[ann_ids[idx]], coco) + support_list.append(support) + + # prepare query subset + query_list = [] + for idx in query_idx: + data_info = self.parse_basic_anno(coco.anns[ann_ids[idx]], coco) + if self.num_shots > 0: + data_info = self.parse_fewshot_anno(data_info, support_list) + query_list.append(data_info) + + return query_list diff --git a/mmpretrain/datasets/flowers102.py b/mmpretrain/datasets/flowers102.py new file mode 100644 index 0000000000000000000000000000000000000000..fe76dcc8422c8692261800b204a6262b60002e81 --- /dev/null +++ b/mmpretrain/datasets/flowers102.py @@ -0,0 +1,104 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +import mat4py +from mmengine import get_file_backend + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +@DATASETS.register_module() +class Flowers102(BaseDataset): + """The Oxford 102 Flower Dataset. + + Support the `Oxford 102 Flowers Dataset `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + Flowers102 dataset directory: :: + + Flowers102 + ├── jpg + │ ├── image_00001.jpg + │ ├── image_00002.jpg + │ └── ... + ├── imagelabels.mat + ├── setid.mat + └── ... + + Args: + data_root (str): The root directory for Oxford 102 Flowers dataset. + split (str, optional): The dataset split, supports "train", + "val", "trainval", and "test". Default to "trainval". + + Examples: + >>> from mmpretrain.datasets import Flowers102 + >>> train_dataset = Flowers102(data_root='data/Flowers102', split='trainval') + >>> train_dataset + Dataset Flowers102 + Number of samples: 2040 + Root of dataset: data/Flowers102 + >>> test_dataset = Flowers102(data_root='data/Flowers102', split='test') + >>> test_dataset + Dataset Flowers102 + Number of samples: 6149 + Root of dataset: data/Flowers102 + """ # noqa: E501 + + def __init__(self, data_root: str, split: str = 'trainval', **kwargs): + splits = ['train', 'val', 'trainval', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + ann_file = 'imagelabels.mat' + data_prefix = 'jpg' + train_test_split_file = 'setid.mat' + test_mode = split == 'test' + + self.backend = get_file_backend(data_root, enable_singleton=True) + + self.train_test_split_file = self.backend.join_path( + data_root, train_test_split_file) + + super(Flowers102, self).__init__( + ann_file=ann_file, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + + label_dict = mat4py.loadmat(self.ann_file)['labels'] + split_list = mat4py.loadmat(self.train_test_split_file) + + if self.split == 'train': + split_list = split_list['trnid'] + elif self.split == 'val': + split_list = split_list['valid'] + elif self.split == 'test': + split_list = split_list['tstid'] + else: + train_ids = split_list['trnid'] + val_ids = split_list['valid'] + train_ids.extend(val_ids) + split_list = train_ids + + data_list = [] + for sample_id in split_list: + img_name = 'image_%05d.jpg' % (sample_id) + img_path = self.backend.join_path(self.img_prefix, img_name) + gt_label = int(label_dict[sample_id - 1]) - 1 + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/food101.py b/mmpretrain/datasets/food101.py new file mode 100644 index 0000000000000000000000000000000000000000..4ce7ffeee91c6843c259149770e9de4ad9f4317a --- /dev/null +++ b/mmpretrain/datasets/food101.py @@ -0,0 +1,102 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import FOOD101_CATEGORIES + + +@DATASETS.register_module() +class Food101(BaseDataset): + """The Food101 Dataset. + + Support the `Food101 Dataset `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + Food101 dataset directory: :: + + food-101 + ├── images + │ ├── class_x + │ │ ├── xx1.jpg + │ │ ├── xx2.jpg + │ │ └── ... + │ ├── class_y + │ │ ├── yy1.jpg + │ │ ├── yy2.jpg + │ │ └── ... + │ └── ... + ├── meta + │ ├── train.txt + │ └── test.txt + └── .... + + Args: + data_root (str): The root directory for Food101 dataset. + split (str, optional): The dataset split, supports "train" and "test". + Default to "train". + + Examples: + >>> from mmpretrain.datasets import Food101 + >>> train_dataset = Food101(data_root='data/food-101', split='train') + >>> train_dataset + Dataset Food101 + Number of samples: 75750 + Number of categories: 101 + Root of dataset: data/food-101 + >>> test_dataset = Food101(data_root='data/food-101', split='test') + >>> test_dataset + Dataset Food101 + Number of samples: 25250 + Number of categories: 101 + Root of dataset: data/food-101 + """ # noqa: E501 + + METAINFO = {'classes': FOOD101_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'train', **kwargs): + + splits = ['train', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + self.backend = get_file_backend(data_root, enable_singleton=True) + if split == 'train': + ann_file = self.backend.join_path('meta', 'train.txt') + else: + ann_file = self.backend.join_path('meta', 'test.txt') + + test_mode = split == 'test' + data_prefix = 'images' + + super(Food101, self).__init__( + ann_file=ann_file, + data_root=data_root, + test_mode=test_mode, + data_prefix=data_prefix, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + + pairs = list_from_file(self.ann_file) + data_list = [] + for pair in pairs: + class_name, img_name = pair.split('/') + img_name = f'{img_name}.jpg' + img_path = self.backend.join_path(self.img_prefix, class_name, + img_name) + gt_label = self.METAINFO['classes'].index(class_name) + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/imagenet.py b/mmpretrain/datasets/imagenet.py new file mode 100644 index 0000000000000000000000000000000000000000..e309d3af7e53f9ec4072e24f7433d1f6e33d14cb --- /dev/null +++ b/mmpretrain/datasets/imagenet.py @@ -0,0 +1,102 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Union + +from mmengine.logging import MMLogger + +from mmpretrain.registry import DATASETS +from .categories import IMAGENET_CATEGORIES +from .custom import CustomDataset + + +@DATASETS.register_module() +class ImageNet(CustomDataset): + """`ImageNet `_ Dataset. + + The dataset supports two kinds of annotation format. More details can be + found in :class:`CustomDataset`. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for training data. Defaults to ''. + ann_file (str): Annotation file path. Defaults to ''. + metainfo (dict, optional): Meta information for dataset, such as class + information. Defaults to None. + **kwargs: Other keyword arguments in :class:`CustomDataset` and + :class:`BaseDataset`. + """ # noqa: E501 + + IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif') + METAINFO = {'classes': IMAGENET_CATEGORIES} + + def __init__(self, + data_root: str = '', + data_prefix: Union[str, dict] = '', + ann_file: str = '', + metainfo: Optional[dict] = None, + **kwargs): + kwargs = {'extensions': self.IMG_EXTENSIONS, **kwargs} + super().__init__( + data_root=data_root, + data_prefix=data_prefix, + ann_file=ann_file, + metainfo=metainfo, + **kwargs) + + +@DATASETS.register_module() +class ImageNet21k(CustomDataset): + """ImageNet21k Dataset. + + Since the dataset ImageNet21k is extremely big, cantains 21k+ classes + and 1.4B files. We won't provide the default categories list. Please + specify it from the ``classes`` argument. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for training data. Defaults to ''. + ann_file (str): Annotation file path. Defaults to ''. + metainfo (dict, optional): Meta information for dataset, such as class + information. Defaults to None. + multi_label (bool): Not implement by now. Use multi label or not. + Defaults to False. + **kwargs: Other keyword arguments in :class:`CustomDataset` and + :class:`BaseDataset`. + """ + + IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif') + + def __init__(self, + data_root: str = '', + data_prefix: Union[str, dict] = '', + ann_file: str = '', + metainfo: Optional[dict] = None, + multi_label: bool = False, + **kwargs): + if multi_label: + raise NotImplementedError( + 'The `multi_label` option is not supported by now.') + self.multi_label = multi_label + + logger = MMLogger.get_current_instance() + + if not ann_file: + logger.warning( + 'The ImageNet21k dataset is large, and scanning directory may ' + 'consume long time. Considering to specify the `ann_file` to ' + 'accelerate the initialization.') + + kwargs = {'extensions': self.IMG_EXTENSIONS, **kwargs} + super().__init__( + data_root=data_root, + data_prefix=data_prefix, + ann_file=ann_file, + metainfo=metainfo, + **kwargs) + + if self.CLASSES is None: + logger.warning( + 'The CLASSES is not stored in the `ImageNet21k` class. ' + 'Considering to specify the `classes` argument if you need ' + 'do inference on the ImageNet-21k dataset') diff --git a/mmpretrain/datasets/inshop.py b/mmpretrain/datasets/inshop.py new file mode 100644 index 0000000000000000000000000000000000000000..f64f1779632d4a98d0e36d59750f4a1e8cbd4aed --- /dev/null +++ b/mmpretrain/datasets/inshop.py @@ -0,0 +1,157 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +@DATASETS.register_module() +class InShop(BaseDataset): + """InShop Dataset for Image Retrieval. + + Please download the images from the homepage + 'https://mmlab.ie.cuhk.edu.hk/projects/DeepFashion/InShopRetrieval.html' + (In-shop Clothes Retrieval Benchmark -> Img -> img.zip, + Eval/list_eval_partition.txt), and organize them as follows way: :: + + In-shop Clothes Retrieval Benchmark (data_root)/ + ├── Eval / + │ └── list_eval_partition.txt (ann_file) + ├── Img (img_prefix) + │ └── img/ + ├── README.txt + └── ..... + + Args: + data_root (str): The root directory for dataset. + split (str): Choose from 'train', 'query' and 'gallery'. + Defaults to 'train'. + data_prefix (str | dict): Prefix for training data. + Defaults to 'Img'. + ann_file (str): Annotation file path, path relative to + ``data_root``. Defaults to 'Eval/list_eval_partition.txt'. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + + Examples: + >>> from mmpretrain.datasets import InShop + >>> + >>> # build train InShop dataset + >>> inshop_train_cfg = dict(data_root='data/inshop', split='train') + >>> inshop_train = InShop(**inshop_train_cfg) + >>> inshop_train + Dataset InShop + Number of samples: 25882 + The `CLASSES` meta info is not set. + Root of dataset: data/inshop + >>> + >>> # build query InShop dataset + >>> inshop_query_cfg = dict(data_root='data/inshop', split='query') + >>> inshop_query = InShop(**inshop_query_cfg) + >>> inshop_query + Dataset InShop + Number of samples: 14218 + The `CLASSES` meta info is not set. + Root of dataset: data/inshop + >>> + >>> # build gallery InShop dataset + >>> inshop_gallery_cfg = dict(data_root='data/inshop', split='gallery') + >>> inshop_gallery = InShop(**inshop_gallery_cfg) + >>> inshop_gallery + Dataset InShop + Number of samples: 12612 + The `CLASSES` meta info is not set. + Root of dataset: data/inshop + """ + + def __init__(self, + data_root: str, + split: str = 'train', + data_prefix: str = 'Img', + ann_file: str = 'Eval/list_eval_partition.txt', + **kwargs): + + assert split in ('train', 'query', 'gallery'), "'split' of `InShop`" \ + f" must be one of ['train', 'query', 'gallery'], bu get '{split}'" + self.backend = get_file_backend(data_root, enable_singleton=True) + self.split = split + super().__init__( + data_root=data_root, + data_prefix=data_prefix, + ann_file=ann_file, + **kwargs) + + def _process_annotations(self): + lines = list_from_file(self.ann_file) + + anno_train = dict(metainfo=dict(), data_list=list()) + anno_gallery = dict(metainfo=dict(), data_list=list()) + + # item_id to label, each item corresponds to one class label + class_num = 0 + gt_label_train = {} + + # item_id to label, each label corresponds to several items + gallery_num = 0 + gt_label_gallery = {} + + # (lines[0], lines[1]) is the image number and the field name; + # Each line format as 'image_name, item_id, evaluation_status' + for line in lines[2:]: + img_name, item_id, status = line.split() + img_path = self.backend.join_path(self.img_prefix, img_name) + if status == 'train': + if item_id not in gt_label_train: + gt_label_train[item_id] = class_num + class_num += 1 + # item_id to class_id (for the training set) + anno_train['data_list'].append( + dict(img_path=img_path, gt_label=gt_label_train[item_id])) + elif status == 'gallery': + if item_id not in gt_label_gallery: + gt_label_gallery[item_id] = [] + # Since there are multiple images for each item, + # record the corresponding item for each image. + gt_label_gallery[item_id].append(gallery_num) + anno_gallery['data_list'].append( + dict(img_path=img_path, sample_idx=gallery_num)) + gallery_num += 1 + + if self.split == 'train': + anno_train['metainfo']['class_number'] = class_num + anno_train['metainfo']['sample_number'] = \ + len(anno_train['data_list']) + return anno_train + elif self.split == 'gallery': + anno_gallery['metainfo']['sample_number'] = gallery_num + return anno_gallery + + # Generate the label for the query(val) set + anno_query = dict(metainfo=dict(), data_list=list()) + query_num = 0 + for line in lines[2:]: + img_name, item_id, status = line.split() + img_path = self.backend.join_path(self.img_prefix, img_name) + if status == 'query': + anno_query['data_list'].append( + dict( + img_path=img_path, gt_label=gt_label_gallery[item_id])) + query_num += 1 + + anno_query['metainfo']['sample_number'] = query_num + return anno_query + + def load_data_list(self): + """load data list. + + For the train set, return image and ground truth label. For the query + set, return image and ids of images in gallery. For the gallery set, + return image and its id. + """ + data_info = self._process_annotations() + data_list = data_info['data_list'] + return data_list + + def extra_repr(self): + """The extra repr information of the dataset.""" + body = [f'Root of dataset: \t{self.data_root}'] + return body diff --git a/mmpretrain/datasets/mnist.py b/mmpretrain/datasets/mnist.py new file mode 100644 index 0000000000000000000000000000000000000000..2617146f4dad45b0899f4ccebe3e8e012ac08550 --- /dev/null +++ b/mmpretrain/datasets/mnist.py @@ -0,0 +1,220 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import codecs +from typing import List, Optional +from urllib.parse import urljoin + +import mmengine.dist as dist +import numpy as np +import torch +from mmengine.fileio import LocalBackend, exists, get_file_backend, join_path + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import FASHIONMNIST_CATEGORITES, MNIST_CATEGORITES +from .utils import (download_and_extract_archive, open_maybe_compressed_file, + rm_suffix) + + +@DATASETS.register_module() +class MNIST(BaseDataset): + """`MNIST `_ Dataset. + + This implementation is modified from + https://github.com/pytorch/vision/blob/master/torchvision/datasets/mnist.py + + Args: + data_prefix (str): Prefix for data. + test_mode (bool): ``test_mode=True`` means in test phase. + It determines to use the training set or test set. + metainfo (dict, optional): Meta information for dataset, such as + categories information. Defaults to None. + data_root (str): The root directory for ``data_prefix``. + Defaults to ''. + download (bool): Whether to download the dataset if not exists. + Defaults to True. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ # noqa: E501 + + url_prefix = 'http://yann.lecun.com/exdb/mnist/' + # train images and labels + train_list = [ + ['train-images-idx3-ubyte.gz', 'f68b3c2dcbeaaa9fbdd348bbdeb94873'], + ['train-labels-idx1-ubyte.gz', 'd53e105ee54ea40749a09fcbcd1e9432'], + ] + # test images and labels + test_list = [ + ['t10k-images-idx3-ubyte.gz', '9fb629c4189551a2d022fa330f9573f3'], + ['t10k-labels-idx1-ubyte.gz', 'ec29112dd5afa0611ce80d1b7f02629c'], + ] + METAINFO = {'classes': MNIST_CATEGORITES} + + def __init__(self, + data_prefix: str, + test_mode: bool, + metainfo: Optional[dict] = None, + data_root: str = '', + download: bool = True, + **kwargs): + self.download = download + super().__init__( + # The MNIST dataset doesn't need specify annotation file + ann_file='', + metainfo=metainfo, + data_root=data_root, + data_prefix=dict(root=data_prefix), + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + root = self.data_prefix['root'] + backend = get_file_backend(root, enable_singleton=True) + + if dist.is_main_process() and not self._check_exists(): + if not isinstance(backend, LocalBackend): + raise RuntimeError(f'The dataset on {root} is not integrated, ' + f'please manually handle it.') + + if self.download: + self._download() + else: + raise RuntimeError( + f'Cannot find {self.__class__.__name__} dataset in ' + f"{self.data_prefix['root']}, you can specify " + '`download=True` to download automatically.') + + dist.barrier() + assert self._check_exists(), \ + 'Download failed or shared storage is unavailable. Please ' \ + f'download the dataset manually through {self.url_prefix}.' + + if not self.test_mode: + file_list = self.train_list + else: + file_list = self.test_list + + # load data from SN3 files + imgs = read_image_file(join_path(root, rm_suffix(file_list[0][0]))) + gt_labels = read_label_file( + join_path(root, rm_suffix(file_list[1][0]))) + + data_infos = [] + for img, gt_label in zip(imgs, gt_labels): + gt_label = np.array(gt_label, dtype=np.int64) + info = {'img': img.numpy(), 'gt_label': gt_label} + data_infos.append(info) + return data_infos + + def _check_exists(self): + """Check the exists of data files.""" + root = self.data_prefix['root'] + + for filename, _ in (self.train_list + self.test_list): + # get extracted filename of data + extract_filename = rm_suffix(filename) + fpath = join_path(root, extract_filename) + if not exists(fpath): + return False + return True + + def _download(self): + """Download and extract data files.""" + root = self.data_prefix['root'] + + for filename, md5 in (self.train_list + self.test_list): + url = urljoin(self.url_prefix, filename) + download_and_extract_archive( + url, download_root=root, filename=filename, md5=md5) + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [f"Prefix of data: \t{self.data_prefix['root']}"] + return body + + +@DATASETS.register_module() +class FashionMNIST(MNIST): + """`Fashion-MNIST `_ + Dataset. + + Args: + data_prefix (str): Prefix for data. + test_mode (bool): ``test_mode=True`` means in test phase. + It determines to use the training set or test set. + metainfo (dict, optional): Meta information for dataset, such as + categories information. Defaults to None. + data_root (str): The root directory for ``data_prefix``. + Defaults to ''. + download (bool): Whether to download the dataset if not exists. + Defaults to True. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + url_prefix = 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/' + # train images and labels + train_list = [ + ['train-images-idx3-ubyte.gz', '8d4fb7e6c68d591d4c3dfef9ec88bf0d'], + ['train-labels-idx1-ubyte.gz', '25c81989df183df01b3e8a0aad5dffbe'], + ] + # test images and labels + test_list = [ + ['t10k-images-idx3-ubyte.gz', 'bef4ecab320f06d8554ea6380940ec79'], + ['t10k-labels-idx1-ubyte.gz', 'bb300cfdad3c16e7a12a480ee83cd310'], + ] + METAINFO = {'classes': FASHIONMNIST_CATEGORITES} + + +def get_int(b: bytes) -> int: + """Convert bytes to int.""" + return int(codecs.encode(b, 'hex'), 16) + + +def read_sn3_pascalvincent_tensor(path: str, + strict: bool = True) -> torch.Tensor: + """Read a SN3 file in "Pascal Vincent" format (Lush file 'libidx/idx- + io.lsh'). + + Argument may be a filename, compressed filename, or file object. + """ + # typemap + if not hasattr(read_sn3_pascalvincent_tensor, 'typemap'): + read_sn3_pascalvincent_tensor.typemap = { + 8: (torch.uint8, np.uint8, np.uint8), + 9: (torch.int8, np.int8, np.int8), + 11: (torch.int16, np.dtype('>i2'), 'i2'), + 12: (torch.int32, np.dtype('>i4'), 'i4'), + 13: (torch.float32, np.dtype('>f4'), 'f4'), + 14: (torch.float64, np.dtype('>f8'), 'f8') + } + # read + with open_maybe_compressed_file(path) as f: + data = f.read() + # parse + magic = get_int(data[0:4]) + nd = magic % 256 + ty = magic // 256 + assert nd >= 1 and nd <= 3 + assert ty >= 8 and ty <= 14 + m = read_sn3_pascalvincent_tensor.typemap[ty] + s = [get_int(data[4 * (i + 1):4 * (i + 2)]) for i in range(nd)] + parsed = np.frombuffer(data, dtype=m[1], offset=(4 * (nd + 1))) + assert parsed.shape[0] == np.prod(s) or not strict + return torch.from_numpy(parsed.astype(m[2], copy=False)).view(*s) + + +def read_label_file(path: str) -> torch.Tensor: + """Read labels from SN3 label file.""" + with open(path, 'rb') as f: + x = read_sn3_pascalvincent_tensor(f, strict=False) + assert (x.dtype == torch.uint8) + assert (x.ndimension() == 1) + return x.long() + + +def read_image_file(path: str) -> torch.Tensor: + """Read images from SN3 image file.""" + with open(path, 'rb') as f: + x = read_sn3_pascalvincent_tensor(f, strict=False) + assert (x.dtype == torch.uint8) + assert (x.ndimension() == 3) + return x diff --git a/mmpretrain/datasets/multi_label.py b/mmpretrain/datasets/multi_label.py new file mode 100644 index 0000000000000000000000000000000000000000..58a9c7cd5f097689d29700004e2ed815934a1594 --- /dev/null +++ b/mmpretrain/datasets/multi_label.py @@ -0,0 +1,85 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +@DATASETS.register_module() +class MultiLabelDataset(BaseDataset): + """Multi-label Dataset. + + This dataset support annotation file in `OpenMMLab 2.0 style annotation + format`. + + The annotation format is shown as follows. + + .. code-block:: none + + { + "metainfo": + { + "classes":['A', 'B', 'C'....] + }, + "data_list": + [ + { + "img_path": "test_img1.jpg", + 'gt_label': [0, 1], + }, + { + "img_path": "test_img2.jpg", + 'gt_label': [2], + }, + ] + .... + } + + + Args: + ann_file (str): Annotation file path. + metainfo (dict, optional): Meta information for dataset, such as class + information. Defaults to None. + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for training data. Defaults to ''. + filter_cfg (dict, optional): Config for filter data. Defaults to None. + indices (int or Sequence[int], optional): Support using first few + data in annotation file to facilitate training/testing on a smaller + dataset. Defaults to None which means using all ``data_infos``. + serialize_data (bool, optional): Whether to hold memory using + serialized objects, when enabled, data loader workers can use + shared RAM from master process instead of making a copy. Defaults + to True. + pipeline (list, optional): Processing pipeline. Defaults to []. + test_mode (bool, optional): ``test_mode=True`` means in test phase. + Defaults to False. + lazy_init (bool, optional): Whether to load annotation during + instantiation. In some cases, such as visualization, only the meta + information of the dataset is needed, which is not necessary to + load annotation file. ``Basedataset`` can skip load annotations to + save time by set ``lazy_init=False``. Defaults to False. + max_refetch (int, optional): If ``Basedataset.prepare_data`` get a + None img. The maximum extra number of cycles to get a valid + image. Defaults to 1000. + classes (str | Sequence[str], optional): Specify names of classes. + + - If is string, it should be a file path, and the every line of + the file is a name of a class. + - If is a sequence of string, every item is a name of class. + - If is None, use categories information in ``metainfo`` argument, + annotation file or the class attribute ``METAINFO``. + + Defaults to None. + """ + + def get_cat_ids(self, idx: int) -> List[int]: + """Get category ids by index. + + Args: + idx (int): Index of data. + + Returns: + cat_ids (List[int]): Image categories of specified index. + """ + return self.get_data_info(idx)['gt_label'] diff --git a/mmpretrain/datasets/multi_task.py b/mmpretrain/datasets/multi_task.py new file mode 100644 index 0000000000000000000000000000000000000000..443df0e7d7de11962d472d33b25b4bbff562524f --- /dev/null +++ b/mmpretrain/datasets/multi_task.py @@ -0,0 +1,337 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import os.path as osp +from os import PathLike +from typing import Optional, Sequence + +import mmengine +from mmcv.transforms import Compose +from mmengine.fileio import get_file_backend + +from .builder import DATASETS + + +def expanduser(path): + if isinstance(path, (str, PathLike)): + return osp.expanduser(path) + else: + return path + + +def isabs(uri): + return osp.isabs(uri) or ('://' in uri) + + +@DATASETS.register_module() +class MultiTaskDataset: + """Custom dataset for multi-task dataset. + + To use the dataset, please generate and provide an annotation file in the + below format: + + .. code-block:: json + + { + "metainfo": { + "tasks": + [ + 'gender' + 'wear' + ] + }, + "data_list": [ + { + "img_path": "a.jpg", + gt_label:{ + "gender": 0, + "wear": [1, 0, 1, 0] + } + }, + { + "img_path": "b.jpg", + gt_label:{ + "gender": 1, + "wear": [1, 0, 1, 0] + } + } + ] + } + + Assume we put our dataset in the ``data/mydataset`` folder in the + repository and organize it as the below format: :: + + mmpretrain/ + └── data + └── mydataset + ├── annotation + │   ├── train.json + │   ├── test.json + │   └── val.json + ├── train + │   ├── a.jpg + │   └── ... + ├── test + │   ├── b.jpg + │   └── ... + └── val + ├── c.jpg + └── ... + + We can use the below config to build datasets: + + .. code:: python + + >>> from mmpretrain.datasets import build_dataset + >>> train_cfg = dict( + ... type="MultiTaskDataset", + ... ann_file="annotation/train.json", + ... data_root="data/mydataset", + ... # The `img_path` field in the train annotation file is relative + ... # to the `train` folder. + ... data_prefix='train', + ... ) + >>> train_dataset = build_dataset(train_cfg) + + Or we can put all files in the same folder: :: + + mmpretrain/ + └── data + └── mydataset + ├── train.json + ├── test.json + ├── val.json + ├── a.jpg + ├── b.jpg + ├── c.jpg + └── ... + + And we can use the below config to build datasets: + + .. code:: python + + >>> from mmpretrain.datasets import build_dataset + >>> train_cfg = dict( + ... type="MultiTaskDataset", + ... ann_file="train.json", + ... data_root="data/mydataset", + ... # the `data_prefix` is not required since all paths are + ... # relative to the `data_root`. + ... ) + >>> train_dataset = build_dataset(train_cfg) + + + Args: + ann_file (str): The annotation file path. It can be either absolute + path or relative path to the ``data_root``. + metainfo (dict, optional): The extra meta information. It should be + a dict with the same format as the ``"metainfo"`` field in the + annotation file. Defaults to None. + data_root (str, optional): The root path of the data directory. It's + the prefix of the ``data_prefix`` and the ``ann_file``. And it can + be a remote path like "s3://openmmlab/xxx/". Defaults to None. + data_prefix (str, optional): The base folder relative to the + ``data_root`` for the ``"img_path"`` field in the annotation file. + Defaults to None. + pipeline (Sequence[dict]): A list of dict, where each element + represents a operation defined in + :mod:`mmpretrain.datasets.pipelines`. Defaults to an empty tuple. + test_mode (bool): in train mode or test mode. Defaults to False. + """ + METAINFO = dict() + + def __init__(self, + ann_file: str, + metainfo: Optional[dict] = None, + data_root: Optional[str] = None, + data_prefix: Optional[str] = None, + pipeline: Sequence = (), + test_mode: bool = False): + + self.data_root = expanduser(data_root) + + # Inference the file client + if self.data_root is not None: + self.file_backend = get_file_backend(uri=self.data_root) + else: + self.file_backend = None + + self.ann_file = self._join_root(expanduser(ann_file)) + self.data_prefix = self._join_root(data_prefix) + + self.test_mode = test_mode + self.pipeline = Compose(pipeline) + self.data_list = self.load_data_list(self.ann_file, metainfo) + + def _join_root(self, path): + """Join ``self.data_root`` with the specified path. + + If the path is an absolute path, just return the path. And if the + path is None, return ``self.data_root``. + + Examples: + >>> self.data_root = 'a/b/c' + >>> self._join_root('d/e/') + 'a/b/c/d/e' + >>> self._join_root('https://openmmlab.com') + 'https://openmmlab.com' + >>> self._join_root(None) + 'a/b/c' + """ + if path is None: + return self.data_root + if isabs(path): + return path + + joined_path = self.file_backend.join_path(self.data_root, path) + return joined_path + + @classmethod + def _get_meta_info(cls, in_metainfo: dict = None) -> dict: + """Collect meta information from the dictionary of meta. + + Args: + in_metainfo (dict): Meta information dict. + + Returns: + dict: Parsed meta information. + """ + # `cls.METAINFO` will be overwritten by in_meta + metainfo = copy.deepcopy(cls.METAINFO) + if in_metainfo is None: + return metainfo + + metainfo.update(in_metainfo) + + return metainfo + + def load_data_list(self, ann_file, metainfo_override=None): + """Load annotations from an annotation file. + + Args: + ann_file (str): Absolute annotation file path if ``self.root=None`` + or relative path if ``self.root=/path/to/data/``. + + Returns: + list[dict]: A list of annotation. + """ + annotations = mmengine.load(ann_file) + if not isinstance(annotations, dict): + raise TypeError(f'The annotations loaded from annotation file ' + f'should be a dict, but got {type(annotations)}!') + if 'data_list' not in annotations: + raise ValueError('The annotation file must have the `data_list` ' + 'field.') + metainfo = annotations.get('metainfo', {}) + raw_data_list = annotations['data_list'] + + # Set meta information. + assert isinstance(metainfo, dict), 'The `metainfo` field in the '\ + f'annotation file should be a dict, but got {type(metainfo)}' + if metainfo_override is not None: + assert isinstance(metainfo_override, dict), 'The `metainfo` ' \ + f'argument should be a dict, but got {type(metainfo_override)}' + metainfo.update(metainfo_override) + self._metainfo = self._get_meta_info(metainfo) + + data_list = [] + for i, raw_data in enumerate(raw_data_list): + try: + data_list.append(self.parse_data_info(raw_data)) + except AssertionError as e: + raise RuntimeError( + f'The format check fails during parse the item {i} of ' + f'the annotation file with error: {e}') + return data_list + + def parse_data_info(self, raw_data): + """Parse raw annotation to target format. + + This method will return a dict which contains the data information of a + sample. + + Args: + raw_data (dict): Raw data information load from ``ann_file`` + + Returns: + dict: Parsed annotation. + """ + assert isinstance(raw_data, dict), \ + f'The item should be a dict, but got {type(raw_data)}' + assert 'img_path' in raw_data, \ + "The item doesn't have `img_path` field." + data = dict( + img_path=self._join_root(raw_data['img_path']), + gt_label=raw_data['gt_label'], + ) + return data + + @property + def metainfo(self) -> dict: + """Get meta information of dataset. + + Returns: + dict: meta information collected from ``cls.METAINFO``, + annotation file and metainfo argument during instantiation. + """ + return copy.deepcopy(self._metainfo) + + def prepare_data(self, idx): + """Get data processed by ``self.pipeline``. + + Args: + idx (int): The index of ``data_info``. + + Returns: + Any: Depends on ``self.pipeline``. + """ + results = copy.deepcopy(self.data_list[idx]) + return self.pipeline(results) + + def __len__(self): + """Get the length of the whole dataset. + + Returns: + int: The length of filtered dataset. + """ + return len(self.data_list) + + def __getitem__(self, idx): + """Get the idx-th image and data information of dataset after + ``self.pipeline``. + + Args: + idx (int): The index of of the data. + + Returns: + dict: The idx-th image and data information after + ``self.pipeline``. + """ + return self.prepare_data(idx) + + def __repr__(self): + """Print the basic information of the dataset. + + Returns: + str: Formatted string. + """ + head = 'Dataset ' + self.__class__.__name__ + body = [f'Number of samples: \t{self.__len__()}'] + if self.data_root is not None: + body.append(f'Root location: \t{self.data_root}') + body.append(f'Annotation file: \t{self.ann_file}') + if self.data_prefix is not None: + body.append(f'Prefix of images: \t{self.data_prefix}') + # -------------------- extra repr -------------------- + tasks = self.metainfo['tasks'] + body.append(f'For {len(tasks)} tasks') + for task in tasks: + body.append(f' {task} ') + # ---------------------------------------------------- + + if len(self.pipeline.transforms) > 0: + body.append('With transforms:') + for t in self.pipeline.transforms: + body.append(f' {t}') + + lines = [head] + [' ' * 4 + line for line in body] + return '\n'.join(lines) diff --git a/mmpretrain/datasets/nlvr2.py b/mmpretrain/datasets/nlvr2.py new file mode 100644 index 0000000000000000000000000000000000000000..0063090657714406049a6daa6fa3c0d868422590 --- /dev/null +++ b/mmpretrain/datasets/nlvr2.py @@ -0,0 +1,36 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json +from typing import List + +from mmengine.fileio import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +@DATASETS.register_module() +class NLVR2(BaseDataset): + """COCO Caption dataset.""" + + def load_data_list(self) -> List[dict]: + """Load data list.""" + + data_list = [] + img_prefix = self.data_prefix['img_path'] + file_backend = get_file_backend(img_prefix) + examples = list_from_file(self.ann_file) + + for example in examples: + example = json.loads(example) + prefix = example['identifier'].rsplit('-', 1)[0] + train_data = {} + train_data['text'] = example['sentence'] + train_data['gt_label'] = {'True': 1, 'False': 0}[example['label']] + train_data['img_path'] = [ + file_backend.join_path(img_prefix, prefix + f'-img{i}.png') + for i in range(2) + ] + + data_list.append(train_data) + + return data_list diff --git a/mmpretrain/datasets/oxfordiiitpet.py b/mmpretrain/datasets/oxfordiiitpet.py new file mode 100644 index 0000000000000000000000000000000000000000..23c8b7db8679e99c6ed2698b9eb140cd6151d445 --- /dev/null +++ b/mmpretrain/datasets/oxfordiiitpet.py @@ -0,0 +1,97 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import OxfordIIITPet_CATEGORIES + + +@DATASETS.register_module() +class OxfordIIITPet(BaseDataset): + """The Oxford-IIIT Pets Dataset. + + Support the `Oxford-IIIT Pets Dataset `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + Oxford-IIIT_Pets dataset directory: :: + + Oxford-IIIT_Pets + ├── images + │ ├── Abyssinian_1.jpg + │ ├── Abyssinian_2.jpg + │ └── ... + ├── annotations + │ ├── trainval.txt + │ ├── test.txt + │ ├── list.txt + │ └── ... + └── .... + + Args: + data_root (str): The root directory for Oxford-IIIT Pets dataset. + split (str, optional): The dataset split, supports "trainval" and "test". + Default to "trainval". + + Examples: + >>> from mmpretrain.datasets import OxfordIIITPet + >>> train_dataset = OxfordIIITPet(data_root='data/Oxford-IIIT_Pets', split='trainval') + >>> train_dataset + Dataset OxfordIIITPet + Number of samples: 3680 + Number of categories: 37 + Root of dataset: data/Oxford-IIIT_Pets + >>> test_dataset = OxfordIIITPet(data_root='data/Oxford-IIIT_Pets', split='test') + >>> test_dataset + Dataset OxfordIIITPet + Number of samples: 3669 + Number of categories: 37 + Root of dataset: data/Oxford-IIIT_Pets + """ # noqa: E501 + + METAINFO = {'classes': OxfordIIITPet_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'trainval', **kwargs): + + splits = ['trainval', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + self.backend = get_file_backend(data_root, enable_singleton=True) + if split == 'trainval': + ann_file = self.backend.join_path('annotations', 'trainval.txt') + else: + ann_file = self.backend.join_path('annotations', 'test.txt') + + data_prefix = 'images' + test_mode = split == 'test' + + super(OxfordIIITPet, self).__init__( + ann_file=ann_file, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + """Load images and ground truth labels.""" + + pairs = list_from_file(self.ann_file) + data_list = [] + for pair in pairs: + img_name, class_id, _, _ = pair.split() + img_name = f'{img_name}.jpg' + img_path = self.backend.join_path(self.img_prefix, img_name) + gt_label = int(class_id) - 1 + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/places205.py b/mmpretrain/datasets/places205.py new file mode 100644 index 0000000000000000000000000000000000000000..f3ba1ff631a7a4840b66cf63ec53585ec064560d --- /dev/null +++ b/mmpretrain/datasets/places205.py @@ -0,0 +1,40 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Union + +from mmpretrain.registry import DATASETS +from .categories import PLACES205_CATEGORIES +from .custom import CustomDataset + + +@DATASETS.register_module() +class Places205(CustomDataset): + """`Places205 `_ Dataset. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str | dict): Prefix for training data. Defaults + to ''. + ann_file (str): Annotation file path. Defaults to ''. + metainfo (dict, optional): Meta information for dataset, such as class + information. Defaults to None. + **kwargs: Other keyword arguments in :class:`CustomDataset` and + :class:`BaseDataset`. + """ + + IMG_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif') + METAINFO = {'classes': PLACES205_CATEGORIES} + + def __init__(self, + data_root: str = '', + data_prefix: Union[str, dict] = '', + ann_file: str = '', + metainfo: Optional[dict] = None, + **kwargs): + kwargs = {'extensions': self.IMG_EXTENSIONS, **kwargs} + super().__init__( + data_root=data_root, + data_prefix=data_prefix, + ann_file=ann_file, + metainfo=metainfo, + **kwargs) diff --git a/mmpretrain/datasets/refcoco.py b/mmpretrain/datasets/refcoco.py new file mode 100644 index 0000000000000000000000000000000000000000..f4f2a943f73fdab493a47bbcd1d0ea6385ec60fa --- /dev/null +++ b/mmpretrain/datasets/refcoco.py @@ -0,0 +1,81 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from typing import List + +import mmengine +import numpy as np +from mmengine.dataset import BaseDataset +from pycocotools.coco import COCO + +from mmpretrain.registry import DATASETS + + +@DATASETS.register_module() +class RefCOCO(BaseDataset): + """RefCOCO dataset. + + Args: + ann_file (str): Annotation file path. + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. Defaults to ''. + data_prefix (str): Prefix for training data. + pipeline (Sequence): Processing pipeline. Defaults to an empty tuple. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root, + ann_file, + data_prefix, + split_file, + split='train', + **kwargs): + self.split_file = split_file + self.split = split + + super().__init__( + data_root=data_root, + data_prefix=dict(img_path=data_prefix), + ann_file=ann_file, + **kwargs, + ) + + def _join_prefix(self): + if not mmengine.is_abs(self.split_file) and self.split_file: + self.split_file = osp.join(self.data_root, self.split_file) + + return super()._join_prefix() + + def load_data_list(self) -> List[dict]: + """Load data list.""" + with mmengine.get_local_path(self.ann_file) as ann_file: + coco = COCO(ann_file) + splits = mmengine.load(self.split_file, file_format='pkl') + img_prefix = self.data_prefix['img_path'] + + data_list = [] + join_path = mmengine.fileio.get_file_backend(img_prefix).join_path + for refer in splits: + if refer['split'] != self.split: + continue + + ann = coco.anns[refer['ann_id']] + img = coco.imgs[ann['image_id']] + sentences = refer['sentences'] + bbox = np.array(ann['bbox'], dtype=np.float32) + bbox[2:4] = bbox[0:2] + bbox[2:4] # XYWH -> XYXY + + for sent in sentences: + data_info = { + 'img_path': join_path(img_prefix, img['file_name']), + 'image_id': ann['image_id'], + 'ann_id': ann['id'], + 'text': sent['sent'], + 'gt_bboxes': bbox[None, :], + } + data_list.append(data_info) + + if len(data_list) == 0: + raise ValueError(f'No sample in split "{self.split}".') + + return data_list diff --git a/mmpretrain/datasets/samplers/__init__.py b/mmpretrain/datasets/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2bccf9c34659e19764871a696260cf5884696ca1 --- /dev/null +++ b/mmpretrain/datasets/samplers/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .repeat_aug import RepeatAugSampler +from .sequential import SequentialSampler + +__all__ = ['RepeatAugSampler', 'SequentialSampler'] diff --git a/mmpretrain/datasets/samplers/repeat_aug.py b/mmpretrain/datasets/samplers/repeat_aug.py new file mode 100644 index 0000000000000000000000000000000000000000..d833a1954d7d9d181c368d5b3b956c25df241c1a --- /dev/null +++ b/mmpretrain/datasets/samplers/repeat_aug.py @@ -0,0 +1,101 @@ +import math +from typing import Iterator, Optional, Sized + +import torch +from mmengine.dist import get_dist_info, is_main_process, sync_random_seed +from torch.utils.data import Sampler + +from mmpretrain.registry import DATA_SAMPLERS + + +@DATA_SAMPLERS.register_module() +class RepeatAugSampler(Sampler): + """Sampler that restricts data loading to a subset of the dataset for + distributed, with repeated augmentation. It ensures that different each + augmented version of a sample will be visible to a different process (GPU). + Heavily based on torch.utils.data.DistributedSampler. + + This sampler was taken from + https://github.com/facebookresearch/deit/blob/0c4b8f60/samplers.py + Used in + Copyright (c) 2015-present, Facebook, Inc. + + Args: + dataset (Sized): The dataset. + shuffle (bool): Whether shuffle the dataset or not. Defaults to True. + num_repeats (int): The repeat times of every sample. Defaults to 3. + seed (int, optional): Random seed used to shuffle the sampler if + :attr:`shuffle=True`. This number should be identical across all + processes in the distributed group. Defaults to None. + """ + + def __init__(self, + dataset: Sized, + shuffle: bool = True, + num_repeats: int = 3, + seed: Optional[int] = None): + rank, world_size = get_dist_info() + self.rank = rank + self.world_size = world_size + + self.dataset = dataset + self.shuffle = shuffle + if not self.shuffle and is_main_process(): + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.warning('The RepeatAugSampler always picks a ' + 'fixed part of data if `shuffle=False`.') + + if seed is None: + seed = sync_random_seed() + self.seed = seed + self.epoch = 0 + self.num_repeats = num_repeats + + # The number of repeated samples in the rank + self.num_samples = math.ceil( + len(self.dataset) * num_repeats / world_size) + # The total number of repeated samples in all ranks. + self.total_size = self.num_samples * world_size + # The number of selected samples in the rank + self.num_selected_samples = math.ceil(len(self.dataset) / world_size) + + def __iter__(self) -> Iterator[int]: + """Iterate the indices.""" + # deterministically shuffle based on epoch and seed + if self.shuffle: + g = torch.Generator() + g.manual_seed(self.seed + self.epoch) + indices = torch.randperm(len(self.dataset), generator=g).tolist() + else: + indices = list(range(len(self.dataset))) + + # produce repeats e.g. [0, 0, 0, 1, 1, 1, 2, 2, 2....] + indices = [x for x in indices for _ in range(self.num_repeats)] + # add extra samples to make it evenly divisible + padding_size = self.total_size - len(indices) + indices += indices[:padding_size] + assert len(indices) == self.total_size + + # subsample per rank + indices = indices[self.rank:self.total_size:self.world_size] + assert len(indices) == self.num_samples + + # return up to num selected samples + return iter(indices[:self.num_selected_samples]) + + def __len__(self) -> int: + """The number of samples in this rank.""" + return self.num_selected_samples + + def set_epoch(self, epoch: int) -> None: + """Sets the epoch for this sampler. + + When :attr:`shuffle=True`, this ensures all replicas use a different + random ordering for each epoch. Otherwise, the next iteration of this + sampler will yield the same ordering. + + Args: + epoch (int): Epoch number. + """ + self.epoch = epoch diff --git a/mmpretrain/datasets/samplers/sequential.py b/mmpretrain/datasets/samplers/sequential.py new file mode 100644 index 0000000000000000000000000000000000000000..e3b940c2eabc2ab9c2401cd1923776fc067e9f6c --- /dev/null +++ b/mmpretrain/datasets/samplers/sequential.py @@ -0,0 +1,56 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Iterator + +import torch +from mmengine.dataset import DefaultSampler + +from mmpretrain.registry import DATA_SAMPLERS + + +@DATA_SAMPLERS.register_module() +class SequentialSampler(DefaultSampler): + """Sequential sampler which supports different subsample policy. + + Args: + dataset (Sized): The dataset. + round_up (bool): Whether to add extra samples to make the number of + samples evenly divisible by the world size. Defaults to True. + subsample_type (str): The method to subsample data on different rank. + Supported type: + + - ``'default'``: Original torch behavior. Sample the examples one + by one for each GPU in terms. For instance, 8 examples on 2 GPUs, + GPU0: [0,2,4,8], GPU1: [1,3,5,7] + - ``'sequential'``: Subsample all examples to n chunk sequntially. + For instance, 8 examples on 2 GPUs, + GPU0: [0,1,2,3], GPU1: [4,5,6,7] + """ + + def __init__(self, subsample_type: str = 'default', **kwargs) -> None: + super().__init__(shuffle=False, **kwargs) + + if subsample_type not in ['default', 'sequential']: + raise ValueError(f'Unsupported subsample typer "{subsample_type}",' + ' please choose from ["default", "sequential"]') + self.subsample_type = subsample_type + + def __iter__(self) -> Iterator[int]: + """Iterate the indices.""" + indices = torch.arange(len(self.dataset)).tolist() + + # add extra samples to make it evenly divisible + if self.round_up: + indices = ( + indices * + int(self.total_size / len(indices) + 1))[:self.total_size] + + # subsample + if self.subsample_type == 'default': + indices = indices[self.rank:self.total_size:self.world_size] + elif self.subsample_type == 'sequential': + num_samples_per_rank = self.total_size // self.world_size + indices = indices[self.rank * + num_samples_per_rank:(self.rank + 1) * + num_samples_per_rank] + + return iter(indices) diff --git a/mmpretrain/datasets/scienceqa.py b/mmpretrain/datasets/scienceqa.py new file mode 100644 index 0000000000000000000000000000000000000000..f0205a2cdd923dbc985f4990043a9d5c16ca125c --- /dev/null +++ b/mmpretrain/datasets/scienceqa.py @@ -0,0 +1,104 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +from typing import Callable, List, Sequence + +import mmengine +from mmengine.dataset import BaseDataset +from mmengine.fileio import get_file_backend + +from mmpretrain.registry import DATASETS + + +@DATASETS.register_module() +class ScienceQA(BaseDataset): + """ScienceQA dataset. + + This dataset is used to load the multimodal data of ScienceQA dataset. + + Args: + data_root (str): The root directory for ``data_prefix`` and + ``ann_file``. + split (str): The split of dataset. Options: ``train``, ``val``, + ``test``, ``trainval``, ``minival``, and ``minitest``. + split_file (str): The split file of dataset, which contains the + ids of data samples in the split. + ann_file (str): Annotation file path. + data_prefix (dict): Prefix for data field. Defaults to + ``dict(img_path='')``. + pipeline (Sequence): Processing pipeline. Defaults to an empty tuple. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root: str, + split: str, + split_file: str, + ann_file: str, + data_prefix: dict = dict(img_path=''), + pipeline: Sequence[Callable] = (), + **kwargs): + + assert split in [ + 'train', 'val', 'test', 'trainval', 'minival', 'minitest' + ], f'Invalid split {split}' + self.split = split + self.split_file = os.path.join(data_root, split_file) + + super().__init__( + data_root=data_root, + ann_file=ann_file, + data_prefix=data_prefix, + pipeline=pipeline, + **kwargs) + + def load_data_list(self) -> List[dict]: + """Load data list.""" + img_prefix = self.data_prefix['img_path'] + annotations = mmengine.load(self.ann_file) + current_data_split = mmengine.load(self.split_file)[self.split] # noqa + + file_backend = get_file_backend(img_prefix) + + data_list = [] + for data_id in current_data_split: + ann = annotations[data_id] + data_info = { + 'image_id': + data_id, + 'question': + ann['question'], + 'choices': + ann['choices'], + 'gt_answer': + ann['answer'], + 'hint': + ann['hint'], + 'image_name': + ann['image'], + 'task': + ann['task'], + 'grade': + ann['grade'], + 'subject': + ann['subject'], + 'topic': + ann['topic'], + 'category': + ann['category'], + 'skill': + ann['skill'], + 'lecture': + ann['lecture'], + 'solution': + ann['solution'], + 'split': + ann['split'], + 'img_path': + file_backend.join_path(img_prefix, data_id, ann['image']) + if ann['image'] is not None else None, + 'has_image': + True if ann['image'] is not None else False, + } + data_list.append(data_info) + + return data_list diff --git a/mmpretrain/datasets/stanfordcars.py b/mmpretrain/datasets/stanfordcars.py new file mode 100644 index 0000000000000000000000000000000000000000..355697943cf693869f35f2a0bd71abdfa0396722 --- /dev/null +++ b/mmpretrain/datasets/stanfordcars.py @@ -0,0 +1,148 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +import mat4py +from mmengine import get_file_backend + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import STANFORDCARS_CATEGORIES + + +@DATASETS.register_module() +class StanfordCars(BaseDataset): + """The Stanford Cars Dataset. + + Support the `Stanford Cars Dataset `_ Dataset. + The official website provides two ways to organize the dataset. + Therefore, after downloading and decompression, the dataset directory structure is as follows. + + Stanford Cars dataset directory: :: + + Stanford_Cars + ├── car_ims + │ ├── 00001.jpg + │ ├── 00002.jpg + │ └── ... + └── cars_annos.mat + + or :: + + Stanford_Cars + ├── cars_train + │ ├── 00001.jpg + │ ├── 00002.jpg + │ └── ... + ├── cars_test + │ ├── 00001.jpg + │ ├── 00002.jpg + │ └── ... + └── devkit + ├── cars_meta.mat + ├── cars_train_annos.mat + ├── cars_test_annos.mat + ├── cars_test_annoswithlabels.mat + ├── eval_train.m + └── train_perfect_preds.txt + + Args: + data_root (str): The root directory for Stanford Cars dataset. + split (str, optional): The dataset split, supports "train" + and "test". Default to "train". + + Examples: + >>> from mmpretrain.datasets import StanfordCars + >>> train_dataset = StanfordCars(data_root='data/Stanford_Cars', split='train') + >>> train_dataset + Dataset StanfordCars + Number of samples: 8144 + Number of categories: 196 + Root of dataset: data/Stanford_Cars + >>> test_dataset = StanfordCars(data_root='data/Stanford_Cars', split='test') + >>> test_dataset + Dataset StanfordCars + Number of samples: 8041 + Number of categories: 196 + Root of dataset: data/Stanford_Cars + """ # noqa: E501 + + METAINFO = {'classes': STANFORDCARS_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'train', **kwargs): + + splits = ['train', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + test_mode = split == 'test' + self.backend = get_file_backend(data_root, enable_singleton=True) + + anno_file_path = self.backend.join_path(data_root, 'cars_annos.mat') + if self.backend.exists(anno_file_path): + ann_file = 'cars_annos.mat' + data_prefix = '' + else: + if test_mode: + ann_file = self.backend.join_path( + 'devkit', 'cars_test_annos_withlabels.mat') + data_prefix = 'cars_test' + else: + ann_file = self.backend.join_path('devkit', + 'cars_train_annos.mat') + data_prefix = 'cars_train' + + if not self.backend.exists( + self.backend.join_path(data_root, ann_file)): + doc_url = 'https://mmpretrain.readthedocs.io/en/latest/api/datasets.html#stanfordcars' # noqa: E501 + raise RuntimeError( + f'The dataset is incorrectly organized, please \ + refer to {doc_url} and reorganize your folders.') + + super(StanfordCars, self).__init__( + ann_file=ann_file, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + def load_data_list(self): + data = mat4py.loadmat(self.ann_file)['annotations'] + + data_list = [] + if 'test' in data.keys(): + # first way + img_paths, labels, test = data['relative_im_path'], data[ + 'class'], data['test'] + num = len(img_paths) + assert num == len(labels) == len(test), 'get error ann file' + for i in range(num): + if not self.test_mode and test[i] == 1: + continue + if self.test_mode and test[i] == 0: + continue + img_path = self.backend.join_path(self.img_prefix, + img_paths[i]) + gt_label = labels[i] - 1 + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + else: + # second way + img_names, labels = data['fname'], data['class'] + num = len(img_names) + assert num == len(labels), 'get error ann file' + for i in range(num): + img_path = self.backend.join_path(self.img_prefix, + img_names[i]) + gt_label = labels[i] - 1 + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/sun397.py b/mmpretrain/datasets/sun397.py new file mode 100644 index 0000000000000000000000000000000000000000..ac7e9efcca0ad8bdfdec5fe90afa60ed4f20fc91 --- /dev/null +++ b/mmpretrain/datasets/sun397.py @@ -0,0 +1,225 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset +from .categories import SUN397_CATEGORIES + +# Note that some images are not a jpg file although the name ends +# with jpg and therefore cannot be read properly. So we provide +# a list to skip these files. +INVALID = [ + '/a/assembly_line/sun_ajckcfldgdrdjogj.jpg', + '/a/auto_factory/sun_apfsprenzdnzbhmt.jpg', + '/b/baggage_claim/sun_avittiqqaiibgcau.jpg', + '/b/batters_box/sun_alqlfpgtbgggezyr.jpg', + '/b/bow_window/indoor/sun_ahsholsagvlrsboa.jpg', + '/b/bow_window/indoor/sun_aioomcoujmmcxkkx.jpg', + '/b/bow_window/outdoor/sun_atgtjdpqikjmllth.jpg', + '/c/carrousel/sun_atsgphqympojgxnc.jpg', + '/c/carrousel/sun_auzitjuirwolazns.jpg', + '/c/church/outdoor/sun_boagasgfltequmal.jpg', + '/c/church/outdoor/sun_brhmnwzzbkphcvfo.jpg', + '/c/church/outdoor/sun_byjkqzybxpjnuofa.jpg', + '/c/corridor/sun_aznefxvocwpgimko.jpg', + '/d/dentists_office/sun_aaefsoauqlcsihou.jpg', + '/d/diner/indoor/sun_apswilaujhntrybg.jpg', + '/e/elevator/door/sun_aaudobqlphijkjdv.jpg', + '/f/fastfood_restaurant/sun_axeniwtesffxqedr.jpg', + '/f/fire_station/sun_bjyapttwilyyuxqm.jpg', + '/f/fountain/sun_axgmpbdyvqhtkhee.jpg', + '/h/hospital_room/sun_ahokhhxjiclpxqqa.jpg', + '/o/oast_house/sun_bqsrrygxyrutgjve.jpg', + '/r/restaurant_patio/sun_aurwypviprwycame.jpg', + '/s/ski_resort/sun_bplmntyzoiobcqhp.jpg', + '/w/wine_cellar/bottle_storage/sun_afmzwxkzmxkbamqi.jpg', + '/w/wine_cellar/bottle_storage/sun_ahyymswdjejrbhyb.jpg', + '/w/wine_cellar/bottle_storage/sun_avnttpxamufejbfe.jpg', + '/a/archive/sun_awgsrbljlsvhqjij.jpg', + '/a/art_school/sun_aabogqsjulyvmcse.jpg', + '/a/art_school/sun_apnzojafyvkariue.jpg', + '/b/ball_pit/sun_atjhwqngtoeuwhso.jpg', + '/b/bow_window/indoor/sun_asxvsqbexmmtqmht.jpg', + '/b/bow_window/indoor/sun_abeugxecxrwzmffp.jpg', + '/b/bow_window/outdoor/sun_auwcqhrtzkgihvlv.jpg', + '/b/bow_window/outdoor/sun_apnvdyecnjjmcuhi.jpg', + '/c/childs_room/sun_alggivksjwwiklmt.jpg', + '/c/control_tower/outdoor/sun_avbcxakrvpomqdgr.jpg', + '/d/diner/indoor/sun_ajmzozstvsxisvgx.jpg', + '/e/elevator/door/sun_aaqsyluqbluugqgy.jpg', + '/f/fastfood_restaurant/sun_aevchxlxoruhxgrb.jpg', + '/f/firing_range/indoor/sun_affrzvahwjorpalo.jpg', + '/f/formal_garden/sun_bjvrlaeatjufekft.jpg', + '/g/garage/indoor/sun_akbocuwclkxqlofx.jpg', + '/g/greenhouse/indoor/sun_addirvgtxfbndlwf.jpg', + '/k/kindergarden_classroom/sun_ajtpaahilrqzarri.jpg', + '/l/laundromat/sun_afrrjykuhhlwiwun.jpg', + '/m/music_studio/sun_bsntklkmwqgnjrjj.jpg', + '/t/track/outdoor/sun_aophkoiosslinihb.jpg', + '/a/archive/sun_aegmzltkiwyevpwa.jpg', + '/a/auto_factory/sun_aybymzvbxgvcrwgn.jpg', + '/b/baggage_claim/sun_atpmiqmnxjpgqsxi.jpg', + '/b/baggage_claim/sun_ajffcdpsvgqfzoxx.jpg', + '/b/bamboo_forest/sun_ausmxphosyahoyjo.jpg', + '/b/batters_box/sun_aaeheulsicxtxnbu.jpg', + '/c/carrousel/sun_arjrjcxemhttubqz.jpg', + '/c/chicken_coop/outdoor/sun_abcegmmdbizqkpgh.jpg', + '/c/control_tower/outdoor/sun_axhjfpkxdvqdfkyr.jpg', + '/d/diner/indoor/sun_apaotiublwqeowck.jpg', + '/f/fastfood_restaurant/sun_anexashcgmxdbmxq.jpg', + '/l/landing_deck/sun_aizahnjfkuurjibw.jpg', + '/n/nuclear_power_plant/outdoor/sun_aoblfvgyleweqanr.jpg', + '/w/waiting_room/sun_aicytusmthfvqcwc.jpg', + '/b/bow_window/indoor/sun_asmvdfnjlulewkpr.jpg', + '/b/bus_interior/sun_adhktvidwzmodeou.jpg', + '/c/catacomb/sun_algnawesgjzzmcqd.jpg', + '/c/church/outdoor/sun_baihxlseimcsdhdx.jpg', + '/d/diner/indoor/sun_agoyalzcawgxodbm.jpg', + '/e/elevator_shaft/sun_awaitimkinrjaybl.jpg', + '/f/fastfood_restaurant/sun_aplvzfbmtqtbsvbx.jpg', + '/g/greenhouse/indoor/sun_bkccvyfpwetwjuhk.jpg', + '/c/car_interior/backseat/sun_adexwfoqdyhowxpu.jpg', + '/c/church/outdoor/sun_blmmweiumednscuf.jpg', + '/f/fire_station/sun_bibntbsuunbsdrum.jpg', + '/g/game_room/sun_aopfaqlllpvzhrak.jpg', + '/u/underwater/coral_reef/sun_biiueajvszaxqopo.jpg', + '/a/airplane_cabin/sun_arqyikigkyfpegug.jpg', + '/b/badminton_court/indoor/sun_amppvxecgtjpfold.jpg', + '/c/carrousel/sun_anxtrtieimkpmhvk.jpg', + '/c/computer_room/sun_aebgvpgtwoqbfyvl.jpg', + '/f/fire_escape/sun_atbraxuwwlvdoolv.jpg', + '/k/kasbah/sun_abxkkoielpavsouu.jpg', + '/t/tower/sun_bccqnzcvqkiwicjt.jpg', + '/a/archive/sun_afngadshxudodkct.jpg', + '/b/bow_window/indoor/sun_awnrlipyxpgxxgxz.jpg', + '/c/control_tower/outdoor/sun_arohngcbtsvbthho.jpg', + '/f/fire_station/sun_brbskkfgghbfvgkk.jpg', + '/r/restaurant_patio/sun_amjfbqzfgxarrpec.jpg', + '/v/vineyard/sun_bdxhnbgbnolddswz.jpg', + '/b/baggage_claim/sun_axrtsmillrglugia.jpg', + '/d/diner/indoor/sun_alaqevbwpjaqqdqz.jpg', + '/l/landing_deck/sun_acodgoamhgnnbmvr.jpg', + '/c/carrousel/sun_adsafgyrinnekycc.jpg', + '/c/church/outdoor/sun_bzqhuwshtdgakkay.jpg', + '/c/closet/sun_absahzamlrylkxyn.jpg', + '/f/fire_escape/sun_acdthenaosuqcoqn.jpg', + '/b/butchers_shop/sun_asrdgbefoszenfex.jpg', + '/c/church/outdoor/sun_bzfyucfrdigaqneg.jpg', + '/c/church/outdoor/sun_byzxhknqrejdajxi.jpg', + '/c/cockpit/sun_ajkulpqauavrmxae.jpg', + '/l/living_room/sun_aefoqbeatyufobtx.jpg', + '/s/supermarket/sun_attvxbzocurnddbz.jpg', + '/c/closet/sun_aqnutmwfkypmrnfy.jpg', + '/f/fire_station/sun_bttrtzktpbymxkmf.jpg', + '/s/shopping_mall/indoor/sun_avwzjsijaxnwuzjx.jpg', + '/w/windmill/sun_blvczkyqbmabzeej.jpg', + '/c/chicken_coop/outdoor/sun_amaonsnnkskxwmrj.jpg', + '/s/swimming_pool/outdoor/sun_bslaihiqlhfewtzn.jpg', + '/u/underwater/coral_reef/sun_bhcrnmvbgnkvcvkr.jpg', + '/d/dining_room/sun_azlxdhiajwrhaivq.jpg', + '/c/church/outdoor/sun_bnunxbznqnvgeykx.jpg', + '/c/corridor/sun_aspwpqqlcwzfanvl.jpg', + '/r/restaurant_patio/sun_awcbpizjbudjvrhs.jpg', + '/b/ball_pit/sun_avdnmemjrgrbkwjm.jpg', +] + + +@DATASETS.register_module() +class SUN397(BaseDataset): + """The SUN397 Dataset. + + Support the `SUN397 Dataset `_ Dataset. + After downloading and decompression, the dataset directory structure is as follows. + + SUN397 dataset directory: :: + + SUN397 + ├── SUN397 + │ ├── a + │ │ ├── abbey + │ | | ├── sun_aaalbzqrimafwbiv.jpg + │ | | └── ... + │ │ ├── airplane_cabin + │ | | ├── sun_aadqdkqaslqqoblu.jpg + │ | | └── ... + │ | └── ... + │ ├── b + │ │ └── ... + │ ├── c + │ │ └── ... + │ └── ... + └── Partitions + ├── ClassName.txt + ├── Training_01.txt + ├── Testing_01.txt + └── ... + + Args: + data_root (str): The root directory for Stanford Cars dataset. + split (str, optional): The dataset split, supports "train" and "test". + Default to "train". + + Examples: + >>> from mmpretrain.datasets import SUN397 + >>> train_dataset = SUN397(data_root='data/SUN397', split='train') + >>> train_dataset + Dataset SUN397 + Number of samples: 19824 + Number of categories: 397 + Root of dataset: data/SUN397 + >>> test_dataset = SUN397(data_root='data/SUN397', split='test') + >>> test_dataset + Dataset SUN397 + Number of samples: 19829 + Number of categories: 397 + Root of dataset: data/SUN397 + """ # noqa: E501 + + METAINFO = {'classes': SUN397_CATEGORIES} + + def __init__(self, data_root: str, split: str = 'train', **kwargs): + + splits = ['train', 'test'] + assert split in splits, \ + f"The split must be one of {splits}, but get '{split}'" + self.split = split + + self.backend = get_file_backend(data_root, enable_singleton=True) + if split == 'train': + ann_file = self.backend.join_path('Partitions', 'Training_01.txt') + else: + ann_file = self.backend.join_path('Partitions', 'Testing_01.txt') + + data_prefix = 'SUN397' + test_mode = split == 'test' + + super(SUN397, self).__init__( + ann_file=ann_file, + data_root=data_root, + test_mode=test_mode, + data_prefix=data_prefix, + **kwargs) + + def load_data_list(self): + pairs = list_from_file(self.ann_file) + data_list = [] + for pair in pairs: + if pair in INVALID: + continue + img_path = self.backend.join_path(self.img_prefix, pair[1:]) + items = pair.split('/') + class_name = '_'.join(items[2:-1]) + gt_label = self.METAINFO['classes'].index(class_name) + info = dict(img_path=img_path, gt_label=gt_label) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Root of dataset: \t{self.data_root}', + ] + return body diff --git a/mmpretrain/datasets/transforms/__init__.py b/mmpretrain/datasets/transforms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..88c72ca18b2118d9c62a35704d854e8d9dad7d6e --- /dev/null +++ b/mmpretrain/datasets/transforms/__init__.py @@ -0,0 +1,36 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.transforms import (CenterCrop, LoadImageFromFile, Normalize, + RandomFlip, RandomGrayscale, RandomResize, Resize) + +from mmpretrain.registry import TRANSFORMS +from .auto_augment import (AutoAugment, AutoContrast, BaseAugTransform, + Brightness, ColorTransform, Contrast, Cutout, + Equalize, GaussianBlur, Invert, Posterize, + RandAugment, Rotate, Sharpness, Shear, Solarize, + SolarizeAdd, Translate) +from .formatting import (Collect, NumpyToPIL, PackInputs, PackMultiTaskInputs, + PILToNumpy, Transpose) +from .processing import (Albumentations, BEiTMaskGenerator, CleanCaption, + ColorJitter, EfficientNetCenterCrop, + EfficientNetRandomCrop, Lighting, RandomCrop, + RandomErasing, RandomResizedCrop, RandomTranslatePad, + ResizeEdge, SimMIMMaskGenerator) +from .wrappers import ApplyToList, MultiView + +for t in (CenterCrop, LoadImageFromFile, Normalize, RandomFlip, + RandomGrayscale, RandomResize, Resize): + TRANSFORMS.register_module(module=t) + +__all__ = [ + 'NumpyToPIL', 'PILToNumpy', 'Transpose', 'Collect', 'RandomCrop', + 'RandomResizedCrop', 'Shear', 'Translate', 'Rotate', 'Invert', + 'ColorTransform', 'Solarize', 'Posterize', 'AutoContrast', 'Equalize', + 'Contrast', 'Brightness', 'Sharpness', 'AutoAugment', 'SolarizeAdd', + 'Cutout', 'RandAugment', 'Lighting', 'ColorJitter', 'RandomErasing', + 'PackInputs', 'Albumentations', 'EfficientNetRandomCrop', + 'EfficientNetCenterCrop', 'ResizeEdge', 'BaseAugTransform', + 'PackMultiTaskInputs', 'GaussianBlur', 'BEiTMaskGenerator', + 'SimMIMMaskGenerator', 'CenterCrop', 'LoadImageFromFile', 'Normalize', + 'RandomFlip', 'RandomGrayscale', 'RandomResize', 'Resize', 'MultiView', + 'ApplyToList', 'CleanCaption', 'RandomTranslatePad' +] diff --git a/mmpretrain/datasets/transforms/auto_augment.py b/mmpretrain/datasets/transforms/auto_augment.py new file mode 100644 index 0000000000000000000000000000000000000000..03b057b850a4fd797f8f5c0672f60c6c20e44273 --- /dev/null +++ b/mmpretrain/datasets/transforms/auto_augment.py @@ -0,0 +1,1244 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +from copy import deepcopy +from math import ceil +from numbers import Number +from typing import List, Optional, Sequence, Tuple, Union + +import mmcv +import numpy as np +from mmcv.transforms import BaseTransform, Compose, RandomChoice +from mmcv.transforms.utils import cache_randomness +from mmengine.utils import is_list_of, is_seq_of +from PIL import Image, ImageFilter + +from mmpretrain.registry import TRANSFORMS + + +def merge_hparams(policy: dict, hparams: dict) -> dict: + """Merge hyperparameters into policy config. + + Only merge partial hyperparameters required of the policy. + + Args: + policy (dict): Original policy config dict. + hparams (dict): Hyperparameters need to be merged. + + Returns: + dict: Policy config dict after adding ``hparams``. + """ + policy = deepcopy(policy) + op = TRANSFORMS.get(policy['type']) + assert op is not None, f'Invalid policy type "{policy["type"]}".' + + op_args = inspect.getfullargspec(op.__init__).args + for key, value in hparams.items(): + if key in op_args and key not in policy: + policy[key] = value + return policy + + +@TRANSFORMS.register_module() +class AutoAugment(RandomChoice): + """Auto augmentation. + + This data augmentation is proposed in `AutoAugment: Learning Augmentation + Policies from Data `_. + + Args: + policies (str | list[list[dict]]): The policies of auto augmentation. + If string, use preset policies collection like "imagenet". If list, + Each item is a sub policies, composed by several augmentation + policy dicts. When AutoAugment is called, a random sub policies in + ``policies`` will be selected to augment images. + hparams (dict): Configs of hyperparameters. Hyperparameters will be + used in policies that require these arguments if these arguments + are not set in policy dicts. Defaults to ``dict(pad_val=128)``. + + .. admonition:: Available preset policies + + - ``"imagenet"``: Policy for ImageNet, come from + `DeepVoltaire/AutoAugment`_ + + .. _DeepVoltaire/AutoAugment: https://github.com/DeepVoltaire/AutoAugment + """ + + def __init__(self, + policies: Union[str, List[List[dict]]], + hparams: dict = dict(pad_val=128)): + if isinstance(policies, str): + assert policies in AUTOAUG_POLICIES, 'Invalid policies, ' \ + f'please choose from {list(AUTOAUG_POLICIES.keys())}.' + policies = AUTOAUG_POLICIES[policies] + self.hparams = hparams + self.policies = [[merge_hparams(t, hparams) for t in sub] + for sub in policies] + transforms = [[TRANSFORMS.build(t) for t in sub] for sub in policies] + + super().__init__(transforms=transforms) + + def __repr__(self) -> str: + policies_str = '' + for sub in self.policies: + policies_str += '\n ' + ', \t'.join([t['type'] for t in sub]) + + repr_str = self.__class__.__name__ + repr_str += f'(policies:{policies_str}\n)' + return repr_str + + +@TRANSFORMS.register_module() +class RandAugment(BaseTransform): + r"""Random augmentation. + + This data augmentation is proposed in `RandAugment: Practical automated + data augmentation with a reduced search space + `_. + + Args: + policies (str | list[dict]): The policies of random augmentation. + If string, use preset policies collection like "timm_increasing". + If list, each item is one specific augmentation policy dict. + The policy dict shall should have these keys: + + - ``type`` (str), The type of augmentation. + - ``magnitude_range`` (Sequence[number], optional): For those + augmentation have magnitude, you need to specify the magnitude + level mapping range. For example, assume ``total_level`` is 10, + ``magnitude_level=3`` specify magnitude is 3 if + ``magnitude_range=(0, 10)`` while specify magnitude is 7 if + ``magnitude_range=(10, 0)``. + - other keyword arguments of the augmentation. + + num_policies (int): Number of policies to select from policies each + time. + magnitude_level (int | float): Magnitude level for all the augmentation + selected. + magnitude_std (Number | str): Deviation of magnitude noise applied. + + - If positive number, the magnitude obeys normal distribution + :math:`\mathcal{N}(magnitude_level, magnitude_std)`. + - If 0 or negative number, magnitude remains unchanged. + - If str "inf", the magnitude obeys uniform distribution + :math:`Uniform(min, magnitude)`. + total_level (int | float): Total level for the magnitude. Defaults to + 10. + hparams (dict): Configs of hyperparameters. Hyperparameters will be + used in policies that require these arguments if these arguments + are not set in policy dicts. Defaults to ``dict(pad_val=128)``. + + .. admonition:: Available preset policies + + - ``"timm_increasing"``: The ``_RAND_INCREASING_TRANSFORMS`` policy + from `timm`_ + + .. _timm: https://github.com/rwightman/pytorch-image-models + + Examples: + + To use "timm-increasing" policies collection, select two policies every + time, and magnitude_level of every policy is 6 (total is 10 by default) + + >>> import numpy as np + >>> from mmpretrain.datasets import RandAugment + >>> transform = RandAugment( + ... policies='timm_increasing', + ... num_policies=2, + ... magnitude_level=6, + ... ) + >>> data = {'img': np.random.randint(0, 256, (224, 224, 3))} + >>> results = transform(data) + >>> print(results['img'].shape) + (224, 224, 3) + + If you want the ``magnitude_level`` randomly changes every time, you + can use ``magnitude_std`` to specify the random distribution. For + example, a normal distribution :math:`\mathcal{N}(6, 0.5)`. + + >>> transform = RandAugment( + ... policies='timm_increasing', + ... num_policies=2, + ... magnitude_level=6, + ... magnitude_std=0.5, + ... ) + + You can also use your own policies: + + >>> policies = [ + ... dict(type='AutoContrast'), + ... dict(type='Rotate', magnitude_range=(0, 30)), + ... dict(type='ColorTransform', magnitude_range=(0, 0.9)), + ... ] + >>> transform = RandAugment( + ... policies=policies, + ... num_policies=2, + ... magnitude_level=6 + ... ) + + Note: + ``magnitude_std`` will introduce some randomness to policy, modified by + https://github.com/rwightman/pytorch-image-models. + + When magnitude_std=0, we calculate the magnitude as follows: + + .. math:: + \text{magnitude} = \frac{\text{magnitude_level}} + {\text{totallevel}} \times (\text{val2} - \text{val1}) + + \text{val1} + """ + + def __init__(self, + policies: Union[str, List[dict]], + num_policies: int, + magnitude_level: int, + magnitude_std: Union[Number, str] = 0., + total_level: int = 10, + hparams: dict = dict(pad_val=128)): + if isinstance(policies, str): + assert policies in RANDAUG_POLICIES, 'Invalid policies, ' \ + f'please choose from {list(RANDAUG_POLICIES.keys())}.' + policies = RANDAUG_POLICIES[policies] + + assert is_list_of(policies, dict), 'policies must be a list of dict.' + + assert isinstance(magnitude_std, (Number, str)), \ + '`magnitude_std` must be of number or str type, ' \ + f'got {type(magnitude_std)} instead.' + if isinstance(magnitude_std, str): + assert magnitude_std == 'inf', \ + '`magnitude_std` must be of number or "inf", ' \ + f'got "{magnitude_std}" instead.' + + assert num_policies > 0, 'num_policies must be greater than 0.' + assert magnitude_level >= 0, 'magnitude_level must be no less than 0.' + assert total_level > 0, 'total_level must be greater than 0.' + + self.num_policies = num_policies + self.magnitude_level = magnitude_level + self.magnitude_std = magnitude_std + self.total_level = total_level + self.hparams = hparams + self.policies = [] + self.transforms = [] + + randaug_cfg = dict( + magnitude_level=magnitude_level, + total_level=total_level, + magnitude_std=magnitude_std) + + for policy in policies: + self._check_policy(policy) + policy = merge_hparams(policy, hparams) + policy.pop('magnitude_key', None) # For backward compatibility + if 'magnitude_range' in policy: + policy.update(randaug_cfg) + self.policies.append(policy) + self.transforms.append(TRANSFORMS.build(policy)) + + def __iter__(self): + """Iterate all transforms.""" + return iter(self.transforms) + + def _check_policy(self, policy): + """Check whether the sub-policy dict is available.""" + assert isinstance(policy, dict) and 'type' in policy, \ + 'Each policy must be a dict with key "type".' + type_name = policy['type'] + + if 'magnitude_range' in policy: + magnitude_range = policy['magnitude_range'] + assert is_seq_of(magnitude_range, Number), \ + f'`magnitude_range` of RandAugment policy {type_name} ' \ + 'should be a sequence with two numbers.' + + @cache_randomness + def random_policy_indices(self) -> np.ndarray: + """Return the random chosen transform indices.""" + indices = np.arange(len(self.policies)) + return np.random.choice(indices, size=self.num_policies).tolist() + + def transform(self, results: dict) -> Optional[dict]: + """Randomly choose a sub-policy to apply.""" + + chosen_policies = [ + self.transforms[i] for i in self.random_policy_indices() + ] + + sub_pipeline = Compose(chosen_policies) + return sub_pipeline(results) + + def __repr__(self) -> str: + policies_str = '' + for policy in self.policies: + policies_str += '\n ' + f'{policy["type"]}' + if 'magnitude_range' in policy: + val1, val2 = policy['magnitude_range'] + policies_str += f' ({val1}, {val2})' + + repr_str = self.__class__.__name__ + repr_str += f'(num_policies={self.num_policies}, ' + repr_str += f'magnitude_level={self.magnitude_level}, ' + repr_str += f'total_level={self.total_level}, ' + repr_str += f'policies:{policies_str}\n)' + return repr_str + + +class BaseAugTransform(BaseTransform): + r"""The base class of augmentation transform for RandAugment. + + This class provides several common attributions and methods to support the + magnitude level mapping and magnitude level randomness in + :class:`RandAugment`. + + Args: + magnitude_level (int | float): Magnitude level. + magnitude_range (Sequence[number], optional): For augmentation have + magnitude argument, maybe "magnitude", "angle" or other, you can + specify the magnitude level mapping range to generate the magnitude + argument. For example, assume ``total_level`` is 10, + ``magnitude_level=3`` specify magnitude is 3 if + ``magnitude_range=(0, 10)`` while specify magnitude is 7 if + ``magnitude_range=(10, 0)``. Defaults to None. + magnitude_std (Number | str): Deviation of magnitude noise applied. + + - If positive number, the magnitude obeys normal distribution + :math:`\mathcal{N}(magnitude, magnitude_std)`. + - If 0 or negative number, magnitude remains unchanged. + - If str "inf", the magnitude obeys uniform distribution + :math:`Uniform(min, magnitude)`. + + Defaults to 0. + total_level (int | float): Total level for the magnitude. Defaults to + 10. + prob (float): The probability for performing transformation therefore + should be in range [0, 1]. Defaults to 0.5. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0. + """ + + def __init__(self, + magnitude_level: int = 10, + magnitude_range: Tuple[float, float] = None, + magnitude_std: Union[str, float] = 0., + total_level: int = 10, + prob: float = 0.5, + random_negative_prob: float = 0.5): + self.magnitude_level = magnitude_level + self.magnitude_range = magnitude_range + self.magnitude_std = magnitude_std + self.total_level = total_level + self.prob = prob + self.random_negative_prob = random_negative_prob + + @cache_randomness + def random_disable(self): + """Randomly disable the transform.""" + return np.random.rand() > self.prob + + @cache_randomness + def random_magnitude(self): + """Randomly generate magnitude.""" + magnitude = self.magnitude_level + # if magnitude_std is positive number or 'inf', move + # magnitude_value randomly. + if self.magnitude_std == 'inf': + magnitude = np.random.uniform(0, magnitude) + elif self.magnitude_std > 0: + magnitude = np.random.normal(magnitude, self.magnitude_std) + magnitude = np.clip(magnitude, 0, self.total_level) + + val1, val2 = self.magnitude_range + magnitude = (magnitude / self.total_level) * (val2 - val1) + val1 + return magnitude + + @cache_randomness + def random_negative(self, value): + """Randomly negative the value.""" + if np.random.rand() < self.random_negative_prob: + return -value + else: + return value + + def extra_repr(self): + """Extra repr string when auto-generating magnitude is enabled.""" + if self.magnitude_range is not None: + repr_str = f', magnitude_level={self.magnitude_level}, ' + repr_str += f'magnitude_range={self.magnitude_range}, ' + repr_str += f'magnitude_std={self.magnitude_std}, ' + repr_str += f'total_level={self.total_level}, ' + return repr_str + else: + return '' + + +@TRANSFORMS.register_module() +class Shear(BaseAugTransform): + """Shear images. + + Args: + magnitude (int | float | None): The magnitude used for shear. If None, + generate from ``magnitude_range``, see :class:`BaseAugTransform`. + Defaults to None. + pad_val (int, Sequence[int]): Pixel pad_val value for constant fill. + If a sequence of length 3, it is used to pad_val R, G, B channels + respectively. Defaults to 128. + prob (float): The probability for performing shear therefore should be + in range [0, 1]. Defaults to 0.5. + direction (str): The shearing direction. Options are 'horizontal' and + 'vertical'. Defaults to 'horizontal'. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0.5. + interpolation (str): Interpolation method. Options are 'nearest', + 'bilinear', 'bicubic', 'area', 'lanczos'. Defaults to 'bicubic'. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + pad_val: Union[int, Sequence[int]] = 128, + prob: float = 0.5, + direction: str = 'horizontal', + random_negative_prob: float = 0.5, + interpolation: str = 'bicubic', + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + if isinstance(pad_val, Sequence): + self.pad_val = tuple(pad_val) + else: + self.pad_val = pad_val + + assert direction in ('horizontal', 'vertical'), 'direction must be ' \ + f'either "horizontal" or "vertical", got "{direction}" instead.' + self.direction = direction + + self.interpolation = interpolation + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.random_negative(self.magnitude) + else: + magnitude = self.random_negative(self.random_magnitude()) + + img = results['img'] + img_sheared = mmcv.imshear( + img, + magnitude, + direction=self.direction, + border_value=self.pad_val, + interpolation=self.interpolation) + results['img'] = img_sheared.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'pad_val={self.pad_val}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'direction={self.direction}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}, ' + repr_str += f'interpolation={self.interpolation}{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Translate(BaseAugTransform): + """Translate images. + + Args: + magnitude (int | float | None): The magnitude used for translate. Note + that the offset is calculated by magnitude * size in the + corresponding direction. With a magnitude of 1, the whole image + will be moved out of the range. If None, generate from + ``magnitude_range``, see :class:`BaseAugTransform`. + pad_val (int, Sequence[int]): Pixel pad_val value for constant fill. + If a sequence of length 3, it is used to pad_val R, G, B channels + respectively. Defaults to 128. + prob (float): The probability for performing translate therefore should + be in range [0, 1]. Defaults to 0.5. + direction (str): The translating direction. Options are 'horizontal' + and 'vertical'. Defaults to 'horizontal'. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0.5. + interpolation (str): Interpolation method. Options are 'nearest', + 'bilinear', 'bicubic', 'area', 'lanczos'. Defaults to 'nearest'. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + pad_val: Union[int, Sequence[int]] = 128, + prob: float = 0.5, + direction: str = 'horizontal', + random_negative_prob: float = 0.5, + interpolation: str = 'nearest', + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + if isinstance(pad_val, Sequence): + self.pad_val = tuple(pad_val) + else: + self.pad_val = pad_val + + assert direction in ('horizontal', 'vertical'), 'direction must be ' \ + f'either "horizontal" or "vertical", got "{direction}" instead.' + self.direction = direction + + self.interpolation = interpolation + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.random_negative(self.magnitude) + else: + magnitude = self.random_negative(self.random_magnitude()) + + img = results['img'] + height, width = img.shape[:2] + if self.direction == 'horizontal': + offset = magnitude * width + else: + offset = magnitude * height + img_translated = mmcv.imtranslate( + img, + offset, + direction=self.direction, + border_value=self.pad_val, + interpolation=self.interpolation) + results['img'] = img_translated.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'pad_val={self.pad_val}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'direction={self.direction}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}, ' + repr_str += f'interpolation={self.interpolation}{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Rotate(BaseAugTransform): + """Rotate images. + + Args: + angle (float, optional): The angle used for rotate. Positive values + stand for clockwise rotation. If None, generate from + ``magnitude_range``, see :class:`BaseAugTransform`. + Defaults to None. + center (tuple[float], optional): Center point (w, h) of the rotation in + the source image. If None, the center of the image will be used. + Defaults to None. + scale (float): Isotropic scale factor. Defaults to 1.0. + pad_val (int, Sequence[int]): Pixel pad_val value for constant fill. + If a sequence of length 3, it is used to pad_val R, G, B channels + respectively. Defaults to 128. + prob (float): The probability for performing rotate therefore should be + in range [0, 1]. Defaults to 0.5. + random_negative_prob (float): The probability that turns the angle + negative, which should be in range [0,1]. Defaults to 0.5. + interpolation (str): Interpolation method. Options are 'nearest', + 'bilinear', 'bicubic', 'area', 'lanczos'. Defaults to 'nearest'. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + angle: Optional[float] = None, + center: Optional[Tuple[float]] = None, + scale: float = 1.0, + pad_val: Union[int, Sequence[int]] = 128, + prob: float = 0.5, + random_negative_prob: float = 0.5, + interpolation: str = 'nearest', + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (angle is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `angle` and `magnitude_range`.' + + self.angle = angle + self.center = center + self.scale = scale + if isinstance(pad_val, Sequence): + self.pad_val = tuple(pad_val) + else: + self.pad_val = pad_val + + self.interpolation = interpolation + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.angle is not None: + angle = self.random_negative(self.angle) + else: + angle = self.random_negative(self.random_magnitude()) + + img = results['img'] + img_rotated = mmcv.imrotate( + img, + angle, + center=self.center, + scale=self.scale, + border_value=self.pad_val, + interpolation=self.interpolation) + results['img'] = img_rotated.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(angle={self.angle}, ' + repr_str += f'center={self.center}, ' + repr_str += f'scale={self.scale}, ' + repr_str += f'pad_val={self.pad_val}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}, ' + repr_str += f'interpolation={self.interpolation}{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class AutoContrast(BaseAugTransform): + """Auto adjust image contrast. + + Args: + prob (float): The probability for performing auto contrast + therefore should be in range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, prob: float = 0.5, **kwargs): + super().__init__(prob=prob, **kwargs) + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + img = results['img'] + img_contrasted = mmcv.auto_contrast(img) + results['img'] = img_contrasted.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(prob={self.prob})' + return repr_str + + +@TRANSFORMS.register_module() +class Invert(BaseAugTransform): + """Invert images. + + Args: + prob (float): The probability for performing invert therefore should + be in range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, prob: float = 0.5, **kwargs): + super().__init__(prob=prob, **kwargs) + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + img = results['img'] + img_inverted = mmcv.iminvert(img) + results['img'] = img_inverted.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(prob={self.prob})' + return repr_str + + +@TRANSFORMS.register_module() +class Equalize(BaseAugTransform): + """Equalize the image histogram. + + Args: + prob (float): The probability for performing equalize therefore should + be in range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, prob: float = 0.5, **kwargs): + super().__init__(prob=prob, **kwargs) + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + img = results['img'] + img_equalized = mmcv.imequalize(img) + results['img'] = img_equalized.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(prob={self.prob})' + return repr_str + + +@TRANSFORMS.register_module() +class Solarize(BaseAugTransform): + """Solarize images (invert all pixel values above a threshold). + + Args: + thr (int | float | None): The threshold above which the pixels value + will be inverted. If None, generate from ``magnitude_range``, + see :class:`BaseAugTransform`. Defaults to None. + prob (float): The probability for solarizing therefore should be in + range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + thr: Union[int, float, None] = None, + prob: float = 0.5, + **kwargs): + super().__init__(prob=prob, random_negative_prob=0., **kwargs) + assert (thr is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `thr` and `magnitude_range`.' + + self.thr = thr + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.thr is not None: + thr = self.thr + else: + thr = self.random_magnitude() + + img = results['img'] + img_solarized = mmcv.solarize(img, thr=thr) + results['img'] = img_solarized.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(thr={self.thr}, ' + repr_str += f'prob={self.prob}{self.extra_repr()}))' + return repr_str + + +@TRANSFORMS.register_module() +class SolarizeAdd(BaseAugTransform): + """SolarizeAdd images (add a certain value to pixels below a threshold). + + Args: + magnitude (int | float | None): The value to be added to pixels below + the thr. If None, generate from ``magnitude_range``, see + :class:`BaseAugTransform`. Defaults to None. + thr (int | float): The threshold below which the pixels value will be + adjusted. + prob (float): The probability for solarizing therefore should be in + range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + thr: Union[int, float] = 128, + prob: float = 0.5, + **kwargs): + super().__init__(prob=prob, random_negative_prob=0., **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + + assert isinstance(thr, (int, float)), 'The thr type must '\ + f'be int or float, but got {type(thr)} instead.' + self.thr = thr + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.magnitude + else: + magnitude = self.random_magnitude() + + img = results['img'] + img_solarized = np.where(img < self.thr, + np.minimum(img + magnitude, 255), img) + results['img'] = img_solarized.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'thr={self.thr}, ' + repr_str += f'prob={self.prob}{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Posterize(BaseAugTransform): + """Posterize images (reduce the number of bits for each color channel). + + Args: + bits (int, optional): Number of bits for each pixel in the output img, + which should be less or equal to 8. If None, generate from + ``magnitude_range``, see :class:`BaseAugTransform`. + Defaults to None. + prob (float): The probability for posterizing therefore should be in + range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + bits: Optional[int] = None, + prob: float = 0.5, + **kwargs): + super().__init__(prob=prob, random_negative_prob=0., **kwargs) + assert (bits is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `bits` and `magnitude_range`.' + + if bits is not None: + assert bits <= 8, \ + f'The bits must be less than 8, got {bits} instead.' + self.bits = bits + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.bits is not None: + bits = self.bits + else: + bits = self.random_magnitude() + + # To align timm version, we need to round up to integer here. + bits = ceil(bits) + + img = results['img'] + img_posterized = mmcv.posterize(img, bits=bits) + results['img'] = img_posterized.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(bits={self.bits}, ' + repr_str += f'prob={self.prob}{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Contrast(BaseAugTransform): + """Adjust images contrast. + + Args: + magnitude (int | float | None): The magnitude used for adjusting + contrast. A positive magnitude would enhance the contrast and + a negative magnitude would make the image grayer. A magnitude=0 + gives the origin img. If None, generate from ``magnitude_range``, + see :class:`BaseAugTransform`. Defaults to None. + prob (float): The probability for performing contrast adjusting + therefore should be in range [0, 1]. Defaults to 0.5. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0.5. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + prob: float = 0.5, + random_negative_prob: float = 0.5, + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.random_negative(self.magnitude) + else: + magnitude = self.random_negative(self.random_magnitude()) + + img = results['img'] + img_contrasted = mmcv.adjust_contrast(img, factor=1 + magnitude) + results['img'] = img_contrasted.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}' + repr_str += f'{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class ColorTransform(BaseAugTransform): + """Adjust images color balance. + + Args: + magnitude (int | float | None): The magnitude used for color transform. + A positive magnitude would enhance the color and a negative + magnitude would make the image grayer. A magnitude=0 gives the + origin img. If None, generate from ``magnitude_range``, see + :class:`BaseAugTransform`. Defaults to None. + prob (float): The probability for performing ColorTransform therefore + should be in range [0, 1]. Defaults to 0.5. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + prob: float = 0.5, + random_negative_prob: float = 0.5, + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.random_negative(self.magnitude) + else: + magnitude = self.random_negative(self.random_magnitude()) + + img = results['img'] + img_color_adjusted = mmcv.adjust_color(img, alpha=1 + magnitude) + results['img'] = img_color_adjusted.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}' + repr_str += f'{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Brightness(BaseAugTransform): + """Adjust images brightness. + + Args: + magnitude (int | float | None): The magnitude used for adjusting + brightness. A positive magnitude would enhance the brightness and a + negative magnitude would make the image darker. A magnitude=0 gives + the origin img. If None, generate from ``magnitude_range``, see + :class:`BaseAugTransform`. Defaults to None. + prob (float): The probability for performing brightness adjusting + therefore should be in range [0, 1]. Defaults to 0.5. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + prob: float = 0.5, + random_negative_prob: float = 0.5, + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.random_negative(self.magnitude) + else: + magnitude = self.random_negative(self.random_magnitude()) + + img = results['img'] + img_brightened = mmcv.adjust_brightness(img, factor=1 + magnitude) + results['img'] = img_brightened.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}' + repr_str += f'{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Sharpness(BaseAugTransform): + """Adjust images sharpness. + + Args: + magnitude (int | float | None): The magnitude used for adjusting + sharpness. A positive magnitude would enhance the sharpness and a + negative magnitude would make the image bulr. A magnitude=0 gives + the origin img. If None, generate from ``magnitude_range``, see + :class:`BaseAugTransform`. Defaults to None. + prob (float): The probability for performing sharpness adjusting + therefore should be in range [0, 1]. Defaults to 0.5. + random_negative_prob (float): The probability that turns the magnitude + negative, which should be in range [0,1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + magnitude: Union[int, float, None] = None, + prob: float = 0.5, + random_negative_prob: float = 0.5, + **kwargs): + super().__init__( + prob=prob, random_negative_prob=random_negative_prob, **kwargs) + assert (magnitude is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `magnitude` and `magnitude_range`.' + + self.magnitude = magnitude + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.magnitude is not None: + magnitude = self.random_negative(self.magnitude) + else: + magnitude = self.random_negative(self.random_magnitude()) + + img = results['img'] + img_sharpened = mmcv.adjust_sharpness(img, factor=1 + magnitude) + results['img'] = img_sharpened.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(magnitude={self.magnitude}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}' + repr_str += f'{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class Cutout(BaseAugTransform): + """Cutout images. + + Args: + shape (int | tuple(int) | None): Expected cutout shape (h, w). + If given as a single value, the value will be used for both h and + w. If None, generate from ``magnitude_range``, see + :class:`BaseAugTransform`. Defaults to None. + pad_val (int, Sequence[int]): Pixel pad_val value for constant fill. + If it is a sequence, it must have the same length with the image + channels. Defaults to 128. + prob (float): The probability for performing cutout therefore should + be in range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + shape: Union[int, Tuple[int], None] = None, + pad_val: Union[int, Sequence[int]] = 128, + prob: float = 0.5, + **kwargs): + super().__init__(prob=prob, random_negative_prob=0., **kwargs) + assert (shape is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `shape` and `magnitude_range`.' + + self.shape = shape + if isinstance(pad_val, Sequence): + self.pad_val = tuple(pad_val) + else: + self.pad_val = pad_val + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.shape is not None: + shape = self.shape + else: + shape = int(self.random_magnitude()) + + img = results['img'] + img_cutout = mmcv.cutout(img, shape, pad_val=self.pad_val) + results['img'] = img_cutout.astype(img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(shape={self.shape}, ' + repr_str += f'pad_val={self.pad_val}, ' + repr_str += f'prob={self.prob}{self.extra_repr()})' + return repr_str + + +@TRANSFORMS.register_module() +class GaussianBlur(BaseAugTransform): + """Gaussian blur images. + + Args: + radius (int, float, optional): The blur radius. If None, generate from + ``magnitude_range``, see :class:`BaseAugTransform`. + Defaults to None. + prob (float): The probability for posterizing therefore should be in + range [0, 1]. Defaults to 0.5. + **kwargs: Other keyword arguments of :class:`BaseAugTransform`. + """ + + def __init__(self, + radius: Union[int, float, None] = None, + prob: float = 0.5, + **kwargs): + super().__init__(prob=prob, random_negative_prob=0., **kwargs) + assert (radius is None) ^ (self.magnitude_range is None), \ + 'Please specify only one of `radius` and `magnitude_range`.' + + self.radius = radius + + def transform(self, results): + """Apply transform to results.""" + if self.random_disable(): + return results + + if self.radius is not None: + radius = self.radius + else: + radius = self.random_magnitude() + + img = results['img'] + pil_img = Image.fromarray(img) + pil_img.filter(ImageFilter.GaussianBlur(radius=radius)) + results['img'] = np.array(pil_img, dtype=img.dtype) + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(radius={self.radius}, ' + repr_str += f'prob={self.prob}{self.extra_repr()})' + return repr_str + + +# yapf: disable +# flake8: noqa +AUTOAUG_POLICIES = { + # Policy for ImageNet, refers to + # https://github.com/DeepVoltaire/AutoAugment/blame/master/autoaugment.py + 'imagenet': [ + [dict(type='Posterize', bits=4, prob=0.4), dict(type='Rotate', angle=30., prob=0.6)], + [dict(type='Solarize', thr=256 / 9 * 4, prob=0.6), dict(type='AutoContrast', prob=0.6)], + [dict(type='Equalize', prob=0.8), dict(type='Equalize', prob=0.6)], + [dict(type='Posterize', bits=5, prob=0.6), dict(type='Posterize', bits=5, prob=0.6)], + [dict(type='Equalize', prob=0.4), dict(type='Solarize', thr=256 / 9 * 5, prob=0.2)], + [dict(type='Equalize', prob=0.4), dict(type='Rotate', angle=30 / 9 * 8, prob=0.8)], + [dict(type='Solarize', thr=256 / 9 * 6, prob=0.6), dict(type='Equalize', prob=0.6)], + [dict(type='Posterize', bits=6, prob=0.8), dict(type='Equalize', prob=1.)], + [dict(type='Rotate', angle=10., prob=0.2), dict(type='Solarize', thr=256 / 9, prob=0.6)], + [dict(type='Equalize', prob=0.6), dict(type='Posterize', bits=5, prob=0.4)], + [dict(type='Rotate', angle=30 / 9 * 8, prob=0.8), dict(type='ColorTransform', magnitude=0., prob=0.4)], + [dict(type='Rotate', angle=30., prob=0.4), dict(type='Equalize', prob=0.6)], + [dict(type='Equalize', prob=0.0), dict(type='Equalize', prob=0.8)], + [dict(type='Invert', prob=0.6), dict(type='Equalize', prob=1.)], + [dict(type='ColorTransform', magnitude=0.4, prob=0.6), dict(type='Contrast', magnitude=0.8, prob=1.)], + [dict(type='Rotate', angle=30 / 9 * 8, prob=0.8), dict(type='ColorTransform', magnitude=0.2, prob=1.)], + [dict(type='ColorTransform', magnitude=0.8, prob=0.8), dict(type='Solarize', thr=256 / 9 * 2, prob=0.8)], + [dict(type='Sharpness', magnitude=0.7, prob=0.4), dict(type='Invert', prob=0.6)], + [dict(type='Shear', magnitude=0.3 / 9 * 5, prob=0.6, direction='horizontal'), dict(type='Equalize', prob=1.)], + [dict(type='ColorTransform', magnitude=0., prob=0.4), dict(type='Equalize', prob=0.6)], + [dict(type='Equalize', prob=0.4), dict(type='Solarize', thr=256 / 9 * 5, prob=0.2)], + [dict(type='Solarize', thr=256 / 9 * 4, prob=0.6), dict(type='AutoContrast', prob=0.6)], + [dict(type='Invert', prob=0.6), dict(type='Equalize', prob=1.)], + [dict(type='ColorTransform', magnitude=0.4, prob=0.6), dict(type='Contrast', magnitude=0.8, prob=1.)], + [dict(type='Equalize', prob=0.8), dict(type='Equalize', prob=0.6)], + ], +} + +RANDAUG_POLICIES = { + # Refers to `_RAND_INCREASING_TRANSFORMS` in pytorch-image-models + 'timm_increasing': [ + dict(type='AutoContrast'), + dict(type='Equalize'), + dict(type='Invert'), + dict(type='Rotate', magnitude_range=(0, 30)), + dict(type='Posterize', magnitude_range=(4, 0)), + dict(type='Solarize', magnitude_range=(256, 0)), + dict(type='SolarizeAdd', magnitude_range=(0, 110)), + dict(type='ColorTransform', magnitude_range=(0, 0.9)), + dict(type='Contrast', magnitude_range=(0, 0.9)), + dict(type='Brightness', magnitude_range=(0, 0.9)), + dict(type='Sharpness', magnitude_range=(0, 0.9)), + dict(type='Shear', magnitude_range=(0, 0.3), direction='horizontal'), + dict(type='Shear', magnitude_range=(0, 0.3), direction='vertical'), + dict(type='Translate', magnitude_range=(0, 0.45), direction='horizontal'), + dict(type='Translate', magnitude_range=(0, 0.45), direction='vertical'), + ], + 'simple_increasing': [ + dict(type='AutoContrast'), + dict(type='Equalize'), + dict(type='Rotate', magnitude_range=(0, 30)), + dict(type='Shear', magnitude_range=(0, 0.3), direction='horizontal'), + dict(type='Shear', magnitude_range=(0, 0.3), direction='vertical'), + ], +} diff --git a/mmpretrain/datasets/transforms/formatting.py b/mmpretrain/datasets/transforms/formatting.py new file mode 100644 index 0000000000000000000000000000000000000000..e4d331636a883ce602e419e0867aea7b513b4d87 --- /dev/null +++ b/mmpretrain/datasets/transforms/formatting.py @@ -0,0 +1,353 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import defaultdict +from collections.abc import Sequence + +import cv2 +import numpy as np +import torch +import torchvision.transforms.functional as F +from mmcv.transforms import BaseTransform +from mmengine.utils import is_str +from PIL import Image + +from mmpretrain.registry import TRANSFORMS +from mmpretrain.structures import DataSample, MultiTaskDataSample + + +def to_tensor(data): + """Convert objects of various python types to :obj:`torch.Tensor`. + + Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`, + :class:`Sequence`, :class:`int` and :class:`float`. + """ + if isinstance(data, torch.Tensor): + return data + elif isinstance(data, np.ndarray): + return torch.from_numpy(data) + elif isinstance(data, Sequence) and not is_str(data): + return torch.tensor(data) + elif isinstance(data, int): + return torch.LongTensor([data]) + elif isinstance(data, float): + return torch.FloatTensor([data]) + else: + raise TypeError( + f'Type {type(data)} cannot be converted to tensor.' + 'Supported types are: `numpy.ndarray`, `torch.Tensor`, ' + '`Sequence`, `int` and `float`') + + +@TRANSFORMS.register_module() +class PackInputs(BaseTransform): + """Pack the inputs data. + + **Required Keys:** + + - ``input_key`` + - ``*algorithm_keys`` + - ``*meta_keys`` + + **Deleted Keys:** + + All other keys in the dict. + + **Added Keys:** + + - inputs (:obj:`torch.Tensor`): The forward data of models. + - data_samples (:obj:`~mmpretrain.structures.DataSample`): The + annotation info of the sample. + + Args: + input_key (str): The key of element to feed into the model forwarding. + Defaults to 'img'. + algorithm_keys (Sequence[str]): The keys of custom elements to be used + in the algorithm. Defaults to an empty tuple. + meta_keys (Sequence[str]): The keys of meta information to be saved in + the data sample. Defaults to :attr:`PackInputs.DEFAULT_META_KEYS`. + + .. admonition:: Default algorithm keys + + Besides the specified ``algorithm_keys``, we will set some default keys + into the output data sample and do some formatting. Therefore, you + don't need to set these keys in the ``algorithm_keys``. + + - ``gt_label``: The ground-truth label. The value will be converted + into a 1-D tensor. + - ``gt_score``: The ground-truth score. The value will be converted + into a 1-D tensor. + - ``mask``: The mask for some self-supervise tasks. The value will + be converted into a tensor. + + .. admonition:: Default meta keys + + - ``sample_idx``: The id of the image sample. + - ``img_path``: The path to the image file. + - ``ori_shape``: The original shape of the image as a tuple (H, W). + - ``img_shape``: The shape of the image after the pipeline as a + tuple (H, W). + - ``scale_factor``: The scale factor between the resized image and + the original image. + - ``flip``: A boolean indicating if image flip transform was used. + - ``flip_direction``: The flipping direction. + """ + + DEFAULT_META_KEYS = ('sample_idx', 'img_path', 'ori_shape', 'img_shape', + 'scale_factor', 'flip', 'flip_direction') + + def __init__(self, + input_key='img', + algorithm_keys=(), + meta_keys=DEFAULT_META_KEYS): + self.input_key = input_key + self.algorithm_keys = algorithm_keys + self.meta_keys = meta_keys + + @staticmethod + def format_input(input_): + if isinstance(input_, list): + return [PackInputs.format_input(item) for item in input_] + elif isinstance(input_, np.ndarray): + if input_.ndim == 2: # For grayscale image. + input_ = np.expand_dims(input_, -1) + if input_.ndim == 3 and not input_.flags.c_contiguous: + input_ = np.ascontiguousarray(input_.transpose(2, 0, 1)) + input_ = to_tensor(input_) + elif input_.ndim == 3: + # convert to tensor first to accelerate, see + # https://github.com/open-mmlab/mmdetection/pull/9533 + input_ = to_tensor(input_).permute(2, 0, 1).contiguous() + else: + # convert input with other shape to tensor without permute, + # like video input (num_crops, C, T, H, W). + input_ = to_tensor(input_) + elif isinstance(input_, Image.Image): + input_ = F.pil_to_tensor(input_) + elif not isinstance(input_, torch.Tensor): + raise TypeError(f'Unsupported input type {type(input_)}.') + + return input_ + + def transform(self, results: dict) -> dict: + """Method to pack the input data.""" + + packed_results = dict() + if self.input_key in results: + input_ = results[self.input_key] + packed_results['inputs'] = self.format_input(input_) + + data_sample = DataSample() + + # Set default keys + if 'gt_label' in results: + data_sample.set_gt_label(results['gt_label']) + if 'gt_score' in results: + data_sample.set_gt_score(results['gt_score']) + if 'mask' in results: + data_sample.set_mask(results['mask']) + + # Set custom algorithm keys + for key in self.algorithm_keys: + if key in results: + data_sample.set_field(results[key], key) + + # Set meta keys + for key in self.meta_keys: + if key in results: + data_sample.set_field(results[key], key, field_type='metainfo') + + packed_results['data_samples'] = data_sample + return packed_results + + def __repr__(self) -> str: + repr_str = self.__class__.__name__ + repr_str += f"(input_key='{self.input_key}', " + repr_str += f'algorithm_keys={self.algorithm_keys}, ' + repr_str += f'meta_keys={self.meta_keys})' + return repr_str + + +@TRANSFORMS.register_module() +class PackMultiTaskInputs(BaseTransform): + """Convert all image labels of multi-task dataset to a dict of tensor. + + Args: + multi_task_fields (Sequence[str]): + input_key (str): + task_handlers (dict): + """ + + def __init__(self, + multi_task_fields, + input_key='img', + task_handlers=dict()): + self.multi_task_fields = multi_task_fields + self.input_key = input_key + self.task_handlers = defaultdict(PackInputs) + for task_name, task_handler in task_handlers.items(): + self.task_handlers[task_name] = TRANSFORMS.build(task_handler) + + def transform(self, results: dict) -> dict: + """Method to pack the input data. + + result = {'img_path': 'a.png', 'gt_label': {'task1': 1, 'task3': 3}, + 'img': array([[[ 0, 0, 0]) + """ + packed_results = dict() + results = results.copy() + + if self.input_key in results: + input_ = results[self.input_key] + packed_results['inputs'] = PackInputs.format_input(input_) + + task_results = defaultdict(dict) + for field in self.multi_task_fields: + if field in results: + value = results.pop(field) + for k, v in value.items(): + task_results[k].update({field: v}) + + data_sample = MultiTaskDataSample() + for task_name, task_result in task_results.items(): + task_handler = self.task_handlers[task_name] + task_pack_result = task_handler({**results, **task_result}) + data_sample.set_field(task_pack_result['data_samples'], task_name) + + packed_results['data_samples'] = data_sample + return packed_results + + def __repr__(self): + repr = self.__class__.__name__ + task_handlers = ', '.join( + f"'{name}': {handler.__class__.__name__}" + for name, handler in self.task_handlers.items()) + repr += f'(multi_task_fields={self.multi_task_fields}, ' + repr += f"input_key='{self.input_key}', " + repr += f'task_handlers={{{task_handlers}}})' + return repr + + +@TRANSFORMS.register_module() +class Transpose(BaseTransform): + """Transpose numpy array. + + **Required Keys:** + + - ``*keys`` + + **Modified Keys:** + + - ``*keys`` + + Args: + keys (List[str]): The fields to convert to tensor. + order (List[int]): The output dimensions order. + """ + + def __init__(self, keys, order): + self.keys = keys + self.order = order + + def transform(self, results): + """Method to transpose array.""" + for key in self.keys: + results[key] = results[key].transpose(self.order) + return results + + def __repr__(self): + return self.__class__.__name__ + \ + f'(keys={self.keys}, order={self.order})' + + +@TRANSFORMS.register_module(('NumpyToPIL', 'ToPIL')) +class NumpyToPIL(BaseTransform): + """Convert the image from OpenCV format to :obj:`PIL.Image.Image`. + + **Required Keys:** + + - ``img`` + + **Modified Keys:** + + - ``img`` + + Args: + to_rgb (bool): Whether to convert img to rgb. Defaults to True. + """ + + def __init__(self, to_rgb: bool = False) -> None: + self.to_rgb = to_rgb + + def transform(self, results: dict) -> dict: + """Method to convert images to :obj:`PIL.Image.Image`.""" + img = results['img'] + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) if self.to_rgb else img + + results['img'] = Image.fromarray(img) + return results + + def __repr__(self) -> str: + return self.__class__.__name__ + f'(to_rgb={self.to_rgb})' + + +@TRANSFORMS.register_module(('PILToNumpy', 'ToNumpy')) +class PILToNumpy(BaseTransform): + """Convert img to :obj:`numpy.ndarray`. + + **Required Keys:** + + - ``img`` + + **Modified Keys:** + + - ``img`` + + Args: + to_bgr (bool): Whether to convert img to rgb. Defaults to True. + dtype (str, optional): The dtype of the converted numpy array. + Defaults to None. + """ + + def __init__(self, to_bgr: bool = False, dtype=None) -> None: + self.to_bgr = to_bgr + self.dtype = dtype + + def transform(self, results: dict) -> dict: + """Method to convert img to :obj:`numpy.ndarray`.""" + img = np.array(results['img'], dtype=self.dtype) + img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) if self.to_bgr else img + + results['img'] = img + return results + + def __repr__(self) -> str: + return self.__class__.__name__ + \ + f'(to_bgr={self.to_bgr}, dtype={self.dtype})' + + +@TRANSFORMS.register_module() +class Collect(BaseTransform): + """Collect and only reserve the specified fields. + + **Required Keys:** + + - ``*keys`` + + **Deleted Keys:** + + All keys except those in the argument ``*keys``. + + Args: + keys (Sequence[str]): The keys of the fields to be collected. + """ + + def __init__(self, keys): + self.keys = keys + + def transform(self, results): + data = {} + for key in self.keys: + data[key] = results[key] + return data + + def __repr__(self): + return self.__class__.__name__ + f'(keys={self.keys})' diff --git a/mmpretrain/datasets/transforms/processing.py b/mmpretrain/datasets/transforms/processing.py new file mode 100644 index 0000000000000000000000000000000000000000..ad753c16fbb241a0a92eb6b9260a6cb8d928aa51 --- /dev/null +++ b/mmpretrain/datasets/transforms/processing.py @@ -0,0 +1,1762 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +import math +import numbers +import re +import string +import traceback +from enum import EnumMeta +from numbers import Number +from typing import Dict, List, Optional, Sequence, Tuple, Union + +import mmcv +import mmengine +import numpy as np +import torchvision +from mmcv.transforms import BaseTransform +from mmcv.transforms.utils import cache_randomness +from torchvision.transforms.transforms import InterpolationMode + +from mmpretrain.registry import TRANSFORMS + +try: + import albumentations +except ImportError: + albumentations = None + + +def _str_to_torch_dtype(t: str): + """mapping str format dtype to torch.dtype.""" + import torch # noqa: F401,F403 + return eval(f'torch.{t}') + + +def _interpolation_modes_from_str(t: str): + """mapping str format to Interpolation.""" + t = t.lower() + inverse_modes_mapping = { + 'nearest': InterpolationMode.NEAREST, + 'bilinear': InterpolationMode.BILINEAR, + 'bicubic': InterpolationMode.BICUBIC, + 'box': InterpolationMode.BOX, + 'hammimg': InterpolationMode.HAMMING, + 'lanczos': InterpolationMode.LANCZOS, + } + return inverse_modes_mapping[t] + + +def _warpper_vision_transform_cls(vision_transform_cls, new_name): + """build a transform warpper class for specific torchvison.transform to + handle the different input type between torchvison.transforms with + mmcls.datasets.transforms.""" + + def new_init(self, *args, **kwargs): + if 'interpolation' in kwargs and isinstance(kwargs['interpolation'], + str): + kwargs['interpolation'] = _interpolation_modes_from_str( + kwargs['interpolation']) + if 'dtype' in kwargs and isinstance(kwargs['dtype'], str): + kwargs['dtype'] = _str_to_torch_dtype(kwargs['dtype']) + + try: + self.t = vision_transform_cls(*args, **kwargs) + except TypeError as e: + traceback.print_exc() + raise TypeError( + f'Error when init the {vision_transform_cls}, please ' + f'check the argmemnts of {args} and {kwargs}. \n{e}') + + def new_call(self, input): + try: + input['img'] = self.t(input['img']) + except Exception as e: + traceback.print_exc() + raise Exception('Error when processing of transform(`torhcvison/' + f'{vision_transform_cls.__name__}`). \n{e}') + return input + + def new_str(self): + return str(self.t) + + new_transforms_cls = type( + new_name, (), + dict(__init__=new_init, __call__=new_call, __str__=new_str)) + return new_transforms_cls + + +def register_vision_transforms() -> List[str]: + """Register transforms in ``torchvision.transforms`` to the ``TRANSFORMS`` + registry. + + Returns: + List[str]: A list of registered transforms' name. + """ + vision_transforms = [] + for module_name in dir(torchvision.transforms): + if not re.match('[A-Z]', module_name): + # must startswith a capital letter + continue + _transform = getattr(torchvision.transforms, module_name) + if inspect.isclass(_transform) and callable( + _transform) and not isinstance(_transform, (EnumMeta)): + new_cls = _warpper_vision_transform_cls( + _transform, f'TorchVison{module_name}') + TRANSFORMS.register_module( + module=new_cls, name=f'torchvision/{module_name}') + vision_transforms.append(f'torchvision/{module_name}') + return vision_transforms + + +# register all the transforms in torchvision by using a transform wrapper +VISION_TRANSFORMS = register_vision_transforms() + + +@TRANSFORMS.register_module() +class RandomCrop(BaseTransform): + """Crop the given Image at a random location. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + - img_shape + + Args: + crop_size (int | Sequence): Desired output size of the crop. If + crop_size is an int instead of sequence like (h, w), a square crop + (crop_size, crop_size) is made. + padding (int | Sequence, optional): Optional padding on each border + of the image. If a sequence of length 4 is provided, it is used to + pad left, top, right, bottom borders respectively. If a sequence + of length 2 is provided, it is used to pad left/right, top/bottom + borders, respectively. Default: None, which means no padding. + pad_if_needed (bool): It will pad the image if smaller than the + desired size to avoid raising an exception. Since cropping is done + after padding, the padding seems to be done at a random offset. + Default: False. + pad_val (Number | Sequence[Number]): Pixel pad_val value for constant + fill. If a tuple of length 3, it is used to pad_val R, G, B + channels respectively. Default: 0. + padding_mode (str): Type of padding. Defaults to "constant". Should + be one of the following: + + - ``constant``: Pads with a constant value, this value is specified + with pad_val. + - ``edge``: pads with the last value at the edge of the image. + - ``reflect``: Pads with reflection of image without repeating the + last value on the edge. For example, padding [1, 2, 3, 4] + with 2 elements on both sides in reflect mode will result + in [3, 2, 1, 2, 3, 4, 3, 2]. + - ``symmetric``: Pads with reflection of image repeating the last + value on the edge. For example, padding [1, 2, 3, 4] with + 2 elements on both sides in symmetric mode will result in + [2, 1, 1, 2, 3, 4, 4, 3]. + """ + + def __init__(self, + crop_size: Union[Sequence, int], + padding: Optional[Union[Sequence, int]] = None, + pad_if_needed: bool = False, + pad_val: Union[Number, Sequence[Number]] = 0, + padding_mode: str = 'constant'): + if isinstance(crop_size, Sequence): + assert len(crop_size) == 2 + assert crop_size[0] > 0 and crop_size[1] > 0 + self.crop_size = crop_size + else: + assert crop_size > 0 + self.crop_size = (crop_size, crop_size) + # check padding mode + assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'] + self.padding = padding + self.pad_if_needed = pad_if_needed + self.pad_val = pad_val + self.padding_mode = padding_mode + + @cache_randomness + def rand_crop_params(self, img: np.ndarray): + """Get parameters for ``crop`` for a random crop. + + Args: + img (ndarray): Image to be cropped. + + Returns: + tuple: Params (offset_h, offset_w, target_h, target_w) to be + passed to ``crop`` for random crop. + """ + h, w = img.shape[:2] + target_h, target_w = self.crop_size + if w == target_w and h == target_h: + return 0, 0, h, w + elif w < target_w or h < target_h: + target_w = min(w, target_w) + target_h = min(w, target_h) + + offset_h = np.random.randint(0, h - target_h + 1) + offset_w = np.random.randint(0, w - target_w + 1) + + return offset_h, offset_w, target_h, target_w + + def transform(self, results: dict) -> dict: + """Transform function to randomly crop images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Randomly cropped results, 'img_shape' + key in result dict is updated according to crop size. + """ + img = results['img'] + if self.padding is not None: + img = mmcv.impad(img, padding=self.padding, pad_val=self.pad_val) + + # pad img if needed + if self.pad_if_needed: + h_pad = math.ceil(max(0, self.crop_size[0] - img.shape[0]) / 2) + w_pad = math.ceil(max(0, self.crop_size[1] - img.shape[1]) / 2) + + img = mmcv.impad( + img, + padding=(w_pad, h_pad, w_pad, h_pad), + pad_val=self.pad_val, + padding_mode=self.padding_mode) + + offset_h, offset_w, target_h, target_w = self.rand_crop_params(img) + img = mmcv.imcrop( + img, + np.array([ + offset_w, + offset_h, + offset_w + target_w - 1, + offset_h + target_h - 1, + ])) + results['img'] = img + results['img_shape'] = img.shape + + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + f'(crop_size={self.crop_size}' + repr_str += f', padding={self.padding}' + repr_str += f', pad_if_needed={self.pad_if_needed}' + repr_str += f', pad_val={self.pad_val}' + repr_str += f', padding_mode={self.padding_mode})' + return repr_str + + +@TRANSFORMS.register_module() +class RandomResizedCrop(BaseTransform): + """Crop the given image to random scale and aspect ratio. + + A crop of random size (default: of 0.08 to 1.0) of the original size and a + random aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio + is made. This crop is finally resized to given size. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + - img_shape + + Args: + scale (sequence | int): Desired output scale of the crop. If size is an + int instead of sequence like (h, w), a square crop (size, size) is + made. + crop_ratio_range (tuple): Range of the random size of the cropped + image compared to the original image. Defaults to (0.08, 1.0). + aspect_ratio_range (tuple): Range of the random aspect ratio of the + cropped image compared to the original image. + Defaults to (3. / 4., 4. / 3.). + max_attempts (int): Maximum number of attempts before falling back to + Central Crop. Defaults to 10. + interpolation (str): Interpolation method, accepted values are + 'nearest', 'bilinear', 'bicubic', 'area', 'lanczos'. Defaults to + 'bilinear'. + backend (str): The image resize backend type, accepted values are + 'cv2' and 'pillow'. Defaults to 'cv2'. + """ + + def __init__(self, + scale: Union[Sequence, int], + crop_ratio_range: Tuple[float, float] = (0.08, 1.0), + aspect_ratio_range: Tuple[float, float] = (3. / 4., 4. / 3.), + max_attempts: int = 10, + interpolation: str = 'bilinear', + backend: str = 'cv2') -> None: + if isinstance(scale, Sequence): + assert len(scale) == 2 + assert scale[0] > 0 and scale[1] > 0 + self.scale = scale + else: + assert scale > 0 + self.scale = (scale, scale) + if (crop_ratio_range[0] > crop_ratio_range[1]) or ( + aspect_ratio_range[0] > aspect_ratio_range[1]): + raise ValueError( + 'range should be of kind (min, max). ' + f'But received crop_ratio_range {crop_ratio_range} ' + f'and aspect_ratio_range {aspect_ratio_range}.') + assert isinstance(max_attempts, int) and max_attempts >= 0, \ + 'max_attempts mush be int and no less than 0.' + assert interpolation in ('nearest', 'bilinear', 'bicubic', 'area', + 'lanczos') + + self.crop_ratio_range = crop_ratio_range + self.aspect_ratio_range = aspect_ratio_range + self.max_attempts = max_attempts + self.interpolation = interpolation + self.backend = backend + + @cache_randomness + def rand_crop_params(self, img: np.ndarray) -> Tuple[int, int, int, int]: + """Get parameters for ``crop`` for a random sized crop. + + Args: + img (ndarray): Image to be cropped. + + Returns: + tuple: Params (offset_h, offset_w, target_h, target_w) to be + passed to `crop` for a random sized crop. + """ + h, w = img.shape[:2] + area = h * w + + for _ in range(self.max_attempts): + target_area = np.random.uniform(*self.crop_ratio_range) * area + log_ratio = (math.log(self.aspect_ratio_range[0]), + math.log(self.aspect_ratio_range[1])) + aspect_ratio = math.exp(np.random.uniform(*log_ratio)) + target_w = int(round(math.sqrt(target_area * aspect_ratio))) + target_h = int(round(math.sqrt(target_area / aspect_ratio))) + + if 0 < target_w <= w and 0 < target_h <= h: + offset_h = np.random.randint(0, h - target_h + 1) + offset_w = np.random.randint(0, w - target_w + 1) + + return offset_h, offset_w, target_h, target_w + + # Fallback to central crop + in_ratio = float(w) / float(h) + if in_ratio < min(self.aspect_ratio_range): + target_w = w + target_h = int(round(target_w / min(self.aspect_ratio_range))) + elif in_ratio > max(self.aspect_ratio_range): + target_h = h + target_w = int(round(target_h * max(self.aspect_ratio_range))) + else: # whole image + target_w = w + target_h = h + offset_h = (h - target_h) // 2 + offset_w = (w - target_w) // 2 + return offset_h, offset_w, target_h, target_w + + def transform(self, results: dict) -> dict: + """Transform function to randomly resized crop images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Randomly resized cropped results, 'img_shape' + key in result dict is updated according to crop size. + """ + img = results['img'] + offset_h, offset_w, target_h, target_w = self.rand_crop_params(img) + img = mmcv.imcrop( + img, + bboxes=np.array([ + offset_w, offset_h, offset_w + target_w - 1, + offset_h + target_h - 1 + ])) + img = mmcv.imresize( + img, + tuple(self.scale[::-1]), + interpolation=self.interpolation, + backend=self.backend) + results['img'] = img + results['img_shape'] = img.shape + + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + f'(scale={self.scale}' + repr_str += ', crop_ratio_range=' + repr_str += f'{tuple(round(s, 4) for s in self.crop_ratio_range)}' + repr_str += ', aspect_ratio_range=' + repr_str += f'{tuple(round(r, 4) for r in self.aspect_ratio_range)}' + repr_str += f', max_attempts={self.max_attempts}' + repr_str += f', interpolation={self.interpolation}' + repr_str += f', backend={self.backend})' + return repr_str + + +@TRANSFORMS.register_module() +class EfficientNetRandomCrop(RandomResizedCrop): + """EfficientNet style RandomResizedCrop. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + - img_shape + + Args: + scale (int): Desired output scale of the crop. Only int size is + accepted, a square crop (size, size) is made. + min_covered (Number): Minimum ratio of the cropped area to the original + area. Defaults to 0.1. + crop_padding (int): The crop padding parameter in efficientnet style + center crop. Defaults to 32. + crop_ratio_range (tuple): Range of the random size of the cropped + image compared to the original image. Defaults to (0.08, 1.0). + aspect_ratio_range (tuple): Range of the random aspect ratio of the + cropped image compared to the original image. + Defaults to (3. / 4., 4. / 3.). + max_attempts (int): Maximum number of attempts before falling back to + Central Crop. Defaults to 10. + interpolation (str): Interpolation method, accepted values are + 'nearest', 'bilinear', 'bicubic', 'area', 'lanczos'. Defaults to + 'bicubic'. + backend (str): The image resize backend type, accepted values are + 'cv2' and 'pillow'. Defaults to 'cv2'. + """ + + def __init__(self, + scale: int, + min_covered: float = 0.1, + crop_padding: int = 32, + interpolation: str = 'bicubic', + **kwarg): + assert isinstance(scale, int) + super().__init__(scale, interpolation=interpolation, **kwarg) + assert min_covered >= 0, 'min_covered should be no less than 0.' + assert crop_padding >= 0, 'crop_padding should be no less than 0.' + + self.min_covered = min_covered + self.crop_padding = crop_padding + + # https://github.com/kakaobrain/fast-autoaugment/blob/master/FastAutoAugment/data.py # noqa + @cache_randomness + def rand_crop_params(self, img: np.ndarray) -> Tuple[int, int, int, int]: + """Get parameters for ``crop`` for a random sized crop. + + Args: + img (ndarray): Image to be cropped. + + Returns: + tuple: Params (offset_h, offset_w, target_h, target_w) to be + passed to `crop` for a random sized crop. + """ + h, w = img.shape[:2] + area = h * w + min_target_area = self.crop_ratio_range[0] * area + max_target_area = self.crop_ratio_range[1] * area + + for _ in range(self.max_attempts): + aspect_ratio = np.random.uniform(*self.aspect_ratio_range) + min_target_h = int( + round(math.sqrt(min_target_area / aspect_ratio))) + max_target_h = int( + round(math.sqrt(max_target_area / aspect_ratio))) + + if max_target_h * aspect_ratio > w: + max_target_h = int((w + 0.5 - 1e-7) / aspect_ratio) + if max_target_h * aspect_ratio > w: + max_target_h -= 1 + + max_target_h = min(max_target_h, h) + min_target_h = min(max_target_h, min_target_h) + + # slightly differs from tf implementation + target_h = int( + round(np.random.uniform(min_target_h, max_target_h))) + target_w = int(round(target_h * aspect_ratio)) + target_area = target_h * target_w + + # slight differs from tf. In tf, if target_area > max_target_area, + # area will be recalculated + if (target_area < min_target_area or target_area > max_target_area + or target_w > w or target_h > h + or target_area < self.min_covered * area): + continue + + offset_h = np.random.randint(0, h - target_h + 1) + offset_w = np.random.randint(0, w - target_w + 1) + + return offset_h, offset_w, target_h, target_w + + # Fallback to central crop + img_short = min(h, w) + crop_size = self.scale[0] / (self.scale[0] + + self.crop_padding) * img_short + + offset_h = max(0, int(round((h - crop_size) / 2.))) + offset_w = max(0, int(round((w - crop_size) / 2.))) + return offset_h, offset_w, crop_size, crop_size + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = super().__repr__()[:-1] + repr_str += f', min_covered={self.min_covered}' + repr_str += f', crop_padding={self.crop_padding})' + return repr_str + + +@TRANSFORMS.register_module() +class RandomErasing(BaseTransform): + """Randomly selects a rectangle region in an image and erase pixels. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + + Args: + erase_prob (float): Probability that image will be randomly erased. + Default: 0.5 + min_area_ratio (float): Minimum erased area / input image area + Default: 0.02 + max_area_ratio (float): Maximum erased area / input image area + Default: 0.4 + aspect_range (sequence | float): Aspect ratio range of erased area. + if float, it will be converted to (aspect_ratio, 1/aspect_ratio) + Default: (3/10, 10/3) + mode (str): Fill method in erased area, can be: + + - const (default): All pixels are assign with the same value. + - rand: each pixel is assigned with a random value in [0, 255] + + fill_color (sequence | Number): Base color filled in erased area. + Defaults to (128, 128, 128). + fill_std (sequence | Number, optional): If set and ``mode`` is 'rand', + fill erased area with random color from normal distribution + (mean=fill_color, std=fill_std); If not set, fill erased area with + random color from uniform distribution (0~255). Defaults to None. + + Note: + See `Random Erasing Data Augmentation + `_ + + This paper provided 4 modes: RE-R, RE-M, RE-0, RE-255, and use RE-M as + default. The config of these 4 modes are: + + - RE-R: RandomErasing(mode='rand') + - RE-M: RandomErasing(mode='const', fill_color=(123.67, 116.3, 103.5)) + - RE-0: RandomErasing(mode='const', fill_color=0) + - RE-255: RandomErasing(mode='const', fill_color=255) + """ + + def __init__(self, + erase_prob=0.5, + min_area_ratio=0.02, + max_area_ratio=0.4, + aspect_range=(3 / 10, 10 / 3), + mode='const', + fill_color=(128, 128, 128), + fill_std=None): + assert isinstance(erase_prob, float) and 0. <= erase_prob <= 1. + assert isinstance(min_area_ratio, float) and 0. <= min_area_ratio <= 1. + assert isinstance(max_area_ratio, float) and 0. <= max_area_ratio <= 1. + assert min_area_ratio <= max_area_ratio, \ + 'min_area_ratio should be smaller than max_area_ratio' + if isinstance(aspect_range, float): + aspect_range = min(aspect_range, 1 / aspect_range) + aspect_range = (aspect_range, 1 / aspect_range) + assert isinstance(aspect_range, Sequence) and len(aspect_range) == 2 \ + and all(isinstance(x, float) for x in aspect_range), \ + 'aspect_range should be a float or Sequence with two float.' + assert all(x > 0 for x in aspect_range), \ + 'aspect_range should be positive.' + assert aspect_range[0] <= aspect_range[1], \ + 'In aspect_range (min, max), min should be smaller than max.' + assert mode in ['const', 'rand'], \ + 'Please select `mode` from ["const", "rand"].' + if isinstance(fill_color, Number): + fill_color = [fill_color] * 3 + assert isinstance(fill_color, Sequence) and len(fill_color) == 3 \ + and all(isinstance(x, Number) for x in fill_color), \ + 'fill_color should be a float or Sequence with three int.' + if fill_std is not None: + if isinstance(fill_std, Number): + fill_std = [fill_std] * 3 + assert isinstance(fill_std, Sequence) and len(fill_std) == 3 \ + and all(isinstance(x, Number) for x in fill_std), \ + 'fill_std should be a float or Sequence with three int.' + + self.erase_prob = erase_prob + self.min_area_ratio = min_area_ratio + self.max_area_ratio = max_area_ratio + self.aspect_range = aspect_range + self.mode = mode + self.fill_color = fill_color + self.fill_std = fill_std + + def _fill_pixels(self, img, top, left, h, w): + """Fill pixels to the patch of image.""" + if self.mode == 'const': + patch = np.empty((h, w, 3), dtype=np.uint8) + patch[:, :] = np.array(self.fill_color, dtype=np.uint8) + elif self.fill_std is None: + # Uniform distribution + patch = np.random.uniform(0, 256, (h, w, 3)).astype(np.uint8) + else: + # Normal distribution + patch = np.random.normal(self.fill_color, self.fill_std, (h, w, 3)) + patch = np.clip(patch.astype(np.int32), 0, 255).astype(np.uint8) + + img[top:top + h, left:left + w] = patch + return img + + @cache_randomness + def random_disable(self): + """Randomly disable the transform.""" + return np.random.rand() > self.erase_prob + + @cache_randomness + def random_patch(self, img_h, img_w): + """Randomly generate patch the erase.""" + # convert the aspect ratio to log space to equally handle width and + # height. + log_aspect_range = np.log( + np.array(self.aspect_range, dtype=np.float32)) + aspect_ratio = np.exp(np.random.uniform(*log_aspect_range)) + area = img_h * img_w + area *= np.random.uniform(self.min_area_ratio, self.max_area_ratio) + + h = min(int(round(np.sqrt(area * aspect_ratio))), img_h) + w = min(int(round(np.sqrt(area / aspect_ratio))), img_w) + top = np.random.randint(0, img_h - h) if img_h > h else 0 + left = np.random.randint(0, img_w - w) if img_w > w else 0 + return top, left, h, w + + def transform(self, results): + """ + Args: + results (dict): Results dict from pipeline + + Returns: + dict: Results after the transformation. + """ + if self.random_disable(): + return results + + img = results['img'] + img_h, img_w = img.shape[:2] + + img = self._fill_pixels(img, *self.random_patch(img_h, img_w)) + + results['img'] = img + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(erase_prob={self.erase_prob}, ' + repr_str += f'min_area_ratio={self.min_area_ratio}, ' + repr_str += f'max_area_ratio={self.max_area_ratio}, ' + repr_str += f'aspect_range={self.aspect_range}, ' + repr_str += f'mode={self.mode}, ' + repr_str += f'fill_color={self.fill_color}, ' + repr_str += f'fill_std={self.fill_std})' + return repr_str + + +@TRANSFORMS.register_module() +class EfficientNetCenterCrop(BaseTransform): + r"""EfficientNet style center crop. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + - img_shape + + Args: + crop_size (int): Expected size after cropping with the format + of (h, w). + crop_padding (int): The crop padding parameter in efficientnet style + center crop. Defaults to 32. + interpolation (str): Interpolation method, accepted values are + 'nearest', 'bilinear', 'bicubic', 'area', 'lanczos'. Only valid if + ``efficientnet_style`` is True. Defaults to 'bicubic'. + backend (str): The image resize backend type, accepted values are + `cv2` and `pillow`. Only valid if efficientnet style is True. + Defaults to `cv2`. + Notes: + - If the image is smaller than the crop size, return the original + image. + - The pipeline will be to first + to perform the center crop with the ``crop_size_`` as: + + .. math:: + + \text{crop_size_} = \frac{\text{crop_size}}{\text{crop_size} + + \text{crop_padding}} \times \text{short_edge} + + And then the pipeline resizes the img to the input crop size. + """ + + def __init__(self, + crop_size: int, + crop_padding: int = 32, + interpolation: str = 'bicubic', + backend: str = 'cv2'): + assert isinstance(crop_size, int) + assert crop_size > 0 + assert crop_padding >= 0 + assert interpolation in ('nearest', 'bilinear', 'bicubic', 'area', + 'lanczos') + + self.crop_size = crop_size + self.crop_padding = crop_padding + self.interpolation = interpolation + self.backend = backend + + def transform(self, results: dict) -> dict: + """Transform function to randomly resized crop images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: EfficientNet style center cropped results, 'img_shape' + key in result dict is updated according to crop size. + """ + img = results['img'] + h, w = img.shape[:2] + + # https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/preprocessing.py#L118 # noqa + img_short = min(h, w) + crop_size = self.crop_size / (self.crop_size + + self.crop_padding) * img_short + + offset_h = max(0, int(round((h - crop_size) / 2.))) + offset_w = max(0, int(round((w - crop_size) / 2.))) + + # crop the image + img = mmcv.imcrop( + img, + bboxes=np.array([ + offset_w, offset_h, offset_w + crop_size - 1, + offset_h + crop_size - 1 + ])) + # resize image + img = mmcv.imresize( + img, (self.crop_size, self.crop_size), + interpolation=self.interpolation, + backend=self.backend) + results['img'] = img + results['img_shape'] = img.shape + + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + f'(crop_size={self.crop_size}' + repr_str += f', crop_padding={self.crop_padding}' + repr_str += f', interpolation={self.interpolation}' + repr_str += f', backend={self.backend})' + return repr_str + + +@TRANSFORMS.register_module() +class ResizeEdge(BaseTransform): + """Resize images along the specified edge. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + - img_shape + + **Added Keys:** + + - scale + - scale_factor + + Args: + scale (int): The edge scale to resizing. + edge (str): The edge to resize. Defaults to 'short'. + backend (str): Image resize backend, choices are 'cv2' and 'pillow'. + These two backends generates slightly different results. + Defaults to 'cv2'. + interpolation (str): Interpolation method, accepted values are + "nearest", "bilinear", "bicubic", "area", "lanczos" for 'cv2' + backend, "nearest", "bilinear" for 'pillow' backend. + Defaults to 'bilinear'. + """ + + def __init__(self, + scale: int, + edge: str = 'short', + backend: str = 'cv2', + interpolation: str = 'bilinear') -> None: + allow_edges = ['short', 'long', 'width', 'height'] + assert edge in allow_edges, \ + f'Invalid edge "{edge}", please specify from {allow_edges}.' + self.edge = edge + self.scale = scale + self.backend = backend + self.interpolation = interpolation + + def _resize_img(self, results: dict) -> None: + """Resize images with ``results['scale']``.""" + + img, w_scale, h_scale = mmcv.imresize( + results['img'], + results['scale'], + interpolation=self.interpolation, + return_scale=True, + backend=self.backend) + results['img'] = img + results['img_shape'] = img.shape[:2] + results['scale'] = img.shape[:2][::-1] + results['scale_factor'] = (w_scale, h_scale) + + def transform(self, results: Dict) -> Dict: + """Transform function to resize images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Resized results, 'img', 'scale', 'scale_factor', + 'img_shape' keys are updated in result dict. + """ + assert 'img' in results, 'No `img` field in the input.' + + h, w = results['img'].shape[:2] + if any([ + # conditions to resize the width + self.edge == 'short' and w < h, + self.edge == 'long' and w > h, + self.edge == 'width', + ]): + width = self.scale + height = int(self.scale * h / w) + else: + height = self.scale + width = int(self.scale * w / h) + results['scale'] = (width, height) + + self._resize_img(results) + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + repr_str += f'(scale={self.scale}, ' + repr_str += f'edge={self.edge}, ' + repr_str += f'backend={self.backend}, ' + repr_str += f'interpolation={self.interpolation})' + return repr_str + + +@TRANSFORMS.register_module() +class ColorJitter(BaseTransform): + """Randomly change the brightness, contrast and saturation of an image. + + Modified from + https://github.com/pytorch/vision/blob/main/torchvision/transforms/transforms.py + Licensed under the BSD 3-Clause License. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + + Args: + brightness (float | Sequence[float] (min, max)): How much to jitter + brightness. brightness_factor is chosen uniformly from + ``[max(0, 1 - brightness), 1 + brightness]`` or the given + ``[min, max]``. Should be non negative numbers. Defaults to 0. + contrast (float | Sequence[float] (min, max)): How much to jitter + contrast. contrast_factor is chosen uniformly from + ``[max(0, 1 - contrast), 1 + contrast]`` or the given + ``[min, max]``. Should be non negative numbers. Defaults to 0. + saturation (float | Sequence[float] (min, max)): How much to jitter + saturation. saturation_factor is chosen uniformly from + ``[max(0, 1 - saturation), 1 + saturation]`` or the given + ``[min, max]``. Should be non negative numbers. Defaults to 0. + hue (float | Sequence[float] (min, max)): How much to jitter hue. + hue_factor is chosen uniformly from ``[-hue, hue]`` (0 <= hue + <= 0.5) or the given ``[min, max]`` (-0.5 <= min <= max <= 0.5). + Defaults to 0. + backend (str): The backend to operate the image. Defaults to 'pillow' + """ + + def __init__(self, + brightness: Union[float, Sequence[float]] = 0., + contrast: Union[float, Sequence[float]] = 0., + saturation: Union[float, Sequence[float]] = 0., + hue: Union[float, Sequence[float]] = 0., + backend='pillow'): + self.brightness = self._set_range(brightness, 'brightness') + self.contrast = self._set_range(contrast, 'contrast') + self.saturation = self._set_range(saturation, 'saturation') + self.hue = self._set_range(hue, 'hue', center=0, bound=(-0.5, 0.5)) + self.backend = backend + + def _set_range(self, value, name, center=1, bound=(0, float('inf'))): + """Set the range of magnitudes.""" + if isinstance(value, numbers.Number): + if value < 0: + raise ValueError( + f'If {name} is a single number, it must be non negative.') + value = (center - float(value), center + float(value)) + + if isinstance(value, (tuple, list)) and len(value) == 2: + if not bound[0] <= value[0] <= value[1] <= bound[1]: + value = np.clip(value, bound[0], bound[1]) + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.warning(f'ColorJitter {name} values exceed the bound ' + f'{bound}, clipped to the bound.') + else: + raise TypeError(f'{name} should be a single number ' + 'or a list/tuple with length 2.') + + # if value is 0 or (1., 1.) for brightness/contrast/saturation + # or (0., 0.) for hue, do nothing + if value[0] == value[1] == center: + value = None + else: + value = tuple(value) + + return value + + @cache_randomness + def _rand_params(self): + """Get random parameters including magnitudes and indices of + transforms.""" + trans_inds = np.random.permutation(4) + b, c, s, h = (None, ) * 4 + + if self.brightness is not None: + b = np.random.uniform(self.brightness[0], self.brightness[1]) + if self.contrast is not None: + c = np.random.uniform(self.contrast[0], self.contrast[1]) + if self.saturation is not None: + s = np.random.uniform(self.saturation[0], self.saturation[1]) + if self.hue is not None: + h = np.random.uniform(self.hue[0], self.hue[1]) + + return trans_inds, b, c, s, h + + def transform(self, results: Dict) -> Dict: + """Transform function to resize images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: ColorJitter results, 'img' key is updated in result dict. + """ + img = results['img'] + trans_inds, brightness, contrast, saturation, hue = self._rand_params() + + for index in trans_inds: + if index == 0 and brightness is not None: + img = mmcv.adjust_brightness( + img, brightness, backend=self.backend) + elif index == 1 and contrast is not None: + img = mmcv.adjust_contrast(img, contrast, backend=self.backend) + elif index == 2 and saturation is not None: + img = mmcv.adjust_color( + img, alpha=saturation, backend=self.backend) + elif index == 3 and hue is not None: + img = mmcv.adjust_hue(img, hue, backend=self.backend) + + results['img'] = img + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + repr_str += f'(brightness={self.brightness}, ' + repr_str += f'contrast={self.contrast}, ' + repr_str += f'saturation={self.saturation}, ' + repr_str += f'hue={self.hue})' + return repr_str + + +@TRANSFORMS.register_module() +class Lighting(BaseTransform): + """Adjust images lighting using AlexNet-style PCA jitter. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + + Args: + eigval (Sequence[float]): the eigenvalue of the convariance matrix + of pixel values, respectively. + eigvec (list[list]): the eigenvector of the convariance matrix of + pixel values, respectively. + alphastd (float): The standard deviation for distribution of alpha. + Defaults to 0.1. + to_rgb (bool): Whether to convert img to rgb. Defaults to False. + """ + + def __init__(self, + eigval: Sequence[float], + eigvec: Sequence[float], + alphastd: float = 0.1, + to_rgb: bool = False): + assert isinstance(eigval, Sequence), \ + f'eigval must be Sequence, got {type(eigval)} instead.' + assert isinstance(eigvec, Sequence), \ + f'eigvec must be Sequence, got {type(eigvec)} instead.' + for vec in eigvec: + assert isinstance(vec, Sequence) and len(vec) == len(eigvec[0]), \ + 'eigvec must contains lists with equal length.' + assert isinstance(alphastd, float), 'alphastd should be of type ' \ + f'float or int, got {type(alphastd)} instead.' + + self.eigval = np.array(eigval) + self.eigvec = np.array(eigvec) + self.alphastd = alphastd + self.to_rgb = to_rgb + + def transform(self, results: Dict) -> Dict: + """Transform function to resize images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Lightinged results, 'img' key is updated in result dict. + """ + assert 'img' in results, 'No `img` field in the input.' + + img = results['img'] + img_lighting = mmcv.adjust_lighting( + img, + self.eigval, + self.eigvec, + alphastd=self.alphastd, + to_rgb=self.to_rgb) + results['img'] = img_lighting.astype(img.dtype) + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + repr_str += f'(eigval={self.eigval.tolist()}, ' + repr_str += f'eigvec={self.eigvec.tolist()}, ' + repr_str += f'alphastd={self.alphastd}, ' + repr_str += f'to_rgb={self.to_rgb})' + return repr_str + + +# 'Albu' is used in previous versions of mmpretrain, here is for compatibility +# users can use both 'Albumentations' and 'Albu'. +@TRANSFORMS.register_module(['Albumentations', 'Albu']) +class Albumentations(BaseTransform): + """Wrapper to use augmentation from albumentations library. + + **Required Keys:** + + - img + + **Modified Keys:** + + - img + - img_shape + + Adds custom transformations from albumentations library. + More details can be found in + `Albumentations `_. + An example of ``transforms`` is as followed: + + .. code-block:: + + [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), + ] + + Args: + transforms (List[Dict]): List of albumentations transform configs. + keymap (Optional[Dict]): Mapping of mmpretrain to albumentations + fields, in format {'input key':'albumentation-style key'}. + Defaults to None. + + Example: + >>> import mmcv + >>> from mmpretrain.datasets import Albumentations + >>> transforms = [ + ... dict( + ... type='ShiftScaleRotate', + ... shift_limit=0.0625, + ... scale_limit=0.0, + ... rotate_limit=0, + ... interpolation=1, + ... p=0.5), + ... dict( + ... type='RandomBrightnessContrast', + ... brightness_limit=[0.1, 0.3], + ... contrast_limit=[0.1, 0.3], + ... p=0.2), + ... dict(type='ChannelShuffle', p=0.1), + ... dict( + ... type='OneOf', + ... transforms=[ + ... dict(type='Blur', blur_limit=3, p=1.0), + ... dict(type='MedianBlur', blur_limit=3, p=1.0) + ... ], + ... p=0.1), + ... ] + >>> albu = Albumentations(transforms) + >>> data = {'img': mmcv.imread('./demo/demo.JPEG')} + >>> data = albu(data) + >>> print(data['img'].shape) + (375, 500, 3) + """ + + def __init__(self, transforms: List[Dict], keymap: Optional[Dict] = None): + if albumentations is None: + raise RuntimeError('albumentations is not installed') + else: + from albumentations import Compose as albu_Compose + + assert isinstance(transforms, list), 'transforms must be a list.' + if keymap is not None: + assert isinstance(keymap, dict), 'keymap must be None or a dict. ' + + self.transforms = transforms + + self.aug = albu_Compose( + [self.albu_builder(t) for t in self.transforms]) + + if not keymap: + self.keymap_to_albu = dict(img='image') + else: + self.keymap_to_albu = keymap + self.keymap_back = {v: k for k, v in self.keymap_to_albu.items()} + + def albu_builder(self, cfg: Dict): + """Import a module from albumentations. + + It inherits some of :func:`build_from_cfg` logic. + Args: + cfg (dict): Config dict. It should at least contain the key "type". + Returns: + obj: The constructed object. + """ + + assert isinstance(cfg, dict) and 'type' in cfg, 'each item in ' \ + "transforms must be a dict with keyword 'type'." + args = cfg.copy() + + obj_type = args.pop('type') + if mmengine.is_str(obj_type): + obj_cls = getattr(albumentations, obj_type) + elif inspect.isclass(obj_type): + obj_cls = obj_type + else: + raise TypeError( + f'type must be a str or valid type, but got {type(obj_type)}') + + if 'transforms' in args: + args['transforms'] = [ + self.albu_builder(transform) + for transform in args['transforms'] + ] + + return obj_cls(**args) + + @staticmethod + def mapper(d, keymap): + """Dictionary mapper. + + Renames keys according to keymap provided. + Args: + d (dict): old dict + keymap (dict): {'old_key':'new_key'} + Returns: + dict: new dict. + """ + + updated_dict = {} + for k, v in zip(d.keys(), d.values()): + new_k = keymap.get(k, k) + updated_dict[new_k] = d[k] + return updated_dict + + def transform(self, results: Dict) -> Dict: + """Transform function to perform albumentations transforms. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Transformed results, 'img' and 'img_shape' keys are + updated in result dict. + """ + assert 'img' in results, 'No `img` field in the input.' + + # dict to albumentations format + results = self.mapper(results, self.keymap_to_albu) + results = self.aug(**results) + + # back to the original format + results = self.mapper(results, self.keymap_back) + results['img_shape'] = results['img'].shape[:2] + + return results + + def __repr__(self): + """Print the basic information of the transform. + + Returns: + str: Formatted string. + """ + repr_str = self.__class__.__name__ + repr_str += f'(transforms={repr(self.transforms)})' + return repr_str + + +@TRANSFORMS.register_module() +class SimMIMMaskGenerator(BaseTransform): + """Generate random block mask for each Image. + + **Added Keys**: + + - mask + + This module is used in SimMIM to generate masks. + + Args: + input_size (int): Size of input image. Defaults to 192. + mask_patch_size (int): Size of each block mask. Defaults to 32. + model_patch_size (int): Patch size of each token. Defaults to 4. + mask_ratio (float): The mask ratio of image. Defaults to 0.6. + """ + + def __init__(self, + input_size: int = 192, + mask_patch_size: int = 32, + model_patch_size: int = 4, + mask_ratio: float = 0.6): + self.input_size = input_size + self.mask_patch_size = mask_patch_size + self.model_patch_size = model_patch_size + self.mask_ratio = mask_ratio + + assert self.input_size % self.mask_patch_size == 0 + assert self.mask_patch_size % self.model_patch_size == 0 + + self.rand_size = self.input_size // self.mask_patch_size + self.scale = self.mask_patch_size // self.model_patch_size + + self.token_count = self.rand_size**2 + self.mask_count = int(np.ceil(self.token_count * self.mask_ratio)) + + def transform(self, results: dict) -> dict: + """Method to generate random block mask for each Image in SimMIM. + + Args: + results (dict): Result dict from previous pipeline. + + Returns: + dict: Result dict with added key ``mask``. + """ + mask_idx = np.random.permutation(self.token_count)[:self.mask_count] + mask = np.zeros(self.token_count, dtype=int) + mask[mask_idx] = 1 + + mask = mask.reshape((self.rand_size, self.rand_size)) + mask = mask.repeat(self.scale, axis=0).repeat(self.scale, axis=1) + + results.update({'mask': mask}) + + return results + + def __repr__(self) -> str: + repr_str = self.__class__.__name__ + repr_str += f'(input_size={self.input_size}, ' + repr_str += f'mask_patch_size={self.mask_patch_size}, ' + repr_str += f'model_patch_size={self.model_patch_size}, ' + repr_str += f'mask_ratio={self.mask_ratio})' + return repr_str + + +@TRANSFORMS.register_module() +class BEiTMaskGenerator(BaseTransform): + """Generate mask for image. + + **Added Keys**: + + - mask + + This module is borrowed from + https://github.com/microsoft/unilm/tree/master/beit + + Args: + input_size (int): The size of input image. + num_masking_patches (int): The number of patches to be masked. + min_num_patches (int): The minimum number of patches to be masked + in the process of generating mask. Defaults to 4. + max_num_patches (int, optional): The maximum number of patches to be + masked in the process of generating mask. Defaults to None. + min_aspect (float): The minimum aspect ratio of mask blocks. Defaults + to 0.3. + min_aspect (float, optional): The minimum aspect ratio of mask blocks. + Defaults to None. + """ + + def __init__(self, + input_size: int, + num_masking_patches: int, + min_num_patches: int = 4, + max_num_patches: Optional[int] = None, + min_aspect: float = 0.3, + max_aspect: Optional[float] = None) -> None: + if not isinstance(input_size, tuple): + input_size = (input_size, ) * 2 + self.height, self.width = input_size + + self.num_patches = self.height * self.width + + self.num_masking_patches = num_masking_patches + self.min_num_patches = min_num_patches + self.max_num_patches = num_masking_patches if max_num_patches is None \ + else max_num_patches + + max_aspect = max_aspect or 1 / min_aspect + self.log_aspect_ratio = (math.log(min_aspect), math.log(max_aspect)) + + def _mask(self, mask: np.ndarray, max_mask_patches: int) -> int: + """Generate mask recursively. + + Args: + mask (np.ndarray): The mask to be generated. + max_mask_patches (int): The maximum number of patches to be masked. + + Returns: + int: The number of patches masked. + """ + delta = 0 + for _ in range(10): + target_area = np.random.uniform(self.min_num_patches, + max_mask_patches) + aspect_ratio = math.exp(np.random.uniform(*self.log_aspect_ratio)) + h = int(round(math.sqrt(target_area * aspect_ratio))) + w = int(round(math.sqrt(target_area / aspect_ratio))) + if w < self.width and h < self.height: + top = np.random.randint(0, self.height - h) + left = np.random.randint(0, self.width - w) + + num_masked = mask[top:top + h, left:left + w].sum() + # Overlap + if 0 < h * w - num_masked <= max_mask_patches: + for i in range(top, top + h): + for j in range(left, left + w): + if mask[i, j] == 0: + mask[i, j] = 1 + delta += 1 + if delta > 0: + break + return delta + + def transform(self, results: dict) -> dict: + """Method to generate random block mask for each Image in BEiT. + + Args: + results (dict): Result dict from previous pipeline. + + Returns: + dict: Result dict with added key ``mask``. + """ + mask = np.zeros(shape=(self.height, self.width), dtype=int) + + mask_count = 0 + while mask_count != self.num_masking_patches: + max_mask_patches = self.num_masking_patches - mask_count + max_mask_patches = min(max_mask_patches, self.max_num_patches) + + delta = self._mask(mask, max_mask_patches) + mask_count += delta + results.update({'mask': mask}) + + return results + + def __repr__(self) -> str: + repr_str = self.__class__.__name__ + repr_str += f'(height={self.height}, ' + repr_str += f'width={self.width}, ' + repr_str += f'num_patches={self.num_patches}, ' + repr_str += f'num_masking_patches={self.num_masking_patches}, ' + repr_str += f'min_num_patches={self.min_num_patches}, ' + repr_str += f'max_num_patches={self.max_num_patches}, ' + repr_str += f'log_aspect_ratio={self.log_aspect_ratio})' + return repr_str + + +@TRANSFORMS.register_module() +class RandomResizedCropAndInterpolationWithTwoPic(BaseTransform): + """Crop the given PIL Image to random size and aspect ratio with random + interpolation. + + **Required Keys**: + + - img + + **Modified Keys**: + + - img + + **Added Keys**: + + - target_img + + This module is borrowed from + https://github.com/microsoft/unilm/tree/master/beit. + + A crop of random size (default: of 0.08 to 1.0) of the original size and a + random aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio + is made. This crop is finally resized to given size. This is popularly used + to train the Inception networks. This module first crops the image and + resizes the crop to two different sizes. + + Args: + size (Union[tuple, int]): Expected output size of each edge of the + first image. + second_size (Union[tuple, int], optional): Expected output size of each + edge of the second image. + scale (tuple[float, float]): Range of size of the origin size cropped. + Defaults to (0.08, 1.0). + ratio (tuple[float, float]): Range of aspect ratio of the origin aspect + ratio cropped. Defaults to (3./4., 4./3.). + interpolation (str): The interpolation for the first image. Defaults + to ``bilinear``. + second_interpolation (str): The interpolation for the second image. + Defaults to ``lanczos``. + """ + + def __init__(self, + size: Union[tuple, int], + second_size=None, + scale=(0.08, 1.0), + ratio=(3. / 4., 4. / 3.), + interpolation='bilinear', + second_interpolation='lanczos') -> None: + if isinstance(size, tuple): + self.size = size + else: + self.size = (size, size) + if second_size is not None: + if isinstance(second_size, tuple): + self.second_size = second_size + else: + self.second_size = (second_size, second_size) + else: + self.second_size = None + if (scale[0] > scale[1]) or (ratio[0] > ratio[1]): + ('range should be of kind (min, max)') + + if interpolation == 'random': + self.interpolation = ('bilinear', 'bicubic') + else: + self.interpolation = interpolation + self.second_interpolation = second_interpolation + self.scale = scale + self.ratio = ratio + + @staticmethod + def get_params(img: np.ndarray, scale: tuple, + ratio: tuple) -> Sequence[int]: + """Get parameters for ``crop`` for a random sized crop. + + Args: + img (np.ndarray): Image to be cropped. + scale (tuple): range of size of the origin size cropped + ratio (tuple): range of aspect ratio of the origin aspect + ratio cropped + + Returns: + tuple: params (i, j, h, w) to be passed to ``crop`` for a random + sized crop. + """ + img_h, img_w = img.shape[:2] + area = img_h * img_w + + for _ in range(10): + target_area = np.random.uniform(*scale) * area + log_ratio = (math.log(ratio[0]), math.log(ratio[1])) + aspect_ratio = math.exp(np.random.uniform(*log_ratio)) + + w = int(round(math.sqrt(target_area * aspect_ratio))) + h = int(round(math.sqrt(target_area / aspect_ratio))) + + if w < img_w and h < img_h: + i = np.random.randint(0, img_h - h) + j = np.random.randint(0, img_w - w) + return i, j, h, w + + # Fallback to central crop + in_ratio = img_w / img_h + if in_ratio < min(ratio): + w = img_w + h = int(round(w / min(ratio))) + elif in_ratio > max(ratio): + h = img_h + w = int(round(h * max(ratio))) + else: # whole image + w = img_w + h = img_h + i = (img_h - h) // 2 + j = (img_w - w) // 2 + return i, j, h, w + + def transform(self, results: dict) -> dict: + """Crop the given image and resize it to two different sizes. + + This module crops the given image randomly and resize the crop to two + different sizes. This is popularly used in BEiT-style masked image + modeling, where an off-the-shelf model is used to provide the target. + + Args: + results (dict): Results from previous pipeline. + + Returns: + dict: Results after applying this transformation. + """ + img = results['img'] + i, j, h, w = self.get_params(img, self.scale, self.ratio) + if isinstance(self.interpolation, (tuple, list)): + interpolation = np.random.choice(self.interpolation) + else: + interpolation = self.interpolation + if self.second_size is None: + img = img[i:i + h, j:j + w] + img = mmcv.imresize(img, self.size, interpolation=interpolation) + results.update({'img': img}) + else: + img = img[i:i + h, j:j + w] + img_sample = mmcv.imresize( + img, self.size, interpolation=interpolation) + img_target = mmcv.imresize( + img, self.second_size, interpolation=self.second_interpolation) + results.update({'img': [img_sample, img_target]}) + return results + + def __repr__(self) -> str: + repr_str = self.__class__.__name__ + repr_str += f'(size={self.size}, ' + repr_str += f'second_size={self.second_size}, ' + repr_str += f'interpolation={self.interpolation}, ' + repr_str += f'second_interpolation={self.second_interpolation}, ' + repr_str += f'scale={self.scale}, ' + repr_str += f'ratio={self.ratio})' + return repr_str + + +@TRANSFORMS.register_module() +class CleanCaption(BaseTransform): + """Clean caption text. + + Remove some useless punctuation for the caption task. + + **Required Keys:** + + - ``*keys`` + + **Modified Keys:** + + - ``*keys`` + + Args: + keys (Sequence[str], optional): The keys of text to be cleaned. + Defaults to 'gt_caption'. + remove_chars (str): The characters to be removed. Defaults to + :py:attr:`string.punctuation`. + lowercase (bool): Whether to convert the text to lowercase. + Defaults to True. + remove_dup_space (bool): Whether to remove duplicated whitespaces. + Defaults to True. + strip (bool): Whether to remove leading and trailing whitespaces. + Defaults to True. + """ + + def __init__( + self, + keys='gt_caption', + remove_chars=string.punctuation, + lowercase=True, + remove_dup_space=True, + strip=True, + ): + if isinstance(keys, str): + keys = [keys] + self.keys = keys + self.transtab = str.maketrans({ch: None for ch in remove_chars}) + self.lowercase = lowercase + self.remove_dup_space = remove_dup_space + self.strip = strip + + def _clean(self, text): + """Perform text cleaning before tokenizer.""" + + if self.strip: + text = text.strip() + + text = text.translate(self.transtab) + + if self.remove_dup_space: + text = re.sub(r'\s{2,}', ' ', text) + + if self.lowercase: + text = text.lower() + + return text + + def clean(self, text): + """Perform text cleaning before tokenizer.""" + if isinstance(text, (list, tuple)): + return [self._clean(item) for item in text] + elif isinstance(text, str): + return self._clean(text) + else: + raise TypeError('text must be a string or a list of strings') + + def transform(self, results: dict) -> dict: + """Method to clean the input text data.""" + for key in self.keys: + results[key] = self.clean(results[key]) + return results + + +@TRANSFORMS.register_module() +class OFAAddObjects(BaseTransform): + + def transform(self, results: dict) -> dict: + if 'objects' not in results: + raise ValueError( + 'Some OFA fine-tuned models requires `objects` field in the ' + 'dataset, which is generated by VinVL. Or please use ' + 'zero-shot configs. See ' + 'https://github.com/OFA-Sys/OFA/issues/189') + + if 'question' in results: + prompt = '{} object: {}'.format( + results['question'], + ' '.join(results['objects']), + ) + results['decoder_prompt'] = prompt + results['question'] = prompt + + +@TRANSFORMS.register_module() +class RandomTranslatePad(BaseTransform): + + def __init__(self, size=640, aug_translate=False): + self.size = size + self.aug_translate = aug_translate + + @cache_randomness + def rand_translate_params(self, dh, dw): + top = np.random.randint(0, dh) + left = np.random.randint(0, dw) + return top, left + + def transform(self, results: dict) -> dict: + img = results['img'] + h, w = img.shape[:-1] + dw = self.size - w + dh = self.size - h + if self.aug_translate: + top, left = self.rand_translate_params(dh, dw) + else: + top = round(dh / 2.0 - 0.1) + left = round(dw / 2.0 - 0.1) + + out_img = np.zeros((self.size, self.size, 3), dtype=np.float32) + out_img[top:top + h, left:left + w, :] = img + results['img'] = out_img + results['img_shape'] = (self.size, self.size) + + # translate box + if 'gt_bboxes' in results.keys(): + for i in range(len(results['gt_bboxes'])): + box = results['gt_bboxes'][i] + box[0], box[2] = box[0] + left, box[2] + left + box[1], box[3] = box[1] + top, box[3] + top + results['gt_bboxes'][i] = box + + return results diff --git a/mmpretrain/datasets/transforms/wrappers.py b/mmpretrain/datasets/transforms/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..c0dfd730b4db0dc80ed315b79658cfbf683e4035 --- /dev/null +++ b/mmpretrain/datasets/transforms/wrappers.py @@ -0,0 +1,144 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +from typing import Callable, List, Union + +from mmcv.transforms import BaseTransform, Compose + +from mmpretrain.registry import TRANSFORMS + +# Define type of transform or transform config +Transform = Union[dict, Callable[[dict], dict]] + + +@TRANSFORMS.register_module() +class MultiView(BaseTransform): + """A transform wrapper for multiple views of an image. + + Args: + transforms (list[dict | callable], optional): Sequence of transform + object or config dict to be wrapped. + mapping (dict): A dict that defines the input key mapping. + The keys corresponds to the inner key (i.e., kwargs of the + ``transform`` method), and should be string type. The values + corresponds to the outer keys (i.e., the keys of the + data/results), and should have a type of string, list or dict. + None means not applying input mapping. Default: None. + allow_nonexist_keys (bool): If False, the outer keys in the mapping + must exist in the input data, or an exception will be raised. + Default: False. + + Examples: + >>> # Example 1: MultiViews 1 pipeline with 2 views + >>> pipeline = [ + >>> dict(type='MultiView', + >>> num_views=2, + >>> transforms=[ + >>> [ + >>> dict(type='Resize', scale=224))], + >>> ]) + >>> ] + >>> # Example 2: MultiViews 2 pipelines, the first with 2 views, + >>> # the second with 6 views + >>> pipeline = [ + >>> dict(type='MultiView', + >>> num_views=[2, 6], + >>> transforms=[ + >>> [ + >>> dict(type='Resize', scale=224)], + >>> [ + >>> dict(type='Resize', scale=224), + >>> dict(type='RandomSolarize')], + >>> ]) + >>> ] + """ + + def __init__(self, transforms: List[List[Transform]], + num_views: Union[int, List[int]]) -> None: + + if isinstance(num_views, int): + num_views = [num_views] + assert isinstance(num_views, List) + assert len(num_views) == len(transforms) + self.num_views = num_views + + self.pipelines = [] + for trans in transforms: + pipeline = Compose(trans) + self.pipelines.append(pipeline) + + self.transforms = [] + for i in range(len(num_views)): + self.transforms.extend([self.pipelines[i]] * num_views[i]) + + def transform(self, results: dict) -> dict: + """Apply transformation to inputs. + + Args: + results (dict): Result dict from previous pipelines. + + Returns: + dict: Transformed results. + """ + multi_views_outputs = dict(img=[]) + for trans in self.transforms: + inputs = copy.deepcopy(results) + outputs = trans(inputs) + + multi_views_outputs['img'].append(outputs['img']) + results.update(multi_views_outputs) + return results + + def __repr__(self) -> str: + repr_str = self.__class__.__name__ + '(' + for i, p in enumerate(self.pipelines): + repr_str += f'\nPipeline {i + 1} with {self.num_views[i]} views:\n' + repr_str += str(p) + repr_str += ')' + return repr_str + + +@TRANSFORMS.register_module() +class ApplyToList(BaseTransform): + """A transform wrapper to apply the wrapped transforms to a list of items. + For example, to load and resize a list of images. + + Args: + transforms (list[dict | callable]): Sequence of transform config dict + to be wrapped. + scatter_key (str): The key to scatter data dict. If the field is a + list, scatter the list to multiple data dicts to do transformation. + collate_keys (List[str]): The keys to collate from multiple data dicts. + The fields in ``collate_keys`` will be composed into a list after + transformation, and the other fields will be adopted from the + first data dict. + """ + + def __init__(self, transforms, scatter_key, collate_keys): + super().__init__() + + self.transforms = Compose([TRANSFORMS.build(t) for t in transforms]) + self.scatter_key = scatter_key + self.collate_keys = set(collate_keys) + self.collate_keys.add(self.scatter_key) + + def transform(self, results: dict): + scatter_field = results.get(self.scatter_key) + + if isinstance(scatter_field, list): + scattered_results = [] + for item in scatter_field: + single_results = copy.deepcopy(results) + single_results[self.scatter_key] = item + scattered_results.append(self.transforms(single_results)) + + final_output = scattered_results[0] + + # merge output list to single output + for key in scattered_results[0].keys(): + if key in self.collate_keys: + final_output[key] = [ + single[key] for single in scattered_results + ] + return final_output + else: + return self.transforms(results) diff --git a/mmpretrain/datasets/utils.py b/mmpretrain/datasets/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..fcb60e432c374c1a904700a7348f706fa0e523eb --- /dev/null +++ b/mmpretrain/datasets/utils.py @@ -0,0 +1,243 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import gzip +import hashlib +import os +import os.path +import shutil +import tarfile +import tempfile +import urllib.error +import urllib.request +import zipfile + +from mmengine.fileio import LocalBackend, get_file_backend + +__all__ = [ + 'rm_suffix', 'check_integrity', 'download_and_extract_archive', + 'open_maybe_compressed_file' +] + + +def rm_suffix(s, suffix=None): + if suffix is None: + return s[:s.rfind('.')] + else: + return s[:s.rfind(suffix)] + + +def calculate_md5(fpath: str, chunk_size: int = 1024 * 1024): + md5 = hashlib.md5() + backend = get_file_backend(fpath, enable_singleton=True) + if isinstance(backend, LocalBackend): + # Enable chunk update for local file. + with open(fpath, 'rb') as f: + for chunk in iter(lambda: f.read(chunk_size), b''): + md5.update(chunk) + else: + md5.update(backend.get(fpath)) + return md5.hexdigest() + + +def check_md5(fpath, md5, **kwargs): + return md5 == calculate_md5(fpath, **kwargs) + + +def check_integrity(fpath, md5=None): + if not os.path.isfile(fpath): + return False + if md5 is None: + return True + return check_md5(fpath, md5) + + +def download_url_to_file(url, dst, hash_prefix=None, progress=True): + """Download object at the given URL to a local path. + + Modified from + https://pytorch.org/docs/stable/hub.html#torch.hub.download_url_to_file + + Args: + url (str): URL of the object to download + dst (str): Full path where object will be saved, + e.g. ``/tmp/temporary_file`` + hash_prefix (string, optional): If not None, the SHA256 downloaded + file should start with ``hash_prefix``. Defaults to None. + progress (bool): whether or not to display a progress bar to stderr. + Defaults to True + """ + file_size = None + req = urllib.request.Request(url) + u = urllib.request.urlopen(req) + meta = u.info() + if hasattr(meta, 'getheaders'): + content_length = meta.getheaders('Content-Length') + else: + content_length = meta.get_all('Content-Length') + if content_length is not None and len(content_length) > 0: + file_size = int(content_length[0]) + + # We deliberately save it in a temp file and move it after download is + # complete. This prevents a local file being overridden by a broken + # download. + dst = os.path.expanduser(dst) + dst_dir = os.path.dirname(dst) + f = tempfile.NamedTemporaryFile(delete=False, dir=dst_dir) + + import rich.progress + columns = [ + rich.progress.DownloadColumn(), + rich.progress.BarColumn(bar_width=None), + rich.progress.TimeRemainingColumn(), + ] + try: + if hash_prefix is not None: + sha256 = hashlib.sha256() + with rich.progress.Progress(*columns) as pbar: + task = pbar.add_task('download', total=file_size, visible=progress) + while True: + buffer = u.read(8192) + if len(buffer) == 0: + break + f.write(buffer) + if hash_prefix is not None: + sha256.update(buffer) + pbar.update(task, advance=len(buffer)) + + f.close() + if hash_prefix is not None: + digest = sha256.hexdigest() + if digest[:len(hash_prefix)] != hash_prefix: + raise RuntimeError( + 'invalid hash value (expected "{}", got "{}")'.format( + hash_prefix, digest)) + shutil.move(f.name, dst) + finally: + f.close() + if os.path.exists(f.name): + os.remove(f.name) + + +def download_url(url, root, filename=None, md5=None): + """Download a file from a url and place it in root. + + Args: + url (str): URL to download file from. + root (str): Directory to place downloaded file in. + filename (str | None): Name to save the file under. + If filename is None, use the basename of the URL. + md5 (str | None): MD5 checksum of the download. + If md5 is None, download without md5 check. + """ + root = os.path.expanduser(root) + if not filename: + filename = os.path.basename(url) + fpath = os.path.join(root, filename) + + os.makedirs(root, exist_ok=True) + + if check_integrity(fpath, md5): + print(f'Using downloaded and verified file: {fpath}') + else: + try: + print(f'Downloading {url} to {fpath}') + download_url_to_file(url, fpath) + except (urllib.error.URLError, IOError) as e: + if url[:5] == 'https': + url = url.replace('https:', 'http:') + print('Failed download. Trying https -> http instead.' + f' Downloading {url} to {fpath}') + download_url_to_file(url, fpath) + else: + raise e + # check integrity of downloaded file + if not check_integrity(fpath, md5): + raise RuntimeError('File not found or corrupted.') + + +def _is_tarxz(filename): + return filename.endswith('.tar.xz') + + +def _is_tar(filename): + return filename.endswith('.tar') + + +def _is_targz(filename): + return filename.endswith('.tar.gz') + + +def _is_tgz(filename): + return filename.endswith('.tgz') + + +def _is_gzip(filename): + return filename.endswith('.gz') and not filename.endswith('.tar.gz') + + +def _is_zip(filename): + return filename.endswith('.zip') + + +def extract_archive(from_path, to_path=None, remove_finished=False): + if to_path is None: + to_path = os.path.dirname(from_path) + + if _is_tar(from_path): + with tarfile.open(from_path, 'r') as tar: + tar.extractall(path=to_path) + elif _is_targz(from_path) or _is_tgz(from_path): + with tarfile.open(from_path, 'r:gz') as tar: + tar.extractall(path=to_path) + elif _is_tarxz(from_path): + with tarfile.open(from_path, 'r:xz') as tar: + tar.extractall(path=to_path) + elif _is_gzip(from_path): + to_path = os.path.join( + to_path, + os.path.splitext(os.path.basename(from_path))[0]) + with open(to_path, 'wb') as out_f, gzip.GzipFile(from_path) as zip_f: + out_f.write(zip_f.read()) + elif _is_zip(from_path): + with zipfile.ZipFile(from_path, 'r') as z: + z.extractall(to_path) + else: + raise ValueError(f'Extraction of {from_path} not supported') + + if remove_finished: + os.remove(from_path) + + +def download_and_extract_archive(url, + download_root, + extract_root=None, + filename=None, + md5=None, + remove_finished=False): + download_root = os.path.expanduser(download_root) + if extract_root is None: + extract_root = download_root + if not filename: + filename = os.path.basename(url) + + download_url(url, download_root, filename, md5) + + archive = os.path.join(download_root, filename) + print(f'Extracting {archive} to {extract_root}') + extract_archive(archive, extract_root, remove_finished) + + +def open_maybe_compressed_file(path: str): + """Return a file object that possibly decompresses 'path' on the fly. + + Decompression occurs when argument `path` is a string and ends with '.gz' + or '.xz'. + """ + if not isinstance(path, str): + return path + if path.endswith('.gz'): + import gzip + return gzip.open(path, 'rb') + if path.endswith('.xz'): + import lzma + return lzma.open(path, 'rb') + return open(path, 'rb') diff --git a/mmpretrain/datasets/vg_vqa.py b/mmpretrain/datasets/vg_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..2d83884c804086c060bcfe27e833bff28dc28e9e --- /dev/null +++ b/mmpretrain/datasets/vg_vqa.py @@ -0,0 +1,77 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine.fileio import load + +from mmpretrain.registry import DATASETS +from .base_dataset import BaseDataset + + +@DATASETS.register_module() +class VGVQA(BaseDataset): + """Visual Genome VQA dataset.""" + + def load_data_list(self) -> List[dict]: + """Load data list. + + Compare to BaseDataset, the only difference is that coco_vqa annotation + file is already a list of data. There is no 'metainfo'. + """ + + raw_data_list = load(self.ann_file) + if not isinstance(raw_data_list, list): + raise TypeError( + f'The VQA annotations loaded from annotation file ' + f'should be a dict, but got {type(raw_data_list)}!') + + # load and parse data_infos. + data_list = [] + for raw_data_info in raw_data_list: + # parse raw data information to target format + data_info = self.parse_data_info(raw_data_info) + if isinstance(data_info, dict): + # For VQA tasks, each `data_info` looks like: + # { + # "question_id": 986769, + # "question": "How many people are there?", + # "answer": "two", + # "image": "image/1.jpg", + # "dataset": "vg" + # } + + # change 'image' key to 'img_path' + # TODO: This process will be removed, after the annotation file + # is preprocess. + data_info['img_path'] = data_info['image'] + del data_info['image'] + + if 'answer' in data_info: + # add answer_weight & answer_count, delete duplicate answer + if data_info['dataset'] == 'vqa': + answer_weight = {} + for answer in data_info['answer']: + if answer in answer_weight.keys(): + answer_weight[answer] += 1 / len( + data_info['answer']) + else: + answer_weight[answer] = 1 / len( + data_info['answer']) + + data_info['answer'] = list(answer_weight.keys()) + data_info['answer_weight'] = list( + answer_weight.values()) + data_info['answer_count'] = len(answer_weight) + + elif data_info['dataset'] == 'vg': + data_info['answers'] = [data_info['answer']] + data_info['answer_weight'] = [0.2] + data_info['answer_count'] = 1 + + data_list.append(data_info) + + else: + raise TypeError( + f'Each VQA data element loaded from annotation file ' + f'should be a dict, but got {type(data_info)}!') + + return data_list diff --git a/mmpretrain/datasets/visual_genome.py b/mmpretrain/datasets/visual_genome.py new file mode 100644 index 0000000000000000000000000000000000000000..8c33b86c4f81d0be0f2830618ad100196b461dcf --- /dev/null +++ b/mmpretrain/datasets/visual_genome.py @@ -0,0 +1,95 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import re +from itertools import chain +from typing import List + +import mmengine +from mmengine.dataset import BaseDataset + +from mmpretrain.registry import DATASETS + + +@DATASETS.register_module() +class VisualGenomeQA(BaseDataset): + """Visual Genome Question Answering dataset. + + dataset structure: :: + + data_root + ├── image + │   ├── 1.jpg + │   ├── 2.jpg + │   └── ... + └── question_answers.json + + Args: + data_root (str): The root directory for ``data_prefix``, ``ann_file`` + and ``question_file``. + data_prefix (str): The directory of images. Defaults to ``"image"``. + ann_file (str, optional): Annotation file path for training and + validation. Defaults to ``"question_answers.json"``. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ + + def __init__(self, + data_root: str, + data_prefix: str = 'image', + ann_file: str = 'question_answers.json', + **kwarg): + super().__init__( + data_root=data_root, + data_prefix=dict(img_path=data_prefix), + ann_file=ann_file, + **kwarg, + ) + + def _create_image_index(self): + img_prefix = self.data_prefix['img_path'] + + files = mmengine.list_dir_or_file(img_prefix, list_dir=False) + image_index = {} + for file in files: + image_id = re.findall(r'\d+', file) + if len(image_id) > 0: + image_id = int(image_id[-1]) + image_index[image_id] = mmengine.join_path(img_prefix, file) + + return image_index + + def load_data_list(self) -> List[dict]: + """Load data list.""" + annotations = mmengine.load(self.ann_file) + + # The original Visual Genome annotation file and question file includes + # only image id but no image file paths. + self.image_index = self._create_image_index() + + data_list = [] + for qas in chain.from_iterable(ann['qas'] for ann in annotations): + # ann example + # { + # 'id': 1, + # 'qas': [ + # { + # 'a_objects': [], + # 'question': 'What color is the clock?', + # 'image_id': 1, + # 'qa_id': 986768, + # 'answer': 'Two.', + # 'q_objects': [], + # } + # ... + # ] + # } + + data_info = { + 'img_path': self.image_index[qas['image_id']], + 'quesiton': qas['quesiton'], + 'question_id': qas['question_id'], + 'image_id': qas['image_id'], + 'gt_answer': [qas['answer']], + } + + data_list.append(data_info) + + return data_list diff --git a/mmpretrain/datasets/voc.py b/mmpretrain/datasets/voc.py new file mode 100644 index 0000000000000000000000000000000000000000..fa8475c5e24cde40f8ffefd1a6ac1d89e36c29bb --- /dev/null +++ b/mmpretrain/datasets/voc.py @@ -0,0 +1,145 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import xml.etree.ElementTree as ET +from typing import List, Optional, Union + +from mmengine import get_file_backend, list_from_file + +from mmpretrain.registry import DATASETS +from .base_dataset import expanduser +from .categories import VOC2007_CATEGORIES +from .multi_label import MultiLabelDataset + + +@DATASETS.register_module() +class VOC(MultiLabelDataset): + """`Pascal VOC `_ Dataset. + + After decompression, the dataset directory structure is as follows: + + VOC dataset directory: :: + + VOC2007 (data_root)/ + ├── JPEGImages (data_prefix['img_path']) + │ ├── xxx.jpg + │ ├── xxy.jpg + │ └── ... + ├── Annotations (data_prefix['ann_path']) + │ ├── xxx.xml + │ ├── xxy.xml + │ └── ... + └── ImageSets (directory contains various imageset file) + + Extra difficult label is in VOC annotations, we will use + `gt_label_difficult` to record the difficult labels in each sample + and corresponding evaluation should take care of this field + to calculate metrics. Usually, difficult labels are reckoned as + negative in defaults. + + Args: + data_root (str): The root directory for VOC dataset. + image_set_path (str): The path of image set, The file which + lists image ids of the sub dataset, and this path is relative + to ``data_root``. + data_prefix (dict): Prefix for data and annotation, keyword + 'img_path' and 'ann_path' can be set. Defaults to be + ``dict(img_path='JPEGImages', ann_path='Annotations')``. + test_mode (bool): ``test_mode=True`` means in test phase. + It determines to use the training set or test set. + metainfo (dict, optional): Meta information for dataset, such as + categories information. Defaults to None. + **kwargs: Other keyword arguments in :class:`BaseDataset`. + """ # noqa: E501 + + METAINFO = {'classes': VOC2007_CATEGORIES} + + def __init__(self, + data_root: str, + image_set_path: str, + data_prefix: Union[str, dict] = dict( + img_path='JPEGImages', ann_path='Annotations'), + test_mode: bool = False, + metainfo: Optional[dict] = None, + **kwargs): + if isinstance(data_prefix, str): + data_prefix = dict(img_path=expanduser(data_prefix)) + assert isinstance(data_prefix, dict) and 'img_path' in data_prefix, \ + '`data_prefix` must be a dict with key img_path' + + if test_mode is False: + assert 'ann_path' in data_prefix and data_prefix[ + 'ann_path'] is not None, \ + '"ann_path" must be set in `data_prefix` if `test_mode` is' \ + ' False.' + + self.data_root = data_root + self.backend = get_file_backend(data_root, enable_singleton=True) + self.image_set_path = self.backend.join_path(data_root, image_set_path) + + super().__init__( + ann_file='', + metainfo=metainfo, + data_root=data_root, + data_prefix=data_prefix, + test_mode=test_mode, + **kwargs) + + @property + def ann_prefix(self): + """The prefix of images.""" + if 'ann_path' in self.data_prefix: + return self.data_prefix['ann_path'] + else: + return None + + def _get_labels_from_xml(self, img_id): + """Get gt_labels and labels_difficult from xml file.""" + xml_path = self.backend.join_path(self.ann_prefix, f'{img_id}.xml') + content = self.backend.get(xml_path) + root = ET.fromstring(content) + + labels, labels_difficult = set(), set() + for obj in root.findall('object'): + label_name = obj.find('name').text + # in case customized dataset has wrong labels + # or CLASSES has been override. + if label_name not in self.CLASSES: + continue + label = self.class_to_idx[label_name] + difficult = int(obj.find('difficult').text) + if difficult: + labels_difficult.add(label) + else: + labels.add(label) + + return list(labels), list(labels_difficult) + + def load_data_list(self): + """Load images and ground truth labels.""" + data_list = [] + img_ids = list_from_file(self.image_set_path) + + for img_id in img_ids: + img_path = self.backend.join_path(self.img_prefix, f'{img_id}.jpg') + + labels, labels_difficult = None, None + if self.ann_prefix is not None: + labels, labels_difficult = self._get_labels_from_xml(img_id) + + info = dict( + img_path=img_path, + gt_label=labels, + gt_label_difficult=labels_difficult) + data_list.append(info) + + return data_list + + def extra_repr(self) -> List[str]: + """The extra repr information of the dataset.""" + body = [ + f'Prefix of dataset: \t{self.data_root}', + f'Path of image set: \t{self.image_set_path}', + f'Prefix of images: \t{self.img_prefix}', + f'Prefix of annotations: \t{self.ann_prefix}' + ] + + return body diff --git a/mmpretrain/engine/__init__.py b/mmpretrain/engine/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7785da7b25950b7f13770e30ba5a5082dd5f8655 --- /dev/null +++ b/mmpretrain/engine/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .hooks import * # noqa: F401, F403 +from .optimizers import * # noqa: F401, F403 +from .runners import * # noqa: F401, F403 diff --git a/mmpretrain/engine/hooks/__init__.py b/mmpretrain/engine/hooks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bc9e22be7e96d636f202066f2e00e7699b730619 --- /dev/null +++ b/mmpretrain/engine/hooks/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .class_num_check_hook import ClassNumCheckHook +from .densecl_hook import DenseCLHook +from .ema_hook import EMAHook +from .margin_head_hooks import SetAdaptiveMarginsHook +from .precise_bn_hook import PreciseBNHook +from .retriever_hooks import PrepareProtoBeforeValLoopHook +from .simsiam_hook import SimSiamHook +from .swav_hook import SwAVHook +from .switch_recipe_hook import SwitchRecipeHook +from .visualization_hook import VisualizationHook +from .warmup_param_hook import WarmupParamHook + +__all__ = [ + 'ClassNumCheckHook', 'PreciseBNHook', 'VisualizationHook', + 'SwitchRecipeHook', 'PrepareProtoBeforeValLoopHook', + 'SetAdaptiveMarginsHook', 'EMAHook', 'SimSiamHook', 'DenseCLHook', + 'SwAVHook', 'WarmupParamHook' +] diff --git a/mmpretrain/engine/hooks/class_num_check_hook.py b/mmpretrain/engine/hooks/class_num_check_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..38170d6604810c575aa5c2c9435c0b75cfa761b2 --- /dev/null +++ b/mmpretrain/engine/hooks/class_num_check_hook.py @@ -0,0 +1,63 @@ +# Copyright (c) OpenMMLab. All rights reserved +from mmengine.hooks import Hook +from mmengine.utils import is_seq_of + +from mmpretrain.registry import HOOKS + + +@HOOKS.register_module() +class ClassNumCheckHook(Hook): + """Class Number Check HOOK.""" + + def _check_head(self, runner, dataset): + """Check whether the `num_classes` in head matches the length of + `CLASSES` in `dataset`. + + Args: + runner (obj:`Runner`): runner object. + dataset (obj: `BaseDataset`): the dataset to check. + """ + model = runner.model + if dataset.CLASSES is None: + runner.logger.warning( + f'Please set class information in `metainfo` ' + f'in the {dataset.__class__.__name__} and' + f'check if it is consistent with the `num_classes` ' + f'of head') + else: + assert is_seq_of(dataset.CLASSES, str), \ + (f'Class information in `metainfo` in ' + f'{dataset.__class__.__name__} should be a tuple of str.') + for _, module in model.named_modules(): + if hasattr(module, 'num_classes'): + assert module.num_classes == len(dataset.CLASSES), \ + (f'The `num_classes` ({module.num_classes}) in ' + f'{module.__class__.__name__} of ' + f'{model.__class__.__name__} does not matches ' + f'the length of class information in `metainfo` ' + f'{len(dataset.CLASSES)}) in ' + f'{dataset.__class__.__name__}') + + def before_train(self, runner): + """Check whether the training dataset is compatible with head. + + Args: + runner (obj: `IterBasedRunner`): Iter based Runner. + """ + self._check_head(runner, runner.train_dataloader.dataset) + + def before_val(self, runner): + """Check whether the validation dataset is compatible with head. + + Args: + runner (obj:`IterBasedRunner`): Iter based Runner. + """ + self._check_head(runner, runner.val_dataloader.dataset) + + def before_test(self, runner): + """Check whether the test dataset is compatible with head. + + Args: + runner (obj:`IterBasedRunner`): Iter based Runner. + """ + self._check_head(runner, runner.test_dataloader.dataset) diff --git a/mmpretrain/engine/hooks/densecl_hook.py b/mmpretrain/engine/hooks/densecl_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..8c7e17d3419cbc2a540d3aecd81e223eed670df2 --- /dev/null +++ b/mmpretrain/engine/hooks/densecl_hook.py @@ -0,0 +1,42 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence + +from mmengine.hooks import Hook + +from mmpretrain.registry import HOOKS +from mmpretrain.utils import get_ori_model + + +@HOOKS.register_module() +class DenseCLHook(Hook): + """Hook for DenseCL. + + This hook includes ``loss_lambda`` warmup in DenseCL. + Borrowed from the authors' code: ``_. + + Args: + start_iters (int): The number of warmup iterations to set + ``loss_lambda=0``. Defaults to 1000. + """ + + def __init__(self, start_iters: int = 1000) -> None: + self.start_iters = start_iters + + def before_train(self, runner) -> None: + """Obtain ``loss_lambda`` from algorithm.""" + assert hasattr(get_ori_model(runner.model), 'loss_lambda'), \ + "The runner must have attribute \"loss_lambda\" in DenseCL." + self.loss_lambda = get_ori_model(runner.model).loss_lambda + + def before_train_iter(self, + runner, + batch_idx: int, + data_batch: Optional[Sequence[dict]] = None) -> None: + """Adjust ``loss_lambda`` every train iter.""" + assert hasattr(get_ori_model(runner.model), 'loss_lambda'), \ + "The runner must have attribute \"loss_lambda\" in DenseCL." + cur_iter = runner.iter + if cur_iter >= self.start_iters: + get_ori_model(runner.model).loss_lambda = self.loss_lambda + else: + get_ori_model(runner.model).loss_lambda = 0. diff --git a/mmpretrain/engine/hooks/ema_hook.py b/mmpretrain/engine/hooks/ema_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..284d211b628c411f0eb712d1c558dc6aa2eb8996 --- /dev/null +++ b/mmpretrain/engine/hooks/ema_hook.py @@ -0,0 +1,216 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import itertools +import warnings +from typing import Dict, Optional + +from mmengine.hooks import EMAHook as BaseEMAHook +from mmengine.logging import MMLogger +from mmengine.runner import Runner + +from mmpretrain.registry import HOOKS + + +@HOOKS.register_module() +class EMAHook(BaseEMAHook): + """A Hook to apply Exponential Moving Average (EMA) on the model during + training. + + Comparing with :class:`mmengine.hooks.EMAHook`, this hook accepts + ``evaluate_on_ema`` and ``evaluate_on_origin`` arguments. By default, the + ``evaluate_on_ema`` is enabled, and if you want to do validation and + testing on both original and EMA models, please set both arguments + ``True``. + + Note: + - EMAHook takes priority over CheckpointHook. + - The original model parameters are actually saved in ema field after + train. + - ``begin_iter`` and ``begin_epoch`` cannot be set at the same time. + + Args: + ema_type (str): The type of EMA strategy to use. You can find the + supported strategies in :mod:`mmengine.model.averaged_model`. + Defaults to 'ExponentialMovingAverage'. + strict_load (bool): Whether to strictly enforce that the keys of + ``state_dict`` in checkpoint match the keys returned by + ``self.module.state_dict``. Defaults to False. + Changed in v0.3.0. + begin_iter (int): The number of iteration to enable ``EMAHook``. + Defaults to 0. + begin_epoch (int): The number of epoch to enable ``EMAHook``. + Defaults to 0. + evaluate_on_ema (bool): Whether to evaluate (validate and test) + on EMA model during val-loop and test-loop. Defaults to True. + evaluate_on_origin (bool): Whether to evaluate (validate and test) + on the original model during val-loop and test-loop. + Defaults to False. + **kwargs: Keyword arguments passed to subclasses of + :obj:`BaseAveragedModel` + """ + + priority = 'NORMAL' + + def __init__(self, + ema_type: str = 'ExponentialMovingAverage', + strict_load: bool = False, + begin_iter: int = 0, + begin_epoch: int = 0, + evaluate_on_ema: bool = True, + evaluate_on_origin: bool = False, + **kwargs): + super().__init__( + ema_type=ema_type, + strict_load=strict_load, + begin_iter=begin_iter, + begin_epoch=begin_epoch, + **kwargs) + + if not evaluate_on_ema and not evaluate_on_origin: + warnings.warn( + 'Automatically set `evaluate_on_origin=True` since the ' + '`evaluate_on_ema` is disabled. If you want to disable ' + 'all validation, please modify the `val_interval` of ' + 'the `train_cfg`.', UserWarning) + evaluate_on_origin = True + + self.evaluate_on_ema = evaluate_on_ema + self.evaluate_on_origin = evaluate_on_origin + self.load_ema_from_ckpt = False + + def before_train(self, runner) -> None: + super().before_train(runner) + if not runner._resume and self.load_ema_from_ckpt: + # If loaded EMA state dict but not want to resume training + # overwrite the EMA state dict with the source model. + MMLogger.get_current_instance().info( + 'Load from a checkpoint with EMA parameters but not ' + 'resume training. Initialize the model parameters with ' + 'EMA parameters') + for p_ema, p_src in zip(self._ema_params, self._src_params): + p_src.data.copy_(p_ema.data) + + def before_val_epoch(self, runner) -> None: + """We load parameter values from ema model to source model before + validation. + + Args: + runner (Runner): The runner of the training process. + """ + if self.evaluate_on_ema: + # Swap when evaluate on ema + self._swap_ema_parameters() + + def after_val_epoch(self, + runner, + metrics: Optional[Dict[str, float]] = None) -> None: + """We recover source model's parameter from ema model after validation. + + Args: + runner (Runner): The runner of the validation process. + metrics (Dict[str, float], optional): Evaluation results of all + metrics on validation dataset. The keys are the names of the + metrics, and the values are corresponding results. + """ + if self.evaluate_on_ema: + # Swap when evaluate on ema + self._swap_ema_parameters() + + if self.evaluate_on_ema and self.evaluate_on_origin: + # Re-evaluate if evaluate on both ema and origin. + val_loop = runner.val_loop + + runner.model.eval() + for idx, data_batch in enumerate(val_loop.dataloader): + val_loop.run_iter(idx, data_batch) + + # compute metrics + origin_metrics = val_loop.evaluator.evaluate( + len(val_loop.dataloader.dataset)) + + for k, v in origin_metrics.items(): + runner.message_hub.update_scalar(f'val/{k}_origin', v) + + def before_test_epoch(self, runner) -> None: + """We load parameter values from ema model to source model before test. + + Args: + runner (Runner): The runner of the training process. + """ + if self.evaluate_on_ema: + # Swap when evaluate on ema + self._swap_ema_parameters() + MMLogger.get_current_instance().info('Start testing on EMA model.') + else: + MMLogger.get_current_instance().info( + 'Start testing on the original model.') + + def after_test_epoch(self, + runner: Runner, + metrics: Optional[Dict[str, float]] = None) -> None: + """We recover source model's parameter from ema model after test. + + Args: + runner (Runner): The runner of the testing process. + metrics (Dict[str, float], optional): Evaluation results of all + metrics on test dataset. The keys are the names of the + metrics, and the values are corresponding results. + """ + if self.evaluate_on_ema: + # Swap when evaluate on ema + self._swap_ema_parameters() + + if self.evaluate_on_ema and self.evaluate_on_origin: + # Re-evaluate if evaluate on both ema and origin. + MMLogger.get_current_instance().info( + 'Start testing on the original model.') + test_loop = runner.test_loop + + runner.model.eval() + for idx, data_batch in enumerate(test_loop.dataloader): + test_loop.run_iter(idx, data_batch) + + # compute metrics + origin_metrics = test_loop.evaluator.evaluate( + len(test_loop.dataloader.dataset)) + + for k, v in origin_metrics.items(): + runner.message_hub.update_scalar(f'test/{k}_origin', v) + + def after_load_checkpoint(self, runner, checkpoint: dict) -> None: + """Resume ema parameters from checkpoint. + + Args: + runner (Runner): The runner of the testing process. + """ + from mmengine.runner.checkpoint import load_state_dict + if 'ema_state_dict' in checkpoint: + # The original model parameters are actually saved in ema + # field swap the weights back to resume ema state. + self._swap_ema_state_dict(checkpoint) + self.ema_model.load_state_dict( + checkpoint['ema_state_dict'], strict=self.strict_load) + self.load_ema_from_ckpt = True + + # Support load checkpoint without ema state dict. + else: + load_state_dict( + self.ema_model.module, + copy.deepcopy(checkpoint['state_dict']), + strict=self.strict_load) + + @property + def _src_params(self): + if self.ema_model.update_buffers: + return itertools.chain(self.src_model.parameters(), + self.src_model.buffers()) + else: + return self.src_model.parameters() + + @property + def _ema_params(self): + if self.ema_model.update_buffers: + return itertools.chain(self.ema_model.module.parameters(), + self.ema_model.module.buffers()) + else: + return self.ema_model.module.parameters() diff --git a/mmpretrain/engine/hooks/margin_head_hooks.py b/mmpretrain/engine/hooks/margin_head_hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..fbeae7a347453153ff4ab3bef958acb549623f6f --- /dev/null +++ b/mmpretrain/engine/hooks/margin_head_hooks.py @@ -0,0 +1,61 @@ +# Copyright (c) OpenMMLab. All rights reserved +import numpy as np +from mmengine.hooks import Hook +from mmengine.model import is_model_wrapper + +from mmpretrain.models.heads import ArcFaceClsHead +from mmpretrain.registry import HOOKS + + +@HOOKS.register_module() +class SetAdaptiveMarginsHook(Hook): + r"""Set adaptive-margins in ArcFaceClsHead based on the power of + category-wise count. + + A PyTorch implementation of paper `Google Landmark Recognition 2020 + Competition Third Place Solution `_. + The margins will be + :math:`\text{f}(n) = (marginMax - marginMin) · norm(n^p) + marginMin`. + The `n` indicates the number of occurrences of a category. + + Args: + margin_min (float): Lower bound of margins. Defaults to 0.05. + margin_max (float): Upper bound of margins. Defaults to 0.5. + power (float): The power of category freqercy. Defaults to -0.25. + """ + + def __init__(self, margin_min=0.05, margin_max=0.5, power=-0.25) -> None: + self.margin_min = margin_min + self.margin_max = margin_max + self.margin_range = margin_max - margin_min + self.p = power + + def before_train(self, runner): + """change the margins in ArcFaceClsHead. + + Args: + runner (obj: `Runner`): Runner. + """ + model = runner.model + if is_model_wrapper(model): + model = model.module + + if (hasattr(model, 'head') + and not isinstance(model.head, ArcFaceClsHead)): + raise ValueError( + 'Hook ``SetFreqPowAdvMarginsHook`` could only be used ' + f'for ``ArcFaceClsHead``, but get {type(model.head)}') + + # generate margins base on the dataset. + gt_labels = runner.train_dataloader.dataset.get_gt_labels() + label_count = np.bincount(gt_labels) + label_count[label_count == 0] = 1 # At least one occurrence + pow_freq = np.power(label_count, self.p) + + min_f, max_f = pow_freq.min(), pow_freq.max() + normized_pow_freq = (pow_freq - min_f) / (max_f - min_f) + margins = normized_pow_freq * self.margin_range + self.margin_min + + assert len(margins) == runner.model.head.num_classes + + model.head.set_margins(margins) diff --git a/mmpretrain/engine/hooks/precise_bn_hook.py b/mmpretrain/engine/hooks/precise_bn_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..4fb0e4c419e4ed2af23574769815aaecbcd629c0 --- /dev/null +++ b/mmpretrain/engine/hooks/precise_bn_hook.py @@ -0,0 +1,223 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Adapted from https://github.com/facebookresearch/pycls/blob/f8cd962737e33ce9e19b3083a33551da95c2d9c0/pycls/core/net.py # noqa: E501 +# Original licence: Copyright (c) 2019 Facebook, Inc under the Apache License 2.0 # noqa: E501 + +import itertools +import logging +from typing import List, Optional, Sequence, Union + +import mmengine +import torch +import torch.nn as nn +from mmengine.hooks import Hook +from mmengine.logging import print_log +from mmengine.model import is_model_wrapper +from mmengine.runner import EpochBasedTrainLoop, IterBasedTrainLoop, Runner +from mmengine.utils import ProgressBar +from torch.functional import Tensor +from torch.nn import GroupNorm +from torch.nn.modules.batchnorm import _BatchNorm +from torch.nn.modules.instancenorm import _InstanceNorm +from torch.utils.data import DataLoader + +from mmpretrain.registry import HOOKS + +DATA_BATCH = Optional[Sequence[dict]] + + +def scaled_all_reduce(tensors: List[Tensor], num_gpus: int) -> List[Tensor]: + """Performs the scaled all_reduce operation on the provided tensors. + + The input tensors are modified in-place. Currently supports only the sum + reduction operator. The reduced values are scaled by the inverse size of + the process group. + + Args: + tensors (List[torch.Tensor]): The tensors to process. + num_gpus (int): The number of gpus to use + Returns: + List[torch.Tensor]: The processed tensors. + """ + # There is no need for reduction in the single-proc case + if num_gpus == 1: + return tensors + # Queue the reductions + reductions = [] + for tensor in tensors: + reduction = torch.distributed.all_reduce(tensor, async_op=True) + reductions.append(reduction) + # Wait for reductions to finish + for reduction in reductions: + reduction.wait() + # Scale the results + for tensor in tensors: + tensor.mul_(1.0 / num_gpus) + return tensors + + +@torch.no_grad() +def update_bn_stats( + model: nn.Module, + loader: DataLoader, + num_samples: int = 8192, + logger: Optional[Union[logging.Logger, str]] = None) -> None: + """Computes precise BN stats on training data. + + Args: + model (nn.module): The model whose bn stats will be recomputed. + loader (DataLoader): PyTorch dataloader._dataloader + num_samples (int): The number of samples to update the bn stats. + Defaults to 8192. + logger (logging.Logger or str, optional): If the type of logger is + ``logging.Logger``, we directly use logger to log messages. + Some special loggers are: + - "silent": No message will be printed. + - "current": Use latest created logger to log message. + - other str: Instance name of logger. The corresponding logger + will log message if it has been created, otherwise will raise a + `ValueError`. + - None: The `print()` method will be used to print log messages. + """ + if is_model_wrapper(model): + model = model.module + + # get dist info + rank, world_size = mmengine.dist.get_dist_info() + # Compute the number of mini-batches to use, if the size of dataloader is + # less than num_iters, use all the samples in dataloader. + num_iter = num_samples // (loader.batch_size * world_size) + num_iter = min(num_iter, len(loader)) + # Retrieve the BN layers + bn_layers = [ + m for m in model.modules() + if m.training and isinstance(m, (_BatchNorm)) + ] + if len(bn_layers) == 0: + print_log('No BN found in model', logger=logger, level=logging.WARNING) + return + print_log( + f'{len(bn_layers)} BN found, run {num_iter} iters...', logger=logger) + + # Finds all the other norm layers with training=True. + other_norm_layers = [ + m for m in model.modules() + if m.training and isinstance(m, (_InstanceNorm, GroupNorm)) + ] + if len(other_norm_layers) > 0: + print_log( + 'IN/GN stats will not be updated in PreciseHook.', + logger=logger, + level=logging.INFO) + + # Initialize BN stats storage for computing + # mean(mean(batch)) and mean(var(batch)) + running_means = [torch.zeros_like(bn.running_mean) for bn in bn_layers] + running_vars = [torch.zeros_like(bn.running_var) for bn in bn_layers] + # Remember momentum values + momentums = [bn.momentum for bn in bn_layers] + # Set momentum to 1.0 to compute BN stats that reflect the current batch + for bn in bn_layers: + bn.momentum = 1.0 + # Average the BN stats for each BN layer over the batches + if rank == 0: + prog_bar = ProgressBar(num_iter) + + for data in itertools.islice(loader, num_iter): + data = model.data_preprocessor(data, False) + model(**data) + + for i, bn in enumerate(bn_layers): + running_means[i] += bn.running_mean / num_iter + running_vars[i] += bn.running_var / num_iter + if rank == 0: + prog_bar.update() + + # Sync BN stats across GPUs (no reduction if 1 GPU used) + running_means = scaled_all_reduce(running_means, world_size) + running_vars = scaled_all_reduce(running_vars, world_size) + # Set BN stats and restore original momentum values + for i, bn in enumerate(bn_layers): + bn.running_mean = running_means[i] + bn.running_var = running_vars[i] + bn.momentum = momentums[i] + + +@HOOKS.register_module() +class PreciseBNHook(Hook): + """Precise BN hook. + + Recompute and update the batch norm stats to make them more precise. During + training both BN stats and the weight are changing after every iteration, + so the running average can not precisely reflect the actual stats of the + current model. + + With this hook, the BN stats are recomputed with fixed weights, to make the + running average more precise. Specifically, it computes the true average of + per-batch mean/variance instead of the running average. See Sec. 3 of the + paper `Rethinking Batch in BatchNorm ` + for details. + + This hook will update BN stats, so it should be executed before + ``CheckpointHook`` and ``EMAHook``, generally set its priority to + "ABOVE_NORMAL". + + Args: + num_samples (int): The number of samples to update the bn stats. + Defaults to 8192. + interval (int): Perform precise bn interval. If the train loop is + `EpochBasedTrainLoop` or `by_epoch=True`, its unit is 'epoch'; if the + train loop is `IterBasedTrainLoop` or `by_epoch=False`, its unit is + 'iter'. Defaults to 1. + """ + + def __init__(self, num_samples: int = 8192, interval: int = 1) -> None: + assert interval > 0 and num_samples > 0, "'interval' and " \ + "'num_samples' must be bigger than 0." + + self.interval = interval + self.num_samples = num_samples + + def _perform_precise_bn(self, runner: Runner) -> None: + """perform precise bn.""" + print_log( + f'Running Precise BN for {self.num_samples} samples...', + logger=runner.logger) + update_bn_stats( + runner.model, + runner.train_loop.dataloader, + self.num_samples, + logger=runner.logger) + print_log('Finish Precise BN, BN stats updated.', logger=runner.logger) + + def after_train_epoch(self, runner: Runner) -> None: + """Calculate prcise BN and broadcast BN stats across GPUs. + + Args: + runner (obj:`Runner`): The runner of the training process. + """ + # if use `EpochBasedTrainLoop``, do perform precise every + # `self.interval` epochs. + if isinstance(runner.train_loop, + EpochBasedTrainLoop) and self.every_n_epochs( + runner, self.interval): + self._perform_precise_bn(runner) + + def after_train_iter(self, + runner, + batch_idx: int, + data_batch: DATA_BATCH = None, + outputs: Optional[dict] = None) -> None: + """Calculate prcise BN and broadcast BN stats across GPUs. + + Args: + runner (obj:`Runner`): The runner of the training process. + batch_idx (int): The index of the current batch in the train loop. + data_batch (Sequence[dict], optional): Data from dataloader. + Defaults to None. + """ + # if use `IterBasedTrainLoop``, do perform precise every + # `self.interval` iters. + if isinstance(runner.train_loop, + IterBasedTrainLoop) and self.every_n_train_iters( + runner, self.interval): + self._perform_precise_bn(runner) diff --git a/mmpretrain/engine/hooks/retriever_hooks.py b/mmpretrain/engine/hooks/retriever_hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..6bd7c7aaff3175491b1ea1508e33b07b7c2ea8d4 --- /dev/null +++ b/mmpretrain/engine/hooks/retriever_hooks.py @@ -0,0 +1,32 @@ +# Copyright (c) OpenMMLab. All rights reserved +import warnings + +from mmengine.hooks import Hook +from mmengine.model import is_model_wrapper + +from mmpretrain.models import BaseRetriever +from mmpretrain.registry import HOOKS + + +@HOOKS.register_module() +class PrepareProtoBeforeValLoopHook(Hook): + """The hook to prepare the prototype in retrievers. + + Since the encoders of the retriever changes during training, the prototype + changes accordingly. So the `prototype_vecs` needs to be regenerated before + validation loop. + """ + + def before_val(self, runner) -> None: + model = runner.model + if is_model_wrapper(model): + model = model.module + + if isinstance(model, BaseRetriever): + if hasattr(model, 'prepare_prototype'): + model.prepare_prototype() + else: + warnings.warn( + 'Only the `mmpretrain.models.retrievers.BaseRetriever` ' + 'can execute `PrepareRetrieverPrototypeHook`, but got ' + f'`{type(model)}`') diff --git a/mmpretrain/engine/hooks/simsiam_hook.py b/mmpretrain/engine/hooks/simsiam_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..fabc4faca02bb78b92c39de68fa8a18e56d544f5 --- /dev/null +++ b/mmpretrain/engine/hooks/simsiam_hook.py @@ -0,0 +1,48 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence + +from mmengine.hooks import Hook + +from mmpretrain.registry import HOOKS + + +@HOOKS.register_module() +class SimSiamHook(Hook): + """Hook for SimSiam. + + This hook is for SimSiam to fix learning rate of predictor. + + Args: + fix_pred_lr (bool): whether to fix the lr of predictor or not. + lr (float): the value of fixed lr. + adjust_by_epoch (bool, optional): whether to set lr by epoch or iter. + Defaults to True. + """ + + def __init__(self, + fix_pred_lr: bool, + lr: float, + adjust_by_epoch: Optional[bool] = True) -> None: + self.fix_pred_lr = fix_pred_lr + self.lr = lr + self.adjust_by_epoch = adjust_by_epoch + + def before_train_iter(self, + runner, + batch_idx: int, + data_batch: Optional[Sequence[dict]] = None) -> None: + """fix lr of predictor by iter.""" + if self.adjust_by_epoch: + return + else: + if self.fix_pred_lr: + for param_group in runner.optim_wrapper.optimizer.param_groups: + if 'fix_lr' in param_group and param_group['fix_lr']: + param_group['lr'] = self.lr + + def before_train_epoch(self, runner) -> None: + """fix lr of predictor by epoch.""" + if self.fix_pred_lr: + for param_group in runner.optim_wrapper.optimizer.param_groups: + if 'fix_lr' in param_group and param_group['fix_lr']: + param_group['lr'] = self.lr diff --git a/mmpretrain/engine/hooks/swav_hook.py b/mmpretrain/engine/hooks/swav_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..71c82ad1e0c47114cccdd90f26a3d6c086e36d18 --- /dev/null +++ b/mmpretrain/engine/hooks/swav_hook.py @@ -0,0 +1,116 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from typing import Dict, List, Optional, Sequence + +import torch +from mmengine.dist import get_rank, get_world_size, is_distributed +from mmengine.hooks import Hook +from mmengine.logging import MMLogger + +from mmpretrain.registry import HOOKS +from mmpretrain.utils import get_ori_model + + +@HOOKS.register_module() +class SwAVHook(Hook): + """Hook for SwAV. + + This hook builds the queue in SwAV according to ``epoch_queue_starts``. + The queue will be saved in ``runner.work_dir`` or loaded at start epoch + if the path folder has queues saved before. + + Args: + batch_size (int): the batch size per GPU for computing. + epoch_queue_starts (int, optional): from this epoch, starts to use the + queue. Defaults to 15. + crops_for_assign (list[int], optional): list of crops id used for + computing assignments. Defaults to [0, 1]. + feat_dim (int, optional): feature dimension of output vector. + Defaults to 128. + queue_length (int, optional): length of the queue (0 for no queue). + Defaults to 0. + interval (int, optional): the interval to save the queue. + Defaults to 1. + frozen_layers_cfg (dict, optional): Dict to config frozen layers. + The key-value pair is layer name and its frozen iters. If frozen, + the layers don't need gradient. Defaults to dict(). + """ + + def __init__( + self, + batch_size: int, + epoch_queue_starts: Optional[int] = 15, + crops_for_assign: Optional[List[int]] = [0, 1], + feat_dim: Optional[int] = 128, + queue_length: Optional[int] = 0, + interval: Optional[int] = 1, + frozen_layers_cfg: Optional[Dict] = dict() + ) -> None: + self.batch_size = batch_size * get_world_size() + self.epoch_queue_starts = epoch_queue_starts + self.crops_for_assign = crops_for_assign + self.feat_dim = feat_dim + self.queue_length = queue_length + self.interval = interval + self.frozen_layers_cfg = frozen_layers_cfg + self.requires_grad = True + self.queue = None + + def before_run(self, runner) -> None: + """Check whether the queues exist locally or not.""" + if is_distributed(): + self.queue_path = osp.join(runner.work_dir, + 'queue' + str(get_rank()) + '.pth') + else: + self.queue_path = osp.join(runner.work_dir, 'queue.pth') + + # load the queues if queues exist locally + if osp.isfile(self.queue_path): + self.queue = torch.load(self.queue_path)['queue'] + get_ori_model(runner.model).head.loss_module.queue = self.queue + MMLogger.get_current_instance().info( + f'Load queue from file: {self.queue_path}') + + # the queue needs to be divisible by the batch size + self.queue_length -= self.queue_length % self.batch_size + + def before_train_iter(self, + runner, + batch_idx: int, + data_batch: Optional[Sequence[dict]] = None) -> None: + """Freeze layers before specific iters according to the config.""" + for layer, frozen_iters in self.frozen_layers_cfg.items(): + if runner.iter < frozen_iters and self.requires_grad: + self.requires_grad = False + for name, p in get_ori_model(runner.model).named_parameters(): + if layer in name: + p.requires_grad = False + elif runner.iter >= frozen_iters and not self.requires_grad: + self.requires_grad = True + for name, p in get_ori_model(runner.model).named_parameters(): + if layer in name: + p.requires_grad = True + + def before_train_epoch(self, runner) -> None: + """Check the queues' state.""" + # optionally starts a queue + if self.queue_length > 0 \ + and runner.epoch >= self.epoch_queue_starts \ + and self.queue is None: + self.queue = torch.zeros( + len(self.crops_for_assign), + self.queue_length // runner.world_size, + self.feat_dim, + ).cuda() + + # set the boolean type of use_the_queue + get_ori_model(runner.model).head.loss_module.queue = self.queue + get_ori_model(runner.model).head.loss_module.use_queue = False + + def after_train_epoch(self, runner) -> None: + """Save the queues locally.""" + self.queue = get_ori_model(runner.model).head.loss_module.queue + + if self.queue is not None and self.every_n_epochs( + runner, self.interval): + torch.save({'queue': self.queue}, self.queue_path) diff --git a/mmpretrain/engine/hooks/switch_recipe_hook.py b/mmpretrain/engine/hooks/switch_recipe_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..914b9572eb22d2cd2f54c519273c86baf2e0894d --- /dev/null +++ b/mmpretrain/engine/hooks/switch_recipe_hook.py @@ -0,0 +1,169 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import OrderedDict +from copy import deepcopy + +from mmcv.transforms import Compose +from mmengine.hooks import Hook +from mmengine.model import is_model_wrapper + +from mmpretrain.models.utils import RandomBatchAugment +from mmpretrain.registry import HOOKS, MODEL_WRAPPERS, MODELS + + +@HOOKS.register_module() +class SwitchRecipeHook(Hook): + """switch recipe during the training loop, including train pipeline, batch + augments and loss currently. + + Args: + schedule (list): Every item of the schedule list should be a dict, and + the dict should have ``action_epoch`` and some of + ``train_pipeline``, ``train_augments`` and ``loss`` keys: + + - ``action_epoch`` (int): switch training recipe at which epoch. + - ``train_pipeline`` (list, optional): The new data pipeline of the + train dataset. If not specified, keep the original settings. + - ``batch_augments`` (dict | None, optional): The new batch + augmentations of during training. See :mod:`Batch Augmentations + ` for more details. + If None, disable batch augmentations. If not specified, keep the + original settings. + - ``loss`` (dict, optional): The new loss module config. If not + specified, keep the original settings. + + Example: + To use this hook in config files. + + .. code:: python + + custom_hooks = [ + dict( + type='SwitchRecipeHook', + schedule=[ + dict( + action_epoch=30, + train_pipeline=pipeline_after_30e, + batch_augments=batch_augments_after_30e, + loss=loss_after_30e, + ), + dict( + action_epoch=60, + # Disable batch augmentations after 60e + # and keep other settings. + batch_augments=None, + ), + ] + ) + ] + """ + priority = 'NORMAL' + + def __init__(self, schedule): + recipes = {} + for recipe in schedule: + assert 'action_epoch' in recipe, \ + 'Please set `action_epoch` in every item ' \ + 'of the `schedule` in the SwitchRecipeHook.' + recipe = deepcopy(recipe) + if 'train_pipeline' in recipe: + recipe['train_pipeline'] = Compose(recipe['train_pipeline']) + if 'batch_augments' in recipe: + batch_augments = recipe['batch_augments'] + if isinstance(batch_augments, dict): + batch_augments = RandomBatchAugment(**batch_augments) + recipe['batch_augments'] = batch_augments + if 'loss' in recipe: + loss = recipe['loss'] + if isinstance(loss, dict): + loss = MODELS.build(loss) + recipe['loss'] = loss + + action_epoch = recipe.pop('action_epoch') + assert action_epoch not in recipes, \ + f'The `action_epoch` {action_epoch} is repeated ' \ + 'in the SwitchRecipeHook.' + recipes[action_epoch] = recipe + self.schedule = OrderedDict(sorted(recipes.items())) + + def before_train(self, runner) -> None: + """before run setting. If resume form a checkpoint, do all switch + before the current epoch. + + Args: + runner (Runner): The runner of the training, validation or testing + process. + """ + if runner._resume: + for action_epoch, recipe in self.schedule.items(): + if action_epoch >= runner.epoch + 1: + break + self._do_switch(runner, recipe, + f' (resume recipe of epoch {action_epoch})') + + def before_train_epoch(self, runner): + """do before train epoch.""" + recipe = self.schedule.get(runner.epoch + 1, None) + if recipe is not None: + self._do_switch(runner, recipe, f' at epoch {runner.epoch + 1}') + + def _do_switch(self, runner, recipe, extra_info=''): + """do the switch aug process.""" + if 'batch_augments' in recipe: + self._switch_batch_augments(runner, recipe['batch_augments']) + runner.logger.info(f'Switch batch augments{extra_info}.') + + if 'train_pipeline' in recipe: + self._switch_train_pipeline(runner, recipe['train_pipeline']) + runner.logger.info(f'Switch train pipeline{extra_info}.') + + if 'loss' in recipe: + self._switch_loss(runner, recipe['loss']) + runner.logger.info(f'Switch loss{extra_info}.') + + @staticmethod + def _switch_batch_augments(runner, batch_augments): + """switch the train augments.""" + model = runner.model + if is_model_wrapper(model): + model = model.module + + model.data_preprocessor.batch_augments = batch_augments + + @staticmethod + def _switch_train_pipeline(runner, train_pipeline): + """switch the train loader dataset pipeline.""" + + def switch_pipeline(dataset, pipeline): + if hasattr(dataset, 'pipeline'): + # for usual dataset + dataset.pipeline = pipeline + elif hasattr(dataset, 'datasets'): + # for concat dataset wrapper + for ds in dataset.datasets: + switch_pipeline(ds, pipeline) + elif hasattr(dataset, 'dataset'): + # for other dataset wrappers + switch_pipeline(dataset.dataset, pipeline) + else: + raise RuntimeError( + 'Cannot access the `pipeline` of the dataset.') + + train_loader = runner.train_loop.dataloader + switch_pipeline(train_loader.dataset, train_pipeline) + + # To restart the iterator of dataloader when `persistent_workers=True` + train_loader._iterator = None + + @staticmethod + def _switch_loss(runner, loss_module): + """switch the loss module.""" + model = runner.model + if is_model_wrapper(model, MODEL_WRAPPERS): + model = model.module + + if hasattr(model, 'loss_module'): + model.loss_module = loss_module + elif hasattr(model, 'head') and hasattr(model.head, 'loss_module'): + model.head.loss_module = loss_module + else: + raise RuntimeError('Cannot access the `loss_module` of the model.') diff --git a/mmpretrain/engine/hooks/visualization_hook.py b/mmpretrain/engine/hooks/visualization_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..64d2230a79db971bef78d77bcf80c40365bddb15 --- /dev/null +++ b/mmpretrain/engine/hooks/visualization_hook.py @@ -0,0 +1,126 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +import os.path as osp +from typing import Optional, Sequence + +from mmengine.fileio import join_path +from mmengine.hooks import Hook +from mmengine.runner import EpochBasedTrainLoop, Runner +from mmengine.visualization import Visualizer + +from mmpretrain.registry import HOOKS +from mmpretrain.structures import DataSample + + +@HOOKS.register_module() +class VisualizationHook(Hook): + """Classification Visualization Hook. Used to visualize validation and + testing prediction results. + + - If ``out_dir`` is specified, all storage backends are ignored + and save the image to the ``out_dir``. + - If ``show`` is True, plot the result image in a window, please + confirm you are able to access the graphical interface. + + Args: + enable (bool): Whether to enable this hook. Defaults to False. + interval (int): The interval of samples to visualize. Defaults to 5000. + show (bool): Whether to display the drawn image. Defaults to False. + out_dir (str, optional): directory where painted images will be saved + in the testing process. If None, handle with the backends of the + visualizer. Defaults to None. + **kwargs: other keyword arguments of + :meth:`mmpretrain.visualization.UniversalVisualizer.visualize_cls`. + """ + + def __init__(self, + enable=False, + interval: int = 5000, + show: bool = False, + out_dir: Optional[str] = None, + **kwargs): + self._visualizer: Visualizer = Visualizer.get_current_instance() + + self.enable = enable + self.interval = interval + self.show = show + self.out_dir = out_dir + + self.draw_args = {**kwargs, 'show': show} + + def _draw_samples(self, + batch_idx: int, + data_batch: dict, + data_samples: Sequence[DataSample], + step: int = 0) -> None: + """Visualize every ``self.interval`` samples from a data batch. + + Args: + batch_idx (int): The index of the current batch in the val loop. + data_batch (dict): Data from dataloader. + outputs (Sequence[:obj:`DataSample`]): Outputs from model. + step (int): Global step value to record. Defaults to 0. + """ + if self.enable is False: + return + + batch_size = len(data_samples) + images = data_batch['inputs'] + start_idx = batch_size * batch_idx + end_idx = start_idx + batch_size + + # The first index divisible by the interval, after the start index + first_sample_id = math.ceil(start_idx / self.interval) * self.interval + + for sample_id in range(first_sample_id, end_idx, self.interval): + image = images[sample_id - start_idx] + image = image.permute(1, 2, 0).cpu().numpy().astype('uint8') + + data_sample = data_samples[sample_id - start_idx] + if 'img_path' in data_sample: + # osp.basename works on different platforms even file clients. + sample_name = osp.basename(data_sample.get('img_path')) + else: + sample_name = str(sample_id) + + draw_args = self.draw_args + if self.out_dir is not None: + draw_args['out_file'] = join_path(self.out_dir, + f'{sample_name}_{step}.png') + + self._visualizer.visualize_cls( + image=image, + data_sample=data_sample, + step=step, + name=sample_name, + **self.draw_args, + ) + + def after_val_iter(self, runner: Runner, batch_idx: int, data_batch: dict, + outputs: Sequence[DataSample]) -> None: + """Visualize every ``self.interval`` samples during validation. + + Args: + runner (:obj:`Runner`): The runner of the validation process. + batch_idx (int): The index of the current batch in the val loop. + data_batch (dict): Data from dataloader. + outputs (Sequence[:obj:`DataSample`]): Outputs from model. + """ + if isinstance(runner.train_loop, EpochBasedTrainLoop): + step = runner.epoch + else: + step = runner.iter + + self._draw_samples(batch_idx, data_batch, outputs, step=step) + + def after_test_iter(self, runner: Runner, batch_idx: int, data_batch: dict, + outputs: Sequence[DataSample]) -> None: + """Visualize every ``self.interval`` samples during test. + + Args: + runner (:obj:`Runner`): The runner of the testing process. + batch_idx (int): The index of the current batch in the test loop. + data_batch (dict): Data from dataloader. + outputs (Sequence[:obj:`DetDataSample`]): Outputs from model. + """ + self._draw_samples(batch_idx, data_batch, outputs, step=0) diff --git a/mmpretrain/engine/hooks/warmup_param_hook.py b/mmpretrain/engine/hooks/warmup_param_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..b45d8918dbbcb9cf5d12c252621908f0b6c1f251 --- /dev/null +++ b/mmpretrain/engine/hooks/warmup_param_hook.py @@ -0,0 +1,66 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import operator as op +from typing import Any, Optional, Union + +from mmengine.hooks import Hook + +from mmpretrain.registry import HOOKS +from mmpretrain.utils import get_ori_model + + +@HOOKS.register_module() +class WarmupParamHook(Hook): + """This is a hook used for changing the parameters other than optimizations + that need to warmup inside the module. + + This hook can extend with more detailed warmup rule if necessary. + + Args: + param_name (str): The parameter name that needs to be altered. + module_name (str): Module name that belongs to the model. Such as + `head`, `head.loss`, etc. + warmup_epochs (int): The warmup epochs for this parameter. + """ + + def __init__( + self, + param_name: str, + module_name: str, + warmup_epochs: int, + ) -> None: + self.param_name = param_name + self.warmup_epochs = warmup_epochs + # getter for module which saves the changed parameter + self.module_getter = op.attrgetter(module_name) + + def get_param(self, runner) -> Any: + """Get the parameter.""" + try: + module = self.module_getter(get_ori_model(runner.model)) + return getattr(module, self.param_name) + except AttributeError as e: + raise AttributeError(f'{e}. Please check hook settings.') + + def set_param(self, runner, value) -> None: + """Set the parameter.""" + try: + module = self.module_getter(get_ori_model(runner.model)) + setattr(module, self.param_name, value) + except AttributeError as e: + raise AttributeError(f'{e}. Please check hook settings.') + + def before_train(self, runner) -> None: + """Get the original value before train.""" + self.ori_val = self.get_param(runner) + + def before_train_iter( + self, + runner, + batch_idx: int, + data_batch: Optional[Union[dict, tuple, list]] = None) -> None: + """Set the warmup value before each train iter.""" + cur_iter = runner.iter + iters_per_epoch = runner.max_iters / runner.max_epochs + new_val = self.ori_val * min( + 1, cur_iter / (self.warmup_epochs * iters_per_epoch)) + self.set_param(runner, new_val) diff --git a/mmpretrain/engine/optimizers/__init__.py b/mmpretrain/engine/optimizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bd53a37630b2a0dfbb69b1020518b9ec4ff03715 --- /dev/null +++ b/mmpretrain/engine/optimizers/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .adan_t import Adan +from .lamb import Lamb +from .lars import LARS +from .layer_decay_optim_wrapper_constructor import \ + LearningRateDecayOptimWrapperConstructor + +__all__ = ['Lamb', 'Adan', 'LARS', 'LearningRateDecayOptimWrapperConstructor'] diff --git a/mmpretrain/engine/optimizers/adan_t.py b/mmpretrain/engine/optimizers/adan_t.py new file mode 100644 index 0000000000000000000000000000000000000000..571a71b6fe561fb33053af2fd6d2161a775918e4 --- /dev/null +++ b/mmpretrain/engine/optimizers/adan_t.py @@ -0,0 +1,312 @@ +# Copyright 2022 Garena Online Private Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +from typing import List + +import torch +from torch import Tensor +from torch.optim.optimizer import Optimizer + +from mmpretrain.registry import OPTIMIZERS + + +@OPTIMIZERS.register_module() +class Adan(Optimizer): + """Implements a pytorch variant of Adan. + + Adan was proposed in + Adan : Adaptive Nesterov Momentum Algorithm for Faster Optimizing Deep Models. # noqa + https://arxiv.org/abs/2208.06677 + Arguments: + params (iterable): iterable of parameters to optimize + or dicts defining parameter groups. + lr (float, optional): learning rate. (default: 1e-3) + betas (Tuple[float, float, flot], optional): coefficients used + for computing running averages of gradient. + (default: (0.98, 0.92, 0.99)) + eps (float, optional): term added to the denominator to improve + numerical stability. (default: 1e-8) + weight_decay (float, optional): decoupled weight decay + (L2 penalty) (default: 0) + max_grad_norm (float, optional): value used to clip + global grad norm (default: 0.0 no clip) + no_prox (bool): how to perform the decoupled weight decay + (default: False) + foreach (bool): if True would use torch._foreach implementation. + It's faster but uses slightly more memory. + """ + + def __init__(self, + params, + lr=1e-3, + betas=(0.98, 0.92, 0.99), + eps=1e-8, + weight_decay=0.0, + max_grad_norm=0.0, + no_prox=False, + foreach: bool = True): + if not 0.0 <= max_grad_norm: + raise ValueError('Invalid Max grad norm: {}'.format(max_grad_norm)) + if not 0.0 <= lr: + raise ValueError('Invalid learning rate: {}'.format(lr)) + if not 0.0 <= eps: + raise ValueError('Invalid epsilon value: {}'.format(eps)) + if not 0.0 <= betas[0] < 1.0: + raise ValueError('Invalid beta parameter at index 0: {}'.format( + betas[0])) + if not 0.0 <= betas[1] < 1.0: + raise ValueError('Invalid beta parameter at index 1: {}'.format( + betas[1])) + if not 0.0 <= betas[2] < 1.0: + raise ValueError('Invalid beta parameter at index 2: {}'.format( + betas[2])) + defaults = dict( + lr=lr, + betas=betas, + eps=eps, + weight_decay=weight_decay, + max_grad_norm=max_grad_norm, + no_prox=no_prox, + foreach=foreach) + super().__init__(params, defaults) + + def __setstate__(self, state): + super(Adan, self).__setstate__(state) + for group in self.param_groups: + group.setdefault('no_prox', False) + + @torch.no_grad() + def restart_opt(self): + for group in self.param_groups: + group['step'] = 0 + for p in group['params']: + if p.requires_grad: + state = self.state[p] + # State initialization + + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p) + # Exponential moving average of gradient difference + state['exp_avg_diff'] = torch.zeros_like(p) + + @torch.no_grad() + def step(self): + """Performs a single optimization step.""" + if self.defaults['max_grad_norm'] > 0: + device = self.param_groups[0]['params'][0].device + global_grad_norm = torch.zeros(1, device=device) + + max_grad_norm = torch.tensor( + self.defaults['max_grad_norm'], device=device) + for group in self.param_groups: + + for p in group['params']: + if p.grad is not None: + grad = p.grad + global_grad_norm.add_(grad.pow(2).sum()) + + global_grad_norm = torch.sqrt(global_grad_norm) + group['eps'] + + clip_global_grad_norm = \ + torch.clamp(max_grad_norm / global_grad_norm, max=1.0) + else: + clip_global_grad_norm = 1.0 + + for group in self.param_groups: + params_with_grad = [] + grads = [] + exp_avgs = [] + exp_avg_sqs = [] + exp_avg_diffs = [] + pre_grads = [] + + beta1, beta2, beta3 = group['betas'] + # assume same step across group now to simplify things + # per parameter step can be easily support + # by making it tensor, or pass list into kernel + if 'step' in group: + group['step'] += 1 + else: + group['step'] = 1 + + bias_correction1 = 1.0 - beta1**group['step'] + bias_correction2 = 1.0 - beta2**group['step'] + bias_correction3 = 1.0 - beta3**group['step'] + + for p in group['params']: + if p.grad is None: + continue + params_with_grad.append(p) + grads.append(p.grad) + + state = self.state[p] + if len(state) == 0: + state['exp_avg'] = torch.zeros_like(p) + state['exp_avg_sq'] = torch.zeros_like(p) + state['exp_avg_diff'] = torch.zeros_like(p) + + if 'pre_grad' not in state or group['step'] == 1: + # at first step grad wouldn't be clipped + # by `clip_global_grad_norm` + # this is only to simplify implementation + state['pre_grad'] = p.grad + + exp_avgs.append(state['exp_avg']) + exp_avg_sqs.append(state['exp_avg_sq']) + exp_avg_diffs.append(state['exp_avg_diff']) + pre_grads.append(state['pre_grad']) + + kwargs = dict( + params=params_with_grad, + grads=grads, + exp_avgs=exp_avgs, + exp_avg_sqs=exp_avg_sqs, + exp_avg_diffs=exp_avg_diffs, + pre_grads=pre_grads, + beta1=beta1, + beta2=beta2, + beta3=beta3, + bias_correction1=bias_correction1, + bias_correction2=bias_correction2, + bias_correction3_sqrt=math.sqrt(bias_correction3), + lr=group['lr'], + weight_decay=group['weight_decay'], + eps=group['eps'], + no_prox=group['no_prox'], + clip_global_grad_norm=clip_global_grad_norm, + ) + if group['foreach']: + copy_grads = _multi_tensor_adan(**kwargs) + else: + copy_grads = _single_tensor_adan(**kwargs) + + for p, copy_grad in zip(params_with_grad, copy_grads): + self.state[p]['pre_grad'] = copy_grad + + +def _single_tensor_adan( + params: List[Tensor], + grads: List[Tensor], + exp_avgs: List[Tensor], + exp_avg_sqs: List[Tensor], + exp_avg_diffs: List[Tensor], + pre_grads: List[Tensor], + *, + beta1: float, + beta2: float, + beta3: float, + bias_correction1: float, + bias_correction2: float, + bias_correction3_sqrt: float, + lr: float, + weight_decay: float, + eps: float, + no_prox: bool, + clip_global_grad_norm: Tensor, +): + copy_grads = [] + for i, param in enumerate(params): + grad = grads[i] + exp_avg = exp_avgs[i] + exp_avg_sq = exp_avg_sqs[i] + exp_avg_diff = exp_avg_diffs[i] + pre_grad = pre_grads[i] + + grad = grad.mul_(clip_global_grad_norm) + copy_grads.append(grad.clone()) + + diff = grad - pre_grad + update = grad + beta2 * diff + + exp_avg.mul_(beta1).add_(grad, alpha=1 - beta1) # m_t + exp_avg_diff.mul_(beta2).add_(diff, alpha=1 - beta2) # diff_t + exp_avg_sq.mul_(beta3).addcmul_(update, update, value=1 - beta3) # n_t + + denom = (exp_avg_sq.sqrt() / bias_correction3_sqrt).add_(eps) + update = exp_avg / bias_correction1 + update.add_(beta2 * exp_avg_diff / bias_correction2).div_(denom) + + if no_prox: + param.mul_(1 - lr * weight_decay) + param.add_(update, alpha=-lr) + else: + param.add_(update, alpha=-lr) + param.div_(1 + lr * weight_decay) + return copy_grads + + +def _multi_tensor_adan( + params: List[Tensor], + grads: List[Tensor], + exp_avgs: List[Tensor], + exp_avg_sqs: List[Tensor], + exp_avg_diffs: List[Tensor], + pre_grads: List[Tensor], + *, + beta1: float, + beta2: float, + beta3: float, + bias_correction1: float, + bias_correction2: float, + bias_correction3_sqrt: float, + lr: float, + weight_decay: float, + eps: float, + no_prox: bool, + clip_global_grad_norm: Tensor, +): + if clip_global_grad_norm < 1.0: + torch._foreach_mul_(grads, clip_global_grad_norm.item()) + copy_grads = [g.clone() for g in grads] + + diff = torch._foreach_sub(grads, pre_grads) + # NOTE: line below while looking identical gives different result, + # due to float precision errors. + # using mul+add produces identical results to single-tensor, + # using add+alpha doesn't + # update = torch._foreach_add(grads, torch._foreach_mul(diff, beta2)) + update = torch._foreach_add(grads, diff, alpha=beta2) + + torch._foreach_mul_(exp_avgs, beta1) + torch._foreach_add_(exp_avgs, grads, alpha=1 - beta1) # m_t + + torch._foreach_mul_(exp_avg_diffs, beta2) + torch._foreach_add_(exp_avg_diffs, diff, alpha=1 - beta2) # diff_t + + torch._foreach_mul_(exp_avg_sqs, beta3) + torch._foreach_addcmul_( + exp_avg_sqs, update, update, value=1 - beta3) # n_t + + denom = torch._foreach_sqrt(exp_avg_sqs) + torch._foreach_div_(denom, bias_correction3_sqrt) + torch._foreach_add_(denom, eps) + + update = torch._foreach_div(exp_avgs, bias_correction1) + # NOTE: same issue as above. + # beta2 * diff / bias_correction2 != diff * (beta2 / bias_correction2) # noqa + # using faster version by default. uncomment for tests to pass + # torch._foreach_add_(update, torch._foreach_div(torch._foreach_mul(exp_avg_diffs, beta2), bias_correction2)) # noqa + torch._foreach_add_( + update, torch._foreach_mul(exp_avg_diffs, beta2 / bias_correction2)) + torch._foreach_div_(update, denom) + + if no_prox: + torch._foreach_mul_(params, 1 - lr * weight_decay) + else: + torch._foreach_add_(params, update, alpha=-lr) + torch._foreach_div_(params, 1 + lr * weight_decay) + return copy_grads diff --git a/mmpretrain/engine/optimizers/lamb.py b/mmpretrain/engine/optimizers/lamb.py new file mode 100644 index 0000000000000000000000000000000000000000..0b44a1c168e03fa7f569388beec206fe68c64749 --- /dev/null +++ b/mmpretrain/engine/optimizers/lamb.py @@ -0,0 +1,228 @@ +"""PyTorch Lamb optimizer w/ behaviour similar to NVIDIA FusedLamb. + +This optimizer code was adapted from the following (starting with latest) +* https://github.com/HabanaAI/Model-References/blob/ +2b435114fe8e31f159b1d3063b8280ae37af7423/PyTorch/nlp/bert/pretraining/lamb.py +* https://github.com/NVIDIA/DeepLearningExamples/blob/master/PyTorch/ +LanguageModeling/Transformer-XL/pytorch/lamb.py +* https://github.com/cybertronai/pytorch-lamb + +Use FusedLamb if you can (GPU). The reason for including this variant of Lamb +is to have a version that is +similar in behaviour to APEX FusedLamb if you aren't using NVIDIA GPUs or +cannot install/use APEX. + +In addition to some cleanup, this Lamb impl has been modified to support +PyTorch XLA and has been tested on TPU. + +Original copyrights for above sources are below. + +Modifications Copyright 2021 Ross Wightman +""" +# Copyright (c) 2021, Habana Labs Ltd. All rights reserved. + +# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# MIT License +# +# Copyright (c) 2019 cybertronai +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +import math + +import torch +from torch.optim import Optimizer + +from mmpretrain.registry import OPTIMIZERS + + +@OPTIMIZERS.register_module() +class Lamb(Optimizer): + """A pure pytorch variant of FuseLAMB (NvLamb variant) optimizer. + + This class is copied from `timm`_. The LAMB was proposed in `Large Batch + Optimization for Deep Learning - Training BERT in 76 minutes`_. + + .. _timm: + https://github.com/rwightman/pytorch-image-models/blob/master/timm/optim/lamb.py + .. _Large Batch Optimization for Deep Learning - Training BERT in 76 minutes: + https://arxiv.org/abs/1904.00962 + + Arguments: + params (iterable): iterable of parameters to optimize or dicts defining + parameter groups. + lr (float, optional): learning rate. (default: 1e-3) + betas (Tuple[float, float], optional): coefficients used for computing + running averages of gradient and its norm. (default: (0.9, 0.999)) + eps (float, optional): term added to the denominator to improve + numerical stability. (default: 1e-8) + weight_decay (float, optional): weight decay (L2 penalty) (default: 0) + grad_averaging (bool, optional): whether apply (1-beta2) to grad when + calculating running averages of gradient. (default: True) + max_grad_norm (float, optional): value used to clip global grad norm + (default: 1.0) + trust_clip (bool): enable LAMBC trust ratio clipping (default: False) + always_adapt (boolean, optional): Apply adaptive learning rate to 0.0 + weight decay parameter (default: False) + """ # noqa: E501 + + def __init__(self, + params, + lr=1e-3, + bias_correction=True, + betas=(0.9, 0.999), + eps=1e-6, + weight_decay=0.01, + grad_averaging=True, + max_grad_norm=1.0, + trust_clip=False, + always_adapt=False): + defaults = dict( + lr=lr, + bias_correction=bias_correction, + betas=betas, + eps=eps, + weight_decay=weight_decay, + grad_averaging=grad_averaging, + max_grad_norm=max_grad_norm, + trust_clip=trust_clip, + always_adapt=always_adapt) + super().__init__(params, defaults) + + @torch.no_grad() + def step(self, closure=None): + """Performs a single optimization step. + + Arguments: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + device = self.param_groups[0]['params'][0].device + one_tensor = torch.tensor( + 1.0, device=device + ) # because torch.where doesn't handle scalars correctly + global_grad_norm = torch.zeros(1, device=device) + for group in self.param_groups: + for p in group['params']: + if p.grad is None: + continue + grad = p.grad + if grad.is_sparse: + raise RuntimeError( + 'Lamb does not support sparse gradients, consider ' + 'SparseAdam instead.') + global_grad_norm.add_(grad.pow(2).sum()) + + global_grad_norm = torch.sqrt(global_grad_norm) + # FIXME it'd be nice to remove explicit tensor conversion of scalars + # when torch.where promotes + # scalar types properly https://github.com/pytorch/pytorch/issues/9190 + max_grad_norm = torch.tensor( + self.defaults['max_grad_norm'], device=device) + clip_global_grad_norm = torch.where(global_grad_norm > max_grad_norm, + global_grad_norm / max_grad_norm, + one_tensor) + + for group in self.param_groups: + bias_correction = 1 if group['bias_correction'] else 0 + beta1, beta2 = group['betas'] + grad_averaging = 1 if group['grad_averaging'] else 0 + beta3 = 1 - beta1 if grad_averaging else 1.0 + + # assume same step across group now to simplify things + # per parameter step can be easily support by making it tensor, or + # pass list into kernel + if 'step' in group: + group['step'] += 1 + else: + group['step'] = 1 + + if bias_correction: + bias_correction1 = 1 - beta1**group['step'] + bias_correction2 = 1 - beta2**group['step'] + else: + bias_correction1, bias_correction2 = 1.0, 1.0 + + for p in group['params']: + if p.grad is None: + continue + grad = p.grad.div_(clip_global_grad_norm) + state = self.state[p] + + # State initialization + if len(state) == 0: + # Exponential moving average of gradient valuesa + state['exp_avg'] = torch.zeros_like(p) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p) + + exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq'] + + # Decay the first and second moment running average coefficient + exp_avg.mul_(beta1).add_(grad, alpha=beta3) # m_t + exp_avg_sq.mul_(beta2).addcmul_( + grad, grad, value=1 - beta2) # v_t + + denom = (exp_avg_sq.sqrt() / math.sqrt(bias_correction2)).add_( + group['eps']) + update = (exp_avg / bias_correction1).div_(denom) + + weight_decay = group['weight_decay'] + if weight_decay != 0: + update.add_(p, alpha=weight_decay) + + if weight_decay != 0 or group['always_adapt']: + # Layer-wise LR adaptation. By default, skip adaptation on + # parameters that are + # excluded from weight decay, unless always_adapt == True, + # then always enabled. + w_norm = p.norm(2.0) + g_norm = update.norm(2.0) + # FIXME nested where required since logical and/or not + # working in PT XLA + trust_ratio = torch.where( + w_norm > 0, + torch.where(g_norm > 0, w_norm / g_norm, one_tensor), + one_tensor, + ) + if group['trust_clip']: + # LAMBC trust clipping, upper bound fixed at one + trust_ratio = torch.minimum(trust_ratio, one_tensor) + update.mul_(trust_ratio) + + p.add_(update, alpha=-group['lr']) + + return loss diff --git a/mmpretrain/engine/optimizers/lars.py b/mmpretrain/engine/optimizers/lars.py new file mode 100644 index 0000000000000000000000000000000000000000..5e388878374e3d1e7408861a5f1830b00df5664b --- /dev/null +++ b/mmpretrain/engine/optimizers/lars.py @@ -0,0 +1,130 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Iterable + +import torch +from torch.optim.optimizer import Optimizer + +from mmpretrain.registry import OPTIMIZERS + + +@OPTIMIZERS.register_module() +class LARS(Optimizer): + """Implements layer-wise adaptive rate scaling for SGD. + + Based on Algorithm 1 of the following paper by You, Gitman, and Ginsburg. + `Large Batch Training of Convolutional Networks: + `_. + + Args: + params (Iterable): Iterable of parameters to optimize or dicts defining + parameter groups. + lr (float): Base learning rate. + momentum (float): Momentum factor. Defaults to 0. + weight_decay (float): Weight decay (L2 penalty). Defaults to 0. + dampening (float): Dampening for momentum. Defaults to 0. + eta (float): LARS coefficient. Defaults to 0.001. + nesterov (bool): Enables Nesterov momentum. Defaults to False. + eps (float): A small number to avoid dviding zero. Defaults to 1e-8. + + Example: + >>> optimizer = LARS(model.parameters(), lr=0.1, momentum=0.9, + >>> weight_decay=1e-4, eta=1e-3) + >>> optimizer.zero_grad() + >>> loss_fn(model(input), target).backward() + >>> optimizer.step() + """ + + def __init__(self, + params: Iterable, + lr: float, + momentum: float = 0, + weight_decay: float = 0, + dampening: float = 0, + eta: float = 0.001, + nesterov: bool = False, + eps: float = 1e-8) -> None: + if not isinstance(lr, float) and lr < 0.0: + raise ValueError(f'Invalid learning rate: {lr}') + if momentum < 0.0: + raise ValueError(f'Invalid momentum value: {momentum}') + if weight_decay < 0.0: + raise ValueError(f'Invalid weight_decay value: {weight_decay}') + if eta < 0.0: + raise ValueError(f'Invalid LARS coefficient value: {eta}') + + defaults = dict( + lr=lr, + momentum=momentum, + dampening=dampening, + weight_decay=weight_decay, + nesterov=nesterov, + eta=eta) + if nesterov and (momentum <= 0 or dampening != 0): + raise ValueError( + 'Nesterov momentum requires a momentum and zero dampening') + + self.eps = eps + super().__init__(params, defaults) + + def __setstate__(self, state) -> None: + super().__setstate__(state) + for group in self.param_groups: + group.setdefault('nesterov', False) + + @torch.no_grad() + def step(self, closure=None) -> torch.Tensor: + """Performs a single optimization step. + + Args: + closure (callable, optional): A closure that reevaluates the model + and returns the loss. + """ + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + for group in self.param_groups: + weight_decay = group['weight_decay'] + momentum = group['momentum'] + dampening = group['dampening'] + eta = group['eta'] + nesterov = group['nesterov'] + lr = group['lr'] + lars_exclude = group.get('lars_exclude', False) + + for p in group['params']: + if p.grad is None: + continue + + d_p = p.grad + + if lars_exclude: + local_lr = 1. + else: + weight_norm = torch.norm(p).item() + grad_norm = torch.norm(d_p).item() + if weight_norm != 0 and grad_norm != 0: + # Compute local learning rate for this layer + local_lr = eta * weight_norm / \ + (grad_norm + weight_decay * weight_norm + self.eps) + else: + local_lr = 1. + + actual_lr = local_lr * lr + d_p = d_p.add(p, alpha=weight_decay).mul(actual_lr) + if momentum != 0: + param_state = self.state[p] + if 'momentum_buffer' not in param_state: + buf = param_state['momentum_buffer'] = \ + torch.clone(d_p).detach() + else: + buf = param_state['momentum_buffer'] + buf.mul_(momentum).add_(d_p, alpha=1 - dampening) + if nesterov: + d_p = d_p.add(buf, alpha=momentum) + else: + d_p = buf + p.add_(-d_p) + + return loss diff --git a/mmpretrain/engine/optimizers/layer_decay_optim_wrapper_constructor.py b/mmpretrain/engine/optimizers/layer_decay_optim_wrapper_constructor.py new file mode 100644 index 0000000000000000000000000000000000000000..09c6abc54a9f49cc789bf91d2bf74b0ec68902c4 --- /dev/null +++ b/mmpretrain/engine/optimizers/layer_decay_optim_wrapper_constructor.py @@ -0,0 +1,166 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import defaultdict +from typing import Callable, List, Optional + +from mmengine.logging import MMLogger +from mmengine.optim import DefaultOptimWrapperConstructor +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm, _InstanceNorm +from torch import nn +from torch.nn import GroupNorm, LayerNorm + +from mmpretrain.registry import OPTIM_WRAPPER_CONSTRUCTORS + + +@OPTIM_WRAPPER_CONSTRUCTORS.register_module() +class LearningRateDecayOptimWrapperConstructor(DefaultOptimWrapperConstructor): + """Different learning rates are set for different layers of backbone. + + By default, each parameter share the same optimizer settings, and we + provide an argument ``paramwise_cfg`` to specify parameter-wise settings. + It is a dict and may contain the following fields: + + - ``layer_decay_rate`` (float): The learning rate of a parameter will + multiply it by multiple times according to the layer depth of the + parameter. Usually, it's less than 1, so that the earlier layers will + have a lower learning rate. Defaults to 1. + - ``bias_decay_mult`` (float): It will be multiplied to the weight + decay for all bias parameters (except for those in normalization layers). + - ``norm_decay_mult`` (float): It will be multiplied to the weight + decay for all weight and bias parameters of normalization layers. + - ``flat_decay_mult`` (float): It will be multiplied to the weight + decay for all one-dimensional parameters + - ``custom_keys`` (dict): Specified parameters-wise settings by keys. If + one of the keys in ``custom_keys`` is a substring of the name of one + parameter, then the setting of the parameter will be specified by + ``custom_keys[key]`` and other setting like ``bias_decay_mult`` will be + ignored. It should be a dict and may contain fields ``decay_mult``. + (The ``lr_mult`` is disabled in this constructor). + + Example: + + In the config file, you can use this constructor as below: + + .. code:: python + + optim_wrapper = dict( + optimizer=dict( + type='AdamW', + lr=4e-3, + weight_decay=0.05, + eps=1e-8, + betas=(0.9, 0.999)), + constructor='LearningRateDecayOptimWrapperConstructor', + paramwise_cfg=dict( + layer_decay_rate=0.75, # layer-wise lr decay factor + norm_decay_mult=0., + flat_decay_mult=0., + custom_keys={ + '.cls_token': dict(decay_mult=0.0), + '.pos_embed': dict(decay_mult=0.0) + })) + """ + + def add_params(self, + params: List[dict], + module: nn.Module, + prefix: str = '', + get_layer_depth: Optional[Callable] = None, + **kwargs) -> None: + """Add all parameters of module to the params list. + + The parameters of the given module will be added to the list of param + groups, with specific rules defined by paramwise_cfg. + + Args: + params (List[dict]): A list of param groups, it will be modified + in place. + module (nn.Module): The module to be added. + optimizer_cfg (dict): The configuration of optimizer. + prefix (str): The prefix of the module. + """ + # get param-wise options + custom_keys = self.paramwise_cfg.get('custom_keys', {}) + # first sort with alphabet order and then sort with reversed len of str + sorted_keys = sorted(sorted(custom_keys.keys()), key=len, reverse=True) + logger = MMLogger.get_current_instance() + + # The model should have `get_layer_depth` method + if get_layer_depth is None and not hasattr(module, 'get_layer_depth'): + raise NotImplementedError('The layer-wise learning rate decay need' + f' the model {type(module)} has' + ' `get_layer_depth` method.') + else: + get_layer_depth = get_layer_depth or module.get_layer_depth + + bias_decay_mult = self.paramwise_cfg.get('bias_decay_mult', None) + norm_decay_mult = self.paramwise_cfg.get('norm_decay_mult', None) + flat_decay_mult = self.paramwise_cfg.get('flat_decay_mult', None) + decay_rate = self.paramwise_cfg.get('layer_decay_rate', 1.0) + + # special rules for norm layers and depth-wise conv layers + is_norm = isinstance(module, + (_BatchNorm, _InstanceNorm, GroupNorm, LayerNorm)) + + for name, param in module.named_parameters(recurse=False): + param_group = {'params': [param]} + param_name = prefix + name + if not param.requires_grad: + continue + + if self.base_wd is not None: + base_wd = self.base_wd + custom_key = next( + filter(lambda k: k in param_name, sorted_keys), None) + # custom parameters decay + if custom_key is not None: + custom_cfg = custom_keys[custom_key].copy() + decay_mult = custom_cfg.pop('decay_mult', 1.) + + param_group['weight_decay'] = base_wd * decay_mult + # add custom settings to param_group + param_group.update(custom_cfg) + # norm decay + elif is_norm and norm_decay_mult is not None: + param_group['weight_decay'] = base_wd * norm_decay_mult + # bias decay + elif name == 'bias' and bias_decay_mult is not None: + param_group['weight_decay'] = base_wd * bias_decay_mult + # flatten parameters decay + elif param.ndim == 1 and flat_decay_mult is not None: + param_group['weight_decay'] = base_wd * flat_decay_mult + else: + param_group['weight_decay'] = base_wd + + layer_id, max_id = get_layer_depth(param_name) + scale = decay_rate**(max_id - layer_id - 1) + param_group['lr'] = self.base_lr * scale + param_group['lr_scale'] = scale + param_group['layer_id'] = layer_id + param_group['param_name'] = param_name + + params.append(param_group) + + for child_name, child_mod in module.named_children(): + child_prefix = f'{prefix}{child_name}.' + self.add_params( + params, + child_mod, + prefix=child_prefix, + get_layer_depth=get_layer_depth, + ) + + if prefix == '': + layer_params = defaultdict(list) + for param in params: + layer_params[param['layer_id']].append(param) + for layer_id, layer_params in layer_params.items(): + lr_scale = layer_params[0]['lr_scale'] + lr = layer_params[0]['lr'] + msg = [ + f'layer {layer_id} params ' + f'(lr={lr:.3g}, lr_scale={lr_scale:.3g}):' + ] + for param in layer_params: + msg.append(f'\t{param["param_name"]}: ' + f'weight_decay={param["weight_decay"]:.3g}') + logger.debug('\n'.join(msg)) diff --git a/mmpretrain/engine/runners/__init__.py b/mmpretrain/engine/runners/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..23206e1ea7c83fa1d547c677b3fe5203f8c5485f --- /dev/null +++ b/mmpretrain/engine/runners/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .retrieval_loop import RetrievalTestLoop, RetrievalValLoop + +__all__ = ['RetrievalTestLoop', 'RetrievalValLoop'] diff --git a/mmpretrain/engine/runners/retrieval_loop.py b/mmpretrain/engine/runners/retrieval_loop.py new file mode 100644 index 0000000000000000000000000000000000000000..d15387eddeb9075c23949f95a77ed59006bb9a38 --- /dev/null +++ b/mmpretrain/engine/runners/retrieval_loop.py @@ -0,0 +1,168 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +import torch +from mmengine.model import is_model_wrapper +from mmengine.runner import TestLoop, ValLoop, autocast + +from mmpretrain.registry import LOOPS + + +@LOOPS.register_module() +class RetrievalValLoop(ValLoop): + """Loop for multimodal retrieval val. + + Args: + runner (Runner): A reference of runner. + dataloader (Dataloader or dict): A dataloader object or a dict to + build a dataloader. + evaluator (Evaluator or dict or list): Used for computing metrics. + fp16 (bool): Whether to enable fp16 valing. Defaults to + False. + """ + + def run(self) -> dict: + """Launch val.""" + self.runner.call_hook('before_val') + self.runner.call_hook('before_val_epoch') + self.runner.model.eval() + + feats_local = [] + data_samples_local = [] + + for idx, data_batch in enumerate(self.dataloader): + with torch.no_grad(): + self.runner.call_hook( + 'before_val_iter', batch_idx=idx, data_batch=data_batch) + # predictions should be sequence of BaseDataElement + with autocast(enabled=self.fp16): + if is_model_wrapper(self.runner.model): + data_preprocessor = self.runner.model.module.data_preprocessor # noqa: E501 + else: + data_preprocessor = self.runner.model.data_preprocessor + + # get features for retrieval instead of data samples + data_batch = data_preprocessor(data_batch, False) + feats = self.runner.model._run_forward( + data_batch, mode='tensor') + feats_local.append(feats) + data_samples_local.extend(data_batch['data_samples']) + self.runner.call_hook( + 'after_val_iter', + batch_idx=idx, + data_batch=data_batch, + outputs=feats) + + # concatenate different features + feats_local = { + k: torch.cat([dic[k] for dic in feats_local]) + for k in feats_local[0] + } + + # get predictions + if is_model_wrapper(self.runner.model): + predict_all_fn = self.runner.model.module.predict_all + else: + predict_all_fn = self.runner.model.predict_all + + img_size = self.dataloader.dataset.img_size + text_size = self.dataloader.dataset.text_size + with torch.no_grad(): + i2t_data_samples, t2i_data_samples = predict_all_fn( + feats_local, + data_samples_local, + num_images=img_size, + num_texts=text_size, + ) + + # process in evaluator and compute metrics + self.evaluator.process(i2t_data_samples, None) + i2t_metrics = self.evaluator.evaluate(img_size) + i2t_metrics = {f'i2t/{k}': v for k, v in i2t_metrics.items()} + self.evaluator.process(t2i_data_samples, None) + t2i_metrics = self.evaluator.evaluate(text_size) + t2i_metrics = {f't2i/{k}': v for k, v in t2i_metrics.items()} + metrics = {**i2t_metrics, **t2i_metrics} + + self.runner.call_hook('after_val_epoch', metrics=metrics) + self.runner.call_hook('after_val') + return metrics + + +@LOOPS.register_module() +class RetrievalTestLoop(TestLoop): + """Loop for multimodal retrieval test. + + Args: + runner (Runner): A reference of runner. + dataloader (Dataloader or dict): A dataloader object or a dict to + build a dataloader. + evaluator (Evaluator or dict or list): Used for computing metrics. + fp16 (bool): Whether to enable fp16 testing. Defaults to + False. + """ + + def run(self) -> dict: + """Launch test.""" + self.runner.call_hook('before_test') + self.runner.call_hook('before_test_epoch') + self.runner.model.eval() + + feats_local = [] + data_samples_local = [] + + for idx, data_batch in enumerate(self.dataloader): + with torch.no_grad(): + self.runner.call_hook( + 'before_test_iter', batch_idx=idx, data_batch=data_batch) + # predictions should be sequence of BaseDataElement + with autocast(enabled=self.fp16): + if is_model_wrapper(self.runner.model): + data_preprocessor = self.runner.model.module.data_preprocessor # noqa: E501 + else: + data_preprocessor = self.runner.model.data_preprocessor + # get features for retrieval instead of data samples + data_batch = data_preprocessor(data_batch, False) + feats = self.runner.model._run_forward( + data_batch, mode='tensor') + feats_local.append(feats) + data_samples_local.extend(data_batch['data_samples']) + self.runner.call_hook( + 'after_test_iter', + batch_idx=idx, + data_batch=data_batch, + outputs=feats) + + # concatenate different features + feats_local = { + k: torch.cat([dic[k] for dic in feats_local]) + for k in feats_local[0] + } + + # get predictions + if is_model_wrapper(self.runner.model): + predict_all_fn = self.runner.model.module.predict_all + else: + predict_all_fn = self.runner.model.predict_all + + img_size = self.dataloader.dataset.img_size + text_size = self.dataloader.dataset.text_size + with torch.no_grad(): + i2t_data_samples, t2i_data_samples = predict_all_fn( + feats_local, + data_samples_local, + num_images=img_size, + num_texts=text_size, + ) + + # process in evaluator and compute metrics + self.evaluator.process(i2t_data_samples, None) + i2t_metrics = self.evaluator.evaluate(img_size) + i2t_metrics = {f'i2t/{k}': v for k, v in i2t_metrics.items()} + self.evaluator.process(t2i_data_samples, None) + t2i_metrics = self.evaluator.evaluate(text_size) + t2i_metrics = {f't2i/{k}': v for k, v in t2i_metrics.items()} + metrics = {**i2t_metrics, **t2i_metrics} + + self.runner.call_hook('after_test_epoch', metrics=metrics) + self.runner.call_hook('after_test') + return metrics diff --git a/mmpretrain/evaluation/__init__.py b/mmpretrain/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f70dc226d30f7b8e4ee5a44ca163ad1ae04eabf5 --- /dev/null +++ b/mmpretrain/evaluation/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .functional import * # noqa: F401,F403 +from .metrics import * # noqa: F401,F403 diff --git a/mmpretrain/evaluation/functional/__init__.py b/mmpretrain/evaluation/functional/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ef101fec61e72abc0eb90266d453b5b22331378d --- /dev/null +++ b/mmpretrain/evaluation/functional/__init__.py @@ -0,0 +1 @@ +# Copyright (c) OpenMMLab. All rights reserved. diff --git a/mmpretrain/evaluation/metrics/__init__.py b/mmpretrain/evaluation/metrics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..186cdd9f2635b4258c45d1e64ca4cec9bb70fab4 --- /dev/null +++ b/mmpretrain/evaluation/metrics/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .caption import COCOCaption +from .multi_label import AveragePrecision, MultiLabelMetric +from .multi_task import MultiTasksMetric +from .retrieval import RetrievalRecall +from .scienceqa import ScienceQAMetric +from .single_label import Accuracy, ConfusionMatrix, SingleLabelMetric +from .visual_grounding_eval import VisualGroundingMetric +from .voc_multi_label import VOCAveragePrecision, VOCMultiLabelMetric +from .vqa import ReportVQA, VQAAcc + +__all__ = [ + 'Accuracy', 'SingleLabelMetric', 'MultiLabelMetric', 'AveragePrecision', + 'MultiTasksMetric', 'VOCAveragePrecision', 'VOCMultiLabelMetric', + 'ConfusionMatrix', 'RetrievalRecall', 'VQAAcc', 'ReportVQA', 'COCOCaption', + 'VisualGroundingMetric', 'ScienceQAMetric' +] diff --git a/mmpretrain/evaluation/metrics/caption.py b/mmpretrain/evaluation/metrics/caption.py new file mode 100644 index 0000000000000000000000000000000000000000..c4bffabfa97a9c6faec7ecc0ffb6d9ba2f435b97 --- /dev/null +++ b/mmpretrain/evaluation/metrics/caption.py @@ -0,0 +1,136 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json +import os +import tempfile +from typing import List, Optional + +from mmengine.evaluator import BaseMetric +from mmengine.utils import track_iter_progress + +from mmpretrain.registry import METRICS +from mmpretrain.utils import require + +try: + from pycocoevalcap.eval import COCOEvalCap + from pycocotools.coco import COCO +except ImportError: + COCOEvalCap = None + COCO = None + + +@METRICS.register_module() +class COCOCaption(BaseMetric): + """Coco Caption evaluation wrapper. + + Save the generated captions and transform into coco format. + Calling COCO API for caption metrics. + + Args: + ann_file (str): the path for the COCO format caption ground truth + json file, load for evaluations. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Should be modified according to the + `retrieval_type` for unambiguous results. Defaults to TR. + """ + + @require('pycocoevalcap') + def __init__(self, + ann_file: str, + collect_device: str = 'cpu', + prefix: Optional[str] = None): + super().__init__(collect_device=collect_device, prefix=prefix) + self.ann_file = ann_file + + def process(self, data_batch, data_samples): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + + for data_sample in data_samples: + result = dict() + + result['caption'] = data_sample.get('pred_caption') + result['image_id'] = int(data_sample.get('image_id')) + + # Save the result to `self.results`. + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (dict): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. + + with tempfile.TemporaryDirectory() as temp_dir: + + eval_result_file = save_result( + result=results, + result_dir=temp_dir, + filename='m4-caption_pred', + remove_duplicate='image_id', + ) + + coco_val = coco_caption_eval(eval_result_file, self.ann_file) + + return coco_val + + +def save_result(result, result_dir, filename, remove_duplicate=''): + """Saving predictions as json file for evaluation.""" + + # combine results from all processes + result_new = [] + + if remove_duplicate: + result_new = [] + id_list = [] + for res in track_iter_progress(result): + if res[remove_duplicate] not in id_list: + id_list.append(res[remove_duplicate]) + result_new.append(res) + result = result_new + + final_result_file_url = os.path.join(result_dir, '%s.json' % filename) + print(f'result file saved to {final_result_file_url}') + json.dump(result, open(final_result_file_url, 'w')) + + return final_result_file_url + + +def coco_caption_eval(results_file, ann_file): + """Evaluation between gt json and prediction json files.""" + # create coco object and coco_result object + coco = COCO(ann_file) + coco_result = coco.loadRes(results_file) + + # create coco_eval object by taking coco and coco_result + coco_eval = COCOEvalCap(coco, coco_result) + + # make sure the image ids are the same + coco_eval.params['image_id'] = coco_result.getImgIds() + + # This will take some times at the first run + coco_eval.evaluate() + + # print output evaluation scores + for metric, score in coco_eval.eval.items(): + print(f'{metric}: {score:.3f}') + + return coco_eval.eval diff --git a/mmpretrain/evaluation/metrics/multi_label.py b/mmpretrain/evaluation/metrics/multi_label.py new file mode 100644 index 0000000000000000000000000000000000000000..bd91aac4449c845fbed514ed5f800bd971236ade --- /dev/null +++ b/mmpretrain/evaluation/metrics/multi_label.py @@ -0,0 +1,599 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Sequence, Union + +import numpy as np +import torch +from mmengine.evaluator import BaseMetric +from mmengine.logging import MMLogger + +from mmpretrain.registry import METRICS +from mmpretrain.structures import label_to_onehot +from .single_label import _precision_recall_f1_support, to_tensor + + +@METRICS.register_module() +class MultiLabelMetric(BaseMetric): + r"""A collection of precision, recall, f1-score and support for + multi-label tasks. + + The collection of metrics is for single-label multi-class classification. + And all these metrics are based on the confusion matrix of every category: + + .. image:: ../../_static/image/confusion-matrix.png + :width: 60% + :align: center + + All metrics can be formulated use variables above: + + **Precision** is the fraction of correct predictions in all predictions: + + .. math:: + \text{Precision} = \frac{TP}{TP+FP} + + **Recall** is the fraction of correct predictions in all targets: + + .. math:: + \text{Recall} = \frac{TP}{TP+FN} + + **F1-score** is the harmonic mean of the precision and recall: + + .. math:: + \text{F1-score} = \frac{2\times\text{Recall}\times\text{Precision}}{\text{Recall}+\text{Precision}} + + **Support** is the number of samples: + + .. math:: + \text{Support} = TP + TN + FN + FP + + Args: + thr (float, optional): Predictions with scores under the threshold + are considered as negative. If None, the ``topk`` predictions will + be considered as positive. If the ``topk`` is also None, use + ``thr=0.5`` as default. Defaults to None. + topk (int, optional): Predictions with the k-th highest scores are + considered as positive. If None, use ``thr`` to determine positive + predictions. If both ``thr`` and ``topk`` are not None, use + ``thr``. Defaults to None. + items (Sequence[str]): The detailed metric items to evaluate, select + from "precision", "recall", "f1-score" and "support". + Defaults to ``('precision', 'recall', 'f1-score')``. + average (str | None): How to calculate the final metrics from the + confusion matrix of every category. It supports three modes: + + - `"macro"`: Calculate metrics for each category, and calculate + the mean value over all categories. + - `"micro"`: Average the confusion matrix over all categories and + calculate metrics on the mean confusion matrix. + - `None`: Calculate metrics of every category and output directly. + + Defaults to "macro". + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Defaults to None. + + Examples: + >>> import torch + >>> from mmpretrain.evaluation import MultiLabelMetric + >>> # ------ The Basic Usage for category indices labels ------- + >>> y_pred = [[0], [1], [0, 1], [3]] + >>> y_true = [[0, 3], [0, 2], [1], [3]] + >>> # Output precision, recall, f1-score and support + >>> MultiLabelMetric.calculate( + ... y_pred, y_true, pred_indices=True, target_indices=True, num_classes=4) + (tensor(50.), tensor(50.), tensor(45.8333), tensor(6)) + >>> # ----------- The Basic Usage for one-hot labels ----------- + >>> y_pred = torch.tensor([[1, 1, 0, 0], + ... [1, 1, 0, 0], + ... [0, 0, 1, 0], + ... [0, 1, 0, 0], + ... [0, 1, 0, 0]]) + >>> y_true = torch.Tensor([[1, 1, 0, 0], + ... [0, 0, 1, 0], + ... [1, 1, 1, 0], + ... [1, 0, 0, 0], + ... [1, 0, 0, 0]]) + >>> MultiLabelMetric.calculate(y_pred, y_true) + (tensor(43.7500), tensor(31.2500), tensor(33.3333), tensor(8)) + >>> # --------- The Basic Usage for one-hot pred scores --------- + >>> y_pred = torch.rand(y_true.size()) + >>> y_pred + tensor([[0.4575, 0.7335, 0.3934, 0.2572], + [0.1318, 0.1004, 0.8248, 0.6448], + [0.8349, 0.6294, 0.7896, 0.2061], + [0.4037, 0.7308, 0.6713, 0.8374], + [0.3779, 0.4836, 0.0313, 0.0067]]) + >>> # Calculate with different threshold. + >>> MultiLabelMetric.calculate(y_pred, y_true, thr=0.1) + (tensor(42.5000), tensor(75.), tensor(53.1746), tensor(8)) + >>> # Calculate with topk. + >>> MultiLabelMetric.calculate(y_pred, y_true, topk=1) + (tensor(62.5000), tensor(31.2500), tensor(39.1667), tensor(8)) + >>> + >>> # ------------------- Use with Evalutor ------------------- + >>> from mmpretrain.structures import DataSample + >>> from mmengine.evaluator import Evaluator + >>> data_sampels = [ + ... DataSample().set_pred_score(pred).set_gt_score(gt) + ... for pred, gt in zip(torch.rand(1000, 5), torch.randint(0, 2, (1000, 5)))] + >>> evaluator = Evaluator(metrics=MultiLabelMetric(thr=0.5)) + >>> evaluator.process(data_sampels) + >>> evaluator.evaluate(1000) + { + 'multi-label/precision': 50.72898037055408, + 'multi-label/recall': 50.06836461357571, + 'multi-label/f1-score': 50.384466955258475 + } + >>> # Evaluate on each class by using topk strategy + >>> evaluator = Evaluator(metrics=MultiLabelMetric(topk=1, average=None)) + >>> evaluator.process(data_sampels) + >>> evaluator.evaluate(1000) + { + 'multi-label/precision_top1_classwise': [48.22, 50.54, 50.99, 44.18, 52.5], + 'multi-label/recall_top1_classwise': [18.92, 19.22, 19.92, 20.0, 20.27], + 'multi-label/f1-score_top1_classwise': [27.18, 27.85, 28.65, 27.54, 29.25] + } + """ # noqa: E501 + default_prefix: Optional[str] = 'multi-label' + + def __init__(self, + thr: Optional[float] = None, + topk: Optional[int] = None, + items: Sequence[str] = ('precision', 'recall', 'f1-score'), + average: Optional[str] = 'macro', + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + + logger = MMLogger.get_current_instance() + if thr is None and topk is None: + thr = 0.5 + logger.warning('Neither thr nor k is given, set thr as 0.5 by ' + 'default.') + elif thr is not None and topk is not None: + logger.warning('Both thr and topk are given, ' + 'use threshold in favor of top-k.') + + self.thr = thr + self.topk = topk + self.average = average + + for item in items: + assert item in ['precision', 'recall', 'f1-score', 'support'], \ + f'The metric {item} is not supported by `SingleLabelMetric`,' \ + ' please choose from "precision", "recall", "f1-score" and ' \ + '"support".' + self.items = tuple(items) + + super().__init__(collect_device=collect_device, prefix=prefix) + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + for data_sample in data_samples: + result = dict() + + result['pred_score'] = data_sample['pred_score'].clone() + num_classes = result['pred_score'].size()[-1] + + if 'gt_score' in data_sample: + result['gt_score'] = data_sample['gt_score'].clone() + else: + result['gt_score'] = label_to_onehot(data_sample['gt_label'], + num_classes) + + # Save the result to `self.results`. + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (list): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. `self.results` + # are a list of results from multiple batch, while the input `results` + # are the collected results. + metrics = {} + + target = torch.stack([res['gt_score'] for res in results]) + pred = torch.stack([res['pred_score'] for res in results]) + + metric_res = self.calculate( + pred, + target, + pred_indices=False, + target_indices=False, + average=self.average, + thr=self.thr, + topk=self.topk) + + def pack_results(precision, recall, f1_score, support): + single_metrics = {} + if 'precision' in self.items: + single_metrics['precision'] = precision + if 'recall' in self.items: + single_metrics['recall'] = recall + if 'f1-score' in self.items: + single_metrics['f1-score'] = f1_score + if 'support' in self.items: + single_metrics['support'] = support + return single_metrics + + if self.thr: + suffix = '' if self.thr == 0.5 else f'_thr-{self.thr:.2f}' + for k, v in pack_results(*metric_res).items(): + metrics[k + suffix] = v + else: + for k, v in pack_results(*metric_res).items(): + metrics[k + f'_top{self.topk}'] = v + + result_metrics = dict() + for k, v in metrics.items(): + if self.average is None: + result_metrics[k + '_classwise'] = v.detach().cpu().tolist() + elif self.average == 'macro': + result_metrics[k] = v.item() + else: + result_metrics[k + f'_{self.average}'] = v.item() + return result_metrics + + @staticmethod + def calculate( + pred: Union[torch.Tensor, np.ndarray, Sequence], + target: Union[torch.Tensor, np.ndarray, Sequence], + pred_indices: bool = False, + target_indices: bool = False, + average: Optional[str] = 'macro', + thr: Optional[float] = None, + topk: Optional[int] = None, + num_classes: Optional[int] = None + ) -> Union[torch.Tensor, List[torch.Tensor]]: + """Calculate the precision, recall, f1-score. + + Args: + pred (torch.Tensor | np.ndarray | Sequence): The prediction + results. A :obj:`torch.Tensor` or :obj:`np.ndarray` with + shape ``(N, num_classes)`` or a sequence of index/onehot + format labels. + target (torch.Tensor | np.ndarray | Sequence): The prediction + results. A :obj:`torch.Tensor` or :obj:`np.ndarray` with + shape ``(N, num_classes)`` or a sequence of index/onehot + format labels. + pred_indices (bool): Whether the ``pred`` is a sequence of + category index labels. If True, ``num_classes`` must be set. + Defaults to False. + target_indices (bool): Whether the ``target`` is a sequence of + category index labels. If True, ``num_classes`` must be set. + Defaults to False. + average (str | None): How to calculate the final metrics from + the confusion matrix of every category. It supports three + modes: + + - `"macro"`: Calculate metrics for each category, and calculate + the mean value over all categories. + - `"micro"`: Average the confusion matrix over all categories + and calculate metrics on the mean confusion matrix. + - `None`: Calculate metrics of every category and output + directly. + + Defaults to "macro". + thr (float, optional): Predictions with scores under the thresholds + are considered as negative. Defaults to None. + topk (int, optional): Predictions with the k-th highest scores are + considered as positive. Defaults to None. + num_classes (Optional, int): The number of classes. If the ``pred`` + is indices instead of onehot, this argument is required. + Defaults to None. + + Returns: + Tuple: The tuple contains precision, recall and f1-score. + And the type of each item is: + + - torch.Tensor: A tensor for each metric. The shape is (1, ) if + ``average`` is not None, and (C, ) if ``average`` is None. + + Notes: + If both ``thr`` and ``topk`` are set, use ``thr` to determine + positive predictions. If neither is set, use ``thr=0.5`` as + default. + """ + average_options = ['micro', 'macro', None] + assert average in average_options, 'Invalid `average` argument, ' \ + f'please specicy from {average_options}.' + + def _format_label(label, is_indices): + """format various label to torch.Tensor.""" + if isinstance(label, np.ndarray): + assert label.ndim == 2, 'The shape `pred` and `target` ' \ + 'array must be (N, num_classes).' + label = torch.from_numpy(label) + elif isinstance(label, torch.Tensor): + assert label.ndim == 2, 'The shape `pred` and `target` ' \ + 'tensor must be (N, num_classes).' + elif isinstance(label, Sequence): + if is_indices: + assert num_classes is not None, 'For index-type labels, ' \ + 'please specify `num_classes`.' + label = torch.stack([ + label_to_onehot(indices, num_classes) + for indices in label + ]) + else: + label = torch.stack( + [to_tensor(onehot) for onehot in label]) + else: + raise TypeError( + 'The `pred` and `target` must be type of torch.tensor or ' + f'np.ndarray or sequence but get {type(label)}.') + return label + + pred = _format_label(pred, pred_indices) + target = _format_label(target, target_indices).long() + + assert pred.shape == target.shape, \ + f"The size of pred ({pred.shape}) doesn't match "\ + f'the target ({target.shape}).' + + if num_classes is not None: + assert pred.size(1) == num_classes, \ + f'The shape of `pred` ({pred.shape}) '\ + f"doesn't match the num_classes ({num_classes})." + num_classes = pred.size(1) + + thr = 0.5 if (thr is None and topk is None) else thr + + if thr is not None: + # a label is predicted positive if larger than thr + pos_inds = (pred >= thr).long() + else: + # top-k labels will be predicted positive for any example + _, topk_indices = pred.topk(topk) + pos_inds = torch.zeros_like(pred).scatter_(1, topk_indices, 1) + pos_inds = pos_inds.long() + + return _precision_recall_f1_support(pos_inds, target, average) + + +def _average_precision(pred: torch.Tensor, + target: torch.Tensor) -> torch.Tensor: + r"""Calculate the average precision for a single class. + + AP summarizes a precision-recall curve as the weighted mean of maximum + precisions obtained for any r'>r, where r is the recall: + + .. math:: + \text{AP} = \sum_n (R_n - R_{n-1}) P_n + + Note that no approximation is involved since the curve is piecewise + constant. + + Args: + pred (torch.Tensor): The model prediction with shape + ``(N, num_classes)``. + target (torch.Tensor): The target of predictions with shape + ``(N, num_classes)``. + + Returns: + torch.Tensor: average precision result. + """ + assert pred.shape == target.shape, \ + f"The size of pred ({pred.shape}) doesn't match "\ + f'the target ({target.shape}).' + + # a small value for division by zero errors + eps = torch.finfo(torch.float32).eps + + # get rid of -1 target such as difficult sample + # that is not wanted in evaluation results. + valid_index = target > -1 + pred = pred[valid_index] + target = target[valid_index] + + # sort examples + sorted_pred_inds = torch.argsort(pred, dim=0, descending=True) + sorted_target = target[sorted_pred_inds] + + # get indexes when gt_true is positive + pos_inds = sorted_target == 1 + + # Calculate cumulative tp case numbers + tps = torch.cumsum(pos_inds, 0) + total_pos = tps[-1].item() # the last of tensor may change later + + # Calculate cumulative tp&fp(pred_poss) case numbers + pred_pos_nums = torch.arange(1, len(sorted_target) + 1).to(pred.device) + pred_pos_nums[pred_pos_nums < eps] = eps + + tps[torch.logical_not(pos_inds)] = 0 + precision = tps / pred_pos_nums.float() + ap = torch.sum(precision, 0) / max(total_pos, eps) + return ap + + +@METRICS.register_module() +class AveragePrecision(BaseMetric): + r"""Calculate the average precision with respect of classes. + + AveragePrecision (AP) summarizes a precision-recall curve as the weighted + mean of maximum precisions obtained for any r'>r, where r is the recall: + + .. math:: + \text{AP} = \sum_n (R_n - R_{n-1}) P_n + + Note that no approximation is involved since the curve is piecewise + constant. + + Args: + average (str | None): How to calculate the final metrics from + every category. It supports two modes: + + - `"macro"`: Calculate metrics for each category, and calculate + the mean value over all categories. The result of this mode + is also called **mAP**. + - `None`: Calculate metrics of every category and output directly. + + Defaults to "macro". + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Defaults to None. + + References + ---------- + 1. `Wikipedia entry for the Average precision + `_ + + Examples: + >>> import torch + >>> from mmpretrain.evaluation import AveragePrecision + >>> # --------- The Basic Usage for one-hot pred scores --------- + >>> y_pred = torch.Tensor([[0.9, 0.8, 0.3, 0.2], + ... [0.1, 0.2, 0.2, 0.1], + ... [0.7, 0.5, 0.9, 0.3], + ... [0.8, 0.1, 0.1, 0.2]]) + >>> y_true = torch.Tensor([[1, 1, 0, 0], + ... [0, 1, 0, 0], + ... [0, 0, 1, 0], + ... [1, 0, 0, 0]]) + >>> AveragePrecision.calculate(y_pred, y_true) + tensor(70.833) + >>> # ------------------- Use with Evalutor ------------------- + >>> from mmpretrain.structures import DataSample + >>> from mmengine.evaluator import Evaluator + >>> data_samples = [ + ... DataSample().set_pred_score(i).set_gt_score(j) + ... for i, j in zip(y_pred, y_true) + ... ] + >>> evaluator = Evaluator(metrics=AveragePrecision()) + >>> evaluator.process(data_samples) + >>> evaluator.evaluate(5) + {'multi-label/mAP': 70.83333587646484} + >>> # Evaluate on each class + >>> evaluator = Evaluator(metrics=AveragePrecision(average=None)) + >>> evaluator.process(data_samples) + >>> evaluator.evaluate(5) + {'multi-label/AP_classwise': [100., 83.33, 100., 0.]} + """ + default_prefix: Optional[str] = 'multi-label' + + def __init__(self, + average: Optional[str] = 'macro', + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device=collect_device, prefix=prefix) + self.average = average + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + + for data_sample in data_samples: + result = dict() + + result['pred_score'] = data_sample['pred_score'].clone() + num_classes = result['pred_score'].size()[-1] + + if 'gt_score' in data_sample: + result['gt_score'] = data_sample['gt_score'].clone() + else: + result['gt_score'] = label_to_onehot(data_sample['gt_label'], + num_classes) + + # Save the result to `self.results`. + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (list): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. `self.results` + # are a list of results from multiple batch, while the input `results` + # are the collected results. + + # concat + target = torch.stack([res['gt_score'] for res in results]) + pred = torch.stack([res['pred_score'] for res in results]) + + ap = self.calculate(pred, target, self.average) + + result_metrics = dict() + + if self.average is None: + result_metrics['AP_classwise'] = ap.detach().cpu().tolist() + else: + result_metrics['mAP'] = ap.item() + + return result_metrics + + @staticmethod + def calculate(pred: Union[torch.Tensor, np.ndarray], + target: Union[torch.Tensor, np.ndarray], + average: Optional[str] = 'macro') -> torch.Tensor: + r"""Calculate the average precision for a single class. + + Args: + pred (torch.Tensor | np.ndarray): The model predictions with + shape ``(N, num_classes)``. + target (torch.Tensor | np.ndarray): The target of predictions + with shape ``(N, num_classes)``. + average (str | None): The average method. It supports two modes: + + - `"macro"`: Calculate metrics for each category, and calculate + the mean value over all categories. The result of this mode + is also called mAP. + - `None`: Calculate metrics of every category and output + directly. + + Defaults to "macro". + + Returns: + torch.Tensor: the average precision of all classes. + """ + average_options = ['macro', None] + assert average in average_options, 'Invalid `average` argument, ' \ + f'please specicy from {average_options}.' + + pred = to_tensor(pred) + target = to_tensor(target) + assert pred.ndim == 2 and pred.shape == target.shape, \ + 'Both `pred` and `target` should have shape `(N, num_classes)`.' + + num_classes = pred.shape[1] + ap = pred.new_zeros(num_classes) + for k in range(num_classes): + ap[k] = _average_precision(pred[:, k], target[:, k]) + if average == 'macro': + return ap.mean() * 100.0 + else: + return ap * 100 diff --git a/mmpretrain/evaluation/metrics/multi_task.py b/mmpretrain/evaluation/metrics/multi_task.py new file mode 100644 index 0000000000000000000000000000000000000000..0e6af7680192883308df5f24b65ec38c9bb65ce6 --- /dev/null +++ b/mmpretrain/evaluation/metrics/multi_task.py @@ -0,0 +1,120 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Sequence + +from mmengine.evaluator import BaseMetric + +from mmpretrain.registry import METRICS + + +@METRICS.register_module() +class MultiTasksMetric(BaseMetric): + """Metrics for MultiTask + Args: + task_metrics(dict): a dictionary in the keys are the names of the tasks + and the values is a list of the metric corresponds to this task + Examples: + >>> import torch + >>> from mmpretrain.evaluation import MultiTasksMetric + # -------------------- The Basic Usage -------------------- + >>>task_metrics = { + 'task0': [dict(type='Accuracy', topk=(1, ))], + 'task1': [dict(type='Accuracy', topk=(1, 3))] + } + >>>pred = [{ + 'pred_task': { + 'task0': torch.tensor([0.7, 0.0, 0.3]), + 'task1': torch.tensor([0.5, 0.2, 0.3]) + }, + 'gt_task': { + 'task0': torch.tensor(0), + 'task1': torch.tensor(2) + } + }, { + 'pred_task': { + 'task0': torch.tensor([0.0, 0.0, 1.0]), + 'task1': torch.tensor([0.0, 0.0, 1.0]) + }, + 'gt_task': { + 'task0': torch.tensor(2), + 'task1': torch.tensor(2) + } + }] + >>>metric = MultiTasksMetric(task_metrics) + >>>metric.process(None, pred) + >>>results = metric.evaluate(2) + results = { + 'task0_accuracy/top1': 100.0, + 'task1_accuracy/top1': 50.0, + 'task1_accuracy/top3': 100.0 + } + """ + + def __init__(self, + task_metrics: Dict, + collect_device: str = 'cpu') -> None: + self.task_metrics = task_metrics + super().__init__(collect_device=collect_device) + + self._metrics = {} + for task_name in self.task_metrics.keys(): + self._metrics[task_name] = [] + for metric in self.task_metrics[task_name]: + self._metrics[task_name].append(METRICS.build(metric)) + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + for task_name in self.task_metrics.keys(): + filtered_data_samples = [] + for data_sample in data_samples: + eval_mask = data_sample[task_name]['eval_mask'] + if eval_mask: + filtered_data_samples.append(data_sample[task_name]) + for metric in self._metrics[task_name]: + metric.process(data_batch, filtered_data_samples) + + def compute_metrics(self, results: list) -> dict: + raise NotImplementedError( + 'compute metrics should not be used here directly') + + def evaluate(self, size): + """Evaluate the model performance of the whole dataset after processing + all batches. + + Args: + size (int): Length of the entire validation dataset. When batch + size > 1, the dataloader may pad some data samples to make + sure all ranks have the same length of dataset slice. The + ``collect_results`` function will drop the padded data based on + this size. + Returns: + dict: Evaluation metrics dict on the val dataset. The keys are + "{task_name}_{metric_name}" , and the values + are corresponding results. + """ + metrics = {} + for task_name in self._metrics: + for metric in self._metrics[task_name]: + name = metric.__class__.__name__ + if name == 'MultiTasksMetric' or metric.results: + results = metric.evaluate(size) + else: + results = {metric.__class__.__name__: 0} + for key in results: + name = f'{task_name}_{key}' + if name in results: + """Inspired from https://github.com/open- + mmlab/mmengine/ bl ob/ed20a9cba52ceb371f7c825131636b9e2 + 747172e/mmengine/evalua tor/evaluator.py#L84-L87.""" + raise ValueError( + 'There are multiple metric results with the same' + f'metric name {name}. Please make sure all metrics' + 'have different prefixes.') + metrics[name] = results[key] + return metrics diff --git a/mmpretrain/evaluation/metrics/retrieval.py b/mmpretrain/evaluation/metrics/retrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..3269faebdaedf301b9c8ba2e51c4ef277fdfb05e --- /dev/null +++ b/mmpretrain/evaluation/metrics/retrieval.py @@ -0,0 +1,232 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Sequence, Union + +import mmengine +import numpy as np +import torch +from mmengine.evaluator import BaseMetric +from mmengine.utils import is_seq_of + +from mmpretrain.registry import METRICS +from mmpretrain.structures import label_to_onehot +from .single_label import to_tensor + + +@METRICS.register_module() +class RetrievalRecall(BaseMetric): + r"""Recall evaluation metric for image retrieval. + + Args: + topk (int | Sequence[int]): If the ground truth label matches one of + the best **k** predictions, the sample will be regard as a positive + prediction. If the parameter is a tuple, all of top-k recall will + be calculated and outputted together. Defaults to 1. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Defaults to None. + + Examples: + Use in the code: + + >>> import torch + >>> from mmpretrain.evaluation import RetrievalRecall + >>> # -------------------- The Basic Usage -------------------- + >>> y_pred = [[0], [1], [2], [3]] + >>> y_true = [[0, 1], [2], [1], [0, 3]] + >>> RetrievalRecall.calculate( + >>> y_pred, y_true, topk=1, pred_indices=True, target_indices=True) + [tensor([50.])] + >>> # Calculate the recall@1 and recall@5 for non-indices input. + >>> y_score = torch.rand((1000, 10)) + >>> import torch.nn.functional as F + >>> y_true = F.one_hot(torch.arange(0, 1000) % 10, num_classes=10) + >>> RetrievalRecall.calculate(y_score, y_true, topk=(1, 5)) + [tensor(9.3000), tensor(48.4000)] + >>> + >>> # ------------------- Use with Evalutor ------------------- + >>> from mmpretrain.structures import DataSample + >>> from mmengine.evaluator import Evaluator + >>> data_samples = [ + ... DataSample().set_gt_label([0, 1]).set_pred_score( + ... torch.rand(10)) + ... for i in range(1000) + ... ] + >>> evaluator = Evaluator(metrics=RetrievalRecall(topk=(1, 5))) + >>> evaluator.process(data_samples) + >>> evaluator.evaluate(1000) + {'retrieval/Recall@1': 20.700000762939453, + 'retrieval/Recall@5': 78.5999984741211} + + Use in OpenMMLab configs: + + .. code:: python + + val/test_evaluator = dict(type='RetrievalRecall', topk=(1, 5)) + """ + default_prefix: Optional[str] = 'retrieval' + + def __init__(self, + topk: Union[int, Sequence[int]], + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + topk = (topk, ) if isinstance(topk, int) else topk + + for k in topk: + if k <= 0: + raise ValueError('`topk` must be a ingter larger than 0 ' + 'or seq of ingter larger than 0.') + + self.topk = topk + super().__init__(collect_device=collect_device, prefix=prefix) + + def process(self, data_batch: Sequence[dict], + data_samples: Sequence[dict]): + """Process one batch of data and predictions. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch (Sequence[dict]): A batch of data from the dataloader. + predictions (Sequence[dict]): A batch of outputs from the model. + """ + for data_sample in data_samples: + pred_score = data_sample['pred_score'].clone() + gt_label = data_sample['gt_label'] + + if 'gt_score' in data_sample: + target = data_sample.get('gt_score').clone() + else: + num_classes = pred_score.size()[-1] + target = label_to_onehot(gt_label, num_classes) + + # Because the retrieval output logit vector will be much larger + # compared to the normal classification, to save resources, the + # evaluation results are computed each batch here and then reduce + # all results at the end. + result = RetrievalRecall.calculate( + pred_score.unsqueeze(0), target.unsqueeze(0), topk=self.topk) + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (list): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + result_metrics = dict() + for i, k in enumerate(self.topk): + recall_at_k = sum([r[i].item() for r in results]) / len(results) + result_metrics[f'Recall@{k}'] = recall_at_k + + return result_metrics + + @staticmethod + def calculate(pred: Union[np.ndarray, torch.Tensor], + target: Union[np.ndarray, torch.Tensor], + topk: Union[int, Sequence[int]], + pred_indices: (bool) = False, + target_indices: (bool) = False) -> float: + """Calculate the average recall. + + Args: + pred (torch.Tensor | np.ndarray | Sequence): The prediction + results. A :obj:`torch.Tensor` or :obj:`np.ndarray` with + shape ``(N, M)`` or a sequence of index/onehot + format labels. + target (torch.Tensor | np.ndarray | Sequence): The prediction + results. A :obj:`torch.Tensor` or :obj:`np.ndarray` with + shape ``(N, M)`` or a sequence of index/onehot + format labels. + topk (int, Sequence[int]): Predictions with the k-th highest + scores are considered as positive. + pred_indices (bool): Whether the ``pred`` is a sequence of + category index labels. Defaults to False. + target_indices (bool): Whether the ``target`` is a sequence of + category index labels. Defaults to False. + + Returns: + List[float]: the average recalls. + """ + topk = (topk, ) if isinstance(topk, int) else topk + for k in topk: + if k <= 0: + raise ValueError('`topk` must be a ingter larger than 0 ' + 'or seq of ingter larger than 0.') + + max_keep = max(topk) + pred = _format_pred(pred, max_keep, pred_indices) + target = _format_target(target, target_indices) + + assert len(pred) == len(target), ( + f'Length of `pred`({len(pred)}) and `target` ({len(target)}) ' + f'must be the same.') + + num_samples = len(pred) + results = [] + for k in topk: + recalls = torch.zeros(num_samples) + for i, (sample_pred, + sample_target) in enumerate(zip(pred, target)): + sample_pred = np.array(to_tensor(sample_pred).cpu()) + sample_target = np.array(to_tensor(sample_target).cpu()) + recalls[i] = int(np.in1d(sample_pred[:k], sample_target).max()) + results.append(recalls.mean() * 100) + return results + + +def _format_pred(label, topk=None, is_indices=False): + """format various label to List[indices].""" + if is_indices: + assert isinstance(label, Sequence), \ + '`pred` must be Sequence of indices when' \ + f' `pred_indices` set to True, but get {type(label)}' + for i, sample_pred in enumerate(label): + assert is_seq_of(sample_pred, int) or isinstance( + sample_pred, (np.ndarray, torch.Tensor)), \ + '`pred` should be Sequence of indices when `pred_indices`' \ + f'set to True. but pred[{i}] is {sample_pred}' + if topk: + label[i] = sample_pred[:min(topk, len(sample_pred))] + return label + if isinstance(label, np.ndarray): + label = torch.from_numpy(label) + elif not isinstance(label, torch.Tensor): + raise TypeError(f'The pred must be type of torch.tensor, ' + f'np.ndarray or Sequence but get {type(label)}.') + topk = topk if topk else label.size()[-1] + _, indices = label.topk(topk) + return indices + + +def _format_target(label, is_indices=False): + """format various label to List[indices].""" + if is_indices: + assert isinstance(label, Sequence), \ + '`target` must be Sequence of indices when' \ + f' `target_indices` set to True, but get {type(label)}' + for i, sample_gt in enumerate(label): + assert is_seq_of(sample_gt, int) or isinstance( + sample_gt, (np.ndarray, torch.Tensor)), \ + '`target` should be Sequence of indices when ' \ + f'`target_indices` set to True. but target[{i}] is {sample_gt}' + return label + + if isinstance(label, np.ndarray): + label = torch.from_numpy(label) + elif isinstance(label, Sequence) and not mmengine.is_str(label): + label = torch.tensor(label) + elif not isinstance(label, torch.Tensor): + raise TypeError(f'The pred must be type of torch.tensor, ' + f'np.ndarray or Sequence but get {type(label)}.') + + indices = [sample_gt.nonzero().squeeze(-1) for sample_gt in label] + return indices diff --git a/mmpretrain/evaluation/metrics/scienceqa.py b/mmpretrain/evaluation/metrics/scienceqa.py new file mode 100644 index 0000000000000000000000000000000000000000..e41f4e68cfa42f847365b49fddb61dbc28c828c0 --- /dev/null +++ b/mmpretrain/evaluation/metrics/scienceqa.py @@ -0,0 +1,170 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import random +from typing import List, Optional + +from mmengine.evaluator import BaseMetric + +from mmpretrain.registry import METRICS + + +def get_pred_idx(prediction: str, choices: List[str], + options: List[str]) -> int: # noqa + """Get the index (e.g. 2) from the prediction (e.g. 'C') + + Args: + prediction (str): The prediction from the model, + from ['A', 'B', 'C', 'D', 'E'] + choices (List(str)): The choices for the question, + from ['A', 'B', 'C', 'D', 'E'] + options (List(str)): The options for the question, + from ['A', 'B', 'C', 'D', 'E'] + + Returns: + int: The index of the prediction, from [0, 1, 2, 3, 4] + """ + if prediction in options[:len(choices)]: + return options.index(prediction) + else: + return random.choice(range(len(choices))) + + +@METRICS.register_module() +class ScienceQAMetric(BaseMetric): + """Evaluation Metric for ScienceQA. + + Args: + options (List(str)): Options for each question. Defaults to + ["A", "B", "C", "D", "E"]. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Should be modified according to the + `retrieval_type` for unambiguous results. Defaults to TR. + """ + + def __init__(self, + options: List[str] = ['A', 'B', 'C', 'D', 'E'], + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device=collect_device, prefix=prefix) + self.options = options + + def process(self, data_batch, data_samples) -> None: + """Process one batch of data samples. + + data_samples should contain the following keys: + 1. pred_answer (str): The prediction from the model, + from ['A', 'B', 'C', 'D', 'E'] + 2. choices (List(str)): The choices for the question, + from ['A', 'B', 'C', 'D', 'E'] + 3. grade (int): The grade for the question, from grade1 to grade12 + 4. subject (str): The subject for the question, from + ['natural science', 'social science', 'language science'] + 5. answer (str): The answer for the question, from + ['A', 'B', 'C', 'D', 'E'] + 6. hint (str): The hint for the question + 7. has_image (bool): Whether or not the question has image + + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + for data_sample in data_samples: + result = dict() + choices = data_sample.get('choices') + result['prediction'] = get_pred_idx( + data_sample.get('pred_answer'), choices, self.options) + result['grade'] = data_sample.get('grade') + result['subject'] = data_sample.get('subject') + result['answer'] = data_sample.get('gt_answer') + hint = data_sample.get('hint') + has_image = data_sample.get('has_image', False) + result[ + 'no_context'] = True if not has_image and hint is None else False # noqa + result['has_text'] = True if hint is not None else False + result['has_image'] = has_image + + # Save the result to `self.results`. + self.results.append(result) + + def compute_metrics(self, results: List) -> dict: + """Compute the metrics from processed results. + + Args: + results (dict): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. + metrics = dict() + + all_acc = [] + acc_natural = [] + acc_social = [] + acc_language = [] + acc_has_text = [] + acc_has_image = [] + acc_no_context = [] + acc_grade_1_6 = [] + acc_grade_7_12 = [] + + for result in results: + correct = result['prediction'] == result['answer'] + all_acc.append(correct) + # different subjects + if result['subject'] == 'natural science': + acc_natural.append(correct) + elif result['subject'] == 'social science': + acc_social.append(correct) + elif result['subject'] == 'language science': + acc_language.append(correct) + + # different context + if result['has_text']: + acc_has_text.append(correct) + elif result['has_image']: + acc_has_image.append(correct) + elif result['no_context']: + acc_no_context.append(correct) + + # different grade + if result['grade'] in [ + 'grade1', 'grade2', 'grade3', 'grade4', 'grade5', 'grade6' + ]: + acc_grade_1_6.append(correct) + elif result['grade'] in [ + 'grade7', 'grade8', 'grade9', 'grade10', 'grade11', + 'grade12' + ]: + acc_grade_7_12.append(correct) + + metrics['all_acc'] = sum(all_acc) / len(all_acc) + if len(acc_natural) > 0: + metrics['acc_natural'] = sum(acc_natural) / len(acc_natural) + if len(acc_social) > 0: + metrics['acc_social'] = sum(acc_social) / len(acc_social) + if len(acc_language) > 0: + metrics['acc_language'] = sum(acc_language) / len(acc_language) + if len(acc_has_text) > 0: + metrics['acc_has_text'] = sum(acc_has_text) / len(acc_has_text) + if len(acc_has_image) > 0: + metrics['acc_has_image'] = sum(acc_has_image) / len(acc_has_image) + if len(acc_no_context) > 0: + metrics['acc_no_context'] = sum(acc_no_context) / len( + acc_no_context) + if len(acc_grade_1_6) > 0: + metrics['acc_grade_1_6'] = sum(acc_grade_1_6) / len(acc_grade_1_6) + if len(acc_grade_7_12) > 0: + metrics['acc_grade_7_12'] = sum(acc_grade_7_12) / len( + acc_grade_7_12) + + return metrics diff --git a/mmpretrain/evaluation/metrics/single_label.py b/mmpretrain/evaluation/metrics/single_label.py new file mode 100644 index 0000000000000000000000000000000000000000..f9329b9567e698a4e3ebdb7d77f0f8404b81ad4c --- /dev/null +++ b/mmpretrain/evaluation/metrics/single_label.py @@ -0,0 +1,776 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from itertools import product +from typing import List, Optional, Sequence, Union + +import mmengine +import numpy as np +import torch +import torch.nn.functional as F +from mmengine.evaluator import BaseMetric + +from mmpretrain.registry import METRICS + + +def to_tensor(value): + """Convert value to torch.Tensor.""" + if isinstance(value, np.ndarray): + value = torch.from_numpy(value) + elif isinstance(value, Sequence) and not mmengine.is_str(value): + value = torch.tensor(value) + elif not isinstance(value, torch.Tensor): + raise TypeError(f'{type(value)} is not an available argument.') + return value + + +def _precision_recall_f1_support(pred_positive, gt_positive, average): + """calculate base classification task metrics, such as precision, recall, + f1_score, support.""" + average_options = ['micro', 'macro', None] + assert average in average_options, 'Invalid `average` argument, ' \ + f'please specify from {average_options}.' + + # ignore -1 target such as difficult sample that is not wanted + # in evaluation results. + # only for calculate multi-label without affecting single-label behavior + ignored_index = gt_positive == -1 + pred_positive[ignored_index] = 0 + gt_positive[ignored_index] = 0 + + class_correct = (pred_positive & gt_positive) + if average == 'micro': + tp_sum = class_correct.sum() + pred_sum = pred_positive.sum() + gt_sum = gt_positive.sum() + else: + tp_sum = class_correct.sum(0) + pred_sum = pred_positive.sum(0) + gt_sum = gt_positive.sum(0) + + precision = tp_sum / torch.clamp(pred_sum, min=1).float() * 100 + recall = tp_sum / torch.clamp(gt_sum, min=1).float() * 100 + f1_score = 2 * precision * recall / torch.clamp( + precision + recall, min=torch.finfo(torch.float32).eps) + if average in ['macro', 'micro']: + precision = precision.mean(0) + recall = recall.mean(0) + f1_score = f1_score.mean(0) + support = gt_sum.sum(0) + else: + support = gt_sum + return precision, recall, f1_score, support + + +@METRICS.register_module() +class Accuracy(BaseMetric): + r"""Accuracy evaluation metric. + + For either binary classification or multi-class classification, the + accuracy is the fraction of correct predictions in all predictions: + + .. math:: + + \text{Accuracy} = \frac{N_{\text{correct}}}{N_{\text{all}}} + + Args: + topk (int | Sequence[int]): If the ground truth label matches one of + the best **k** predictions, the sample will be regard as a positive + prediction. If the parameter is a tuple, all of top-k accuracy will + be calculated and outputted together. Defaults to 1. + thrs (Sequence[float | None] | float | None): If a float, predictions + with score lower than the threshold will be regard as the negative + prediction. If None, not apply threshold. If the parameter is a + tuple, accuracy based on all thresholds will be calculated and + outputted together. Defaults to 0. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Defaults to None. + + Examples: + >>> import torch + >>> from mmpretrain.evaluation import Accuracy + >>> # -------------------- The Basic Usage -------------------- + >>> y_pred = [0, 2, 1, 3] + >>> y_true = [0, 1, 2, 3] + >>> Accuracy.calculate(y_pred, y_true) + tensor([50.]) + >>> # Calculate the top1 and top5 accuracy. + >>> y_score = torch.rand((1000, 10)) + >>> y_true = torch.zeros((1000, )) + >>> Accuracy.calculate(y_score, y_true, topk=(1, 5)) + [[tensor([9.9000])], [tensor([51.5000])]] + >>> + >>> # ------------------- Use with Evalutor ------------------- + >>> from mmpretrain.structures import DataSample + >>> from mmengine.evaluator import Evaluator + >>> data_samples = [ + ... DataSample().set_gt_label(0).set_pred_score(torch.rand(10)) + ... for i in range(1000) + ... ] + >>> evaluator = Evaluator(metrics=Accuracy(topk=(1, 5))) + >>> evaluator.process(data_samples) + >>> evaluator.evaluate(1000) + { + 'accuracy/top1': 9.300000190734863, + 'accuracy/top5': 51.20000076293945 + } + """ + default_prefix: Optional[str] = 'accuracy' + + def __init__(self, + topk: Union[int, Sequence[int]] = (1, ), + thrs: Union[float, Sequence[Union[float, None]], None] = 0., + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device=collect_device, prefix=prefix) + + if isinstance(topk, int): + self.topk = (topk, ) + else: + self.topk = tuple(topk) + + if isinstance(thrs, float) or thrs is None: + self.thrs = (thrs, ) + else: + self.thrs = tuple(thrs) + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + + for data_sample in data_samples: + result = dict() + if 'pred_score' in data_sample: + result['pred_score'] = data_sample['pred_score'].cpu() + else: + result['pred_label'] = data_sample['pred_label'].cpu() + result['gt_label'] = data_sample['gt_label'].cpu() + # Save the result to `self.results`. + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (dict): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. + metrics = {} + + # concat + target = torch.cat([res['gt_label'] for res in results]) + if 'pred_score' in results[0]: + pred = torch.stack([res['pred_score'] for res in results]) + + try: + acc = self.calculate(pred, target, self.topk, self.thrs) + except ValueError as e: + # If the topk is invalid. + raise ValueError( + str(e) + ' Please check the `val_evaluator` and ' + '`test_evaluator` fields in your config file.') + + multi_thrs = len(self.thrs) > 1 + for i, k in enumerate(self.topk): + for j, thr in enumerate(self.thrs): + name = f'top{k}' + if multi_thrs: + name += '_no-thr' if thr is None else f'_thr-{thr:.2f}' + metrics[name] = acc[i][j].item() + else: + # If only label in the `pred_label`. + pred = torch.cat([res['pred_label'] for res in results]) + acc = self.calculate(pred, target, self.topk, self.thrs) + metrics['top1'] = acc.item() + + return metrics + + @staticmethod + def calculate( + pred: Union[torch.Tensor, np.ndarray, Sequence], + target: Union[torch.Tensor, np.ndarray, Sequence], + topk: Sequence[int] = (1, ), + thrs: Sequence[Union[float, None]] = (0., ), + ) -> Union[torch.Tensor, List[List[torch.Tensor]]]: + """Calculate the accuracy. + + Args: + pred (torch.Tensor | np.ndarray | Sequence): The prediction + results. It can be labels (N, ), or scores of every + class (N, C). + target (torch.Tensor | np.ndarray | Sequence): The target of + each prediction with shape (N, ). + thrs (Sequence[float | None]): Predictions with scores under + the thresholds are considered negative. It's only used + when ``pred`` is scores. None means no thresholds. + Defaults to (0., ). + thrs (Sequence[float]): Predictions with scores under + the thresholds are considered negative. It's only used + when ``pred`` is scores. Defaults to (0., ). + + Returns: + torch.Tensor | List[List[torch.Tensor]]: Accuracy. + + - torch.Tensor: If the ``pred`` is a sequence of label instead of + score (number of dimensions is 1). Only return a top-1 accuracy + tensor, and ignore the argument ``topk` and ``thrs``. + - List[List[torch.Tensor]]: If the ``pred`` is a sequence of score + (number of dimensions is 2). Return the accuracy on each ``topk`` + and ``thrs``. And the first dim is ``topk``, the second dim is + ``thrs``. + """ + + pred = to_tensor(pred) + target = to_tensor(target).to(torch.int64) + num = pred.size(0) + assert pred.size(0) == target.size(0), \ + f"The size of pred ({pred.size(0)}) doesn't match "\ + f'the target ({target.size(0)}).' + + if pred.ndim == 1: + # For pred label, ignore topk and acc + pred_label = pred.int() + correct = pred.eq(target).float().sum(0, keepdim=True) + acc = correct.mul_(100. / num) + return acc + else: + # For pred score, calculate on all topk and thresholds. + pred = pred.float() + maxk = max(topk) + + if maxk > pred.size(1): + raise ValueError( + f'Top-{maxk} accuracy is unavailable since the number of ' + f'categories is {pred.size(1)}.') + + pred_score, pred_label = pred.topk(maxk, dim=1) + pred_label = pred_label.t() + correct = pred_label.eq(target.view(1, -1).expand_as(pred_label)) + results = [] + for k in topk: + results.append([]) + for thr in thrs: + # Only prediction values larger than thr are counted + # as correct + _correct = correct + if thr is not None: + _correct = _correct & (pred_score.t() > thr) + correct_k = _correct[:k].reshape(-1).float().sum( + 0, keepdim=True) + acc = correct_k.mul_(100. / num) + results[-1].append(acc) + return results + + +@METRICS.register_module() +class SingleLabelMetric(BaseMetric): + r"""A collection of precision, recall, f1-score and support for + single-label tasks. + + The collection of metrics is for single-label multi-class classification. + And all these metrics are based on the confusion matrix of every category: + + .. image:: ../../_static/image/confusion-matrix.png + :width: 60% + :align: center + + All metrics can be formulated use variables above: + + **Precision** is the fraction of correct predictions in all predictions: + + .. math:: + \text{Precision} = \frac{TP}{TP+FP} + + **Recall** is the fraction of correct predictions in all targets: + + .. math:: + \text{Recall} = \frac{TP}{TP+FN} + + **F1-score** is the harmonic mean of the precision and recall: + + .. math:: + \text{F1-score} = \frac{2\times\text{Recall}\times\text{Precision}}{\text{Recall}+\text{Precision}} + + **Support** is the number of samples: + + .. math:: + \text{Support} = TP + TN + FN + FP + + Args: + thrs (Sequence[float | None] | float | None): If a float, predictions + with score lower than the threshold will be regard as the negative + prediction. If None, only the top-1 prediction will be regard as + the positive prediction. If the parameter is a tuple, accuracy + based on all thresholds will be calculated and outputted together. + Defaults to 0. + items (Sequence[str]): The detailed metric items to evaluate, select + from "precision", "recall", "f1-score" and "support". + Defaults to ``('precision', 'recall', 'f1-score')``. + average (str | None): How to calculate the final metrics from the + confusion matrix of every category. It supports three modes: + + - `"macro"`: Calculate metrics for each category, and calculate + the mean value over all categories. + - `"micro"`: Average the confusion matrix over all categories and + calculate metrics on the mean confusion matrix. + - `None`: Calculate metrics of every category and output directly. + + Defaults to "macro". + num_classes (int, optional): The number of classes. Defaults to None. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Defaults to None. + + Examples: + >>> import torch + >>> from mmpretrain.evaluation import SingleLabelMetric + >>> # -------------------- The Basic Usage -------------------- + >>> y_pred = [0, 1, 1, 3] + >>> y_true = [0, 2, 1, 3] + >>> # Output precision, recall, f1-score and support. + >>> SingleLabelMetric.calculate(y_pred, y_true, num_classes=4) + (tensor(62.5000), tensor(75.), tensor(66.6667), tensor(4)) + >>> # Calculate with different thresholds. + >>> y_score = torch.rand((1000, 10)) + >>> y_true = torch.zeros((1000, )) + >>> SingleLabelMetric.calculate(y_score, y_true, thrs=(0., 0.9)) + [(tensor(10.), tensor(0.9500), tensor(1.7352), tensor(1000)), + (tensor(10.), tensor(0.5500), tensor(1.0427), tensor(1000))] + >>> + >>> # ------------------- Use with Evalutor ------------------- + >>> from mmpretrain.structures import DataSample + >>> from mmengine.evaluator import Evaluator + >>> data_samples = [ + ... DataSample().set_gt_label(i%5).set_pred_score(torch.rand(5)) + ... for i in range(1000) + ... ] + >>> evaluator = Evaluator(metrics=SingleLabelMetric()) + >>> evaluator.process(data_samples) + >>> evaluator.evaluate(1000) + {'single-label/precision': 19.650691986083984, + 'single-label/recall': 19.600000381469727, + 'single-label/f1-score': 19.619548797607422} + >>> # Evaluate on each class + >>> evaluator = Evaluator(metrics=SingleLabelMetric(average=None)) + >>> evaluator.process(data_samples) + >>> evaluator.evaluate(1000) + { + 'single-label/precision_classwise': [21.1, 18.7, 17.8, 19.4, 16.1], + 'single-label/recall_classwise': [18.5, 18.5, 17.0, 20.0, 18.0], + 'single-label/f1-score_classwise': [19.7, 18.6, 17.1, 19.7, 17.0] + } + """ # noqa: E501 + default_prefix: Optional[str] = 'single-label' + + def __init__(self, + thrs: Union[float, Sequence[Union[float, None]], None] = 0., + items: Sequence[str] = ('precision', 'recall', 'f1-score'), + average: Optional[str] = 'macro', + num_classes: Optional[int] = None, + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device=collect_device, prefix=prefix) + + if isinstance(thrs, float) or thrs is None: + self.thrs = (thrs, ) + else: + self.thrs = tuple(thrs) + + for item in items: + assert item in ['precision', 'recall', 'f1-score', 'support'], \ + f'The metric {item} is not supported by `SingleLabelMetric`,' \ + ' please specify from "precision", "recall", "f1-score" and ' \ + '"support".' + self.items = tuple(items) + self.average = average + self.num_classes = num_classes + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + + for data_sample in data_samples: + result = dict() + if 'pred_score' in data_sample: + result['pred_score'] = data_sample['pred_score'].cpu() + else: + num_classes = self.num_classes or data_sample.get( + 'num_classes') + assert num_classes is not None, \ + 'The `num_classes` must be specified if no `pred_score`.' + result['pred_label'] = data_sample['pred_label'].cpu() + result['num_classes'] = num_classes + result['gt_label'] = data_sample['gt_label'].cpu() + # Save the result to `self.results`. + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (list): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + # NOTICE: don't access `self.results` from the method. `self.results` + # are a list of results from multiple batch, while the input `results` + # are the collected results. + metrics = {} + + def pack_results(precision, recall, f1_score, support): + single_metrics = {} + if 'precision' in self.items: + single_metrics['precision'] = precision + if 'recall' in self.items: + single_metrics['recall'] = recall + if 'f1-score' in self.items: + single_metrics['f1-score'] = f1_score + if 'support' in self.items: + single_metrics['support'] = support + return single_metrics + + # concat + target = torch.cat([res['gt_label'] for res in results]) + if 'pred_score' in results[0]: + pred = torch.stack([res['pred_score'] for res in results]) + metrics_list = self.calculate( + pred, target, thrs=self.thrs, average=self.average) + + multi_thrs = len(self.thrs) > 1 + for i, thr in enumerate(self.thrs): + if multi_thrs: + suffix = '_no-thr' if thr is None else f'_thr-{thr:.2f}' + else: + suffix = '' + + for k, v in pack_results(*metrics_list[i]).items(): + metrics[k + suffix] = v + else: + # If only label in the `pred_label`. + pred = torch.cat([res['pred_label'] for res in results]) + res = self.calculate( + pred, + target, + average=self.average, + num_classes=results[0]['num_classes']) + metrics = pack_results(*res) + + result_metrics = dict() + for k, v in metrics.items(): + + if self.average is None: + result_metrics[k + '_classwise'] = v.cpu().detach().tolist() + elif self.average == 'micro': + result_metrics[k + f'_{self.average}'] = v.item() + else: + result_metrics[k] = v.item() + + return result_metrics + + @staticmethod + def calculate( + pred: Union[torch.Tensor, np.ndarray, Sequence], + target: Union[torch.Tensor, np.ndarray, Sequence], + thrs: Sequence[Union[float, None]] = (0., ), + average: Optional[str] = 'macro', + num_classes: Optional[int] = None, + ) -> Union[torch.Tensor, List[torch.Tensor]]: + """Calculate the precision, recall, f1-score and support. + + Args: + pred (torch.Tensor | np.ndarray | Sequence): The prediction + results. It can be labels (N, ), or scores of every + class (N, C). + target (torch.Tensor | np.ndarray | Sequence): The target of + each prediction with shape (N, ). + thrs (Sequence[float | None]): Predictions with scores under + the thresholds are considered negative. It's only used + when ``pred`` is scores. None means no thresholds. + Defaults to (0., ). + average (str | None): How to calculate the final metrics from + the confusion matrix of every category. It supports three + modes: + + - `"macro"`: Calculate metrics for each category, and calculate + the mean value over all categories. + - `"micro"`: Average the confusion matrix over all categories + and calculate metrics on the mean confusion matrix. + - `None`: Calculate metrics of every category and output + directly. + + Defaults to "macro". + num_classes (Optional, int): The number of classes. If the ``pred`` + is label instead of scores, this argument is required. + Defaults to None. + + Returns: + Tuple: The tuple contains precision, recall and f1-score. + And the type of each item is: + + - torch.Tensor: If the ``pred`` is a sequence of label instead of + score (number of dimensions is 1). Only returns a tensor for + each metric. The shape is (1, ) if ``classwise`` is False, and + (C, ) if ``classwise`` is True. + - List[torch.Tensor]: If the ``pred`` is a sequence of score + (number of dimensions is 2). Return the metrics on each ``thrs``. + The shape of tensor is (1, ) if ``classwise`` is False, and (C, ) + if ``classwise`` is True. + """ + average_options = ['micro', 'macro', None] + assert average in average_options, 'Invalid `average` argument, ' \ + f'please specify from {average_options}.' + + pred = to_tensor(pred) + target = to_tensor(target).to(torch.int64) + assert pred.size(0) == target.size(0), \ + f"The size of pred ({pred.size(0)}) doesn't match "\ + f'the target ({target.size(0)}).' + + if pred.ndim == 1: + assert num_classes is not None, \ + 'Please specify the `num_classes` if the `pred` is labels ' \ + 'intead of scores.' + gt_positive = F.one_hot(target.flatten(), num_classes) + pred_positive = F.one_hot(pred.to(torch.int64), num_classes) + return _precision_recall_f1_support(pred_positive, gt_positive, + average) + else: + # For pred score, calculate on all thresholds. + num_classes = pred.size(1) + pred_score, pred_label = torch.topk(pred, k=1) + pred_score = pred_score.flatten() + pred_label = pred_label.flatten() + + gt_positive = F.one_hot(target.flatten(), num_classes) + + results = [] + for thr in thrs: + pred_positive = F.one_hot(pred_label, num_classes) + if thr is not None: + pred_positive[pred_score <= thr] = 0 + results.append( + _precision_recall_f1_support(pred_positive, gt_positive, + average)) + + return results + + +@METRICS.register_module() +class ConfusionMatrix(BaseMetric): + r"""A metric to calculate confusion matrix for single-label tasks. + + Args: + num_classes (int, optional): The number of classes. Defaults to None. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Defaults to None. + + Examples: + + 1. The basic usage. + + >>> import torch + >>> from mmpretrain.evaluation import ConfusionMatrix + >>> y_pred = [0, 1, 1, 3] + >>> y_true = [0, 2, 1, 3] + >>> ConfusionMatrix.calculate(y_pred, y_true, num_classes=4) + tensor([[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 1, 0, 0], + [0, 0, 0, 1]]) + >>> # plot the confusion matrix + >>> import matplotlib.pyplot as plt + >>> y_score = torch.rand((1000, 10)) + >>> y_true = torch.randint(10, (1000, )) + >>> matrix = ConfusionMatrix.calculate(y_score, y_true) + >>> ConfusionMatrix().plot(matrix) + >>> plt.show() + + 2. In the config file + + .. code:: python + + val_evaluator = dict(type='ConfusionMatrix') + test_evaluator = dict(type='ConfusionMatrix') + """ # noqa: E501 + default_prefix = 'confusion_matrix' + + def __init__(self, + num_classes: Optional[int] = None, + collect_device: str = 'cpu', + prefix: Optional[str] = None) -> None: + super().__init__(collect_device, prefix) + + self.num_classes = num_classes + + def process(self, data_batch, data_samples: Sequence[dict]) -> None: + for data_sample in data_samples: + if 'pred_score' in data_sample: + pred_score = data_sample['pred_score'] + pred_label = pred_score.argmax(dim=0, keepdim=True) + self.num_classes = pred_score.size(0) + else: + pred_label = data_sample['pred_label'] + + self.results.append({ + 'pred_label': pred_label, + 'gt_label': data_sample['gt_label'], + }) + + def compute_metrics(self, results: list) -> dict: + pred_labels = [] + gt_labels = [] + for result in results: + pred_labels.append(result['pred_label']) + gt_labels.append(result['gt_label']) + confusion_matrix = ConfusionMatrix.calculate( + torch.cat(pred_labels), + torch.cat(gt_labels), + num_classes=self.num_classes) + return {'result': confusion_matrix} + + @staticmethod + def calculate(pred, target, num_classes=None) -> dict: + """Calculate the confusion matrix for single-label task. + + Args: + pred (torch.Tensor | np.ndarray | Sequence): The prediction + results. It can be labels (N, ), or scores of every + class (N, C). + target (torch.Tensor | np.ndarray | Sequence): The target of + each prediction with shape (N, ). + num_classes (Optional, int): The number of classes. If the ``pred`` + is label instead of scores, this argument is required. + Defaults to None. + + Returns: + torch.Tensor: The confusion matrix. + """ + pred = to_tensor(pred) + target_label = to_tensor(target).int() + + assert pred.size(0) == target_label.size(0), \ + f"The size of pred ({pred.size(0)}) doesn't match "\ + f'the target ({target_label.size(0)}).' + assert target_label.ndim == 1 + + if pred.ndim == 1: + assert num_classes is not None, \ + 'Please specify the `num_classes` if the `pred` is labels ' \ + 'intead of scores.' + pred_label = pred + else: + num_classes = num_classes or pred.size(1) + pred_label = torch.argmax(pred, dim=1).flatten() + + with torch.no_grad(): + indices = num_classes * target_label + pred_label + matrix = torch.bincount(indices, minlength=num_classes**2) + matrix = matrix.reshape(num_classes, num_classes) + + return matrix + + @staticmethod + def plot(confusion_matrix: torch.Tensor, + include_values: bool = False, + cmap: str = 'viridis', + classes: Optional[List[str]] = None, + colorbar: bool = True, + show: bool = True): + """Draw a confusion matrix by matplotlib. + + Modified from `Scikit-Learn + `_ + + Args: + confusion_matrix (torch.Tensor): The confusion matrix to draw. + include_values (bool): Whether to draw the values in the figure. + Defaults to False. + cmap (str): The color map to use. Defaults to use "viridis". + classes (list[str], optional): The names of categories. + Defaults to None, which means to use index number. + colorbar (bool): Whether to show the colorbar. Defaults to True. + show (bool): Whether to show the figure immediately. + Defaults to True. + """ # noqa: E501 + import matplotlib.pyplot as plt + + fig, ax = plt.subplots(figsize=(10, 10)) + + num_classes = confusion_matrix.size(0) + + im_ = ax.imshow(confusion_matrix, interpolation='nearest', cmap=cmap) + text_ = None + cmap_min, cmap_max = im_.cmap(0), im_.cmap(1.0) + + if include_values: + text_ = np.empty_like(confusion_matrix, dtype=object) + + # print text with appropriate color depending on background + thresh = (confusion_matrix.max() + confusion_matrix.min()) / 2.0 + + for i, j in product(range(num_classes), range(num_classes)): + color = cmap_max if confusion_matrix[i, + j] < thresh else cmap_min + + text_cm = format(confusion_matrix[i, j], '.2g') + text_d = format(confusion_matrix[i, j], 'd') + if len(text_d) < len(text_cm): + text_cm = text_d + + text_[i, j] = ax.text( + j, i, text_cm, ha='center', va='center', color=color) + + display_labels = classes or np.arange(num_classes) + + if colorbar: + fig.colorbar(im_, ax=ax) + ax.set( + xticks=np.arange(num_classes), + yticks=np.arange(num_classes), + xticklabels=display_labels, + yticklabels=display_labels, + ylabel='True label', + xlabel='Predicted label', + ) + ax.invert_yaxis() + ax.xaxis.tick_top() + + ax.set_ylim((num_classes - 0.5, -0.5)) + # Automatically rotate the x labels. + fig.autofmt_xdate(ha='center') + + if show: + plt.show() + return fig diff --git a/mmpretrain/evaluation/metrics/visual_grounding_eval.py b/mmpretrain/evaluation/metrics/visual_grounding_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..ad16e5adf4660496b3a984087294ed9c0fee6537 --- /dev/null +++ b/mmpretrain/evaluation/metrics/visual_grounding_eval.py @@ -0,0 +1,85 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +import torch +import torchvision.ops.boxes as boxes +from mmengine.evaluator import BaseMetric + +from mmpretrain.registry import METRICS + + +def aligned_box_iou(boxes1: torch.Tensor, boxes2: torch.Tensor): + area1 = boxes.box_area(boxes1) + area2 = boxes.box_area(boxes2) + + lt = torch.max(boxes1[:, :2], boxes2[:, :2]) # (B, 2) + rb = torch.min(boxes1[:, 2:], boxes2[:, 2:]) # (B, 2) + + wh = boxes._upcast(rb - lt).clamp(min=0) # (B, 2) + inter = wh[:, 0] * wh[:, 1] # (B, ) + + union = area1 + area2 - inter + iou = inter / union + return iou + + +@METRICS.register_module() +class VisualGroundingMetric(BaseMetric): + """Visual Grounding evaluator. + + Calculate the box mIOU and box grounding accuracy for visual grounding + model. + + Args: + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Should be modified according to the + `retrieval_type` for unambiguous results. Defaults to TR. + """ + default_prefix = 'visual-grounding' + + def process(self, data_batch, data_samples): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + for preds in data_samples: + + pred_box = preds['pred_bboxes'].squeeze() + box_gt = torch.Tensor(preds['gt_bboxes']).squeeze() + + result = { + 'box': pred_box.to('cpu').squeeze(), + 'box_target': box_gt.squeeze(), + } + + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (dict): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + pred_boxes = torch.stack([each['box'] for each in results]) + gt_boxes = torch.stack([each['box_target'] for each in results]) + iou = aligned_box_iou(pred_boxes, gt_boxes) + accu_num = torch.sum(iou >= 0.5) + + miou = torch.mean(iou) + acc = accu_num / len(gt_boxes) + coco_val = {'miou': miou, 'acc': acc} + return coco_val diff --git a/mmpretrain/evaluation/metrics/voc_multi_label.py b/mmpretrain/evaluation/metrics/voc_multi_label.py new file mode 100644 index 0000000000000000000000000000000000000000..1034852722796271c7ade9d75c3442cce8f1d0d1 --- /dev/null +++ b/mmpretrain/evaluation/metrics/voc_multi_label.py @@ -0,0 +1,98 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence + +from mmpretrain.registry import METRICS +from mmpretrain.structures import label_to_onehot +from .multi_label import AveragePrecision, MultiLabelMetric + + +class VOCMetricMixin: + """A mixin class for VOC dataset metrics, VOC annotations have extra + `difficult` attribute for each object, therefore, extra option is needed + for calculating VOC metrics. + + Args: + difficult_as_postive (Optional[bool]): Whether to map the difficult + labels as positive in one-hot ground truth for evaluation. If it + set to True, map difficult gt labels to positive ones(1), If it + set to False, map difficult gt labels to negative ones(0). + Defaults to None, the difficult labels will be set to '-1'. + """ + + def __init__(self, + *arg, + difficult_as_positive: Optional[bool] = None, + **kwarg): + self.difficult_as_positive = difficult_as_positive + super().__init__(*arg, **kwarg) + + def process(self, data_batch, data_samples: Sequence[dict]): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + for data_sample in data_samples: + result = dict() + gt_label = data_sample['gt_label'] + gt_label_difficult = data_sample['gt_label_difficult'] + + result['pred_score'] = data_sample['pred_score'].clone() + num_classes = result['pred_score'].size()[-1] + + if 'gt_score' in data_sample: + result['gt_score'] = data_sample['gt_score'].clone() + else: + result['gt_score'] = label_to_onehot(gt_label, num_classes) + + # VOC annotation labels all the objects in a single image + # therefore, some categories are appeared both in + # difficult objects and non-difficult objects. + # Here we reckon those labels which are only exists in difficult + # objects as difficult labels. + difficult_label = set(gt_label_difficult) - ( + set(gt_label_difficult) & set(gt_label.tolist())) + + # set difficult label for better eval + if self.difficult_as_positive is None: + result['gt_score'][[*difficult_label]] = -1 + elif self.difficult_as_positive: + result['gt_score'][[*difficult_label]] = 1 + + # Save the result to `self.results`. + self.results.append(result) + + +@METRICS.register_module() +class VOCMultiLabelMetric(VOCMetricMixin, MultiLabelMetric): + """A collection of metrics for multi-label multi-class classification task + based on confusion matrix for VOC dataset. + + It includes precision, recall, f1-score and support. + + Args: + difficult_as_postive (Optional[bool]): Whether to map the difficult + labels as positive in one-hot ground truth for evaluation. If it + set to True, map difficult gt labels to positive ones(1), If it + set to False, map difficult gt labels to negative ones(0). + Defaults to None, the difficult labels will be set to '-1'. + **kwarg: Refers to `MultiLabelMetric` for detailed docstrings. + """ + + +@METRICS.register_module() +class VOCAveragePrecision(VOCMetricMixin, AveragePrecision): + """Calculate the average precision with respect of classes for VOC dataset. + + Args: + difficult_as_postive (Optional[bool]): Whether to map the difficult + labels as positive in one-hot ground truth for evaluation. If it + set to True, map difficult gt labels to positive ones(1), If it + set to False, map difficult gt labels to negative ones(0). + Defaults to None, the difficult labels will be set to '-1'. + **kwarg: Refers to `AveragePrecision` for detailed docstrings. + """ diff --git a/mmpretrain/evaluation/metrics/vqa.py b/mmpretrain/evaluation/metrics/vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..fd77ba9bc23e013c41ac095810740bdb71d33fb3 --- /dev/null +++ b/mmpretrain/evaluation/metrics/vqa.py @@ -0,0 +1,315 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Partly adopted from https://github.com/GT-Vision-Lab/VQA +# Copyright (c) 2014, Aishwarya Agrawal +from typing import List, Optional + +import mmengine +from mmengine.evaluator import BaseMetric +from mmengine.logging import MMLogger + +from mmpretrain.registry import METRICS + + +def _process_punctuation(inText): + import re + outText = inText + punct = [ + ';', r'/', '[', ']', '"', '{', '}', '(', ')', '=', '+', '\\', '_', '-', + '>', '<', '@', '`', ',', '?', '!' + ] + commaStrip = re.compile('(\d)(,)(\d)') # noqa: W605 + periodStrip = re.compile('(?!<=\d)(\.)(?!\d)') # noqa: W605 + for p in punct: + if (p + ' ' in inText or ' ' + p in inText) or (re.search( + commaStrip, inText) is not None): + outText = outText.replace(p, '') + else: + outText = outText.replace(p, ' ') + outText = periodStrip.sub('', outText, re.UNICODE) + return outText + + +def _process_digit_article(inText): + outText = [] + tempText = inText.lower().split() + articles = ['a', 'an', 'the'] + manualMap = { + 'none': '0', + 'zero': '0', + 'one': '1', + 'two': '2', + 'three': '3', + 'four': '4', + 'five': '5', + 'six': '6', + 'seven': '7', + 'eight': '8', + 'nine': '9', + 'ten': '10', + } + contractions = { + 'aint': "ain't", + 'arent': "aren't", + 'cant': "can't", + 'couldve': "could've", + 'couldnt': "couldn't", + "couldn'tve": "couldn't've", + "couldnt've": "couldn't've", + 'didnt': "didn't", + 'doesnt': "doesn't", + 'dont': "don't", + 'hadnt': "hadn't", + "hadnt've": "hadn't've", + "hadn'tve": "hadn't've", + 'hasnt': "hasn't", + 'havent': "haven't", + 'hed': "he'd", + "hed've": "he'd've", + "he'dve": "he'd've", + 'hes': "he's", + 'howd': "how'd", + 'howll': "how'll", + 'hows': "how's", + "Id've": "I'd've", + "I'dve": "I'd've", + 'Im': "I'm", + 'Ive': "I've", + 'isnt': "isn't", + 'itd': "it'd", + "itd've": "it'd've", + "it'dve": "it'd've", + 'itll': "it'll", + "let's": "let's", + 'maam': "ma'am", + 'mightnt': "mightn't", + "mightnt've": "mightn't've", + "mightn'tve": "mightn't've", + 'mightve': "might've", + 'mustnt': "mustn't", + 'mustve': "must've", + 'neednt': "needn't", + 'notve': "not've", + 'oclock': "o'clock", + 'oughtnt': "oughtn't", + "ow's'at": "'ow's'at", + "'ows'at": "'ow's'at", + "'ow'sat": "'ow's'at", + 'shant': "shan't", + "shed've": "she'd've", + "she'dve": "she'd've", + "she's": "she's", + 'shouldve': "should've", + 'shouldnt': "shouldn't", + "shouldnt've": "shouldn't've", + "shouldn'tve": "shouldn't've", + "somebody'd": 'somebodyd', + "somebodyd've": "somebody'd've", + "somebody'dve": "somebody'd've", + 'somebodyll': "somebody'll", + 'somebodys': "somebody's", + 'someoned': "someone'd", + "someoned've": "someone'd've", + "someone'dve": "someone'd've", + 'someonell': "someone'll", + 'someones': "someone's", + 'somethingd': "something'd", + "somethingd've": "something'd've", + "something'dve": "something'd've", + 'somethingll': "something'll", + 'thats': "that's", + 'thered': "there'd", + "thered've": "there'd've", + "there'dve": "there'd've", + 'therere': "there're", + 'theres': "there's", + 'theyd': "they'd", + "theyd've": "they'd've", + "they'dve": "they'd've", + 'theyll': "they'll", + 'theyre': "they're", + 'theyve': "they've", + 'twas': "'twas", + 'wasnt': "wasn't", + "wed've": "we'd've", + "we'dve": "we'd've", + 'weve': "we've", + 'werent': "weren't", + 'whatll': "what'll", + 'whatre': "what're", + 'whats': "what's", + 'whatve': "what've", + 'whens': "when's", + 'whered': "where'd", + 'wheres': "where's", + 'whereve': "where've", + 'whod': "who'd", + "whod've": "who'd've", + "who'dve": "who'd've", + 'wholl': "who'll", + 'whos': "who's", + 'whove': "who've", + 'whyll': "why'll", + 'whyre': "why're", + 'whys': "why's", + 'wont': "won't", + 'wouldve': "would've", + 'wouldnt': "wouldn't", + "wouldnt've": "wouldn't've", + "wouldn'tve": "wouldn't've", + 'yall': "y'all", + "yall'll": "y'all'll", + "y'allll": "y'all'll", + "yall'd've": "y'all'd've", + "y'alld've": "y'all'd've", + "y'all'dve": "y'all'd've", + 'youd': "you'd", + "youd've": "you'd've", + "you'dve": "you'd've", + 'youll': "you'll", + 'youre': "you're", + 'youve': "you've", + } + for word in tempText: + word = manualMap.setdefault(word, word) + if word not in articles: + outText.append(word) + for wordId, word in enumerate(outText): + if word in contractions: + outText[wordId] = contractions[word] + outText = ' '.join(outText) + return outText + + +@METRICS.register_module() +class VQAAcc(BaseMetric): + '''VQA Acc metric. + Args: + + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Should be modified according to the + `retrieval_type` for unambiguous results. Defaults to TR. + ''' + default_prefix = 'VQA' + + def __init__(self, + full_score_weight: float = 0.3, + collect_device: str = 'cpu', + prefix: Optional[str] = None): + super().__init__(collect_device=collect_device, prefix=prefix) + self.full_score_weight = full_score_weight + + def process(self, data_batch, data_samples): + """Process one batch of data samples. + + The processed results should be stored in ``self.results``, which will + be used to computed the metrics when all batches have been processed. + + Args: + data_batch: A batch of data from the dataloader. + data_samples (Sequence[dict]): A batch of outputs from the model. + """ + for sample in data_samples: + gt_answer = sample.get('gt_answer') + gt_answer_weight = sample.get('gt_answer_weight') + if isinstance(gt_answer, str): + gt_answer = [gt_answer] + if gt_answer_weight is None: + gt_answer_weight = [1. / (len(gt_answer))] * len(gt_answer) + + result = { + 'pred_answer': sample.get('pred_answer'), + 'gt_answer': gt_answer, + 'gt_answer_weight': gt_answer_weight, + } + + self.results.append(result) + + def compute_metrics(self, results: List): + """Compute the metrics from processed results. + + Args: + results (dict): The processed results of each batch. + + Returns: + Dict: The computed metrics. The keys are the names of the metrics, + and the values are corresponding results. + """ + acc = [] + for result in results: + pred_answer = self._process_answer(result['pred_answer']) + gt_answer = [ + self._process_answer(answer) for answer in result['gt_answer'] + ] + answer_weight = result['gt_answer_weight'] + + weight_sum = 0 + for i, gt in enumerate(gt_answer): + if gt == pred_answer: + weight_sum += answer_weight[i] + vqa_acc = min(1.0, weight_sum / self.full_score_weight) + acc.append(vqa_acc) + + accuracy = sum(acc) / len(acc) * 100 + + metrics = {'acc': accuracy} + return metrics + + def _process_answer(self, answer): + answer = answer.replace('\n', ' ') + answer = answer.replace('\t', ' ') + answer = answer.strip() + answer = _process_punctuation(answer) + answer = _process_digit_article(answer) + return answer + + +@METRICS.register_module() +class ReportVQA(BaseMetric): + """Dump VQA result to the standard json format for VQA evaluation. + + Args: + file_path (str): The file path to save the result file. + collect_device (str): Device name used for collecting results from + different ranks during distributed training. Must be 'cpu' or + 'gpu'. Defaults to 'cpu'. + prefix (str, optional): The prefix that will be added in the metric + names to disambiguate homonymous metrics of different evaluators. + If prefix is not provided in the argument, self.default_prefix + will be used instead. Should be modified according to the + `retrieval_type` for unambiguous results. Defaults to TR. + """ + default_prefix = 'VQA' + + def __init__(self, + file_path: str, + collect_device: str = 'cpu', + prefix: Optional[str] = None): + super().__init__(collect_device=collect_device, prefix=prefix) + if not file_path.endswith('.json'): + raise ValueError('The output file must be a json file.') + self.file_path = file_path + + def process(self, data_batch, data_samples) -> None: + """transfer tensors in predictions to CPU.""" + for sample in data_samples: + question_id = sample['question_id'] + pred_answer = sample['pred_answer'] + + result = { + 'question_id': int(question_id), + 'answer': pred_answer, + } + + self.results.append(result) + + def compute_metrics(self, results: List): + """Dump the result to json file.""" + mmengine.dump(results, self.file_path) + logger = MMLogger.get_current_instance() + logger.info(f'Results has been saved to {self.file_path}.') + return {} diff --git a/mmpretrain/models/__init__.py b/mmpretrain/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ba05735b26a96cf532486f6f31d0d93bf6d30781 --- /dev/null +++ b/mmpretrain/models/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .backbones import * # noqa: F401,F403 +from .builder import (BACKBONES, CLASSIFIERS, HEADS, LOSSES, NECKS, + build_backbone, build_classifier, build_head, build_loss, + build_neck) +from .classifiers import * # noqa: F401,F403 +from .heads import * # noqa: F401,F403 +from .losses import * # noqa: F401,F403 +from .multimodal import * # noqa: F401,F403 +from .necks import * # noqa: F401,F403 +from .retrievers import * # noqa: F401,F403 +from .selfsup import * # noqa: F401,F403 +from .tta import * # noqa: F401,F403 +from .utils import * # noqa: F401,F403 + +__all__ = [ + 'BACKBONES', 'HEADS', 'NECKS', 'LOSSES', 'CLASSIFIERS', 'build_backbone', + 'build_head', 'build_neck', 'build_loss', 'build_classifier' +] diff --git a/mmpretrain/models/backbones/__init__.py b/mmpretrain/models/backbones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d9830f12c0b7dc4d2bcc6b55f167b351313e1090 --- /dev/null +++ b/mmpretrain/models/backbones/__init__.py @@ -0,0 +1,123 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .alexnet import AlexNet +from .beit import BEiTViT +from .conformer import Conformer +from .convmixer import ConvMixer +from .convnext import ConvNeXt +from .cspnet import CSPDarkNet, CSPNet, CSPResNet, CSPResNeXt +from .davit import DaViT +from .deit import DistilledVisionTransformer +from .deit3 import DeiT3 +from .densenet import DenseNet +from .edgenext import EdgeNeXt +from .efficientformer import EfficientFormer +from .efficientnet import EfficientNet +from .efficientnet_v2 import EfficientNetV2 +from .hornet import HorNet +from .hrnet import HRNet +from .inception_v3 import InceptionV3 +from .lenet import LeNet5 +from .levit import LeViT +from .mixmim import MixMIMTransformer +from .mlp_mixer import MlpMixer +from .mobilenet_v2 import MobileNetV2 +from .mobilenet_v3 import MobileNetV3 +from .mobileone import MobileOne +from .mobilevit import MobileViT +from .mvit import MViT +from .poolformer import PoolFormer +from .regnet import RegNet +from .replknet import RepLKNet +from .repmlp import RepMLPNet +from .repvgg import RepVGG +from .res2net import Res2Net +from .resnest import ResNeSt +from .resnet import ResNet, ResNetV1c, ResNetV1d +from .resnet_cifar import ResNet_CIFAR +from .resnext import ResNeXt +from .revvit import RevVisionTransformer +from .riformer import RIFormer +from .seresnet import SEResNet +from .seresnext import SEResNeXt +from .shufflenet_v1 import ShuffleNetV1 +from .shufflenet_v2 import ShuffleNetV2 +from .swin_transformer import SwinTransformer +from .swin_transformer_v2 import SwinTransformerV2 +from .t2t_vit import T2T_ViT +from .timm_backbone import TIMMBackbone +from .tinyvit import TinyViT +from .tnt import TNT +from .twins import PCPVT, SVT +from .van import VAN +from .vgg import VGG +from .vig import PyramidVig, Vig +from .vision_transformer import VisionTransformer +from .vit_eva02 import ViTEVA02 +from .vit_sam import ViTSAM +from .xcit import XCiT + +__all__ = [ + 'LeNet5', + 'AlexNet', + 'VGG', + 'RegNet', + 'ResNet', + 'ResNeXt', + 'ResNetV1d', + 'ResNeSt', + 'ResNet_CIFAR', + 'SEResNet', + 'SEResNeXt', + 'ShuffleNetV1', + 'ShuffleNetV2', + 'MobileNetV2', + 'MobileNetV3', + 'VisionTransformer', + 'SwinTransformer', + 'TNT', + 'TIMMBackbone', + 'T2T_ViT', + 'Res2Net', + 'RepVGG', + 'Conformer', + 'MlpMixer', + 'DistilledVisionTransformer', + 'PCPVT', + 'SVT', + 'EfficientNet', + 'EfficientNetV2', + 'ConvNeXt', + 'HRNet', + 'ResNetV1c', + 'ConvMixer', + 'EdgeNeXt', + 'CSPDarkNet', + 'CSPResNet', + 'CSPResNeXt', + 'CSPNet', + 'RepLKNet', + 'RepMLPNet', + 'PoolFormer', + 'RIFormer', + 'DenseNet', + 'VAN', + 'InceptionV3', + 'MobileOne', + 'EfficientFormer', + 'SwinTransformerV2', + 'MViT', + 'DeiT3', + 'HorNet', + 'MobileViT', + 'DaViT', + 'BEiTViT', + 'RevVisionTransformer', + 'MixMIMTransformer', + 'TinyViT', + 'LeViT', + 'Vig', + 'PyramidVig', + 'XCiT', + 'ViTSAM', + 'ViTEVA02', +] diff --git a/mmpretrain/models/backbones/alexnet.py b/mmpretrain/models/backbones/alexnet.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c2891fdd2c878e243331f572f6e3e562232d46 --- /dev/null +++ b/mmpretrain/models/backbones/alexnet.py @@ -0,0 +1,56 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +@MODELS.register_module() +class AlexNet(BaseBackbone): + """`AlexNet `_ backbone. + + The input for AlexNet is a 224x224 RGB image. + + Args: + num_classes (int): number of classes for classification. + The default value is -1, which uses the backbone as + a feature extractor without the top classifier. + """ + + def __init__(self, num_classes=-1): + super(AlexNet, self).__init__() + self.num_classes = num_classes + self.features = nn.Sequential( + nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=3, stride=2), + nn.Conv2d(64, 192, kernel_size=5, padding=2), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=3, stride=2), + nn.Conv2d(192, 384, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(384, 256, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(256, 256, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.MaxPool2d(kernel_size=3, stride=2), + ) + if self.num_classes > 0: + self.classifier = nn.Sequential( + nn.Dropout(), + nn.Linear(256 * 6 * 6, 4096), + nn.ReLU(inplace=True), + nn.Dropout(), + nn.Linear(4096, 4096), + nn.ReLU(inplace=True), + nn.Linear(4096, num_classes), + ) + + def forward(self, x): + + x = self.features(x) + if self.num_classes > 0: + x = x.view(x.size(0), 256 * 6 * 6) + x = self.classifier(x) + + return (x, ) diff --git a/mmpretrain/models/backbones/base_backbone.py b/mmpretrain/models/backbones/base_backbone.py new file mode 100644 index 0000000000000000000000000000000000000000..751aa956ba2ad178ea9e40875b6e610ee7bbbcd3 --- /dev/null +++ b/mmpretrain/models/backbones/base_backbone.py @@ -0,0 +1,33 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from abc import ABCMeta, abstractmethod + +from mmengine.model import BaseModule + + +class BaseBackbone(BaseModule, metaclass=ABCMeta): + """Base backbone. + + This class defines the basic functions of a backbone. Any backbone that + inherits this class should at least define its own `forward` function. + """ + + def __init__(self, init_cfg=None): + super(BaseBackbone, self).__init__(init_cfg) + + @abstractmethod + def forward(self, x): + """Forward computation. + + Args: + x (tensor | tuple[tensor]): x could be a Torch.tensor or a tuple of + Torch.tensor, containing input data for forward computation. + """ + pass + + def train(self, mode=True): + """Set module status before forward computation. + + Args: + mode (bool): Whether it is train_mode or test_mode + """ + super(BaseBackbone, self).train(mode) diff --git a/mmpretrain/models/backbones/beit.py b/mmpretrain/models/backbones/beit.py new file mode 100644 index 0000000000000000000000000000000000000000..8f64ae2029b8be47d938fdef25aed9c0058ef307 --- /dev/null +++ b/mmpretrain/models/backbones/beit.py @@ -0,0 +1,522 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Sequence, Tuple, Union + +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn.bricks.drop import build_dropout +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList + +from mmpretrain.registry import MODELS +from ..utils import (BEiTAttention, build_norm_layer, resize_pos_embed, + resize_relative_position_bias_table, to_2tuple) +from .vision_transformer import TransformerEncoderLayer, VisionTransformer + + +class RelativePositionBias(BaseModule): + """Relative Position Bias. + + This module is copied from + https://github.com/microsoft/unilm/blob/master/beit/modeling_finetune.py#L209. + + Args: + window_size (Sequence[int]): The window size of the relative + position bias. + num_heads (int): The number of head in multi-head attention. + with_cls_token (bool): To indicate the backbone has cls_token or not. + Defaults to True. + """ + + def __init__( + self, + window_size: Sequence[int], + num_heads: int, + with_cls_token: bool = True, + ) -> None: + super().__init__() + self.window_size = window_size + if with_cls_token: + num_extra_tokens = 3 + else: + num_extra_tokens = 0 + # cls to token & token to cls & cls to cls + self.num_relative_distance = (2 * window_size[0] - 1) * ( + 2 * window_size[1] - 1) + num_extra_tokens + self.relative_position_bias_table = nn.Parameter( + torch.zeros(self.num_relative_distance, + num_heads)) # 2*Wh-1 * 2*Ww-1, nH + + # get pair-wise relative position index for each + # token inside the window + coords_h = torch.arange(window_size[0]) + coords_w = torch.arange(window_size[1]) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = coords_flatten[:, :, None] -\ + coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute( + 1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * window_size[1] - 1 + if with_cls_token: + relative_position_index = torch.zeros( + size=(window_size[0] * window_size[1] + 1, ) * 2, + dtype=relative_coords.dtype) + relative_position_index[1:, 1:] = relative_coords.sum( + -1) # Wh*Ww, Wh*Ww + relative_position_index[0, 0:] = self.num_relative_distance - 3 + relative_position_index[0:, 0] = self.num_relative_distance - 2 + relative_position_index[0, 0] = self.num_relative_distance - 1 + else: + relative_position_index = torch.zeros( + size=(window_size[0] * window_size[1], ) * 2, + dtype=relative_coords.dtype) + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + + self.register_buffer('relative_position_index', + relative_position_index) + + def forward(self) -> torch.Tensor: + # Wh*Ww,Wh*Ww,nH + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1)].view( + self.window_size[0] * self.window_size[1] + 1, + self.window_size[0] * self.window_size[1] + 1, -1) + return relative_position_bias.permute( + 2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww + + +class BEiTTransformerEncoderLayer(TransformerEncoderLayer): + """Implements one encoder layer in BEiT. + + Comparing with conventional ``TransformerEncoderLayer``, this module + adds weights to the shortcut connection. In addition, ``BEiTAttention`` + is used to replace the original ``MultiheadAttention`` in + ``TransformerEncoderLayer``. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + layer_scale_init_value (float): The initialization value for + the learnable scaling of attention and FFN. 1 means no scaling. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + window_size (tuple[int]): The height and width of the window. + Defaults to None. + use_rel_pos_bias (bool): Whether to use unique relative position bias, + if False, use shared relative position bias defined in backbone. + attn_drop_rate (float): The drop out rate for attention layer. + Defaults to 0.0. + drop_path_rate (float): Stochastic depth rate. Default 0.0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + bias (bool | str): The option to add leanable bias for q, k, v. If bias + is True, it will add leanable bias. If bias is 'qv_bias', it will + only add leanable bias for q, v. If bias is False, it will not add + bias for q, k, v. Default to 'qv_bias'. + act_cfg (dict): The activation config for FFNs. + Defaults to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to dict(type='LN'). + attn_cfg (dict): The configuration for the attention layer. + Defaults to an empty dict. + ffn_cfg (dict): The configuration for the ffn layer. + Defaults to ``dict(add_identity=False)``. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims: int, + num_heads: int, + feedforward_channels: int, + layer_scale_init_value: float, + window_size: Tuple[int, int], + use_rel_pos_bias: bool, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + num_fcs: int = 2, + bias: Union[str, bool] = 'qv_bias', + act_cfg: dict = dict(type='GELU'), + norm_cfg: dict = dict(type='LN'), + attn_cfg: dict = dict(), + ffn_cfg: dict = dict(add_identity=False), + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__( + embed_dims=embed_dims, + num_heads=num_heads, + feedforward_channels=feedforward_channels, + attn_drop_rate=attn_drop_rate, + drop_path_rate=0., + drop_rate=0., + num_fcs=num_fcs, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + init_cfg=init_cfg) + + attn_cfg = { + 'window_size': window_size, + 'use_rel_pos_bias': use_rel_pos_bias, + 'qk_scale': None, + 'embed_dims': embed_dims, + 'num_heads': num_heads, + 'attn_drop': attn_drop_rate, + 'proj_drop': drop_rate, + 'bias': bias, + **attn_cfg, + } + self.attn = BEiTAttention(**attn_cfg) + + ffn_cfg = { + 'embed_dims': embed_dims, + 'feedforward_channels': feedforward_channels, + 'num_fcs': num_fcs, + 'ffn_drop': drop_rate, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path_rate), + 'act_cfg': act_cfg, + **ffn_cfg, + } + self.ffn = FFN(**ffn_cfg) + + # NOTE: drop path for stochastic depth, we shall see if + # this is better than dropout here + dropout_layer = dict(type='DropPath', drop_prob=drop_path_rate) + self.drop_path = build_dropout( + dropout_layer) if dropout_layer else nn.Identity() + + if layer_scale_init_value > 0: + self.gamma_1 = nn.Parameter( + layer_scale_init_value * torch.ones((embed_dims)), + requires_grad=True) + self.gamma_2 = nn.Parameter( + layer_scale_init_value * torch.ones((embed_dims)), + requires_grad=True) + else: + self.gamma_1, self.gamma_2 = None, None + + def forward(self, x: torch.Tensor, + rel_pos_bias: torch.Tensor) -> torch.Tensor: + if self.gamma_1 is None: + x = x + self.drop_path( + self.attn(self.ln1(x), rel_pos_bias=rel_pos_bias)) + x = x + self.drop_path(self.ffn(self.ln2(x))) + else: + x = x + self.drop_path(self.gamma_1 * self.attn( + self.ln1(x), rel_pos_bias=rel_pos_bias)) + x = x + self.drop_path(self.gamma_2 * self.ffn(self.ln2(x))) + return x + + +@MODELS.register_module() +class BEiTViT(VisionTransformer): + """Backbone for BEiT. + + A PyTorch implement of : `BEiT: BERT Pre-Training of Image Transformers + `_ + A PyTorch implement of : `BEiT v2: Masked Image Modeling with + Vector-Quantized Visual Tokenizers `_ + + Args: + arch (str | dict): BEiT architecture. If use string, choose from + 'base', 'large'. If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + + Defaults to 'base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + bias (bool | str): The option to add leanable bias for q, k, v. If bias + is True, it will add leanable bias. If bias is 'qv_bias', it will + only add leanable bias for q, v. If bias is False, it will not add + bias for q, k, v. Default to 'qv_bias'. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + Defaults to ``"avg_featmap"``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + use_abs_pos_emb (bool): Use position embedding like vanilla ViT. + Defaults to False. + use_rel_pos_bias (bool): Use relative position embedding in each + transformer encoder layer. Defaults to True. + use_shared_rel_pos_bias (bool): Use shared relative position embedding, + all transformer encoder layers share the same relative position + embedding. Defaults to False. + layer_scale_init_value (float): The initialization value for + the learnable scaling of attention and FFN. Defaults to 0.1. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + arch='base', + img_size=224, + patch_size=16, + in_channels=3, + out_indices=-1, + drop_rate=0, + drop_path_rate=0, + bias='qv_bias', + norm_cfg=dict(type='LN', eps=1e-6), + final_norm=False, + out_type='avg_featmap', + with_cls_token=True, + frozen_stages=-1, + use_abs_pos_emb=False, + use_rel_pos_bias=True, + use_shared_rel_pos_bias=False, + layer_scale_init_value=0.1, + interpolate_mode='bicubic', + patch_cfg=dict(), + layer_cfgs=dict(), + init_cfg=None): + super(VisionTransformer, self).__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'feedforward_channels' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.img_size = to_2tuple(img_size) + + # Set patch embedding + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + # Set out type + if out_type not in self.OUT_TYPES: + raise ValueError(f'Unsupported `out_type` {out_type}, please ' + f'choose from {self.OUT_TYPES}') + self.out_type = out_type + + # Set cls token + if with_cls_token: + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + self.num_extra_tokens = 1 + elif out_type != 'cls_token': + self.cls_token = None + self.num_extra_tokens = 0 + else: + raise ValueError( + 'with_cls_token must be True when `out_type="cls_token"`.') + + # Set position embedding + self.interpolate_mode = interpolate_mode + if use_abs_pos_emb: + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches + self.num_extra_tokens, + self.embed_dims)) + self._register_load_state_dict_pre_hook(self._prepare_pos_embed) + else: + self.pos_embed = None + self.drop_after_pos = nn.Dropout(p=drop_rate) + + assert not (use_rel_pos_bias and use_shared_rel_pos_bias), ( + '`use_rel_pos_bias` and `use_shared_rel_pos_bias` cannot be set ' + 'to True at the same time') + self.use_rel_pos_bias = use_rel_pos_bias + + if use_shared_rel_pos_bias: + self.rel_pos_bias = RelativePositionBias( + window_size=self.patch_resolution, + num_heads=self.arch_settings['num_heads']) + else: + self.rel_pos_bias = None + self._register_load_state_dict_pre_hook( + self._prepare_relative_position_bias_table) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_layers + index + assert 0 <= out_indices[i] <= self.num_layers, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + embed_dims=self.embed_dims, + num_heads=self.arch_settings['num_heads'], + feedforward_channels=self. + arch_settings['feedforward_channels'], + layer_scale_init_value=layer_scale_init_value, + window_size=self.patch_resolution, + use_rel_pos_bias=use_rel_pos_bias, + drop_rate=drop_rate, + drop_path_rate=dpr[i], + bias=bias, + norm_cfg=norm_cfg) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(BEiTTransformerEncoderLayer(**_layer_cfg)) + + self.frozen_stages = frozen_stages + self.final_norm = final_norm + if final_norm: + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + + if out_type == 'avg_featmap': + self.ln2 = build_norm_layer(norm_cfg, self.embed_dims) + + # freeze stages only when self.frozen_stages > 0 + if self.frozen_stages > 0: + self._freeze_stages() + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + if self.cls_token is not None: + # stole cls_tokens impl from Phil Wang, thanks + cls_token = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_token, x), dim=1) + + if self.pos_embed is not None: + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + rel_pos_bias = self.rel_pos_bias() \ + if self.rel_pos_bias is not None else None + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x, rel_pos_bias) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.ln1(x) + + if i in self.out_indices: + outs.append(self._format_output(x, patch_resolution)) + + return tuple(outs) + + def _format_output(self, x, hw): + if self.out_type == 'raw': + return x + if self.out_type == 'cls_token': + return x[:, 0] + + patch_token = x[:, self.num_extra_tokens:] + if self.out_type == 'featmap': + B = x.size(0) + # (B, N, C) -> (B, H, W, C) -> (B, C, H, W) + return patch_token.reshape(B, *hw, -1).permute(0, 3, 1, 2) + if self.out_type == 'avg_featmap': + return self.ln2(patch_token.mean(dim=1)) + + def _prepare_relative_position_bias_table(self, state_dict, prefix, *args, + **kwargs): + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + + if self.use_rel_pos_bias and 'rel_pos_bias.relative_position_bias_table' in state_dict: # noqa:E501 + logger.info('Expand the shared relative position embedding to ' + 'each transformer block.') + rel_pos_bias = state_dict[ + 'rel_pos_bias.relative_position_bias_table'] + for i in range(self.num_layers): + state_dict[ + f'layers.{i}.attn.relative_position_bias_table'] = \ + rel_pos_bias.clone() + state_dict.pop('rel_pos_bias.relative_position_bias_table') + state_dict.pop('rel_pos_bias.relative_position_index') + + state_dict_model = self.state_dict() + all_keys = list(state_dict_model.keys()) + for key in all_keys: + if 'relative_position_bias_table' in key: + ckpt_key = prefix + key + if ckpt_key not in state_dict: + continue + rel_pos_bias_pretrained = state_dict[ckpt_key] + rel_pos_bias_current = state_dict_model[key] + L1, nH1 = rel_pos_bias_pretrained.size() + L2, nH2 = rel_pos_bias_current.size() + src_size = int((L1 - 3)**0.5) + dst_size = int((L2 - 3)**0.5) + if L1 != L2: + extra_tokens = rel_pos_bias_pretrained[-3:, :] + rel_pos_bias = rel_pos_bias_pretrained[:-3, :] + + new_rel_pos_bias = resize_relative_position_bias_table( + src_size, dst_size, rel_pos_bias, nH1) + new_rel_pos_bias = torch.cat( + (new_rel_pos_bias, extra_tokens), dim=0) + logger.info('Resize the relative_position_bias_table from ' + f'{state_dict[ckpt_key].shape} to ' + f'{new_rel_pos_bias.shape}') + state_dict[ckpt_key] = new_rel_pos_bias + + # The index buffer need to be re-generated. + index_buffer = ckpt_key.replace('bias_table', 'index') + if index_buffer in state_dict: + del state_dict[index_buffer] diff --git a/mmpretrain/models/backbones/conformer.py b/mmpretrain/models/backbones/conformer.py new file mode 100644 index 0000000000000000000000000000000000000000..eda72b0595b6923a7f1f563ae7186ca533f85023 --- /dev/null +++ b/mmpretrain/models/backbones/conformer.py @@ -0,0 +1,621 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import build_activation_layer, build_norm_layer +from mmcv.cnn.bricks.drop import DropPath +from mmcv.cnn.bricks.transformer import AdaptivePadding +from mmengine.model import BaseModule +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone +from .vision_transformer import TransformerEncoderLayer + + +class ConvBlock(BaseModule): + """Basic convluation block used in Conformer. + + This block includes three convluation modules, and supports three new + functions: + 1. Returns the output of both the final layers and the second convluation + module. + 2. Fuses the input of the second convluation module with an extra input + feature map. + 3. Supports to add an extra convluation module to the identity connection. + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + stride (int): The stride of the second convluation module. + Defaults to 1. + groups (int): The groups of the second convluation module. + Defaults to 1. + drop_path_rate (float): The rate of the DropPath layer. Defaults to 0. + with_residual_conv (bool): Whether to add an extra convluation module + to the identity connection. Defaults to False. + norm_cfg (dict): The config of normalization layers. + Defaults to ``dict(type='BN', eps=1e-6)``. + act_cfg (dict): The config of activative functions. + Defaults to ``dict(type='ReLU', inplace=True))``. + init_cfg (dict, optional): The extra config to initialize the module. + Defaults to None. + """ + + def __init__(self, + in_channels, + out_channels, + stride=1, + groups=1, + drop_path_rate=0., + with_residual_conv=False, + norm_cfg=dict(type='BN', eps=1e-6), + act_cfg=dict(type='ReLU', inplace=True), + init_cfg=None): + super(ConvBlock, self).__init__(init_cfg=init_cfg) + + expansion = 4 + mid_channels = out_channels // expansion + + self.conv1 = nn.Conv2d( + in_channels, + mid_channels, + kernel_size=1, + stride=1, + padding=0, + bias=False) + self.bn1 = build_norm_layer(norm_cfg, mid_channels)[1] + self.act1 = build_activation_layer(act_cfg) + + self.conv2 = nn.Conv2d( + mid_channels, + mid_channels, + kernel_size=3, + stride=stride, + groups=groups, + padding=1, + bias=False) + self.bn2 = build_norm_layer(norm_cfg, mid_channels)[1] + self.act2 = build_activation_layer(act_cfg) + + self.conv3 = nn.Conv2d( + mid_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0, + bias=False) + self.bn3 = build_norm_layer(norm_cfg, out_channels)[1] + self.act3 = build_activation_layer(act_cfg) + + if with_residual_conv: + self.residual_conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + padding=0, + bias=False) + self.residual_bn = build_norm_layer(norm_cfg, out_channels)[1] + + self.with_residual_conv = with_residual_conv + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0. else nn.Identity() + + def zero_init_last_bn(self): + nn.init.zeros_(self.bn3.weight) + + def forward(self, x, fusion_features=None, out_conv2=True): + identity = x + + x = self.conv1(x) + x = self.bn1(x) + x = self.act1(x) + + x = self.conv2(x) if fusion_features is None else self.conv2( + x + fusion_features) + x = self.bn2(x) + x2 = self.act2(x) + + x = self.conv3(x2) + x = self.bn3(x) + + if self.drop_path is not None: + x = self.drop_path(x) + + if self.with_residual_conv: + identity = self.residual_conv(identity) + identity = self.residual_bn(identity) + + x += identity + x = self.act3(x) + + if out_conv2: + return x, x2 + else: + return x + + +class FCUDown(BaseModule): + """CNN feature maps -> Transformer patch embeddings.""" + + def __init__(self, + in_channels, + out_channels, + down_stride, + with_cls_token=True, + norm_cfg=dict(type='LN', eps=1e-6), + act_cfg=dict(type='GELU'), + init_cfg=None): + super(FCUDown, self).__init__(init_cfg=init_cfg) + self.down_stride = down_stride + self.with_cls_token = with_cls_token + + self.conv_project = nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0) + self.sample_pooling = nn.AvgPool2d( + kernel_size=down_stride, stride=down_stride) + + self.ln = build_norm_layer(norm_cfg, out_channels)[1] + self.act = build_activation_layer(act_cfg) + + def forward(self, x, x_t): + x = self.conv_project(x) # [N, C, H, W] + + x = self.sample_pooling(x).flatten(2).transpose(1, 2) + x = self.ln(x) + x = self.act(x) + + if self.with_cls_token: + x = torch.cat([x_t[:, 0][:, None, :], x], dim=1) + + return x + + +class FCUUp(BaseModule): + """Transformer patch embeddings -> CNN feature maps.""" + + def __init__(self, + in_channels, + out_channels, + up_stride, + with_cls_token=True, + norm_cfg=dict(type='BN', eps=1e-6), + act_cfg=dict(type='ReLU', inplace=True), + init_cfg=None): + super(FCUUp, self).__init__(init_cfg=init_cfg) + + self.up_stride = up_stride + self.with_cls_token = with_cls_token + + self.conv_project = nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, padding=0) + self.bn = build_norm_layer(norm_cfg, out_channels)[1] + self.act = build_activation_layer(act_cfg) + + def forward(self, x, H, W): + B, _, C = x.shape + # [N, 197, 384] -> [N, 196, 384] -> [N, 384, 196] -> [N, 384, 14, 14] + if self.with_cls_token: + x_r = x[:, 1:].transpose(1, 2).reshape(B, C, H, W) + else: + x_r = x.transpose(1, 2).reshape(B, C, H, W) + + x_r = self.act(self.bn(self.conv_project(x_r))) + + return F.interpolate( + x_r, size=(H * self.up_stride, W * self.up_stride)) + + +class ConvTransBlock(BaseModule): + """Basic module for Conformer. + + This module is a fusion of CNN block transformer encoder block. + + Args: + in_channels (int): The number of input channels in conv blocks. + out_channels (int): The number of output channels in conv blocks. + embed_dims (int): The embedding dimension in transformer blocks. + conv_stride (int): The stride of conv2d layers. Defaults to 1. + groups (int): The groups of conv blocks. Defaults to 1. + with_residual_conv (bool): Whether to add a conv-bn layer to the + identity connect in the conv block. Defaults to False. + down_stride (int): The stride of the downsample pooling layer. + Defaults to 4. + num_heads (int): The number of heads in transformer attention layers. + Defaults to 12. + mlp_ratio (float): The expansion ratio in transformer FFN module. + Defaults to 4. + qkv_bias (bool): Enable bias for qkv if True. Defaults to False. + with_cls_token (bool): Whether use class token or not. + Defaults to True. + drop_rate (float): The dropout rate of the output projection and + FFN in the transformer block. Defaults to 0. + attn_drop_rate (float): The dropout rate after the attention + calculation in the transformer block. Defaults to 0. + drop_path_rate (bloat): The drop path rate in both the conv block + and the transformer block. Defaults to 0. + last_fusion (bool): Whether this block is the last stage. If so, + downsample the fusion feature map. + init_cfg (dict, optional): The extra config to initialize the module. + Defaults to None. + """ + + def __init__(self, + in_channels, + out_channels, + embed_dims, + conv_stride=1, + groups=1, + with_residual_conv=False, + down_stride=4, + num_heads=12, + mlp_ratio=4., + qkv_bias=False, + with_cls_token=True, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + last_fusion=False, + init_cfg=None): + super(ConvTransBlock, self).__init__(init_cfg=init_cfg) + expansion = 4 + self.cnn_block = ConvBlock( + in_channels=in_channels, + out_channels=out_channels, + with_residual_conv=with_residual_conv, + stride=conv_stride, + groups=groups) + + if last_fusion: + self.fusion_block = ConvBlock( + in_channels=out_channels, + out_channels=out_channels, + stride=2, + with_residual_conv=True, + groups=groups, + drop_path_rate=drop_path_rate) + else: + self.fusion_block = ConvBlock( + in_channels=out_channels, + out_channels=out_channels, + groups=groups, + drop_path_rate=drop_path_rate) + + self.squeeze_block = FCUDown( + in_channels=out_channels // expansion, + out_channels=embed_dims, + down_stride=down_stride, + with_cls_token=with_cls_token) + + self.expand_block = FCUUp( + in_channels=embed_dims, + out_channels=out_channels // expansion, + up_stride=down_stride, + with_cls_token=with_cls_token) + + self.trans_block = TransformerEncoderLayer( + embed_dims=embed_dims, + num_heads=num_heads, + feedforward_channels=int(embed_dims * mlp_ratio), + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + attn_drop_rate=attn_drop_rate, + qkv_bias=qkv_bias, + norm_cfg=dict(type='LN', eps=1e-6)) + + self.down_stride = down_stride + self.embed_dim = embed_dims + self.last_fusion = last_fusion + + def forward(self, cnn_input, trans_input): + x, x_conv2 = self.cnn_block(cnn_input, out_conv2=True) + + _, _, H, W = x_conv2.shape + + # Convert the feature map of conv2 to transformer embedding + # and concat with class token. + conv2_embedding = self.squeeze_block(x_conv2, trans_input) + + trans_output = self.trans_block(conv2_embedding + trans_input) + + # Convert the transformer output embedding to feature map + trans_features = self.expand_block(trans_output, H // self.down_stride, + W // self.down_stride) + x = self.fusion_block( + x, fusion_features=trans_features, out_conv2=False) + + return x, trans_output + + +@MODELS.register_module() +class Conformer(BaseBackbone): + """Conformer backbone. + + A PyTorch implementation of : `Conformer: Local Features Coupling Global + Representations for Visual Recognition `_ + + Args: + arch (str | dict): Conformer architecture. Defaults to 'tiny'. + patch_size (int): The patch size. Defaults to 16. + base_channels (int): The base number of channels in CNN network. + Defaults to 64. + mlp_ratio (float): The expansion ratio of FFN network in transformer + block. Defaults to 4. + with_cls_token (bool): Whether use class token or not. + Defaults to True. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys(['t', 'tiny'], + {'embed_dims': 384, + 'channel_ratio': 1, + 'num_heads': 6, + 'depths': 12 + }), + **dict.fromkeys(['s', 'small'], + {'embed_dims': 384, + 'channel_ratio': 4, + 'num_heads': 6, + 'depths': 12 + }), + **dict.fromkeys(['b', 'base'], + {'embed_dims': 576, + 'channel_ratio': 6, + 'num_heads': 9, + 'depths': 12 + }), + } # yapf: disable + + _version = 1 + + def __init__(self, + arch='tiny', + patch_size=16, + base_channels=64, + mlp_ratio=4., + qkv_bias=True, + with_cls_token=True, + drop_path_rate=0., + norm_eval=True, + frozen_stages=0, + out_indices=-1, + init_cfg=None): + + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'depths', 'num_heads', 'channel_ratio' + } + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.num_features = self.embed_dims = self.arch_settings['embed_dims'] + self.depths = self.arch_settings['depths'] + self.num_heads = self.arch_settings['num_heads'] + self.channel_ratio = self.arch_settings['channel_ratio'] + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.depths + index + 1 + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.norm_eval = norm_eval + self.frozen_stages = frozen_stages + + self.with_cls_token = with_cls_token + if self.with_cls_token: + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + + # stochastic depth decay rule + self.trans_dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, self.depths) + ] + + # Stem stage: get the feature maps by conv block + self.conv1 = nn.Conv2d( + 3, 64, kernel_size=7, stride=2, padding=3, + bias=False) # 1 / 2 [112, 112] + self.bn1 = nn.BatchNorm2d(64) + self.act1 = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d( + kernel_size=3, stride=2, padding=1) # 1 / 4 [56, 56] + + assert patch_size % 16 == 0, 'The patch size of Conformer must ' \ + 'be divisible by 16.' + trans_down_stride = patch_size // 4 + + # To solve the issue #680 + # Auto pad the feature map to be divisible by trans_down_stride + self.auto_pad = AdaptivePadding(trans_down_stride, trans_down_stride) + + # 1 stage + stage1_channels = int(base_channels * self.channel_ratio) + self.conv_1 = ConvBlock( + in_channels=64, + out_channels=stage1_channels, + with_residual_conv=True, + stride=1) + self.trans_patch_conv = nn.Conv2d( + 64, + self.embed_dims, + kernel_size=trans_down_stride, + stride=trans_down_stride, + padding=0) + + self.trans_1 = TransformerEncoderLayer( + embed_dims=self.embed_dims, + num_heads=self.num_heads, + feedforward_channels=int(self.embed_dims * mlp_ratio), + drop_path_rate=self.trans_dpr[0], + qkv_bias=qkv_bias, + norm_cfg=dict(type='LN', eps=1e-6)) + + # 2~4 stage + init_stage = 2 + fin_stage = self.depths // 3 + 1 + for i in range(init_stage, fin_stage): + self.add_module( + f'conv_trans_{i}', + ConvTransBlock( + in_channels=stage1_channels, + out_channels=stage1_channels, + embed_dims=self.embed_dims, + conv_stride=1, + with_residual_conv=False, + down_stride=trans_down_stride, + num_heads=self.num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path_rate=self.trans_dpr[i - 1], + with_cls_token=self.with_cls_token)) + + stage2_channels = int(base_channels * self.channel_ratio * 2) + # 5~8 stage + init_stage = fin_stage # 5 + fin_stage = fin_stage + self.depths // 3 # 9 + for i in range(init_stage, fin_stage): + if i == init_stage: + conv_stride = 2 + in_channels = stage1_channels + else: + conv_stride = 1 + in_channels = stage2_channels + + with_residual_conv = True if i == init_stage else False + self.add_module( + f'conv_trans_{i}', + ConvTransBlock( + in_channels=in_channels, + out_channels=stage2_channels, + embed_dims=self.embed_dims, + conv_stride=conv_stride, + with_residual_conv=with_residual_conv, + down_stride=trans_down_stride // 2, + num_heads=self.num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path_rate=self.trans_dpr[i - 1], + with_cls_token=self.with_cls_token)) + + stage3_channels = int(base_channels * self.channel_ratio * 2 * 2) + # 9~12 stage + init_stage = fin_stage # 9 + fin_stage = fin_stage + self.depths // 3 # 13 + for i in range(init_stage, fin_stage): + if i == init_stage: + conv_stride = 2 + in_channels = stage2_channels + with_residual_conv = True + else: + conv_stride = 1 + in_channels = stage3_channels + with_residual_conv = False + + last_fusion = (i == self.depths) + + self.add_module( + f'conv_trans_{i}', + ConvTransBlock( + in_channels=in_channels, + out_channels=stage3_channels, + embed_dims=self.embed_dims, + conv_stride=conv_stride, + with_residual_conv=with_residual_conv, + down_stride=trans_down_stride // 4, + num_heads=self.num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path_rate=self.trans_dpr[i - 1], + with_cls_token=self.with_cls_token, + last_fusion=last_fusion)) + self.fin_stage = fin_stage + + self.pooling = nn.AdaptiveAvgPool2d(1) + self.trans_norm = nn.LayerNorm(self.embed_dims) + + if self.with_cls_token: + trunc_normal_(self.cls_token, std=.02) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + elif isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_( + m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1.) + nn.init.constant_(m.bias, 0.) + + if hasattr(m, 'zero_init_last_bn'): + m.zero_init_last_bn() + + def init_weights(self): + super(Conformer, self).init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + self.apply(self._init_weights) + + def forward(self, x): + output = [] + B = x.shape[0] + if self.with_cls_token: + cls_tokens = self.cls_token.expand(B, -1, -1) + + # stem + x_base = self.maxpool(self.act1(self.bn1(self.conv1(x)))) + x_base = self.auto_pad(x_base) + + # 1 stage [N, 64, 56, 56] -> [N, 128, 56, 56] + x = self.conv_1(x_base, out_conv2=False) + x_t = self.trans_patch_conv(x_base).flatten(2).transpose(1, 2) + if self.with_cls_token: + x_t = torch.cat([cls_tokens, x_t], dim=1) + x_t = self.trans_1(x_t) + + # 2 ~ final + for i in range(2, self.fin_stage): + stage = getattr(self, f'conv_trans_{i}') + x, x_t = stage(x, x_t) + if i in self.out_indices: + if self.with_cls_token: + output.append([ + self.pooling(x).flatten(1), + self.trans_norm(x_t)[:, 0] + ]) + else: + # if no class token, use the mean patch token + # as the transformer feature. + output.append([ + self.pooling(x).flatten(1), + self.trans_norm(x_t).mean(dim=1) + ]) + + return tuple(output) diff --git a/mmpretrain/models/backbones/convmixer.py b/mmpretrain/models/backbones/convmixer.py new file mode 100644 index 0000000000000000000000000000000000000000..480050d5ce1aa29f190dbc24ec1413573d541cb1 --- /dev/null +++ b/mmpretrain/models/backbones/convmixer.py @@ -0,0 +1,176 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import torch +import torch.nn as nn +from mmcv.cnn.bricks import (Conv2dAdaptivePadding, build_activation_layer, + build_norm_layer) +from mmengine.utils import digit_version + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class Residual(nn.Module): + + def __init__(self, fn): + super().__init__() + self.fn = fn + + def forward(self, x): + return self.fn(x) + x + + +@MODELS.register_module() +class ConvMixer(BaseBackbone): + """ConvMixer. . + + A PyTorch implementation of : `Patches Are All You Need? + `_ + + Modified from the `official repo + `_ + and `timm + `_. + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architecture in ``ConvMixer.arch_settings``. And if dict, it + should include the following two keys: + + - embed_dims (int): The dimensions of patch embedding. + - depth (int): Number of repetitions of ConvMixer Layer. + - patch_size (int): The patch size. + - kernel_size (int): The kernel size of depthwise conv layers. + + Defaults to '768/32'. + in_channels (int): Number of input image channels. Defaults to 3. + patch_size (int): The size of one patch in the patch embed layer. + Defaults to 7. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='BN')``. + act_cfg (dict): The config dict for activation after each convolution. + Defaults to ``dict(type='GELU')``. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + init_cfg (dict, optional): Initialization config dict. + """ + arch_settings = { + '768/32': { + 'embed_dims': 768, + 'depth': 32, + 'patch_size': 7, + 'kernel_size': 7 + }, + '1024/20': { + 'embed_dims': 1024, + 'depth': 20, + 'patch_size': 14, + 'kernel_size': 9 + }, + '1536/20': { + 'embed_dims': 1536, + 'depth': 20, + 'patch_size': 7, + 'kernel_size': 9 + }, + } + + def __init__(self, + arch='768/32', + in_channels=3, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='GELU'), + out_indices=-1, + frozen_stages=0, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + essential_keys = { + 'embed_dims', 'depth', 'patch_size', 'kernel_size' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + + self.embed_dims = arch['embed_dims'] + self.depth = arch['depth'] + self.patch_size = arch['patch_size'] + self.kernel_size = arch['kernel_size'] + self.act = build_activation_layer(act_cfg) + + # check out indices and frozen stages + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.depth + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + self.frozen_stages = frozen_stages + + # Set stem layers + self.stem = nn.Sequential( + nn.Conv2d( + in_channels, + self.embed_dims, + kernel_size=self.patch_size, + stride=self.patch_size), self.act, + build_norm_layer(norm_cfg, self.embed_dims)[1]) + + # Set conv2d according to torch version + convfunc = nn.Conv2d + if digit_version(torch.__version__) < digit_version('1.9.0'): + convfunc = Conv2dAdaptivePadding + + # Repetitions of ConvMixer Layer + self.stages = nn.Sequential(*[ + nn.Sequential( + Residual( + nn.Sequential( + convfunc( + self.embed_dims, + self.embed_dims, + self.kernel_size, + groups=self.embed_dims, + padding='same'), self.act, + build_norm_layer(norm_cfg, self.embed_dims)[1])), + nn.Conv2d(self.embed_dims, self.embed_dims, kernel_size=1), + self.act, + build_norm_layer(norm_cfg, self.embed_dims)[1]) + for _ in range(self.depth) + ]) + + self._freeze_stages() + + def forward(self, x): + x = self.stem(x) + outs = [] + for i, stage in enumerate(self.stages): + x = stage(x) + if i in self.out_indices: + outs.append(x) + + # x = self.pooling(x).flatten(1) + return tuple(outs) + + def train(self, mode=True): + super(ConvMixer, self).train(mode) + self._freeze_stages() + + def _freeze_stages(self): + for i in range(self.frozen_stages): + stage = self.stages[i] + stage.eval() + for param in stage.parameters(): + param.requires_grad = False diff --git a/mmpretrain/models/backbones/convnext.py b/mmpretrain/models/backbones/convnext.py new file mode 100644 index 0000000000000000000000000000000000000000..f9c29cf2eb128bea57ab992ec8ff4841cd05679f --- /dev/null +++ b/mmpretrain/models/backbones/convnext.py @@ -0,0 +1,368 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from functools import partial +from itertools import chain +from typing import Sequence + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule, ModuleList, Sequential + +from mmpretrain.registry import MODELS +from ..utils import GRN, build_norm_layer +from .base_backbone import BaseBackbone + + +class ConvNeXtBlock(BaseModule): + """ConvNeXt Block. + + Args: + in_channels (int): The number of input channels. + dw_conv_cfg (dict): Config of depthwise convolution. + Defaults to ``dict(kernel_size=7, padding=3)``. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='LN2d', eps=1e-6)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + mlp_ratio (float): The expansion ratio in both pointwise convolution. + Defaults to 4. + linear_pw_conv (bool): Whether to use linear layer to do pointwise + convolution. More details can be found in the note. + Defaults to True. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): Init value for Layer Scale. + Defaults to 1e-6. + + Note: + There are two equivalent implementations: + + 1. DwConv -> LayerNorm -> 1x1 Conv -> GELU -> 1x1 Conv; + all outputs are in (N, C, H, W). + 2. DwConv -> LayerNorm -> Permute to (N, H, W, C) -> Linear -> GELU + -> Linear; Permute back + + As default, we use the second to align with the official repository. + And it may be slightly faster. + """ + + def __init__(self, + in_channels, + dw_conv_cfg=dict(kernel_size=7, padding=3), + norm_cfg=dict(type='LN2d', eps=1e-6), + act_cfg=dict(type='GELU'), + mlp_ratio=4., + linear_pw_conv=True, + drop_path_rate=0., + layer_scale_init_value=1e-6, + use_grn=False, + with_cp=False): + super().__init__() + self.with_cp = with_cp + + self.depthwise_conv = nn.Conv2d( + in_channels, in_channels, groups=in_channels, **dw_conv_cfg) + + self.linear_pw_conv = linear_pw_conv + self.norm = build_norm_layer(norm_cfg, in_channels) + + mid_channels = int(mlp_ratio * in_channels) + if self.linear_pw_conv: + # Use linear layer to do pointwise conv. + pw_conv = nn.Linear + else: + pw_conv = partial(nn.Conv2d, kernel_size=1) + + self.pointwise_conv1 = pw_conv(in_channels, mid_channels) + self.act = MODELS.build(act_cfg) + self.pointwise_conv2 = pw_conv(mid_channels, in_channels) + + if use_grn: + self.grn = GRN(mid_channels) + else: + self.grn = None + + self.gamma = nn.Parameter( + layer_scale_init_value * torch.ones((in_channels)), + requires_grad=True) if layer_scale_init_value > 0 else None + + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0. else nn.Identity() + + def forward(self, x): + + def _inner_forward(x): + shortcut = x + x = self.depthwise_conv(x) + + if self.linear_pw_conv: + x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C) + x = self.norm(x, data_format='channel_last') + x = self.pointwise_conv1(x) + x = self.act(x) + if self.grn is not None: + x = self.grn(x, data_format='channel_last') + x = self.pointwise_conv2(x) + x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W) + else: + x = self.norm(x, data_format='channel_first') + x = self.pointwise_conv1(x) + x = self.act(x) + + if self.grn is not None: + x = self.grn(x, data_format='channel_first') + x = self.pointwise_conv2(x) + + if self.gamma is not None: + x = x.mul(self.gamma.view(1, -1, 1, 1)) + + x = shortcut + self.drop_path(x) + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + return x + + +@MODELS.register_module() +class ConvNeXt(BaseBackbone): + """ConvNeXt v1&v2 backbone. + + A PyTorch implementation of `A ConvNet for the 2020s + `_ and + `ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders + `_ + + Modified from the `official repo + `_ + and `timm + `_. + + To use ConvNeXt v2, please set ``use_grn=True`` and ``layer_scale_init_value=0.``. + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architecture in ``ConvNeXt.arch_settings``. And if dict, it + should include the following two keys: + + - depths (list[int]): Number of blocks at each stage. + - channels (list[int]): The number of channels at each stage. + + Defaults to 'tiny'. + in_channels (int): Number of input image channels. Defaults to 3. + stem_patch_size (int): The size of one patch in the stem layer. + Defaults to 4. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='LN2d', eps=1e-6)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + linear_pw_conv (bool): Whether to use linear layer to do pointwise + convolution. Defaults to True. + use_grn (bool): Whether to add Global Response Normalization in the + blocks. Defaults to False. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): Init value for Layer Scale. + Defaults to 1e-6. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + gap_before_final_norm (bool): Whether to globally average the feature + map before the final norm layer. In the official repo, it's only + used in classification task. Defaults to True. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict, optional): Initialization config dict + """ # noqa: E501 + arch_settings = { + 'atto': { + 'depths': [2, 2, 6, 2], + 'channels': [40, 80, 160, 320] + }, + 'femto': { + 'depths': [2, 2, 6, 2], + 'channels': [48, 96, 192, 384] + }, + 'pico': { + 'depths': [2, 2, 6, 2], + 'channels': [64, 128, 256, 512] + }, + 'nano': { + 'depths': [2, 2, 8, 2], + 'channels': [80, 160, 320, 640] + }, + 'tiny': { + 'depths': [3, 3, 9, 3], + 'channels': [96, 192, 384, 768] + }, + 'small': { + 'depths': [3, 3, 27, 3], + 'channels': [96, 192, 384, 768] + }, + 'base': { + 'depths': [3, 3, 27, 3], + 'channels': [128, 256, 512, 1024] + }, + 'large': { + 'depths': [3, 3, 27, 3], + 'channels': [192, 384, 768, 1536] + }, + 'xlarge': { + 'depths': [3, 3, 27, 3], + 'channels': [256, 512, 1024, 2048] + }, + 'huge': { + 'depths': [3, 3, 27, 3], + 'channels': [352, 704, 1408, 2816] + } + } + + def __init__(self, + arch='tiny', + in_channels=3, + stem_patch_size=4, + norm_cfg=dict(type='LN2d', eps=1e-6), + act_cfg=dict(type='GELU'), + linear_pw_conv=True, + use_grn=False, + drop_path_rate=0., + layer_scale_init_value=1e-6, + out_indices=-1, + frozen_stages=0, + gap_before_final_norm=True, + with_cp=False, + init_cfg=[ + dict( + type='TruncNormal', + layer=['Conv2d', 'Linear'], + std=.02, + bias=0.), + dict( + type='Constant', layer=['LayerNorm'], val=1., + bias=0.), + ]): + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + assert 'depths' in arch and 'channels' in arch, \ + f'The arch dict must have "depths" and "channels", ' \ + f'but got {list(arch.keys())}.' + + self.depths = arch['depths'] + self.channels = arch['channels'] + assert (isinstance(self.depths, Sequence) + and isinstance(self.channels, Sequence) + and len(self.depths) == len(self.channels)), \ + f'The "depths" ({self.depths}) and "channels" ({self.channels}) ' \ + 'should be both sequence with the same length.' + + self.num_stages = len(self.depths) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = 4 + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.frozen_stages = frozen_stages + self.gap_before_final_norm = gap_before_final_norm + + # stochastic depth decay rule + dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, sum(self.depths)) + ] + block_idx = 0 + + # 4 downsample layers between stages, including the stem layer. + self.downsample_layers = ModuleList() + stem = nn.Sequential( + nn.Conv2d( + in_channels, + self.channels[0], + kernel_size=stem_patch_size, + stride=stem_patch_size), + build_norm_layer(norm_cfg, self.channels[0]), + ) + self.downsample_layers.append(stem) + + # 4 feature resolution stages, each consisting of multiple residual + # blocks + self.stages = nn.ModuleList() + + for i in range(self.num_stages): + depth = self.depths[i] + channels = self.channels[i] + + if i >= 1: + downsample_layer = nn.Sequential( + build_norm_layer(norm_cfg, self.channels[i - 1]), + nn.Conv2d( + self.channels[i - 1], + channels, + kernel_size=2, + stride=2), + ) + self.downsample_layers.append(downsample_layer) + + stage = Sequential(*[ + ConvNeXtBlock( + in_channels=channels, + drop_path_rate=dpr[block_idx + j], + norm_cfg=norm_cfg, + act_cfg=act_cfg, + linear_pw_conv=linear_pw_conv, + layer_scale_init_value=layer_scale_init_value, + use_grn=use_grn, + with_cp=with_cp) for j in range(depth) + ]) + block_idx += depth + + self.stages.append(stage) + + if i in self.out_indices: + norm_layer = build_norm_layer(norm_cfg, channels) + self.add_module(f'norm{i}', norm_layer) + + self._freeze_stages() + + def forward(self, x): + outs = [] + for i, stage in enumerate(self.stages): + x = self.downsample_layers[i](x) + x = stage(x) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + if self.gap_before_final_norm: + gap = x.mean([-2, -1], keepdim=True) + outs.append(norm_layer(gap).flatten(1)) + else: + outs.append(norm_layer(x)) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(self.frozen_stages): + downsample_layer = self.downsample_layers[i] + stage = self.stages[i] + downsample_layer.eval() + stage.eval() + for param in chain(downsample_layer.parameters(), + stage.parameters()): + param.requires_grad = False + + def train(self, mode=True): + super(ConvNeXt, self).train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/backbones/cspnet.py b/mmpretrain/models/backbones/cspnet.py new file mode 100644 index 0000000000000000000000000000000000000000..7492e97702c28861dcce2808207a35e67f32f752 --- /dev/null +++ b/mmpretrain/models/backbones/cspnet.py @@ -0,0 +1,679 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Sequence + +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, DepthwiseSeparableConvModule +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule, Sequential +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.registry import MODELS +from ..utils import to_ntuple +from .resnet import Bottleneck as ResNetBottleneck +from .resnext import Bottleneck as ResNeXtBottleneck + +eps = 1.0e-5 + + +class DarknetBottleneck(BaseModule): + """The basic bottleneck block used in Darknet. Each DarknetBottleneck + consists of two ConvModules and the input is added to the final output. + Each ConvModule is composed of Conv, BN, and LeakyReLU. The first convLayer + has filter size of 1x1 and the second one has the filter size of 3x3. + + Args: + in_channels (int): The input channels of this Module. + out_channels (int): The output channels of this Module. + expansion (int): The ratio of ``out_channels/mid_channels`` where + ``mid_channels`` is the input/output channels of conv2. + Defaults to 4. + add_identity (bool): Whether to add identity to the out. + Defaults to True. + use_depthwise (bool): Whether to use depthwise separable convolution. + Defaults to False. + conv_cfg (dict): Config dict for convolution layer. Defaults to None, + which means using conv2d. + drop_path_rate (float): The ratio of the drop path layer. Default: 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN', eps=1e-5)``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='Swish')``. + """ + + def __init__(self, + in_channels, + out_channels, + expansion=2, + add_identity=True, + use_depthwise=False, + conv_cfg=None, + drop_path_rate=0, + norm_cfg=dict(type='BN', eps=1e-5), + act_cfg=dict(type='LeakyReLU', inplace=True), + init_cfg=None): + super().__init__(init_cfg) + hidden_channels = int(out_channels / expansion) + conv = DepthwiseSeparableConvModule if use_depthwise else ConvModule + self.conv1 = ConvModule( + in_channels, + hidden_channels, + 1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.conv2 = conv( + hidden_channels, + out_channels, + 3, + stride=1, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.add_identity = \ + add_identity and in_channels == out_channels + + self.drop_path = DropPath(drop_prob=drop_path_rate + ) if drop_path_rate > eps else nn.Identity() + + def forward(self, x): + identity = x + out = self.conv1(x) + out = self.conv2(out) + out = self.drop_path(out) + + if self.add_identity: + return out + identity + else: + return out + + +class CSPStage(BaseModule): + """Cross Stage Partial Stage. + + .. code:: text + + Downsample Convolution (optional) + | + | + Expand Convolution + | + | + Split to xa, xb + | \ + | \ + | blocks(xb) + | / + | / transition + | / + Concat xa, blocks(xb) + | + Transition Convolution + + Args: + block_fn (nn.module): The basic block function in the Stage. + in_channels (int): The input channels of the CSP layer. + out_channels (int): The output channels of the CSP layer. + has_downsampler (bool): Whether to add a downsampler in the stage. + Default: False. + down_growth (bool): Whether to expand the channels in the + downsampler layer of the stage. Default: False. + expand_ratio (float): The expand ratio to adjust the number of + channels of the expand conv layer. Default: 0.5 + bottle_ratio (float): Ratio to adjust the number of channels of the + hidden layer. Default: 0.5 + block_dpr (float): The ratio of the drop path layer in the + blocks of the stage. Default: 0. + num_blocks (int): Number of blocks. Default: 1 + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN') + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', inplace=True) + """ + + def __init__(self, + block_fn, + in_channels, + out_channels, + has_downsampler=True, + down_growth=False, + expand_ratio=0.5, + bottle_ratio=2, + num_blocks=1, + block_dpr=0, + block_args={}, + conv_cfg=None, + norm_cfg=dict(type='BN', eps=1e-5), + act_cfg=dict(type='LeakyReLU', inplace=True), + init_cfg=None): + super().__init__(init_cfg) + # grow downsample channels to output channels + down_channels = out_channels if down_growth else in_channels + block_dpr = to_ntuple(num_blocks)(block_dpr) + + if has_downsampler: + self.downsample_conv = ConvModule( + in_channels=in_channels, + out_channels=down_channels, + kernel_size=3, + stride=2, + padding=1, + groups=32 if block_fn is ResNeXtBottleneck else 1, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + else: + self.downsample_conv = nn.Identity() + + exp_channels = int(down_channels * expand_ratio) + self.expand_conv = ConvModule( + in_channels=down_channels, + out_channels=exp_channels, + kernel_size=1, + norm_cfg=norm_cfg, + act_cfg=act_cfg if block_fn is DarknetBottleneck else None) + + assert exp_channels % 2 == 0, \ + 'The channel number before blocks must be divisible by 2.' + block_channels = exp_channels // 2 + blocks = [] + for i in range(num_blocks): + block_cfg = dict( + in_channels=block_channels, + out_channels=block_channels, + expansion=bottle_ratio, + drop_path_rate=block_dpr[i], + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + **block_args) + blocks.append(block_fn(**block_cfg)) + self.blocks = Sequential(*blocks) + self.atfer_blocks_conv = ConvModule( + block_channels, + block_channels, + 1, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + self.final_conv = ConvModule( + 2 * block_channels, + out_channels, + 1, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + def forward(self, x): + x = self.downsample_conv(x) + x = self.expand_conv(x) + + split = x.shape[1] // 2 + xa, xb = x[:, :split], x[:, split:] + + xb = self.blocks(xb) + xb = self.atfer_blocks_conv(xb).contiguous() + + x_final = torch.cat((xa, xb), dim=1) + return self.final_conv(x_final) + + +class CSPNet(BaseModule): + """The abstract CSP Network class. + + A Pytorch implementation of `CSPNet: A New Backbone that can Enhance + Learning Capability of CNN `_ + + This class is an abstract class because the Cross Stage Partial Network + (CSPNet) is a kind of universal network structure, and you + network block to implement networks like CSPResNet, CSPResNeXt and + CSPDarkNet. + + Args: + arch (dict): The architecture of the CSPNet. + It should have the following keys: + + - block_fn (Callable): A function or class to return a block + module, and it should accept at least ``in_channels``, + ``out_channels``, ``expansion``, ``drop_path_rate``, ``norm_cfg`` + and ``act_cfg``. + - in_channels (Tuple[int]): The number of input channels of each + stage. + - out_channels (Tuple[int]): The number of output channels of each + stage. + - num_blocks (Tuple[int]): The number of blocks in each stage. + - expansion_ratio (float | Tuple[float]): The expansion ratio in + the expand convolution of each stage. Defaults to 0.5. + - bottle_ratio (float | Tuple[float]): The expansion ratio of + blocks in each stage. Defaults to 2. + - has_downsampler (bool | Tuple[bool]): Whether to add a + downsample convolution in each stage. Defaults to True + - down_growth (bool | Tuple[bool]): Whether to expand the channels + in the downsampler layer of each stage. Defaults to False. + - block_args (dict | Tuple[dict], optional): The extra arguments to + the blocks in each stage. Defaults to None. + + stem_fn (Callable): A function or class to return a stem module. + And it should accept ``in_channels``. + in_channels (int): Number of input image channels. Defaults to 3. + out_indices (int | Sequence[int]): Output from which stages. + Defaults to -1, which means the last stage. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + conv_cfg (dict, optional): The config dict for conv layers in blocks. + Defaults to None, which means use Conv2d. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='BN', eps=1e-5)``. + act_cfg (dict): The config dict for activation functions. + Defaults to ``dict(type='LeakyReLU', inplace=True)``. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + init_cfg (dict, optional): The initialization settings. + Defaults to ``dict(type='Kaiming', layer='Conv2d'))``. + + Example: + >>> from functools import partial + >>> import torch + >>> import torch.nn as nn + >>> from mmpretrain.models import CSPNet + >>> from mmpretrain.models.backbones.resnet import Bottleneck + >>> + >>> # A simple example to build CSPNet. + >>> arch = dict( + ... block_fn=Bottleneck, + ... in_channels=[32, 64], + ... out_channels=[64, 128], + ... num_blocks=[3, 4] + ... ) + >>> stem_fn = partial(nn.Conv2d, out_channels=32, kernel_size=3) + >>> model = CSPNet(arch=arch, stem_fn=stem_fn, out_indices=(0, 1)) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> outs = model(inputs) + >>> for out in outs: + ... print(out.shape) + ... + (1, 64, 111, 111) + (1, 128, 56, 56) + """ + + def __init__(self, + arch, + stem_fn, + in_channels=3, + out_indices=-1, + frozen_stages=-1, + drop_path_rate=0., + conv_cfg=None, + norm_cfg=dict(type='BN', eps=1e-5), + act_cfg=dict(type='LeakyReLU', inplace=True), + norm_eval=False, + init_cfg=dict(type='Kaiming', layer='Conv2d')): + super().__init__(init_cfg=init_cfg) + self.arch = self.expand_arch(arch) + self.num_stages = len(self.arch['in_channels']) + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.norm_eval = norm_eval + if frozen_stages not in range(-1, self.num_stages): + raise ValueError('frozen_stages must be in range(-1, ' + f'{self.num_stages}). But received ' + f'{frozen_stages}') + self.frozen_stages = frozen_stages + + self.stem = stem_fn(in_channels) + + stages = [] + depths = self.arch['num_blocks'] + dpr = torch.linspace(0, drop_path_rate, sum(depths)).split(depths) + + for i in range(self.num_stages): + stage_cfg = {k: v[i] for k, v in self.arch.items()} + csp_stage = CSPStage( + **stage_cfg, + block_dpr=dpr[i].tolist(), + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + init_cfg=init_cfg) + stages.append(csp_stage) + self.stages = Sequential(*stages) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + out_indices = list(out_indices) + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = len(self.stages) + index + assert 0 <= out_indices[i] <= len(self.stages), \ + f'Invalid out_indices {index}.' + self.out_indices = out_indices + + @staticmethod + def expand_arch(arch): + num_stages = len(arch['in_channels']) + + def to_tuple(x, name=''): + if isinstance(x, (list, tuple)): + assert len(x) == num_stages, \ + f'The length of {name} ({len(x)}) does not ' \ + f'equals to the number of stages ({num_stages})' + return tuple(x) + else: + return (x, ) * num_stages + + full_arch = {k: to_tuple(v, k) for k, v in arch.items()} + if 'block_args' not in full_arch: + full_arch['block_args'] = to_tuple({}) + return full_arch + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + + for i in range(self.frozen_stages + 1): + m = self.stages[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(CSPNet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() + + def forward(self, x): + outs = [] + + x = self.stem(x) + for i, stage in enumerate(self.stages): + x = stage(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) + + +@MODELS.register_module() +class CSPDarkNet(CSPNet): + """CSP-Darknet backbone used in YOLOv4. + + Args: + depth (int): Depth of CSP-Darknet. Default: 53. + in_channels (int): Number of input image channels. Default: 3. + out_indices (Sequence[int]): Output from which stages. + Default: (3, ). + frozen_stages (int): Stages to be frozen (stop grad and set eval + mode). -1 means not freezing any parameters. Default: -1. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None. + + Example: + >>> from mmpretrain.models import CSPDarkNet + >>> import torch + >>> model = CSPDarkNet(depth=53, out_indices=(0, 1, 2, 3, 4)) + >>> model.eval() + >>> inputs = torch.rand(1, 3, 416, 416) + >>> level_outputs = model(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + ... + (1, 64, 208, 208) + (1, 128, 104, 104) + (1, 256, 52, 52) + (1, 512, 26, 26) + (1, 1024, 13, 13) + """ + arch_settings = { + 53: + dict( + block_fn=DarknetBottleneck, + in_channels=(32, 64, 128, 256, 512), + out_channels=(64, 128, 256, 512, 1024), + num_blocks=(1, 2, 8, 8, 4), + expand_ratio=(2, 1, 1, 1, 1), + bottle_ratio=(2, 1, 1, 1, 1), + has_downsampler=True, + down_growth=True, + ), + } + + def __init__(self, + depth, + in_channels=3, + out_indices=(4, ), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', eps=1e-5), + act_cfg=dict(type='LeakyReLU', inplace=True), + norm_eval=False, + init_cfg=dict( + type='Kaiming', + layer='Conv2d', + a=math.sqrt(5), + distribution='uniform', + mode='fan_in', + nonlinearity='leaky_relu')): + + assert depth in self.arch_settings, 'depth must be one of ' \ + f'{list(self.arch_settings.keys())}, but get {depth}.' + + super().__init__( + arch=self.arch_settings[depth], + stem_fn=self._make_stem_layer, + in_channels=in_channels, + out_indices=out_indices, + frozen_stages=frozen_stages, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + norm_eval=norm_eval, + init_cfg=init_cfg) + + def _make_stem_layer(self, in_channels): + """using a stride=1 conv as the stem in CSPDarknet.""" + # `stem_channels` equals to the `in_channels` in the first stage. + stem_channels = self.arch['in_channels'][0] + stem = ConvModule( + in_channels=in_channels, + out_channels=stem_channels, + kernel_size=3, + padding=1, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + return stem + + +@MODELS.register_module() +class CSPResNet(CSPNet): + """CSP-ResNet backbone. + + Args: + depth (int): Depth of CSP-ResNet. Default: 50. + out_indices (Sequence[int]): Output from which stages. + Default: (4, ). + frozen_stages (int): Stages to be frozen (stop grad and set eval + mode). -1 means not freezing any parameters. Default: -1. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None. + Example: + >>> from mmpretrain.models import CSPResNet + >>> import torch + >>> model = CSPResNet(depth=50, out_indices=(0, 1, 2, 3)) + >>> model.eval() + >>> inputs = torch.rand(1, 3, 416, 416) + >>> level_outputs = model(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + ... + (1, 128, 104, 104) + (1, 256, 52, 52) + (1, 512, 26, 26) + (1, 1024, 13, 13) + """ + arch_settings = { + 50: + dict( + block_fn=ResNetBottleneck, + in_channels=(64, 128, 256, 512), + out_channels=(128, 256, 512, 1024), + num_blocks=(3, 3, 5, 2), + expand_ratio=4, + bottle_ratio=2, + has_downsampler=(False, True, True, True), + down_growth=False), + } + + def __init__(self, + depth, + in_channels=3, + out_indices=(3, ), + frozen_stages=-1, + deep_stem=False, + conv_cfg=None, + norm_cfg=dict(type='BN', eps=1e-5), + act_cfg=dict(type='LeakyReLU', inplace=True), + norm_eval=False, + init_cfg=dict(type='Kaiming', layer='Conv2d')): + assert depth in self.arch_settings, 'depth must be one of ' \ + f'{list(self.arch_settings.keys())}, but get {depth}.' + self.deep_stem = deep_stem + + super().__init__( + arch=self.arch_settings[depth], + stem_fn=self._make_stem_layer, + in_channels=in_channels, + out_indices=out_indices, + frozen_stages=frozen_stages, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + norm_eval=norm_eval, + init_cfg=init_cfg) + + def _make_stem_layer(self, in_channels): + # `stem_channels` equals to the `in_channels` in the first stage. + stem_channels = self.arch['in_channels'][0] + if self.deep_stem: + stem = nn.Sequential( + ConvModule( + in_channels, + stem_channels // 2, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg), + ConvModule( + stem_channels // 2, + stem_channels // 2, + kernel_size=3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg), + ConvModule( + stem_channels // 2, + stem_channels, + kernel_size=3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + else: + stem = nn.Sequential( + ConvModule( + in_channels, + stem_channels, + kernel_size=7, + stride=2, + padding=3, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg), + nn.MaxPool2d(kernel_size=3, stride=2, padding=1)) + return stem + + +@MODELS.register_module() +class CSPResNeXt(CSPResNet): + """CSP-ResNeXt backbone. + + Args: + depth (int): Depth of CSP-ResNeXt. Default: 50. + out_indices (Sequence[int]): Output from which stages. + Default: (4, ). + frozen_stages (int): Stages to be frozen (stop grad and set eval + mode). -1 means not freezing any parameters. Default: -1. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None. + Example: + >>> from mmpretrain.models import CSPResNeXt + >>> import torch + >>> model = CSPResNeXt(depth=50, out_indices=(0, 1, 2, 3)) + >>> model.eval() + >>> inputs = torch.rand(1, 3, 224, 224) + >>> level_outputs = model(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + ... + (1, 256, 56, 56) + (1, 512, 28, 28) + (1, 1024, 14, 14) + (1, 2048, 7, 7) + """ + arch_settings = { + 50: + dict( + block_fn=ResNeXtBottleneck, + in_channels=(64, 256, 512, 1024), + out_channels=(256, 512, 1024, 2048), + num_blocks=(3, 3, 5, 2), + expand_ratio=(4, 2, 2, 2), + bottle_ratio=4, + has_downsampler=(False, True, True, True), + down_growth=False, + # the base_channels is changed from 64 to 32 in CSPNet + block_args=dict(base_channels=32), + ), + } + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) diff --git a/mmpretrain/models/backbones/davit.py b/mmpretrain/models/backbones/davit.py new file mode 100644 index 0000000000000000000000000000000000000000..cf25e2ed7137fb403e38801b50b355c4306331d6 --- /dev/null +++ b/mmpretrain/models/backbones/davit.py @@ -0,0 +1,834 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from typing import Sequence, Tuple + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.cnn.bricks import Conv2d +from mmcv.cnn.bricks.transformer import FFN, AdaptivePadding, PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.utils import to_2tuple +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from mmpretrain.models.backbones.base_backbone import BaseBackbone +from mmpretrain.registry import MODELS +from ..utils import ShiftWindowMSA + + +class DaViTWindowMSA(BaseModule): + """Window based multi-head self-attention (W-MSA) module for DaViT. + + The differences between DaViTWindowMSA & WindowMSA: + 1. Without relative position bias. + + Args: + embed_dims (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to q, k, v. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + attn_drop (float, optional): Dropout ratio of attention weight. + Defaults to 0. + proj_drop (float, optional): Dropout ratio of output. Defaults to 0. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0., + proj_drop=0., + init_cfg=None): + + super().__init__(init_cfg) + self.embed_dims = embed_dims + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_embed_dims = embed_dims // num_heads + self.scale = qk_scale or head_embed_dims**-0.5 + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(embed_dims, embed_dims) + self.proj_drop = nn.Dropout(proj_drop) + + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ + Args: + + x (tensor): input features with shape of (num_windows*B, N, C) + mask (tensor, Optional): mask with shape of (num_windows, Wh*Ww, + Wh*Ww), value should be between (-inf, 0]. + """ + B_, N, C = x.shape + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, + C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[ + 2] # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, + N) + mask.unsqueeze(1).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + @staticmethod + def double_step_seq(step1, len1, step2, len2): + seq1 = torch.arange(0, step1 * len1, step1) + seq2 = torch.arange(0, step2 * len2, step2) + return (seq1[:, None] + seq2[None, :]).reshape(1, -1) + + +class ConvPosEnc(BaseModule): + """DaViT conv pos encode block. + + Args: + embed_dims (int): Number of input channels. + kernel_size (int): The kernel size of the first convolution. + Defaults to 3. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, embed_dims, kernel_size=3, init_cfg=None): + super(ConvPosEnc, self).__init__(init_cfg) + self.proj = Conv2d( + embed_dims, + embed_dims, + kernel_size, + stride=1, + padding=kernel_size // 2, + groups=embed_dims) + + def forward(self, x, size: Tuple[int, int]): + B, N, C = x.shape + H, W = size + assert N == H * W + + feat = x.transpose(1, 2).view(B, C, H, W) + feat = self.proj(feat) + feat = feat.flatten(2).transpose(1, 2) + x = x + feat + return x + + +class DaViTDownSample(BaseModule): + """DaViT down sampole block. + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + conv_type (str): The type of convolution + to generate patch embedding. Default: "Conv2d". + kernel_size (int): The kernel size of the first convolution. + Defaults to 2. + stride (int): The stride of the second convluation module. + Defaults to 2. + padding (int | tuple | string ): The padding length of + embedding conv. When it is a string, it means the mode + of adaptive padding, support "same" and "corner" now. + Defaults to "corner". + dilation (int): Dilation of the convolution layers. Defaults to 1. + bias (bool): Bias of embed conv. Default: True. + norm_cfg (dict, optional): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + in_channels, + out_channels, + conv_type='Conv2d', + kernel_size=2, + stride=2, + padding='same', + dilation=1, + bias=True, + norm_cfg=None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.out_channels = out_channels + if stride is None: + stride = kernel_size + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + if isinstance(padding, str): + self.adaptive_padding = AdaptivePadding( + kernel_size=kernel_size, + stride=stride, + dilation=dilation, + padding=padding) + # disable the padding of conv + padding = 0 + else: + self.adaptive_padding = None + padding = to_2tuple(padding) + + self.projection = build_conv_layer( + dict(type=conv_type), + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=bias) + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, in_channels)[1] + else: + self.norm = None + + def forward(self, x, input_size): + if self.adaptive_padding: + x = self.adaptive_padding(x) + H, W = input_size + B, L, C = x.shape + assert L == H * W, 'input feature has wrong size' + + x = self.norm(x) + x = x.reshape(B, H, W, C).permute(0, 3, 1, 2).contiguous() + + x = self.projection(x) + output_size = (x.size(2), x.size(3)) + x = x.flatten(2).transpose(1, 2) + return x, output_size + + +class ChannelAttention(BaseModule): + """DaViT channel attention. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, embed_dims, num_heads=8, qkv_bias=False, init_cfg=None): + super().__init__(init_cfg) + self.embed_dims = embed_dims + self.num_heads = num_heads + self.head_dims = embed_dims // num_heads + self.scale = self.head_dims**-0.5 + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + self.proj = nn.Linear(embed_dims, embed_dims) + + def forward(self, x): + B, N, _ = x.shape + + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, + self.head_dims).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + + k = k * self.scale + attention = k.transpose(-1, -2) @ v + attention = attention.softmax(dim=-1) + + x = (attention @ q.transpose(-1, -2)).transpose(-1, -2) + x = x.transpose(1, 2).reshape(B, N, self.embed_dims) + x = self.proj(x) + return x + + +class ChannelBlock(BaseModule): + """DaViT channel attention block. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + drop_path (float): The drop path rate after attention and ffn. + Defaults to 0. + ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict. + norm_cfg (dict): The config of norm layers. + Defaults to ``dict(type='LN')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + ffn_ratio=4., + qkv_bias=False, + drop_path=0., + ffn_cfgs=dict(), + norm_cfg=dict(type='LN'), + with_cp=False, + init_cfg=None): + super().__init__(init_cfg) + self.with_cp = with_cp + + self.cpe1 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3) + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + self.attn = ChannelAttention( + embed_dims, num_heads=num_heads, qkv_bias=qkv_bias) + self.cpe2 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3) + + _ffn_cfgs = { + 'embed_dims': embed_dims, + 'feedforward_channels': int(embed_dims * ffn_ratio), + 'num_fcs': 2, + 'ffn_drop': 0, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'act_cfg': dict(type='GELU'), + **ffn_cfgs + } + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + self.ffn = FFN(**_ffn_cfgs) + + def forward(self, x, hw_shape): + + def _inner_forward(x): + x = self.cpe1(x, hw_shape) + identity = x + x = self.norm1(x) + x = self.attn(x) + x = x + identity + + x = self.cpe2(x, hw_shape) + identity = x + x = self.norm2(x) + x = self.ffn(x, identity=identity) + + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + + return x + + +class SpatialBlock(BaseModule): + """DaViT spatial attention block. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + drop_path (float): The drop path rate after attention and ffn. + Defaults to 0. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + attn_cfgs (dict): The extra config of Shift Window-MSA. + Defaults to empty dict. + ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict. + norm_cfg (dict): The config of norm layers. + Defaults to ``dict(type='LN')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + window_size=7, + ffn_ratio=4., + qkv_bias=True, + drop_path=0., + pad_small_map=False, + attn_cfgs=dict(), + ffn_cfgs=dict(), + norm_cfg=dict(type='LN'), + with_cp=False, + init_cfg=None): + + super(SpatialBlock, self).__init__(init_cfg) + self.with_cp = with_cp + + self.cpe1 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3) + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + _attn_cfgs = { + 'embed_dims': embed_dims, + 'num_heads': num_heads, + 'shift_size': 0, + 'window_size': window_size, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'qkv_bias': qkv_bias, + 'pad_small_map': pad_small_map, + 'window_msa': DaViTWindowMSA, + **attn_cfgs + } + self.attn = ShiftWindowMSA(**_attn_cfgs) + self.cpe2 = ConvPosEnc(embed_dims=embed_dims, kernel_size=3) + + _ffn_cfgs = { + 'embed_dims': embed_dims, + 'feedforward_channels': int(embed_dims * ffn_ratio), + 'num_fcs': 2, + 'ffn_drop': 0, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'act_cfg': dict(type='GELU'), + **ffn_cfgs + } + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + self.ffn = FFN(**_ffn_cfgs) + + def forward(self, x, hw_shape): + + def _inner_forward(x): + x = self.cpe1(x, hw_shape) + identity = x + x = self.norm1(x) + x = self.attn(x, hw_shape) + x = x + identity + + x = self.cpe2(x, hw_shape) + identity = x + x = self.norm2(x) + x = self.ffn(x, identity=identity) + + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + + return x + + +class DaViTBlock(BaseModule): + """DaViT block. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + drop_path (float): The drop path rate after attention and ffn. + Defaults to 0. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + attn_cfgs (dict): The extra config of Shift Window-MSA. + Defaults to empty dict. + ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict. + norm_cfg (dict): The config of norm layers. + Defaults to ``dict(type='LN')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + window_size=7, + ffn_ratio=4., + qkv_bias=True, + drop_path=0., + pad_small_map=False, + attn_cfgs=dict(), + ffn_cfgs=dict(), + norm_cfg=dict(type='LN'), + with_cp=False, + init_cfg=None): + + super(DaViTBlock, self).__init__(init_cfg) + self.spatial_block = SpatialBlock( + embed_dims, + num_heads, + window_size=window_size, + ffn_ratio=ffn_ratio, + qkv_bias=qkv_bias, + drop_path=drop_path, + pad_small_map=pad_small_map, + attn_cfgs=attn_cfgs, + ffn_cfgs=ffn_cfgs, + norm_cfg=norm_cfg, + with_cp=with_cp) + self.channel_block = ChannelBlock( + embed_dims, + num_heads, + ffn_ratio=ffn_ratio, + qkv_bias=qkv_bias, + drop_path=drop_path, + ffn_cfgs=ffn_cfgs, + norm_cfg=norm_cfg, + with_cp=False) + + def forward(self, x, hw_shape): + x = self.spatial_block(x, hw_shape) + x = self.channel_block(x, hw_shape) + + return x + + +class DaViTBlockSequence(BaseModule): + """Module with successive DaViT blocks and downsample layer. + + Args: + embed_dims (int): Number of input channels. + depth (int): Number of successive DaViT blocks. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + downsample (bool): Downsample the output of blocks by patch merging. + Defaults to False. + downsample_cfg (dict): The extra config of the patch merging layer. + Defaults to empty dict. + drop_paths (Sequence[float] | float): The drop path rate in each block. + Defaults to 0. + block_cfgs (Sequence[dict] | dict): The extra config of each block. + Defaults to empty dicts. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + depth, + num_heads, + window_size=7, + ffn_ratio=4., + qkv_bias=True, + downsample=False, + downsample_cfg=dict(), + drop_paths=0., + block_cfgs=dict(), + with_cp=False, + pad_small_map=False, + init_cfg=None): + super().__init__(init_cfg) + + if not isinstance(drop_paths, Sequence): + drop_paths = [drop_paths] * depth + + if not isinstance(block_cfgs, Sequence): + block_cfgs = [deepcopy(block_cfgs) for _ in range(depth)] + + self.embed_dims = embed_dims + self.blocks = ModuleList() + for i in range(depth): + _block_cfg = { + 'embed_dims': embed_dims, + 'num_heads': num_heads, + 'window_size': window_size, + 'ffn_ratio': ffn_ratio, + 'qkv_bias': qkv_bias, + 'drop_path': drop_paths[i], + 'with_cp': with_cp, + 'pad_small_map': pad_small_map, + **block_cfgs[i] + } + block = DaViTBlock(**_block_cfg) + self.blocks.append(block) + + if downsample: + _downsample_cfg = { + 'in_channels': embed_dims, + 'out_channels': 2 * embed_dims, + 'norm_cfg': dict(type='LN'), + **downsample_cfg + } + self.downsample = DaViTDownSample(**_downsample_cfg) + else: + self.downsample = None + + def forward(self, x, in_shape, do_downsample=True): + for block in self.blocks: + x = block(x, in_shape) + + if self.downsample is not None and do_downsample: + x, out_shape = self.downsample(x, in_shape) + else: + out_shape = in_shape + return x, out_shape + + @property + def out_channels(self): + if self.downsample: + return self.downsample.out_channels + else: + return self.embed_dims + + +@MODELS.register_module() +class DaViT(BaseBackbone): + """DaViT. + + A PyTorch implement of : `DaViT: Dual Attention Vision Transformers + `_ + + Inspiration from + https://github.com/dingmyu/davit + + Args: + arch (str | dict): DaViT architecture. If use string, choose from + 'tiny', 'small', 'base' and 'large', 'huge', 'giant'. If use dict, + it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **depths** (List[int]): The number of blocks in each stage. + - **num_heads** (List[int]): The number of heads in attention + modules of each stage. + + Defaults to 't'. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 4. + in_channels (int): The num of input channels. Defaults to 3. + window_size (int): The height and width of the window. Defaults to 7. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.1. + out_after_downsample (bool): Whether to output the feature map of a + stage after the following downsample layer. Defaults to False. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + norm_cfg (dict): Config dict for normalization layer for all output + features. Defaults to ``dict(type='LN')`` + stage_cfgs (Sequence[dict] | dict): Extra config dict for each + stage. Defaults to an empty dict. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys(['t', 'tiny'], { + 'embed_dims': 96, + 'depths': [1, 1, 3, 1], + 'num_heads': [3, 6, 12, 24] + }), + **dict.fromkeys(['s', 'small'], { + 'embed_dims': 96, + 'depths': [1, 1, 9, 1], + 'num_heads': [3, 6, 12, 24] + }), + **dict.fromkeys(['b', 'base'], { + 'embed_dims': 128, + 'depths': [1, 1, 9, 1], + 'num_heads': [4, 8, 16, 32] + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 192, + 'depths': [1, 1, 9, 1], + 'num_heads': [6, 12, 24, 48] + }), + **dict.fromkeys( + ['h', 'huge'], { + 'embed_dims': 256, + 'depths': [1, 1, 9, 1], + 'num_heads': [8, 16, 32, 64] + }), + **dict.fromkeys( + ['g', 'giant'], { + 'embed_dims': 384, + 'depths': [1, 1, 12, 3], + 'num_heads': [12, 24, 48, 96] + }), + } + + def __init__(self, + arch='t', + patch_size=4, + in_channels=3, + window_size=7, + ffn_ratio=4., + qkv_bias=True, + drop_path_rate=0.1, + out_after_downsample=False, + pad_small_map=False, + norm_cfg=dict(type='LN'), + stage_cfgs=dict(), + frozen_stages=-1, + norm_eval=False, + out_indices=(3, ), + with_cp=False, + init_cfg=None): + super().__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = {'embed_dims', 'depths', 'num_heads'} + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.depths = self.arch_settings['depths'] + self.num_heads = self.arch_settings['num_heads'] + self.num_layers = len(self.depths) + self.out_indices = out_indices + self.out_after_downsample = out_after_downsample + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + + # stochastic depth decay rule + total_depth = sum(self.depths) + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] # stochastic depth decay rule + + _patch_cfg = dict( + in_channels=in_channels, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=7, + stride=patch_size, + padding='same', + norm_cfg=dict(type='LN'), + ) + self.patch_embed = PatchEmbed(**_patch_cfg) + + self.stages = ModuleList() + embed_dims = [self.embed_dims] + for i, (depth, + num_heads) in enumerate(zip(self.depths, self.num_heads)): + if isinstance(stage_cfgs, Sequence): + stage_cfg = stage_cfgs[i] + else: + stage_cfg = deepcopy(stage_cfgs) + downsample = True if i < self.num_layers - 1 else False + _stage_cfg = { + 'embed_dims': embed_dims[-1], + 'depth': depth, + 'num_heads': num_heads, + 'window_size': window_size, + 'ffn_ratio': ffn_ratio, + 'qkv_bias': qkv_bias, + 'downsample': downsample, + 'drop_paths': dpr[:depth], + 'with_cp': with_cp, + 'pad_small_map': pad_small_map, + **stage_cfg + } + + stage = DaViTBlockSequence(**_stage_cfg) + self.stages.append(stage) + + dpr = dpr[depth:] + embed_dims.append(stage.out_channels) + + self.num_features = embed_dims[:-1] + + # add a norm layer for each output + for i in out_indices: + if norm_cfg is not None: + norm_layer = build_norm_layer(norm_cfg, + self.num_features[i])[1] + else: + norm_layer = nn.Identity() + + self.add_module(f'norm{i}', norm_layer) + + def train(self, mode=True): + super().train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + for i in range(0, self.frozen_stages + 1): + m = self.stages[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + for i in self.out_indices: + if i <= self.frozen_stages: + for param in getattr(self, f'norm{i}').parameters(): + param.requires_grad = False + + def forward(self, x): + x, hw_shape = self.patch_embed(x) + + outs = [] + for i, stage in enumerate(self.stages): + x, hw_shape = stage( + x, hw_shape, do_downsample=self.out_after_downsample) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + out = norm_layer(x) + out = out.view(-1, *hw_shape, + self.num_features[i]).permute(0, 3, 1, + 2).contiguous() + outs.append(out) + if stage.downsample is not None and not self.out_after_downsample: + x, hw_shape = stage.downsample(x, hw_shape) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/deit.py b/mmpretrain/models/backbones/deit.py new file mode 100644 index 0000000000000000000000000000000000000000..9ae340829bece31536d0c0ac119ffe635bce82e0 --- /dev/null +++ b/mmpretrain/models/backbones/deit.py @@ -0,0 +1,116 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from .vision_transformer import VisionTransformer + + +@MODELS.register_module() +class DistilledVisionTransformer(VisionTransformer): + """Distilled Vision Transformer. + + A PyTorch implement of : `Training data-efficient image transformers & + distillation through attention `_ + + Args: + arch (str | dict): Vision Transformer architecture. If use string, + choose from 'small', 'base', 'large', 'deit-tiny', 'deit-small' + and 'deit-base'. If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + + Defaults to 'deit-base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: A tuple with the class token and the + distillation token. The shapes of both tensor are (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + Defaults to ``"cls_token"``. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + num_extra_tokens = 2 # class token and distillation token + + def __init__(self, arch='deit-base', *args, **kwargs): + super(DistilledVisionTransformer, self).__init__( + arch=arch, + with_cls_token=True, + *args, + **kwargs, + ) + self.dist_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + # stole cls_tokens impl from Phil Wang, thanks + cls_tokens = self.cls_token.expand(B, -1, -1) + dist_token = self.dist_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, dist_token, x), dim=1) + x = x + self.resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.ln1(x) + + if i in self.out_indices: + outs.append(self._format_output(x, patch_resolution)) + + return tuple(outs) + + def _format_output(self, x, hw): + if self.out_type == 'cls_token': + return x[:, 0], x[:, 1] + + return super()._format_output(x, hw) + + def init_weights(self): + super(DistilledVisionTransformer, self).init_weights() + + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + trunc_normal_(self.dist_token, std=0.02) diff --git a/mmpretrain/models/backbones/deit3.py b/mmpretrain/models/backbones/deit3.py new file mode 100644 index 0000000000000000000000000000000000000000..9be3627915fc5e55a8e8f6c4b419bb708e8575b0 --- /dev/null +++ b/mmpretrain/models/backbones/deit3.py @@ -0,0 +1,454 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import numpy as np +import torch +from mmcv.cnn import Linear, build_activation_layer +from mmcv.cnn.bricks.drop import build_dropout +from mmcv.cnn.bricks.transformer import PatchEmbed +from mmengine.model import BaseModule, ModuleList, Sequential +from mmengine.utils import deprecated_api_warning +from torch import nn + +from mmpretrain.registry import MODELS +from ..utils import (LayerScale, MultiheadAttention, build_norm_layer, + resize_pos_embed, to_2tuple) +from .vision_transformer import VisionTransformer + + +class DeiT3FFN(BaseModule): + """FFN for DeiT3. + + The differences between DeiT3FFN & FFN: + 1. Use LayerScale. + + Args: + embed_dims (int): The feature dimension. Same as + `MultiheadAttention`. Defaults: 256. + feedforward_channels (int): The hidden dimension of FFNs. + Defaults: 1024. + num_fcs (int, optional): The number of fully-connected layers in + FFNs. Default: 2. + act_cfg (dict, optional): The activation config for FFNs. + Default: dict(type='ReLU') + ffn_drop (float, optional): Probability of an element to be + zeroed in FFN. Default 0.0. + add_identity (bool, optional): Whether to add the + identity connection. Default: `True`. + dropout_layer (obj:`ConfigDict`): The dropout_layer used + when adding the shortcut. + use_layer_scale (bool): Whether to use layer_scale in + DeiT3FFN. Defaults to True. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + @deprecated_api_warning( + { + 'dropout': 'ffn_drop', + 'add_residual': 'add_identity' + }, + cls_name='FFN') + def __init__(self, + embed_dims=256, + feedforward_channels=1024, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0., + dropout_layer=None, + add_identity=True, + use_layer_scale=True, + init_cfg=None, + **kwargs): + super().__init__(init_cfg) + assert num_fcs >= 2, 'num_fcs should be no less ' \ + f'than 2. got {num_fcs}.' + self.embed_dims = embed_dims + self.feedforward_channels = feedforward_channels + self.num_fcs = num_fcs + self.act_cfg = act_cfg + self.activate = build_activation_layer(act_cfg) + + layers = [] + in_channels = embed_dims + for _ in range(num_fcs - 1): + layers.append( + Sequential( + Linear(in_channels, feedforward_channels), self.activate, + nn.Dropout(ffn_drop))) + in_channels = feedforward_channels + layers.append(Linear(feedforward_channels, embed_dims)) + layers.append(nn.Dropout(ffn_drop)) + self.layers = Sequential(*layers) + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else torch.nn.Identity() + self.add_identity = add_identity + + if use_layer_scale: + self.gamma2 = LayerScale(embed_dims) + else: + self.gamma2 = nn.Identity() + + @deprecated_api_warning({'residual': 'identity'}, cls_name='FFN') + def forward(self, x, identity=None): + """Forward function for `FFN`. + + The function would add x to the output tensor if residue is None. + """ + out = self.layers(x) + out = self.gamma2(out) + if not self.add_identity: + return self.dropout_layer(out) + if identity is None: + identity = x + return identity + self.dropout_layer(out) + + +class DeiT3TransformerEncoderLayer(BaseModule): + """Implements one encoder layer in DeiT3. + + The differences between DeiT3TransformerEncoderLayer & + TransformerEncoderLayer: + 1. Use LayerScale. + + Args: + embed_dims (int): The feature dimension + num_heads (int): Parallel attention heads + feedforward_channels (int): The hidden dimension for FFNs + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop_rate (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + use_layer_scale (bool): Whether to use layer_scale in + DeiT3TransformerEncoderLayer. Defaults to True. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_fcs=2, + qkv_bias=True, + use_layer_scale=True, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + init_cfg=None): + super(DeiT3TransformerEncoderLayer, self).__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + + self.attn = MultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + qkv_bias=qkv_bias, + use_layer_scale=use_layer_scale) + + self.ln2 = build_norm_layer(norm_cfg, self.embed_dims) + + self.ffn = DeiT3FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg, + use_layer_scale=use_layer_scale) + + def init_weights(self): + super(DeiT3TransformerEncoderLayer, self).init_weights() + for m in self.ffn.modules(): + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + nn.init.normal_(m.bias, std=1e-6) + + def forward(self, x): + x = x + self.attn(self.ln1(x)) + x = self.ffn(self.ln1(x), identity=x) + return x + + +@MODELS.register_module() +class DeiT3(VisionTransformer): + """DeiT3 backbone. + + A PyTorch implement of : `DeiT III: Revenge of the ViT + `_ + + The differences between DeiT3 & VisionTransformer: + + 1. Use LayerScale. + 2. Concat cls token after adding pos_embed. + + Args: + arch (str | dict): DeiT3 architecture. If use string, + choose from 'small', 'base', 'medium', 'large' and 'huge'. + If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + + Defaults to 'base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + Defaults to ``"cls_token"``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + use_layer_scale (bool): Whether to use layer_scale in DeiT3. + Defaults to True. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys( + ['s', 'small'], { + 'embed_dims': 384, + 'num_layers': 12, + 'num_heads': 6, + 'feedforward_channels': 1536, + }), + **dict.fromkeys( + ['m', 'medium'], { + 'embed_dims': 512, + 'num_layers': 12, + 'num_heads': 8, + 'feedforward_channels': 2048, + }), + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 3072 + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'num_layers': 24, + 'num_heads': 16, + 'feedforward_channels': 4096 + }), + **dict.fromkeys( + ['h', 'huge'], { + 'embed_dims': 1280, + 'num_layers': 32, + 'num_heads': 16, + 'feedforward_channels': 5120 + }), + } + num_extra_tokens = 1 # class token + + def __init__(self, + arch='base', + img_size=224, + patch_size=16, + in_channels=3, + out_indices=-1, + drop_rate=0., + drop_path_rate=0., + qkv_bias=True, + norm_cfg=dict(type='LN', eps=1e-6), + final_norm=True, + out_type='cls_token', + with_cls_token=True, + use_layer_scale=True, + interpolate_mode='bicubic', + patch_cfg=dict(), + layer_cfgs=dict(), + init_cfg=None): + super(VisionTransformer, self).__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'feedforward_channels' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.img_size = to_2tuple(img_size) + + # Set patch embedding + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + # Set out type + if out_type not in self.OUT_TYPES: + raise ValueError(f'Unsupported `out_type` {out_type}, please ' + f'choose from {self.OUT_TYPES}') + self.out_type = out_type + + # Set cls token + if with_cls_token: + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + elif out_type != 'cls_token': + self.cls_token = None + self.num_extra_tokens = 0 + else: + raise ValueError( + 'with_cls_token must be True when `out_type="cls_token"`.') + + # Set position embedding + self.interpolate_mode = interpolate_mode + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches, self.embed_dims)) + self._register_load_state_dict_pre_hook(self._prepare_pos_embed) + + self.drop_after_pos = nn.Dropout(p=drop_rate) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_layers + index + assert 0 <= out_indices[i] <= self.num_layers, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + embed_dims=self.embed_dims, + num_heads=self.arch_settings['num_heads'], + feedforward_channels=self. + arch_settings['feedforward_channels'], + drop_rate=drop_rate, + drop_path_rate=dpr[i], + qkv_bias=qkv_bias, + norm_cfg=norm_cfg, + use_layer_scale=use_layer_scale) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(DeiT3TransformerEncoderLayer(**_layer_cfg)) + + self.final_norm = final_norm + if final_norm: + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=0) + x = self.drop_after_pos(x) + + if self.cls_token is not None: + # stole cls_tokens impl from Phil Wang, thanks + cls_tokens = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.ln1(x) + + if i in self.out_indices: + outs.append(self._format_output(x, patch_resolution)) + + return tuple(outs) + + def _prepare_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + f'Resize the pos_embed shape from {ckpt_pos_embed_shape} ' + f'to {self.pos_embed.shape}.') + + ckpt_pos_embed_shape = to_2tuple( + int(np.sqrt(ckpt_pos_embed_shape[1]))) + pos_embed_shape = self.patch_embed.init_out_size + + state_dict[name] = resize_pos_embed( + state_dict[name], + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, + num_extra_tokens=0, # The cls token adding is after pos_embed + ) diff --git a/mmpretrain/models/backbones/densenet.py b/mmpretrain/models/backbones/densenet.py new file mode 100644 index 0000000000000000000000000000000000000000..c9f05302f9b84cd38c7c03701fc21ffd109c1620 --- /dev/null +++ b/mmpretrain/models/backbones/densenet.py @@ -0,0 +1,332 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from itertools import chain +from typing import Sequence + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as cp +from mmcv.cnn.bricks import build_activation_layer, build_norm_layer +from torch.jit.annotations import List + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class DenseLayer(BaseBackbone): + """DenseBlock layers.""" + + def __init__(self, + in_channels, + growth_rate, + bn_size, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + drop_rate=0., + memory_efficient=False): + super(DenseLayer, self).__init__() + + self.norm1 = build_norm_layer(norm_cfg, in_channels)[1] + self.conv1 = nn.Conv2d( + in_channels, + bn_size * growth_rate, + kernel_size=1, + stride=1, + bias=False) + self.act = build_activation_layer(act_cfg) + self.norm2 = build_norm_layer(norm_cfg, bn_size * growth_rate)[1] + self.conv2 = nn.Conv2d( + bn_size * growth_rate, + growth_rate, + kernel_size=3, + stride=1, + padding=1, + bias=False) + self.drop_rate = float(drop_rate) + self.memory_efficient = memory_efficient + + def bottleneck_fn(self, xs): + # type: (List[torch.Tensor]) -> torch.Tensor + concated_features = torch.cat(xs, 1) + bottleneck_output = self.conv1( + self.act(self.norm1(concated_features))) # noqa: T484 + return bottleneck_output + + # todo: rewrite when torchscript supports any + def any_requires_grad(self, x): + # type: (List[torch.Tensor]) -> bool + for tensor in x: + if tensor.requires_grad: + return True + return False + + # This decorator indicates to the compiler that a function or method + # should be ignored and replaced with the raising of an exception. + # Here this function is incompatible with torchscript. + @torch.jit.unused # noqa: T484 + def call_checkpoint_bottleneck(self, x): + # type: (List[torch.Tensor]) -> torch.Tensor + def closure(*xs): + return self.bottleneck_fn(xs) + + # Here use torch.utils.checkpoint to rerun a forward-pass during + # backward in bottleneck to save memories. + return cp.checkpoint(closure, *x) + + def forward(self, x): # noqa: F811 + # type: (List[torch.Tensor]) -> torch.Tensor + # assert input features is a list of Tensor + assert isinstance(x, list) + + if self.memory_efficient and self.any_requires_grad(x): + if torch.jit.is_scripting(): + raise Exception('Memory Efficient not supported in JIT') + bottleneck_output = self.call_checkpoint_bottleneck(x) + else: + bottleneck_output = self.bottleneck_fn(x) + + new_features = self.conv2(self.act(self.norm2(bottleneck_output))) + if self.drop_rate > 0: + new_features = F.dropout( + new_features, p=self.drop_rate, training=self.training) + return new_features + + +class DenseBlock(nn.Module): + """DenseNet Blocks.""" + + def __init__(self, + num_layers, + in_channels, + bn_size, + growth_rate, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + drop_rate=0., + memory_efficient=False): + super(DenseBlock, self).__init__() + self.block = nn.ModuleList([ + DenseLayer( + in_channels + i * growth_rate, + growth_rate=growth_rate, + bn_size=bn_size, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + drop_rate=drop_rate, + memory_efficient=memory_efficient) for i in range(num_layers) + ]) + + def forward(self, init_features): + features = [init_features] + for layer in self.block: + new_features = layer(features) + features.append(new_features) + return torch.cat(features, 1) + + +class DenseTransition(nn.Sequential): + """DenseNet Transition Layers.""" + + def __init__(self, + in_channels, + out_channels, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU')): + super(DenseTransition, self).__init__() + self.add_module('norm', build_norm_layer(norm_cfg, in_channels)[1]) + self.add_module('act', build_activation_layer(act_cfg)) + self.add_module( + 'conv', + nn.Conv2d( + in_channels, out_channels, kernel_size=1, stride=1, + bias=False)) + self.add_module('pool', nn.AvgPool2d(kernel_size=2, stride=2)) + + +@MODELS.register_module() +class DenseNet(BaseBackbone): + """DenseNet. + + A PyTorch implementation of : `Densely Connected Convolutional Networks + `_ + + Modified from the `official repo + `_ + and `pytorch + `_. + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architecture in ``DenseNet.arch_settings``. And if dict, it + should include the following two keys: + + - growth_rate (int): Each layer of DenseBlock produce `k` feature + maps. Here refers `k` as the growth rate of the network. + - depths (list[int]): Number of repeated layers in each DenseBlock. + - init_channels (int): The output channels of stem layers. + + Defaults to '121'. + in_channels (int): Number of input image channels. Defaults to 3. + bn_size (int): Refers to channel expansion parameter of 1x1 + convolution layer. Defaults to 4. + drop_rate (float): Drop rate of Dropout Layer. Defaults to 0. + compression_factor (float): The reduction rate of transition layers. + Defaults to 0.5. + memory_efficient (bool): If True, uses checkpointing. Much more memory + efficient, but slower. Defaults to False. + See `"paper" `_. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='BN')``. + act_cfg (dict): The config dict for activation after each convolution. + Defaults to ``dict(type='ReLU')``. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + init_cfg (dict, optional): Initialization config dict. + """ + arch_settings = { + '121': { + 'growth_rate': 32, + 'depths': [6, 12, 24, 16], + 'init_channels': 64, + }, + '169': { + 'growth_rate': 32, + 'depths': [6, 12, 32, 32], + 'init_channels': 64, + }, + '201': { + 'growth_rate': 32, + 'depths': [6, 12, 48, 32], + 'init_channels': 64, + }, + '161': { + 'growth_rate': 48, + 'depths': [6, 12, 36, 24], + 'init_channels': 96, + }, + } + + def __init__(self, + arch='121', + in_channels=3, + bn_size=4, + drop_rate=0, + compression_factor=0.5, + memory_efficient=False, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + out_indices=-1, + frozen_stages=0, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + essential_keys = {'growth_rate', 'depths', 'init_channels'} + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + + self.growth_rate = arch['growth_rate'] + self.depths = arch['depths'] + self.init_channels = arch['init_channels'] + self.act = build_activation_layer(act_cfg) + + self.num_stages = len(self.depths) + + # check out indices and frozen stages + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_stages + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + self.frozen_stages = frozen_stages + + # Set stem layers + self.stem = nn.Sequential( + nn.Conv2d( + in_channels, + self.init_channels, + kernel_size=7, + stride=2, + padding=3, + bias=False), + build_norm_layer(norm_cfg, self.init_channels)[1], self.act, + nn.MaxPool2d(kernel_size=3, stride=2, padding=1)) + + # Repetitions of DenseNet Blocks + self.stages = nn.ModuleList() + self.transitions = nn.ModuleList() + + channels = self.init_channels + for i in range(self.num_stages): + depth = self.depths[i] + + stage = DenseBlock( + num_layers=depth, + in_channels=channels, + bn_size=bn_size, + growth_rate=self.growth_rate, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + drop_rate=drop_rate, + memory_efficient=memory_efficient) + self.stages.append(stage) + channels += depth * self.growth_rate + + if i != self.num_stages - 1: + transition = DenseTransition( + in_channels=channels, + out_channels=math.floor(channels * compression_factor), + norm_cfg=norm_cfg, + act_cfg=act_cfg, + ) + channels = math.floor(channels * compression_factor) + else: + # Final layers after dense block is just bn with act. + # Unlike the paper, the original repo also put this in + # transition layer, whereas torchvision take this out. + # We reckon this as transition layer here. + transition = nn.Sequential( + build_norm_layer(norm_cfg, channels)[1], + self.act, + ) + self.transitions.append(transition) + + self._freeze_stages() + + def forward(self, x): + x = self.stem(x) + outs = [] + for i in range(self.num_stages): + x = self.stages[i](x) + x = self.transitions[i](x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(self.frozen_stages): + downsample_layer = self.transitions[i] + stage = self.stages[i] + downsample_layer.eval() + stage.eval() + for param in chain(downsample_layer.parameters(), + stage.parameters()): + param.requires_grad = False + + def train(self, mode=True): + super(DenseNet, self).train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/backbones/edgenext.py b/mmpretrain/models/backbones/edgenext.py new file mode 100644 index 0000000000000000000000000000000000000000..ad4e768e7561eb49da3603f4394faaebed7c9251 --- /dev/null +++ b/mmpretrain/models/backbones/edgenext.py @@ -0,0 +1,398 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from itertools import chain +from typing import Sequence + +import torch +import torch.nn as nn +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule, ModuleList, Sequential + +from mmpretrain.registry import MODELS +from ..utils import (ChannelMultiheadAttention, PositionEncodingFourier, + build_norm_layer) +from .base_backbone import BaseBackbone +from .convnext import ConvNeXtBlock + + +class SDTAEncoder(BaseModule): + """A PyTorch implementation of split depth-wise transpose attention (SDTA) + encoder. + + Inspiration from + https://github.com/mmaaz60/EdgeNeXt + Args: + in_channel (int): Number of input channels. + drop_path_rate (float): Stochastic depth dropout rate. + Defaults to 0. + layer_scale_init_value (float): Initial value of layer scale. + Defaults to 1e-6. + mlp_ratio (int): Number of channels ratio in the MLP. + Defaults to 4. + use_pos_emb (bool): Whether to use position encoding. + Defaults to True. + num_heads (int): Number of heads in the multihead attention. + Defaults to 8. + qkv_bias (bool): Whether to use bias in the multihead attention. + Defaults to True. + attn_drop (float): Dropout rate of the attention. + Defaults to 0. + proj_drop (float): Dropout rate of the projection. + Defaults to 0. + layer_scale_init_value (float): Initial value of layer scale. + Defaults to 1e-6. + norm_cfg (dict): Dictionary to construct normalization layer. + Defaults to ``dict(type='LN')``. + act_cfg (dict): Dictionary to construct activation layer. + Defaults to ``dict(type='GELU')``. + scales (int): Number of scales. Default to 1. + """ + + def __init__(self, + in_channel, + drop_path_rate=0., + layer_scale_init_value=1e-6, + mlp_ratio=4, + use_pos_emb=True, + num_heads=8, + qkv_bias=True, + attn_drop=0., + proj_drop=0., + norm_cfg=dict(type='LN'), + act_cfg=dict(type='GELU'), + scales=1, + init_cfg=None): + super(SDTAEncoder, self).__init__(init_cfg=init_cfg) + conv_channels = max( + int(math.ceil(in_channel / scales)), + int(math.floor(in_channel // scales))) + self.conv_channels = conv_channels + self.num_convs = scales if scales == 1 else scales - 1 + + self.conv_modules = ModuleList() + for i in range(self.num_convs): + self.conv_modules.append( + nn.Conv2d( + conv_channels, + conv_channels, + kernel_size=3, + padding=1, + groups=conv_channels)) + + self.pos_embed = PositionEncodingFourier( + embed_dims=in_channel) if use_pos_emb else None + + self.norm_csa = build_norm_layer(norm_cfg, in_channel) + self.gamma_csa = nn.Parameter( + layer_scale_init_value * torch.ones(in_channel), + requires_grad=True) if layer_scale_init_value > 0 else None + self.csa = ChannelMultiheadAttention( + embed_dims=in_channel, + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_drop=attn_drop, + proj_drop=proj_drop) + + self.norm = build_norm_layer(norm_cfg, in_channel) + self.pointwise_conv1 = nn.Linear(in_channel, mlp_ratio * in_channel) + self.act = MODELS.build(act_cfg) + self.pointwise_conv2 = nn.Linear(mlp_ratio * in_channel, in_channel) + self.gamma = nn.Parameter( + layer_scale_init_value * torch.ones(in_channel), + requires_grad=True) if layer_scale_init_value > 0 else None + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0. else nn.Identity() + + def forward(self, x): + shortcut = x + spx = torch.split(x, self.conv_channels, dim=1) + for i in range(self.num_convs): + if i == 0: + sp = spx[i] + else: + sp = sp + spx[i] + sp = self.conv_modules[i](sp) + if i == 0: + out = sp + else: + out = torch.cat((out, sp), 1) + + x = torch.cat((out, spx[self.num_convs]), 1) + + # Channel Self-attention + B, C, H, W = x.shape + x = x.reshape(B, C, H * W).permute(0, 2, 1) + if self.pos_embed: + pos_encoding = self.pos_embed((B, H, W)) + pos_encoding = pos_encoding.reshape(B, -1, + x.shape[1]).permute(0, 2, 1) + x += pos_encoding + + x = x + self.drop_path(self.gamma_csa * self.csa(self.norm_csa(x))) + x = x.reshape(B, H, W, C) + + # Inverted Bottleneck + x = self.norm(x) + x = self.pointwise_conv1(x) + x = self.act(x) + x = self.pointwise_conv2(x) + + if self.gamma is not None: + x = self.gamma * x + x = x.permute(0, 3, 1, 2) # (B, H, W, C) -> (B, C, H, W) + + x = shortcut + self.drop_path(x) + + return x + + +@MODELS.register_module() +class EdgeNeXt(BaseBackbone): + """EdgeNeXt. + + A PyTorch implementation of: `EdgeNeXt: Efficiently Amalgamated + CNN-Transformer Architecture for Mobile Vision Applications + `_ + + Inspiration from + https://github.com/mmaaz60/EdgeNeXt + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architectures in ``EdgeNeXt.arch_settings``. + And if dict, it should include the following keys: + + - channels (list[int]): The number of channels at each stage. + - depths (list[int]): The number of blocks at each stage. + - num_heads (list[int]): The number of heads at each stage. + + Defaults to 'xxsmall'. + in_channels (int): The number of input channels. + Defaults to 3. + global_blocks (list[int]): The number of global blocks. + Defaults to [0, 1, 1, 1]. + global_block_type (list[str]): The type of global blocks. + Defaults to ['None', 'SDTA', 'SDTA', 'SDTA']. + drop_path_rate (float): Stochastic depth dropout rate. + Defaults to 0. + layer_scale_init_value (float): Initial value of layer scale. + Defaults to 1e-6. + linear_pw_conv (bool): Whether to use linear layer to do pointwise + convolution. Defaults to False. + mlp_ratio (int): The number of channel ratio in MLP layers. + Defaults to 4. + conv_kernel_size (list[int]): The kernel size of convolutional layers + at each stage. Defaults to [3, 5, 7, 9]. + use_pos_embd_csa (list[bool]): Whether to use positional embedding in + Channel Self-Attention. Defaults to [False, True, False, False]. + use_pos_emebd_global (bool): Whether to use positional embedding for + whole network. Defaults to False. + d2_scales (list[int]): The number of channel groups used for SDTA at + each stage. Defaults to [2, 2, 3, 4]. + norm_cfg (dict): The config of normalization layer. + Defaults to ``dict(type='LN2d', eps=1e-6)``. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + gap_before_final_norm (bool): Whether to globally average the feature + map before the final norm layer. Defaults to True. + act_cfg (dict): The config of activation layer. + Defaults to ``dict(type='GELU')``. + init_cfg (dict, optional): Config for initialization. + Defaults to None. + """ + arch_settings = { + 'xxsmall': { # parameters: 1.3M + 'channels': [24, 48, 88, 168], + 'depths': [2, 2, 6, 2], + 'num_heads': [4, 4, 4, 4] + }, + 'xsmall': { # parameters: 2.3M + 'channels': [32, 64, 100, 192], + 'depths': [3, 3, 9, 3], + 'num_heads': [4, 4, 4, 4] + }, + 'small': { # parameters: 5.6M + 'channels': [48, 96, 160, 304], + 'depths': [3, 3, 9, 3], + 'num_heads': [8, 8, 8, 8] + }, + 'base': { # parameters: 18.51M + 'channels': [80, 160, 288, 584], + 'depths': [3, 3, 9, 3], + 'num_heads': [8, 8, 8, 8] + }, + } + + def __init__(self, + arch='xxsmall', + in_channels=3, + global_blocks=[0, 1, 1, 1], + global_block_type=['None', 'SDTA', 'SDTA', 'SDTA'], + drop_path_rate=0., + layer_scale_init_value=1e-6, + linear_pw_conv=True, + mlp_ratio=4, + conv_kernel_sizes=[3, 5, 7, 9], + use_pos_embd_csa=[False, True, False, False], + use_pos_embd_global=False, + d2_scales=[2, 2, 3, 4], + norm_cfg=dict(type='LN2d', eps=1e-6), + out_indices=-1, + frozen_stages=0, + gap_before_final_norm=True, + act_cfg=dict(type='GELU'), + init_cfg=None): + super(EdgeNeXt, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in self.arch_settings, \ + f'Arch {arch} is not in default archs ' \ + f'{set(self.arch_settings)}' + self.arch_settings = self.arch_settings[arch] + elif isinstance(arch, dict): + essential_keys = {'channels', 'depths', 'num_heads'} + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.channels = self.arch_settings['channels'] + self.depths = self.arch_settings['depths'] + self.num_heads = self.arch_settings['num_heads'] + self.num_layers = len(self.depths) + self.use_pos_embd_global = use_pos_embd_global + + for g in global_block_type: + assert g in ['None', + 'SDTA'], f'Global block type {g} is not supported' + + self.num_stages = len(self.depths) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = 4 + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.frozen_stages = frozen_stages + self.gap_before_final_norm = gap_before_final_norm + + if self.use_pos_embd_global: + self.pos_embed = PositionEncodingFourier( + embed_dims=self.channels[0]) + else: + self.pos_embed = None + + # stochastic depth decay rule + dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, sum(self.depths)) + ] + + self.downsample_layers = ModuleList() + stem = nn.Sequential( + nn.Conv2d(in_channels, self.channels[0], kernel_size=4, stride=4), + build_norm_layer(norm_cfg, self.channels[0]), + ) + self.downsample_layers.append(stem) + + self.stages = ModuleList() + block_idx = 0 + for i in range(self.num_stages): + depth = self.depths[i] + channels = self.channels[i] + + if i >= 1: + downsample_layer = nn.Sequential( + build_norm_layer(norm_cfg, self.channels[i - 1]), + nn.Conv2d( + self.channels[i - 1], + channels, + kernel_size=2, + stride=2, + )) + self.downsample_layers.append(downsample_layer) + + stage_blocks = [] + for j in range(depth): + if j > depth - global_blocks[i] - 1: + stage_blocks.append( + SDTAEncoder( + in_channel=channels, + drop_path_rate=dpr[block_idx + j], + mlp_ratio=mlp_ratio, + scales=d2_scales[i], + use_pos_emb=use_pos_embd_csa[i], + num_heads=self.num_heads[i], + )) + else: + dw_conv_cfg = dict( + kernel_size=conv_kernel_sizes[i], + padding=conv_kernel_sizes[i] // 2, + ) + stage_blocks.append( + ConvNeXtBlock( + in_channels=channels, + dw_conv_cfg=dw_conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + linear_pw_conv=linear_pw_conv, + drop_path_rate=dpr[block_idx + j], + layer_scale_init_value=layer_scale_init_value, + )) + block_idx += depth + + stage_blocks = Sequential(*stage_blocks) + self.stages.append(stage_blocks) + + if i in self.out_indices: + out_norm_cfg = dict(type='LN') if self.gap_before_final_norm \ + else norm_cfg + norm_layer = build_norm_layer(out_norm_cfg, channels) + self.add_module(f'norm{i}', norm_layer) + + def init_weights(self) -> None: + # TODO: need to be implemented in the future + return super().init_weights() + + def forward(self, x): + outs = [] + for i, stage in enumerate(self.stages): + x = self.downsample_layers[i](x) + x = stage(x) + if self.pos_embed and i == 0: + B, _, H, W = x.shape + x += self.pos_embed((B, H, W)) + + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + if self.gap_before_final_norm: + gap = x.mean([-2, -1], keepdim=True) + outs.append(norm_layer(gap.flatten(1))) + else: + # The output of LayerNorm2d may be discontiguous, which + # may cause some problem in the downstream tasks + outs.append(norm_layer(x).contiguous()) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(self.frozen_stages): + downsample_layer = self.downsample_layers[i] + stage = self.stages[i] + downsample_layer.eval() + stage.eval() + for param in chain(downsample_layer.parameters(), + stage.parameters()): + param.requires_grad = False + + def train(self, mode=True): + super(EdgeNeXt, self).train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/backbones/efficientformer.py b/mmpretrain/models/backbones/efficientformer.py new file mode 100644 index 0000000000000000000000000000000000000000..c2525c8faaa745ff5404e91004421f2360dd1c41 --- /dev/null +++ b/mmpretrain/models/backbones/efficientformer.py @@ -0,0 +1,606 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import itertools +from typing import Optional, Sequence + +import torch +import torch.nn as nn +from mmcv.cnn.bricks import (ConvModule, DropPath, build_activation_layer, + build_norm_layer) +from mmengine.model import BaseModule, ModuleList, Sequential + +from mmpretrain.registry import MODELS +from ..utils import LayerScale +from .base_backbone import BaseBackbone +from .poolformer import Pooling + + +class AttentionWithBias(BaseModule): + """Multi-head Attention Module with attention_bias. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. Defaults to 8. + key_dim (int): The dimension of q, k. Defaults to 32. + attn_ratio (float): The dimension of v equals to + ``key_dim * attn_ratio``. Defaults to 4. + resolution (int): The height and width of attention_bias. + Defaults to 7. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads=8, + key_dim=32, + attn_ratio=4., + resolution=7, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.num_heads = num_heads + self.scale = key_dim**-0.5 + self.attn_ratio = attn_ratio + self.key_dim = key_dim + self.nh_kd = key_dim * num_heads + self.d = int(attn_ratio * key_dim) + self.dh = int(attn_ratio * key_dim) * num_heads + h = self.dh + self.nh_kd * 2 + self.qkv = nn.Linear(embed_dims, h) + self.proj = nn.Linear(self.dh, embed_dims) + + points = list(itertools.product(range(resolution), range(resolution))) + N = len(points) + attention_offsets = {} + idxs = [] + for p1 in points: + for p2 in points: + offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1])) + if offset not in attention_offsets: + attention_offsets[offset] = len(attention_offsets) + idxs.append(attention_offsets[offset]) + self.attention_biases = nn.Parameter( + torch.zeros(num_heads, len(attention_offsets))) + self.register_buffer('attention_bias_idxs', + torch.LongTensor(idxs).view(N, N)) + + @torch.no_grad() + def train(self, mode=True): + """change the mode of model.""" + super().train(mode) + if mode and hasattr(self, 'ab'): + del self.ab + else: + self.ab = self.attention_biases[:, self.attention_bias_idxs] + + def forward(self, x): + """forward function. + + Args: + x (tensor): input features with shape of (B, N, C) + """ + B, N, _ = x.shape + qkv = self.qkv(x) + qkv = qkv.reshape(B, N, self.num_heads, -1).permute(0, 2, 1, 3) + q, k, v = qkv.split([self.key_dim, self.key_dim, self.d], dim=-1) + + attn = ((q @ k.transpose(-2, -1)) * self.scale + + (self.attention_biases[:, self.attention_bias_idxs] + if self.training else self.ab)) + attn = attn.softmax(dim=-1) + x = (attn @ v).transpose(1, 2).reshape(B, N, self.dh) + x = self.proj(x) + return x + + +class Flat(nn.Module): + """Flat the input from (B, C, H, W) to (B, H*W, C).""" + + def __init__(self, ): + super().__init__() + + def forward(self, x: torch.Tensor): + x = x.flatten(2).transpose(1, 2) + return x + + +class LinearMlp(BaseModule): + """Mlp implemented with linear. + + The shape of input and output tensor are (B, N, C). + + Args: + in_features (int): Dimension of input features. + hidden_features (int): Dimension of hidden features. + out_features (int): Dimension of output features. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + drop (float): Dropout rate. Defaults to 0.0. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_cfg=dict(type='GELU'), + drop=0., + init_cfg=None): + super().__init__(init_cfg=init_cfg) + out_features = out_features or in_features + hidden_features = hidden_features or in_features + + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = build_activation_layer(act_cfg) + self.drop1 = nn.Dropout(drop) + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop2 = nn.Dropout(drop) + + def forward(self, x): + """ + Args: + x (torch.Tensor): input tensor with shape (B, N, C). + + Returns: + torch.Tensor: output tensor with shape (B, N, C). + """ + x = self.drop1(self.act(self.fc1(x))) + x = self.drop2(self.fc2(x)) + return x + + +class ConvMlp(BaseModule): + """Mlp implemented with 1*1 convolutions. + + Args: + in_features (int): Dimension of input features. + hidden_features (int): Dimension of hidden features. + out_features (int): Dimension of output features. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + drop (float): Dropout rate. Defaults to 0.0. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + in_features, + hidden_features=None, + out_features=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='GELU'), + drop=0., + init_cfg=None): + super().__init__(init_cfg=init_cfg) + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = build_activation_layer(act_cfg) + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.norm1 = build_norm_layer(norm_cfg, hidden_features)[1] + self.norm2 = build_norm_layer(norm_cfg, out_features)[1] + + self.drop = nn.Dropout(drop) + + def forward(self, x): + """ + Args: + x (torch.Tensor): input tensor with shape (B, C, H, W). + + Returns: + torch.Tensor: output tensor with shape (B, C, H, W). + """ + + x = self.act(self.norm1(self.fc1(x))) + x = self.drop(x) + x = self.norm2(self.fc2(x)) + x = self.drop(x) + return x + + +class Meta3D(BaseModule): + """Meta Former block using 3 dimensions inputs, ``torch.Tensor`` with shape + (B, N, C).""" + + def __init__(self, + dim, + mlp_ratio=4., + norm_cfg=dict(type='LN'), + act_cfg=dict(type='GELU'), + drop=0., + drop_path=0., + use_layer_scale=True, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.norm1 = build_norm_layer(norm_cfg, dim)[1] + self.token_mixer = AttentionWithBias(dim) + self.norm2 = build_norm_layer(norm_cfg, dim)[1] + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = LinearMlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_cfg=act_cfg, + drop=drop) + + self.drop_path = DropPath(drop_path) if drop_path > 0. \ + else nn.Identity() + if use_layer_scale: + self.ls1 = LayerScale(dim) + self.ls2 = LayerScale(dim) + else: + self.ls1, self.ls2 = nn.Identity(), nn.Identity() + + def forward(self, x): + x = x + self.drop_path(self.ls1(self.token_mixer(self.norm1(x)))) + x = x + self.drop_path(self.ls2(self.mlp(self.norm2(x)))) + return x + + +class Meta4D(BaseModule): + """Meta Former block using 4 dimensions inputs, ``torch.Tensor`` with shape + (B, C, H, W).""" + + def __init__(self, + dim, + pool_size=3, + mlp_ratio=4., + act_cfg=dict(type='GELU'), + drop=0., + drop_path=0., + use_layer_scale=True, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + self.token_mixer = Pooling(pool_size=pool_size) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ConvMlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_cfg=act_cfg, + drop=drop) + + self.drop_path = DropPath(drop_path) if drop_path > 0. \ + else nn.Identity() + if use_layer_scale: + self.ls1 = LayerScale(dim, data_format='channels_first') + self.ls2 = LayerScale(dim, data_format='channels_first') + else: + self.ls1, self.ls2 = nn.Identity(), nn.Identity() + + def forward(self, x): + x = x + self.drop_path(self.ls1(self.token_mixer(x))) + x = x + self.drop_path(self.ls2(self.mlp(x))) + return x + + +def basic_blocks(in_channels, + out_channels, + index, + layers, + pool_size=3, + mlp_ratio=4., + act_cfg=dict(type='GELU'), + drop_rate=.0, + drop_path_rate=0., + use_layer_scale=True, + vit_num=1, + has_downsamper=False): + """generate EfficientFormer blocks for a stage.""" + blocks = [] + if has_downsamper: + blocks.append( + ConvModule( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=3, + stride=2, + padding=1, + bias=True, + norm_cfg=dict(type='BN'), + act_cfg=None)) + if index == 3 and vit_num == layers[index]: + blocks.append(Flat()) + for block_idx in range(layers[index]): + block_dpr = drop_path_rate * (block_idx + sum(layers[:index])) / ( + sum(layers) - 1) + if index == 3 and layers[index] - block_idx <= vit_num: + blocks.append( + Meta3D( + out_channels, + mlp_ratio=mlp_ratio, + act_cfg=act_cfg, + drop=drop_rate, + drop_path=block_dpr, + use_layer_scale=use_layer_scale, + )) + else: + blocks.append( + Meta4D( + out_channels, + pool_size=pool_size, + act_cfg=act_cfg, + drop=drop_rate, + drop_path=block_dpr, + use_layer_scale=use_layer_scale)) + if index == 3 and layers[index] - block_idx - 1 == vit_num: + blocks.append(Flat()) + blocks = nn.Sequential(*blocks) + return blocks + + +@MODELS.register_module() +class EfficientFormer(BaseBackbone): + """EfficientFormer. + + A PyTorch implementation of EfficientFormer introduced by: + `EfficientFormer: Vision Transformers at MobileNet Speed `_ + + Modified from the `official repo + `. + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architecture in ``EfficientFormer.arch_settings``. And if dict, + it should include the following 4 keys: + + - layers (list[int]): Number of blocks at each stage. + - embed_dims (list[int]): The number of channels at each stage. + - downsamples (list[int]): Has downsample or not in the four stages. + - vit_num (int): The num of vit blocks in the last stage. + + Defaults to 'l1'. + + in_channels (int): The num of input channels. Defaults to 3. + pool_size (int): The pooling size of ``Meta4D`` blocks. Defaults to 3. + mlp_ratios (int): The dimension ratio of multi-head attention mechanism + in ``Meta4D`` blocks. Defaults to 3. + reshape_last_feat (bool): Whether to reshape the feature map from + (B, N, C) to (B, C, H, W) in the last stage, when the ``vit-num`` + in ``arch`` is not 0. Defaults to False. Usually set to True + in downstream tasks. + out_indices (Sequence[int]): Output from which stages. + Defaults to -1. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + drop_rate (float): Dropout rate. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + use_layer_scale (bool): Whether to use use_layer_scale in MetaFormer + block. Defaults to True. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + + Example: + >>> from mmpretrain.models import EfficientFormer + >>> import torch + >>> inputs = torch.rand((1, 3, 224, 224)) + >>> # build EfficientFormer backbone for classification task + >>> model = EfficientFormer(arch="l1") + >>> model.eval() + >>> level_outputs = model(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 448, 49) + >>> # build EfficientFormer backbone for downstream task + >>> model = EfficientFormer( + >>> arch="l3", + >>> out_indices=(0, 1, 2, 3), + >>> reshape_last_feat=True) + >>> model.eval() + >>> level_outputs = model(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 64, 56, 56) + (1, 128, 28, 28) + (1, 320, 14, 14) + (1, 512, 7, 7) + """ # noqa: E501 + + # --layers: [x,x,x,x], numbers of layers for the four stages + # --embed_dims: [x,x,x,x], embedding dims for the four stages + # --downsamples: [x,x,x,x], has downsample or not in the four stages + # --vit_num:(int), the num of vit blocks in the last stage + arch_settings = { + 'l1': { + 'layers': [3, 2, 6, 4], + 'embed_dims': [48, 96, 224, 448], + 'downsamples': [False, True, True, True], + 'vit_num': 1, + }, + 'l3': { + 'layers': [4, 4, 12, 6], + 'embed_dims': [64, 128, 320, 512], + 'downsamples': [False, True, True, True], + 'vit_num': 4, + }, + 'l7': { + 'layers': [6, 6, 18, 8], + 'embed_dims': [96, 192, 384, 768], + 'downsamples': [False, True, True, True], + 'vit_num': 8, + }, + } + + def __init__(self, + arch='l1', + in_channels=3, + pool_size=3, + mlp_ratios=4, + reshape_last_feat=False, + out_indices=-1, + frozen_stages=-1, + act_cfg=dict(type='GELU'), + drop_rate=0., + drop_path_rate=0., + use_layer_scale=True, + init_cfg=None): + + super().__init__(init_cfg=init_cfg) + self.num_extra_tokens = 0 # no cls_token, no dist_token + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + default_keys = set(self.arch_settings['l1'].keys()) + assert set(arch.keys()) == default_keys, \ + f'The arch dict must have {default_keys}, ' \ + f'but got {list(arch.keys())}.' + + self.layers = arch['layers'] + self.embed_dims = arch['embed_dims'] + self.downsamples = arch['downsamples'] + assert isinstance(self.layers, list) and isinstance( + self.embed_dims, list) and isinstance(self.downsamples, list) + assert len(self.layers) == len(self.embed_dims) == len( + self.downsamples) + + self.vit_num = arch['vit_num'] + self.reshape_last_feat = reshape_last_feat + + assert self.vit_num >= 0, "'vit_num' must be an integer " \ + 'greater than or equal to 0.' + assert self.vit_num <= self.layers[-1], ( + "'vit_num' must be an integer smaller than layer number") + + self._make_stem(in_channels, self.embed_dims[0]) + + # set the main block in network + network = [] + for i in range(len(self.layers)): + if i != 0: + in_channels = self.embed_dims[i - 1] + else: + in_channels = self.embed_dims[i] + out_channels = self.embed_dims[i] + stage = basic_blocks( + in_channels, + out_channels, + i, + self.layers, + pool_size=pool_size, + mlp_ratio=mlp_ratios, + act_cfg=act_cfg, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + vit_num=self.vit_num, + use_layer_scale=use_layer_scale, + has_downsamper=self.downsamples[i]) + network.append(stage) + + self.network = ModuleList(network) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = 4 + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + + self.out_indices = out_indices + for i_layer in self.out_indices: + if not self.reshape_last_feat and \ + i_layer == 3 and self.vit_num > 0: + layer = build_norm_layer( + dict(type='LN'), self.embed_dims[i_layer])[1] + else: + # use GN with 1 group as channel-first LN2D + layer = build_norm_layer( + dict(type='GN', num_groups=1), self.embed_dims[i_layer])[1] + + layer_name = f'norm{i_layer}' + self.add_module(layer_name, layer) + + self.frozen_stages = frozen_stages + self._freeze_stages() + + def _make_stem(self, in_channels: int, stem_channels: int): + """make 2-ConvBNReLu stem layer.""" + self.patch_embed = Sequential( + ConvModule( + in_channels, + stem_channels // 2, + kernel_size=3, + stride=2, + padding=1, + bias=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + inplace=True), + ConvModule( + stem_channels // 2, + stem_channels, + kernel_size=3, + stride=2, + padding=1, + bias=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + inplace=True)) + + def forward_tokens(self, x): + outs = [] + for idx, block in enumerate(self.network): + if idx == len(self.network) - 1: + N, _, H, W = x.shape + if self.downsamples[idx]: + H, W = H // 2, W // 2 + x = block(x) + if idx in self.out_indices: + norm_layer = getattr(self, f'norm{idx}') + + if idx == len(self.network) - 1 and x.dim() == 3: + # when ``vit-num`` > 0 and in the last stage, + # if `self.reshape_last_feat`` is True, reshape the + # features to `BCHW` format before the final normalization. + # if `self.reshape_last_feat`` is False, do + # normalization directly and permute the features to `BCN`. + if self.reshape_last_feat: + x = x.permute((0, 2, 1)).reshape(N, -1, H, W) + x_out = norm_layer(x) + else: + x_out = norm_layer(x).permute((0, 2, 1)) + else: + x_out = norm_layer(x) + + outs.append(x_out.contiguous()) + return tuple(outs) + + def forward(self, x): + # input embedding + x = self.patch_embed(x) + # through stages + x = self.forward_tokens(x) + return x + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + for i in range(self.frozen_stages): + # Include both block and downsample layer. + module = self.network[i] + module.eval() + for param in module.parameters(): + param.requires_grad = False + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + norm_layer.eval() + for param in norm_layer.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(EfficientFormer, self).train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/backbones/efficientnet.py b/mmpretrain/models/backbones/efficientnet.py new file mode 100644 index 0000000000000000000000000000000000000000..9ec7ee81186610f7adb8af92325471d794509ddc --- /dev/null +++ b/mmpretrain/models/backbones/efficientnet.py @@ -0,0 +1,410 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import math +from functools import partial + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn.bricks import ConvModule, DropPath +from mmengine.model import BaseModule, Sequential + +from mmpretrain.models.backbones.base_backbone import BaseBackbone +from mmpretrain.models.utils import InvertedResidual, SELayer, make_divisible +from mmpretrain.registry import MODELS + + +class EdgeResidual(BaseModule): + """Edge Residual Block. + + Args: + in_channels (int): The input channels of this module. + out_channels (int): The output channels of this module. + mid_channels (int): The input channels of the second convolution. + kernel_size (int): The kernel size of the first convolution. + Defaults to 3. + stride (int): The stride of the first convolution. Defaults to 1. + se_cfg (dict, optional): Config dict for se layer. Defaults to None, + which means no se layer. + with_residual (bool): Use residual connection. Defaults to True. + conv_cfg (dict, optional): Config dict for convolution layer. + Defaults to None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='ReLU')``. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict | list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + mid_channels, + kernel_size=3, + stride=1, + se_cfg=None, + with_residual=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + drop_path_rate=0., + with_cp=False, + init_cfg=None): + super(EdgeResidual, self).__init__(init_cfg=init_cfg) + assert stride in [1, 2] + self.with_cp = with_cp + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0 else nn.Identity() + self.with_se = se_cfg is not None + self.with_residual = ( + stride == 1 and in_channels == out_channels and with_residual) + + if self.with_se: + assert isinstance(se_cfg, dict) + + self.conv1 = ConvModule( + in_channels=in_channels, + out_channels=mid_channels, + kernel_size=kernel_size, + stride=stride, + padding=kernel_size // 2, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + if self.with_se: + self.se = SELayer(**se_cfg) + + self.conv2 = ConvModule( + in_channels=mid_channels, + out_channels=out_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=None, + norm_cfg=norm_cfg, + act_cfg=None) + + def forward(self, x): + + def _inner_forward(x): + out = x + out = self.conv1(out) + + if self.with_se: + out = self.se(out) + + out = self.conv2(out) + + if self.with_residual: + return x + self.drop_path(out) + else: + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + return out + + +def model_scaling(layer_setting, arch_setting): + """Scaling operation to the layer's parameters according to the + arch_setting.""" + # scale width + new_layer_setting = copy.deepcopy(layer_setting) + for layer_cfg in new_layer_setting: + for block_cfg in layer_cfg: + block_cfg[1] = make_divisible(block_cfg[1] * arch_setting[0], 8) + + # scale depth + split_layer_setting = [new_layer_setting[0]] + for layer_cfg in new_layer_setting[1:-1]: + tmp_index = [0] + for i in range(len(layer_cfg) - 1): + if layer_cfg[i + 1][1] != layer_cfg[i][1]: + tmp_index.append(i + 1) + tmp_index.append(len(layer_cfg)) + for i in range(len(tmp_index) - 1): + split_layer_setting.append(layer_cfg[tmp_index[i]:tmp_index[i + + 1]]) + split_layer_setting.append(new_layer_setting[-1]) + + num_of_layers = [len(layer_cfg) for layer_cfg in split_layer_setting[1:-1]] + new_layers = [ + int(math.ceil(arch_setting[1] * num)) for num in num_of_layers + ] + + merge_layer_setting = [split_layer_setting[0]] + for i, layer_cfg in enumerate(split_layer_setting[1:-1]): + if new_layers[i] <= num_of_layers[i]: + tmp_layer_cfg = layer_cfg[:new_layers[i]] + else: + tmp_layer_cfg = copy.deepcopy(layer_cfg) + [layer_cfg[-1]] * ( + new_layers[i] - num_of_layers[i]) + if tmp_layer_cfg[0][3] == 1 and i != 0: + merge_layer_setting[-1] += tmp_layer_cfg.copy() + else: + merge_layer_setting.append(tmp_layer_cfg.copy()) + merge_layer_setting.append(split_layer_setting[-1]) + + return merge_layer_setting + + +@MODELS.register_module() +class EfficientNet(BaseBackbone): + """EfficientNet backbone. + + Args: + arch (str): Architecture of efficientnet. Defaults to b0. + out_indices (Sequence[int]): Output from which stages. + Defaults to (6, ). + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + conv_cfg (dict): Config dict for convolution layer. + Defaults to None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Defaults to dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Defaults to dict(type='Swish'). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + """ + + # Parameters to build layers. + # 'b' represents the architecture of normal EfficientNet family includes + # 'b0', 'b1', 'b2', 'b3', 'b4', 'b5', 'b6', 'b7', 'b8'. + # 'e' represents the architecture of EfficientNet-EdgeTPU including 'es', + # 'em', 'el'. + # 6 parameters are needed to construct a layer, From left to right: + # - kernel_size: The kernel size of the block + # - out_channel: The number of out_channels of the block + # - se_ratio: The sequeeze ratio of SELayer. + # - stride: The stride of the block + # - expand_ratio: The expand_ratio of the mid_channels + # - block_type: -1: Not a block, 0: InvertedResidual, 1: EdgeResidual + layer_settings = { + 'b': [[[3, 32, 0, 2, 0, -1]], + [[3, 16, 4, 1, 1, 0]], + [[3, 24, 4, 2, 6, 0], + [3, 24, 4, 1, 6, 0]], + [[5, 40, 4, 2, 6, 0], + [5, 40, 4, 1, 6, 0]], + [[3, 80, 4, 2, 6, 0], + [3, 80, 4, 1, 6, 0], + [3, 80, 4, 1, 6, 0], + [5, 112, 4, 1, 6, 0], + [5, 112, 4, 1, 6, 0], + [5, 112, 4, 1, 6, 0]], + [[5, 192, 4, 2, 6, 0], + [5, 192, 4, 1, 6, 0], + [5, 192, 4, 1, 6, 0], + [5, 192, 4, 1, 6, 0], + [3, 320, 4, 1, 6, 0]], + [[1, 1280, 0, 1, 0, -1]] + ], + 'e': [[[3, 32, 0, 2, 0, -1]], + [[3, 24, 0, 1, 3, 1]], + [[3, 32, 0, 2, 8, 1], + [3, 32, 0, 1, 8, 1]], + [[3, 48, 0, 2, 8, 1], + [3, 48, 0, 1, 8, 1], + [3, 48, 0, 1, 8, 1], + [3, 48, 0, 1, 8, 1]], + [[5, 96, 0, 2, 8, 0], + [5, 96, 0, 1, 8, 0], + [5, 96, 0, 1, 8, 0], + [5, 96, 0, 1, 8, 0], + [5, 96, 0, 1, 8, 0], + [5, 144, 0, 1, 8, 0], + [5, 144, 0, 1, 8, 0], + [5, 144, 0, 1, 8, 0], + [5, 144, 0, 1, 8, 0]], + [[5, 192, 0, 2, 8, 0], + [5, 192, 0, 1, 8, 0]], + [[1, 1280, 0, 1, 0, -1]] + ] + } # yapf: disable + + # Parameters to build different kinds of architecture. + # From left to right: scaling factor for width, scaling factor for depth, + # resolution. + arch_settings = { + 'b0': (1.0, 1.0, 224), + 'b1': (1.0, 1.1, 240), + 'b2': (1.1, 1.2, 260), + 'b3': (1.2, 1.4, 300), + 'b4': (1.4, 1.8, 380), + 'b5': (1.6, 2.2, 456), + 'b6': (1.8, 2.6, 528), + 'b7': (2.0, 3.1, 600), + 'b8': (2.2, 3.6, 672), + 'l2': (4.3, 5.3, 800), + 'es': (1.0, 1.0, 224), + 'em': (1.0, 1.1, 240), + 'el': (1.2, 1.4, 300) + } + + def __init__(self, + arch='b0', + drop_path_rate=0., + out_indices=(6, ), + frozen_stages=0, + conv_cfg=dict(type='Conv2dAdaptivePadding'), + norm_cfg=dict(type='BN', eps=1e-3), + act_cfg=dict(type='Swish'), + norm_eval=False, + with_cp=False, + init_cfg=[ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + layer=['_BatchNorm', 'GroupNorm'], + val=1) + ]): + super(EfficientNet, self).__init__(init_cfg) + assert arch in self.arch_settings, \ + f'"{arch}" is not one of the arch_settings ' \ + f'({", ".join(self.arch_settings.keys())})' + self.arch_setting = self.arch_settings[arch] + # layer_settings of arch='l2' is 'b' + self.layer_setting = self.layer_settings['b' if arch == + 'l2' else arch[:1]] + for index in out_indices: + if index not in range(0, len(self.layer_setting)): + raise ValueError('the item in out_indices must in ' + f'range(0, {len(self.layer_setting)}). ' + f'But received {index}') + + if frozen_stages not in range(len(self.layer_setting) + 1): + raise ValueError('frozen_stages must be in range(0, ' + f'{len(self.layer_setting) + 1}). ' + f'But received {frozen_stages}') + self.drop_path_rate = drop_path_rate + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + + self.layer_setting = model_scaling(self.layer_setting, + self.arch_setting) + block_cfg_0 = self.layer_setting[0][0] + block_cfg_last = self.layer_setting[-1][0] + self.in_channels = make_divisible(block_cfg_0[1], 8) + self.out_channels = block_cfg_last[1] + self.layers = nn.ModuleList() + self.layers.append( + ConvModule( + in_channels=3, + out_channels=self.in_channels, + kernel_size=block_cfg_0[0], + stride=block_cfg_0[3], + padding=block_cfg_0[0] // 2, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + self.make_layer() + self.layers.append( + ConvModule( + in_channels=self.in_channels, + out_channels=self.out_channels, + kernel_size=block_cfg_last[0], + stride=block_cfg_last[3], + padding=block_cfg_last[0] // 2, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + + def make_layer(self): + # Without the first and the final conv block. + layer_setting = self.layer_setting[1:-1] + + total_num_blocks = sum([len(x) for x in layer_setting]) + block_idx = 0 + dpr = [ + x.item() + for x in torch.linspace(0, self.drop_path_rate, total_num_blocks) + ] # stochastic depth decay rule + + for layer_cfg in layer_setting: + layer = [] + for i, block_cfg in enumerate(layer_cfg): + (kernel_size, out_channels, se_ratio, stride, expand_ratio, + block_type) = block_cfg + + mid_channels = int(self.in_channels * expand_ratio) + out_channels = make_divisible(out_channels, 8) + if se_ratio <= 0: + se_cfg = None + else: + se_cfg = dict( + channels=mid_channels, + ratio=expand_ratio * se_ratio, + divisor=1, + act_cfg=(self.act_cfg, dict(type='Sigmoid'))) + if block_type == 1: # edge tpu + if i > 0 and expand_ratio == 3: + with_residual = False + expand_ratio = 4 + else: + with_residual = True + mid_channels = int(self.in_channels * expand_ratio) + if se_cfg is not None: + se_cfg = dict( + channels=mid_channels, + ratio=se_ratio * expand_ratio, + divisor=1, + act_cfg=(self.act_cfg, dict(type='Sigmoid'))) + block = partial(EdgeResidual, with_residual=with_residual) + else: + block = InvertedResidual + layer.append( + block( + in_channels=self.in_channels, + out_channels=out_channels, + mid_channels=mid_channels, + kernel_size=kernel_size, + stride=stride, + se_cfg=se_cfg, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + drop_path_rate=dpr[block_idx], + with_cp=self.with_cp)) + self.in_channels = out_channels + block_idx += 1 + self.layers.append(Sequential(*layer)) + + def forward(self, x): + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(self.frozen_stages): + m = self.layers[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(EfficientNet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() diff --git a/mmpretrain/models/backbones/efficientnet_v2.py b/mmpretrain/models/backbones/efficientnet_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..fec002a4dac46f756f00ed8f596b37028ba18c37 --- /dev/null +++ b/mmpretrain/models/backbones/efficientnet_v2.py @@ -0,0 +1,343 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence, Tuple + +import torch +import torch.nn as nn +from mmcv.cnn.bricks import ConvModule, DropPath +from mmengine.model import Sequential +from torch import Tensor + +from mmpretrain.registry import MODELS +from ..utils import InvertedResidual as MBConv +from .base_backbone import BaseBackbone +from .efficientnet import EdgeResidual as FusedMBConv + + +class EnhancedConvModule(ConvModule): + """ConvModule with short-cut and droppath. + + Args: + in_channels (int): Number of channels in the input feature map. + Same as that in ``nn._ConvNd``. + out_channels (int): Number of channels produced by the convolution. + Same as that in ``nn._ConvNd``. + kernel_size (int | tuple[int]): Size of the convolving kernel. + Same as that in ``nn._ConvNd``. + stride (int | tuple[int]): Stride of the convolution. + Same as that in ``nn._ConvNd``. + has_skip (bool): Whether there is short-cut. Defaults to False. + drop_path_rate (float): Stochastic depth rate. Default 0.0. + padding (int | tuple[int]): Zero-padding added to both sides of + the input. Same as that in ``nn._ConvNd``. + dilation (int | tuple[int]): Spacing between kernel elements. + Same as that in ``nn._ConvNd``. + groups (int): Number of blocked connections from input channels to + output channels. Same as that in ``nn._ConvNd``. + bias (bool | str): If specified as `auto`, it will be decided by the + norm_cfg. Bias will be set as True if `norm_cfg` is None, otherwise + False. Default: "auto". + conv_cfg (dict): Config dict for convolution layer. Default: None, + which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. Default: None. + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + inplace (bool): Whether to use inplace mode for activation. + Default: True. + with_spectral_norm (bool): Whether use spectral norm in conv module. + Default: False. + padding_mode (str): If the `padding_mode` has not been supported by + current `Conv2d` in PyTorch, we will use our own padding layer + instead. Currently, we support ['zeros', 'circular'] with official + implementation and ['reflect'] with our own implementation. + Default: 'zeros'. + order (tuple[str]): The order of conv/norm/activation layers. It is a + sequence of "conv", "norm" and "act". Common examples are + ("conv", "norm", "act") and ("act", "conv", "norm"). + Default: ('conv', 'norm', 'act'). + """ + + def __init__(self, *args, has_skip=False, drop_path_rate=0, **kwargs): + super().__init__(*args, **kwargs) + self.has_skip = has_skip + if self.has_skip and (self.in_channels != self.out_channels + or self.stride != (1, 1)): + raise ValueError('the stride must be 1 and the `in_channels` and' + ' `out_channels` must be the same , when ' + '`has_skip` is True in `EnhancedConvModule` .') + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate else nn.Identity() + + def forward(self, x: torch.Tensor, **kwargs) -> torch.Tensor: + short_cut = x + x = super().forward(x, **kwargs) + if self.has_skip: + x = self.drop_path(x) + short_cut + return x + + +@MODELS.register_module() +class EfficientNetV2(BaseBackbone): + """EfficientNetV2 backbone. + + A PyTorch implementation of EfficientNetV2 introduced by: + `EfficientNetV2: Smaller Models and Faster Training + `_ + + Args: + arch (str): Architecture of efficientnetv2. Defaults to s. + in_channels (int): Number of input image channels. Defaults to 3. + drop_path_rate (float): The ratio of the stochastic depth. + Defaults to 0.0. + out_indices (Sequence[int]): Output from which stages. + Defaults to (-1, ). + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + conv_cfg (dict): Config dict for convolution layer. + Defaults to None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Defaults to dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Defaults to dict(type='Swish'). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + """ + + # Parameters to build layers. From left to right: + # - repeat (int): The repeat number of the block in the layer + # - kernel_size (int): The kernel size of the layer + # - stride (int): The stride of the first block of the layer + # - expand_ratio (int, float): The expand_ratio of the mid_channels + # - in_channel (int): The number of in_channels of the layer + # - out_channel (int): The number of out_channels of the layer + # - se_ratio (float): The sequeeze ratio of SELayer. + # - block_type (int): -2: ConvModule, -1: EnhancedConvModule, + # 0: FusedMBConv, 1: MBConv + arch_settings = { + **dict.fromkeys(['small', 's'], [[2, 3, 1, 1, 24, 24, 0.0, -1], + [4, 3, 2, 4, 24, 48, 0.0, 0], + [4, 3, 2, 4, 48, 64, 0.0, 0], + [6, 3, 2, 4, 64, 128, 0.25, 1], + [9, 3, 1, 6, 128, 160, 0.25, 1], + [15, 3, 2, 6, 160, 256, 0.25, 1], + [1, 1, 1, 1, 256, 1280, 0.0, -2]]), + **dict.fromkeys(['m', 'medium'], [[3, 3, 1, 1, 24, 24, 0.0, -1], + [5, 3, 2, 4, 24, 48, 0.0, 0], + [5, 3, 2, 4, 48, 80, 0.0, 0], + [7, 3, 2, 4, 80, 160, 0.25, 1], + [14, 3, 1, 6, 160, 176, 0.25, 1], + [18, 3, 2, 6, 176, 304, 0.25, 1], + [5, 3, 1, 6, 304, 512, 0.25, 1], + [1, 1, 1, 1, 512, 1280, 0.0, -2]]), + **dict.fromkeys(['l', 'large'], [[4, 3, 1, 1, 32, 32, 0.0, -1], + [7, 3, 2, 4, 32, 64, 0.0, 0], + [7, 3, 2, 4, 64, 96, 0.0, 0], + [10, 3, 2, 4, 96, 192, 0.25, 1], + [19, 3, 1, 6, 192, 224, 0.25, 1], + [25, 3, 2, 6, 224, 384, 0.25, 1], + [7, 3, 1, 6, 384, 640, 0.25, 1], + [1, 1, 1, 1, 640, 1280, 0.0, -2]]), + **dict.fromkeys(['xl'], [[4, 3, 1, 1, 32, 32, 0.0, -1], + [8, 3, 2, 4, 32, 64, 0.0, 0], + [8, 3, 2, 4, 64, 96, 0.0, 0], + [16, 3, 2, 4, 96, 192, 0.25, 1], + [24, 3, 1, 6, 192, 256, 0.25, 1], + [32, 3, 2, 6, 256, 512, 0.25, 1], + [8, 3, 1, 6, 512, 640, 0.25, 1], + [1, 1, 1, 1, 640, 1280, 0.0, -2]]), + **dict.fromkeys(['b0'], [[1, 3, 1, 1, 32, 16, 0.0, -1], + [2, 3, 2, 4, 16, 32, 0.0, 0], + [2, 3, 2, 4, 32, 48, 0.0, 0], + [3, 3, 2, 4, 48, 96, 0.25, 1], + [5, 3, 1, 6, 96, 112, 0.25, 1], + [8, 3, 2, 6, 112, 192, 0.25, 1], + [1, 1, 1, 1, 192, 1280, 0.0, -2]]), + **dict.fromkeys(['b1'], [[2, 3, 1, 1, 32, 16, 0.0, -1], + [3, 3, 2, 4, 16, 32, 0.0, 0], + [3, 3, 2, 4, 32, 48, 0.0, 0], + [4, 3, 2, 4, 48, 96, 0.25, 1], + [6, 3, 1, 6, 96, 112, 0.25, 1], + [9, 3, 2, 6, 112, 192, 0.25, 1], + [1, 1, 1, 1, 192, 1280, 0.0, -2]]), + **dict.fromkeys(['b2'], [[2, 3, 1, 1, 32, 16, 0.0, -1], + [3, 3, 2, 4, 16, 32, 0.0, 0], + [3, 3, 2, 4, 32, 56, 0.0, 0], + [4, 3, 2, 4, 56, 104, 0.25, 1], + [6, 3, 1, 6, 104, 120, 0.25, 1], + [10, 3, 2, 6, 120, 208, 0.25, 1], + [1, 1, 1, 1, 208, 1408, 0.0, -2]]), + **dict.fromkeys(['b3'], [[2, 3, 1, 1, 40, 16, 0.0, -1], + [3, 3, 2, 4, 16, 40, 0.0, 0], + [3, 3, 2, 4, 40, 56, 0.0, 0], + [5, 3, 2, 4, 56, 112, 0.25, 1], + [7, 3, 1, 6, 112, 136, 0.25, 1], + [12, 3, 2, 6, 136, 232, 0.25, 1], + [1, 1, 1, 1, 232, 1536, 0.0, -2]]) + } + + def __init__(self, + arch: str = 's', + in_channels: int = 3, + drop_path_rate: float = 0., + out_indices: Sequence[int] = (-1, ), + frozen_stages: int = 0, + conv_cfg=dict(type='Conv2dAdaptivePadding'), + norm_cfg=dict(type='BN', eps=1e-3, momentum=0.1), + act_cfg=dict(type='Swish'), + norm_eval: bool = False, + with_cp: bool = False, + init_cfg=[ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + layer=['_BatchNorm', 'GroupNorm'], + val=1) + ]): + super(EfficientNetV2, self).__init__(init_cfg) + assert arch in self.arch_settings, \ + f'"{arch}" is not one of the arch_settings ' \ + f'({", ".join(self.arch_settings.keys())})' + self.arch = self.arch_settings[arch] + if frozen_stages not in range(len(self.arch) + 1): + raise ValueError('frozen_stages must be in range(0, ' + f'{len(self.arch)}), but get {frozen_stages}') + self.drop_path_rate = drop_path_rate + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + self.with_cp = with_cp + + self.layers = nn.ModuleList() + assert self.arch[-1][-1] == -2, \ + f'the last block_type of `arch_setting` must be -2 ,' \ + f'but get `{self.arch[-1][-1]}`' + self.in_channels = in_channels + self.out_channels = self.arch[-1][5] + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + + self.make_layers() + + # there len(slef.arch) + 2 layers in the backbone + # including: the first + len(self.arch) layers + the last + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + out_indices = list(out_indices) + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = len(self.layers) + index + assert 0 <= out_indices[i] <= len(self.layers), \ + f'Invalid out_indices {index}.' + self.out_indices = out_indices + + def make_layers(self, ): + # make the first layer + self.layers.append( + ConvModule( + in_channels=self.in_channels, + out_channels=self.arch[0][4], + kernel_size=3, + stride=2, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + + in_channels = self.arch[0][4] + layer_setting = self.arch[:-1] + + total_num_blocks = sum([x[0] for x in layer_setting]) + block_idx = 0 + dpr = [ + x.item() + for x in torch.linspace(0, self.drop_path_rate, total_num_blocks) + ] # stochastic depth decay rule + + for layer_cfg in layer_setting: + layer = [] + (repeat, kernel_size, stride, expand_ratio, _, out_channels, + se_ratio, block_type) = layer_cfg + for i in range(repeat): + stride = stride if i == 0 else 1 + if block_type == -1: + has_skip = stride == 1 and in_channels == out_channels + droppath_rate = dpr[block_idx] if has_skip else 0.0 + layer.append( + EnhancedConvModule( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + has_skip=has_skip, + drop_path_rate=droppath_rate, + stride=stride, + padding=1, + conv_cfg=None, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + in_channels = out_channels + else: + mid_channels = int(in_channels * expand_ratio) + se_cfg = None + if block_type != 0 and se_ratio > 0: + se_cfg = dict( + channels=mid_channels, + ratio=expand_ratio * (1.0 / se_ratio), + divisor=1, + act_cfg=(self.act_cfg, dict(type='Sigmoid'))) + block = FusedMBConv if block_type == 0 else MBConv + conv_cfg = self.conv_cfg if stride == 2 else None + layer.append( + block( + in_channels=in_channels, + out_channels=out_channels, + mid_channels=mid_channels, + kernel_size=kernel_size, + stride=stride, + se_cfg=se_cfg, + conv_cfg=conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + drop_path_rate=dpr[block_idx], + with_cp=self.with_cp)) + in_channels = out_channels + block_idx += 1 + self.layers.append(Sequential(*layer)) + + # make the last layer + self.layers.append( + ConvModule( + in_channels=in_channels, + out_channels=self.out_channels, + kernel_size=self.arch[-1][1], + stride=self.arch[-1][2], + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + + def forward(self, x: Tensor) -> Tuple[Tensor]: + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(self.frozen_stages): + m = self.layers[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(EfficientNetV2, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() diff --git a/mmpretrain/models/backbones/hornet.py b/mmpretrain/models/backbones/hornet.py new file mode 100644 index 0000000000000000000000000000000000000000..460f2dc57975712b5eae8308e2fca9c38b89a3e2 --- /dev/null +++ b/mmpretrain/models/backbones/hornet.py @@ -0,0 +1,500 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Adapted from official impl at https://github.com/raoyongming/HorNet. +try: + import torch.fft + fft = True +except ImportError: + fft = None + +import copy +from functools import partial +from typing import Sequence + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as checkpoint +from mmcv.cnn.bricks import DropPath + +from mmpretrain.models.backbones.base_backbone import BaseBackbone +from mmpretrain.registry import MODELS +from ..utils import LayerScale + + +def get_dwconv(dim, kernel_size, bias=True): + """build a pepth-wise convolution.""" + return nn.Conv2d( + dim, + dim, + kernel_size=kernel_size, + padding=(kernel_size - 1) // 2, + bias=bias, + groups=dim) + + +class HorNetLayerNorm(nn.Module): + """An implementation of LayerNorm of HorNet. + + The differences between HorNetLayerNorm & torch LayerNorm: + 1. Supports two data formats channels_last or channels_first. + Args: + normalized_shape (int or list or torch.Size): input shape from an + expected input of size. + eps (float): a value added to the denominator for numerical stability. + Defaults to 1e-5. + data_format (str): The ordering of the dimensions in the inputs. + channels_last corresponds to inputs with shape (batch_size, height, + width, channels) while channels_first corresponds to inputs with + shape (batch_size, channels, height, width). + Defaults to 'channels_last'. + """ + + def __init__(self, + normalized_shape, + eps=1e-6, + data_format='channels_last'): + super().__init__() + self.weight = nn.Parameter(torch.ones(normalized_shape)) + self.bias = nn.Parameter(torch.zeros(normalized_shape)) + self.eps = eps + self.data_format = data_format + if self.data_format not in ['channels_last', 'channels_first']: + raise ValueError( + 'data_format must be channels_last or channels_first') + self.normalized_shape = (normalized_shape, ) + + def forward(self, x): + if self.data_format == 'channels_last': + return F.layer_norm(x, self.normalized_shape, self.weight, + self.bias, self.eps) + elif self.data_format == 'channels_first': + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x + + +class GlobalLocalFilter(nn.Module): + """A GlobalLocalFilter of HorNet. + + Args: + dim (int): Number of input channels. + h (int): Height of complex_weight. + Defaults to 14. + w (int): Width of complex_weight. + Defaults to 8. + """ + + def __init__(self, dim, h=14, w=8): + super().__init__() + self.dw = nn.Conv2d( + dim // 2, + dim // 2, + kernel_size=3, + padding=1, + bias=False, + groups=dim // 2) + self.complex_weight = nn.Parameter( + torch.randn(dim // 2, h, w, 2, dtype=torch.float32) * 0.02) + self.pre_norm = HorNetLayerNorm( + dim, eps=1e-6, data_format='channels_first') + self.post_norm = HorNetLayerNorm( + dim, eps=1e-6, data_format='channels_first') + + def forward(self, x): + x = self.pre_norm(x) + x1, x2 = torch.chunk(x, 2, dim=1) + x1 = self.dw(x1) + + x2 = x2.to(torch.float32) + B, C, a, b = x2.shape + x2 = torch.fft.rfft2(x2, dim=(2, 3), norm='ortho') + + weight = self.complex_weight + if not weight.shape[1:3] == x2.shape[2:4]: + weight = F.interpolate( + weight.permute(3, 0, 1, 2), + size=x2.shape[2:4], + mode='bilinear', + align_corners=True).permute(1, 2, 3, 0) + + weight = torch.view_as_complex(weight.contiguous()) + + x2 = x2 * weight + x2 = torch.fft.irfft2(x2, s=(a, b), dim=(2, 3), norm='ortho') + + x = torch.cat([x1.unsqueeze(2), x2.unsqueeze(2)], + dim=2).reshape(B, 2 * C, a, b) + x = self.post_norm(x) + return x + + +class gnConv(nn.Module): + """A gnConv of HorNet. + + Args: + dim (int): Number of input channels. + order (int): Order of gnConv. + Defaults to 5. + dw_cfg (dict): The Config for dw conv. + Defaults to ``dict(type='DW', kernel_size=7)``. + scale (float): Scaling parameter of gflayer outputs. + Defaults to 1.0. + """ + + def __init__(self, + dim, + order=5, + dw_cfg=dict(type='DW', kernel_size=7), + scale=1.0): + super().__init__() + self.order = order + self.dims = [dim // 2**i for i in range(order)] + self.dims.reverse() + self.proj_in = nn.Conv2d(dim, 2 * dim, 1) + + cfg = copy.deepcopy(dw_cfg) + dw_type = cfg.pop('type') + assert dw_type in ['DW', 'GF'],\ + 'dw_type should be `DW` or `GF`' + if dw_type == 'DW': + self.dwconv = get_dwconv(sum(self.dims), **cfg) + elif dw_type == 'GF': + self.dwconv = GlobalLocalFilter(sum(self.dims), **cfg) + + self.proj_out = nn.Conv2d(dim, dim, 1) + + self.projs = nn.ModuleList([ + nn.Conv2d(self.dims[i], self.dims[i + 1], 1) + for i in range(order - 1) + ]) + + self.scale = scale + + def forward(self, x): + x = self.proj_in(x) + y, x = torch.split(x, (self.dims[0], sum(self.dims)), dim=1) + + x = self.dwconv(x) * self.scale + + dw_list = torch.split(x, self.dims, dim=1) + x = y * dw_list[0] + + for i in range(self.order - 1): + x = self.projs[i](x) * dw_list[i + 1] + + x = self.proj_out(x) + + return x + + +class HorNetBlock(nn.Module): + """A block of HorNet. + + Args: + dim (int): Number of input channels. + order (int): Order of gnConv. + Defaults to 5. + dw_cfg (dict): The Config for dw conv. + Defaults to ``dict(type='DW', kernel_size=7)``. + scale (float): Scaling parameter of gflayer outputs. + Defaults to 1.0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + use_layer_scale (bool): Whether to use use_layer_scale in HorNet + block. Defaults to True. + """ + + def __init__(self, + dim, + order=5, + dw_cfg=dict(type='DW', kernel_size=7), + scale=1.0, + drop_path_rate=0., + use_layer_scale=True): + super().__init__() + self.out_channels = dim + + self.norm1 = HorNetLayerNorm( + dim, eps=1e-6, data_format='channels_first') + self.gnconv = gnConv(dim, order, dw_cfg, scale) + self.norm2 = HorNetLayerNorm(dim, eps=1e-6) + self.pwconv1 = nn.Linear(dim, 4 * dim) + self.act = nn.GELU() + self.pwconv2 = nn.Linear(4 * dim, dim) + + if use_layer_scale: + self.gamma1 = LayerScale(dim, data_format='channels_first') + self.gamma2 = LayerScale(dim) + else: + self.gamma1, self.gamma2 = nn.Identity(), nn.Identity() + + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0. else nn.Identity() + + def forward(self, x): + x = x + self.drop_path(self.gamma1(self.gnconv(self.norm1(x)))) + + input = x + x = x.permute(0, 2, 3, 1) # (N, C, H, W) -> (N, H, W, C) + x = self.norm2(x) + x = self.pwconv1(x) + x = self.act(x) + x = self.pwconv2(x) + x = self.gamma2(x) + x = x.permute(0, 3, 1, 2) # (N, H, W, C) -> (N, C, H, W) + + x = input + self.drop_path(x) + return x + + +@MODELS.register_module() +class HorNet(BaseBackbone): + """HorNet backbone. + + A PyTorch implementation of paper `HorNet: Efficient High-Order Spatial + Interactions with Recursive Gated Convolutions + `_ . + Inspiration from https://github.com/raoyongming/HorNet + + Args: + arch (str | dict): HorNet architecture. + + If use string, choose from 'tiny', 'small', 'base' and 'large'. + If use dict, it should have below keys: + + - **base_dim** (int): The base dimensions of embedding. + - **depths** (List[int]): The number of blocks in each stage. + - **orders** (List[int]): The number of order of gnConv in each + stage. + - **dw_cfg** (List[dict]): The Config for dw conv. + + Defaults to 'tiny'. + in_channels (int): Number of input image channels. Defaults to 3. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + scale (float): Scaling parameter of gflayer outputs. Defaults to 1/3. + use_layer_scale (bool): Whether to use use_layer_scale in HorNet + block. Defaults to True. + out_indices (Sequence[int]): Output from which stages. + Default: ``(3, )``. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + gap_before_final_norm (bool): Whether to globally average the feature + map before the final norm layer. In the official repo, it's only + used in classification task. Defaults to True. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys(['t', 'tiny'], + {'base_dim': 64, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [dict(type='DW', kernel_size=7)] * 4}), + **dict.fromkeys(['t-gf', 'tiny-gf'], + {'base_dim': 64, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [ + dict(type='DW', kernel_size=7), + dict(type='DW', kernel_size=7), + dict(type='GF', h=14, w=8), + dict(type='GF', h=7, w=4)]}), + **dict.fromkeys(['s', 'small'], + {'base_dim': 96, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [dict(type='DW', kernel_size=7)] * 4}), + **dict.fromkeys(['s-gf', 'small-gf'], + {'base_dim': 96, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [ + dict(type='DW', kernel_size=7), + dict(type='DW', kernel_size=7), + dict(type='GF', h=14, w=8), + dict(type='GF', h=7, w=4)]}), + **dict.fromkeys(['b', 'base'], + {'base_dim': 128, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [dict(type='DW', kernel_size=7)] * 4}), + **dict.fromkeys(['b-gf', 'base-gf'], + {'base_dim': 128, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [ + dict(type='DW', kernel_size=7), + dict(type='DW', kernel_size=7), + dict(type='GF', h=14, w=8), + dict(type='GF', h=7, w=4)]}), + **dict.fromkeys(['b-gf384', 'base-gf384'], + {'base_dim': 128, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [ + dict(type='DW', kernel_size=7), + dict(type='DW', kernel_size=7), + dict(type='GF', h=24, w=12), + dict(type='GF', h=13, w=7)]}), + **dict.fromkeys(['l', 'large'], + {'base_dim': 192, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [dict(type='DW', kernel_size=7)] * 4}), + **dict.fromkeys(['l-gf', 'large-gf'], + {'base_dim': 192, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [ + dict(type='DW', kernel_size=7), + dict(type='DW', kernel_size=7), + dict(type='GF', h=14, w=8), + dict(type='GF', h=7, w=4)]}), + **dict.fromkeys(['l-gf384', 'large-gf384'], + {'base_dim': 192, + 'depths': [2, 3, 18, 2], + 'orders': [2, 3, 4, 5], + 'dw_cfg': [ + dict(type='DW', kernel_size=7), + dict(type='DW', kernel_size=7), + dict(type='GF', h=24, w=12), + dict(type='GF', h=13, w=7)]}), + } # yapf: disable + + def __init__(self, + arch='tiny', + in_channels=3, + drop_path_rate=0., + scale=1 / 3, + use_layer_scale=True, + out_indices=(3, ), + frozen_stages=-1, + with_cp=False, + gap_before_final_norm=True, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + if fft is None: + raise RuntimeError( + 'Failed to import torch.fft. Please install "torch>=1.7".') + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = {'base_dim', 'depths', 'orders', 'dw_cfg'} + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.scale = scale + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.with_cp = with_cp + self.gap_before_final_norm = gap_before_final_norm + + base_dim = self.arch_settings['base_dim'] + dims = list(map(lambda x: 2**x * base_dim, range(4))) + + self.downsample_layers = nn.ModuleList() + stem = nn.Sequential( + nn.Conv2d(in_channels, dims[0], kernel_size=4, stride=4), + HorNetLayerNorm(dims[0], eps=1e-6, data_format='channels_first')) + self.downsample_layers.append(stem) + for i in range(3): + downsample_layer = nn.Sequential( + HorNetLayerNorm( + dims[i], eps=1e-6, data_format='channels_first'), + nn.Conv2d(dims[i], dims[i + 1], kernel_size=2, stride=2), + ) + self.downsample_layers.append(downsample_layer) + + total_depth = sum(self.arch_settings['depths']) + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] # stochastic depth decay rule + + cur_block_idx = 0 + self.stages = nn.ModuleList() + for i in range(4): + stage = nn.Sequential(*[ + HorNetBlock( + dim=dims[i], + order=self.arch_settings['orders'][i], + dw_cfg=self.arch_settings['dw_cfg'][i], + scale=self.scale, + drop_path_rate=dpr[cur_block_idx + j], + use_layer_scale=use_layer_scale) + for j in range(self.arch_settings['depths'][i]) + ]) + self.stages.append(stage) + cur_block_idx += self.arch_settings['depths'][i] + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + out_indices = list(out_indices) + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = len(self.stages) + index + assert 0 <= out_indices[i] <= len(self.stages), \ + f'Invalid out_indices {index}.' + self.out_indices = out_indices + + norm_layer = partial( + HorNetLayerNorm, eps=1e-6, data_format='channels_first') + for i_layer in out_indices: + layer = norm_layer(dims[i_layer]) + layer_name = f'norm{i_layer}' + self.add_module(layer_name, layer) + + def train(self, mode=True): + super(HorNet, self).train(mode) + self._freeze_stages() + + def _freeze_stages(self): + for i in range(0, self.frozen_stages + 1): + # freeze patch embed + m = self.downsample_layers[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + # freeze blocks + m = self.stages[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + if i in self.out_indices: + # freeze norm + m = getattr(self, f'norm{i + 1}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def forward(self, x): + outs = [] + for i in range(4): + x = self.downsample_layers[i](x) + if self.with_cp: + x = checkpoint.checkpoint_sequential(self.stages[i], + len(self.stages[i]), x) + else: + x = self.stages[i](x) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + if self.gap_before_final_norm: + gap = x.mean([-2, -1], keepdim=True) + outs.append(norm_layer(gap).flatten(1)) + else: + # The output of LayerNorm2d may be discontiguous, which + # may cause some problem in the downstream tasks + outs.append(norm_layer(x).contiguous()) + return tuple(outs) diff --git a/mmpretrain/models/backbones/hrnet.py b/mmpretrain/models/backbones/hrnet.py new file mode 100644 index 0000000000000000000000000000000000000000..99afa908531326f05ff1c977f0146a528683af43 --- /dev/null +++ b/mmpretrain/models/backbones/hrnet.py @@ -0,0 +1,563 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmengine.model import BaseModule, ModuleList, Sequential +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.registry import MODELS +from .resnet import BasicBlock, Bottleneck, ResLayer, get_expansion + + +class HRModule(BaseModule): + """High-Resolution Module for HRNet. + + In this module, every branch has 4 BasicBlocks/Bottlenecks. Fusion/Exchange + is in this module. + + Args: + num_branches (int): The number of branches. + block (``BaseModule``): Convolution block module. + num_blocks (tuple): The number of blocks in each branch. + The length must be equal to ``num_branches``. + num_channels (tuple): The number of base channels in each branch. + The length must be equal to ``num_branches``. + multiscale_output (bool): Whether to output multi-level features + produced by multiple branches. If False, only the first level + feature will be output. Defaults to True. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + conv_cfg (dict, optional): Dictionary to construct and config conv + layer. Defaults to None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Defaults to ``dict(type='BN')``. + block_init_cfg (dict, optional): The initialization configs of every + blocks. Defaults to None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + num_branches, + block, + num_blocks, + in_channels, + num_channels, + multiscale_output=True, + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + block_init_cfg=None, + init_cfg=None): + super(HRModule, self).__init__(init_cfg) + self.block_init_cfg = block_init_cfg + self._check_branches(num_branches, num_blocks, in_channels, + num_channels) + + self.in_channels = in_channels + self.num_branches = num_branches + + self.multiscale_output = multiscale_output + self.norm_cfg = norm_cfg + self.conv_cfg = conv_cfg + self.with_cp = with_cp + self.branches = self._make_branches(num_branches, block, num_blocks, + num_channels) + self.fuse_layers = self._make_fuse_layers() + self.relu = nn.ReLU(inplace=False) + + def _check_branches(self, num_branches, num_blocks, in_channels, + num_channels): + if num_branches != len(num_blocks): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_BLOCKS({len(num_blocks)})' + raise ValueError(error_msg) + + if num_branches != len(num_channels): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_CHANNELS({len(num_channels)})' + raise ValueError(error_msg) + + if num_branches != len(in_channels): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_INCHANNELS({len(in_channels)})' + raise ValueError(error_msg) + + def _make_branches(self, num_branches, block, num_blocks, num_channels): + branches = [] + + for i in range(num_branches): + out_channels = num_channels[i] * get_expansion(block) + branches.append( + ResLayer( + block=block, + num_blocks=num_blocks[i], + in_channels=self.in_channels[i], + out_channels=out_channels, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + with_cp=self.with_cp, + init_cfg=self.block_init_cfg, + )) + + return ModuleList(branches) + + def _make_fuse_layers(self): + if self.num_branches == 1: + return None + + num_branches = self.num_branches + in_channels = self.in_channels + fuse_layers = [] + num_out_branches = num_branches if self.multiscale_output else 1 + for i in range(num_out_branches): + fuse_layer = [] + for j in range(num_branches): + if j > i: + # Upsample the feature maps of smaller scales. + fuse_layer.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[i], + kernel_size=1, + stride=1, + padding=0, + bias=False), + build_norm_layer(self.norm_cfg, in_channels[i])[1], + nn.Upsample( + scale_factor=2**(j - i), mode='nearest'))) + elif j == i: + # Keep the feature map with the same scale. + fuse_layer.append(None) + else: + # Downsample the feature maps of larger scales. + conv_downsamples = [] + for k in range(i - j): + # Use stacked convolution layers to downsample. + if k == i - j - 1: + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[i], + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + in_channels[i])[1])) + else: + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[j], + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + in_channels[j])[1], + nn.ReLU(inplace=False))) + fuse_layer.append(nn.Sequential(*conv_downsamples)) + fuse_layers.append(nn.ModuleList(fuse_layer)) + + return nn.ModuleList(fuse_layers) + + def forward(self, x): + """Forward function.""" + if self.num_branches == 1: + return [self.branches[0](x[0])] + + for i in range(self.num_branches): + x[i] = self.branches[i](x[i]) + + x_fuse = [] + for i in range(len(self.fuse_layers)): + y = 0 + for j in range(self.num_branches): + if i == j: + y += x[j] + else: + y += self.fuse_layers[i][j](x[j]) + x_fuse.append(self.relu(y)) + return x_fuse + + +@MODELS.register_module() +class HRNet(BaseModule): + """HRNet backbone. + + `High-Resolution Representations for Labeling Pixels and Regions + `_. + + Args: + arch (str): The preset HRNet architecture, includes 'w18', 'w30', + 'w32', 'w40', 'w44', 'w48', 'w64'. It will only be used if + extra is ``None``. Defaults to 'w32'. + extra (dict, optional): Detailed configuration for each stage of HRNet. + There must be 4 stages, the configuration for each stage must have + 5 keys: + + - num_modules (int): The number of HRModule in this stage. + - num_branches (int): The number of branches in the HRModule. + - block (str): The type of convolution block. Please choose between + 'BOTTLENECK' and 'BASIC'. + - num_blocks (tuple): The number of blocks in each branch. + The length must be equal to num_branches. + - num_channels (tuple): The number of base channels in each branch. + The length must be equal to num_branches. + + Defaults to None. + in_channels (int): Number of input image channels. Defaults to 3. + conv_cfg (dict, optional): Dictionary to construct and config conv + layer. Defaults to None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Defaults to ``dict(type='BN')``. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Defaults to False. + multiscale_output (bool): Whether to output multi-level features + produced by multiple branches. If False, only the first level + feature will be output. Defaults to True. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + + Example: + >>> import torch + >>> from mmpretrain.models import HRNet + >>> extra = dict( + >>> stage1=dict( + >>> num_modules=1, + >>> num_branches=1, + >>> block='BOTTLENECK', + >>> num_blocks=(4, ), + >>> num_channels=(64, )), + >>> stage2=dict( + >>> num_modules=1, + >>> num_branches=2, + >>> block='BASIC', + >>> num_blocks=(4, 4), + >>> num_channels=(32, 64)), + >>> stage3=dict( + >>> num_modules=4, + >>> num_branches=3, + >>> block='BASIC', + >>> num_blocks=(4, 4, 4), + >>> num_channels=(32, 64, 128)), + >>> stage4=dict( + >>> num_modules=3, + >>> num_branches=4, + >>> block='BASIC', + >>> num_blocks=(4, 4, 4, 4), + >>> num_channels=(32, 64, 128, 256))) + >>> self = HRNet(extra, in_channels=1) + >>> self.eval() + >>> inputs = torch.rand(1, 1, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 32, 8, 8) + (1, 64, 4, 4) + (1, 128, 2, 2) + (1, 256, 1, 1) + """ + + blocks_dict = {'BASIC': BasicBlock, 'BOTTLENECK': Bottleneck} + arch_zoo = { + # num_modules, num_branches, block, num_blocks, num_channels + 'w18': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (18, 36)], + [4, 3, 'BASIC', (4, 4, 4), (18, 36, 72)], + [3, 4, 'BASIC', (4, 4, 4, 4), (18, 36, 72, 144)]], + 'w30': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (30, 60)], + [4, 3, 'BASIC', (4, 4, 4), (30, 60, 120)], + [3, 4, 'BASIC', (4, 4, 4, 4), (30, 60, 120, 240)]], + 'w32': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (32, 64)], + [4, 3, 'BASIC', (4, 4, 4), (32, 64, 128)], + [3, 4, 'BASIC', (4, 4, 4, 4), (32, 64, 128, 256)]], + 'w40': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (40, 80)], + [4, 3, 'BASIC', (4, 4, 4), (40, 80, 160)], + [3, 4, 'BASIC', (4, 4, 4, 4), (40, 80, 160, 320)]], + 'w44': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (44, 88)], + [4, 3, 'BASIC', (4, 4, 4), (44, 88, 176)], + [3, 4, 'BASIC', (4, 4, 4, 4), (44, 88, 176, 352)]], + 'w48': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (48, 96)], + [4, 3, 'BASIC', (4, 4, 4), (48, 96, 192)], + [3, 4, 'BASIC', (4, 4, 4, 4), (48, 96, 192, 384)]], + 'w64': [[1, 1, 'BOTTLENECK', (4, ), (64, )], + [1, 2, 'BASIC', (4, 4), (64, 128)], + [4, 3, 'BASIC', (4, 4, 4), (64, 128, 256)], + [3, 4, 'BASIC', (4, 4, 4, 4), (64, 128, 256, 512)]], + } # yapf:disable + + def __init__(self, + arch='w32', + extra=None, + in_channels=3, + conv_cfg=None, + norm_cfg=dict(type='BN'), + norm_eval=False, + with_cp=False, + zero_init_residual=False, + multiscale_output=True, + init_cfg=[ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ]): + super(HRNet, self).__init__(init_cfg) + + extra = self.parse_arch(arch, extra) + + # Assert configurations of 4 stages are in extra + for i in range(1, 5): + assert f'stage{i}' in extra, f'Missing stage{i} config in "extra".' + # Assert whether the length of `num_blocks` and `num_channels` are + # equal to `num_branches` + cfg = extra[f'stage{i}'] + assert len(cfg['num_blocks']) == cfg['num_branches'] and \ + len(cfg['num_channels']) == cfg['num_branches'] + + self.extra = extra + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + self.zero_init_residual = zero_init_residual + + # -------------------- stem net -------------------- + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + out_channels=64, + kernel_size=3, + stride=2, + padding=1, + bias=False) + + self.norm1_name, norm1 = build_norm_layer(self.norm_cfg, 64, postfix=1) + self.add_module(self.norm1_name, norm1) + + self.conv2 = build_conv_layer( + self.conv_cfg, + in_channels=64, + out_channels=64, + kernel_size=3, + stride=2, + padding=1, + bias=False) + + self.norm2_name, norm2 = build_norm_layer(self.norm_cfg, 64, postfix=2) + self.add_module(self.norm2_name, norm2) + self.relu = nn.ReLU(inplace=True) + + # -------------------- stage 1 -------------------- + self.stage1_cfg = self.extra['stage1'] + base_channels = self.stage1_cfg['num_channels'] + block_type = self.stage1_cfg['block'] + num_blocks = self.stage1_cfg['num_blocks'] + + block = self.blocks_dict[block_type] + num_channels = [ + channel * get_expansion(block) for channel in base_channels + ] + # To align with the original code, use layer1 instead of stage1 here. + self.layer1 = ResLayer( + block, + in_channels=64, + out_channels=num_channels[0], + num_blocks=num_blocks[0]) + pre_num_channels = num_channels + + # -------------------- stage 2~4 -------------------- + for i in range(2, 5): + stage_cfg = self.extra[f'stage{i}'] + base_channels = stage_cfg['num_channels'] + block = self.blocks_dict[stage_cfg['block']] + multiscale_output_ = multiscale_output if i == 4 else True + + num_channels = [ + channel * get_expansion(block) for channel in base_channels + ] + # The transition layer from layer1 to stage2 + transition = self._make_transition_layer(pre_num_channels, + num_channels) + self.add_module(f'transition{i-1}', transition) + stage = self._make_stage( + stage_cfg, num_channels, multiscale_output=multiscale_output_) + self.add_module(f'stage{i}', stage) + + pre_num_channels = num_channels + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: the normalization layer named "norm2" """ + return getattr(self, self.norm2_name) + + def _make_transition_layer(self, num_channels_pre_layer, + num_channels_cur_layer): + num_branches_cur = len(num_channels_cur_layer) + num_branches_pre = len(num_channels_pre_layer) + + transition_layers = [] + for i in range(num_branches_cur): + if i < num_branches_pre: + # For existing scale branches, + # add conv block when the channels are not the same. + if num_channels_cur_layer[i] != num_channels_pre_layer[i]: + transition_layers.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + num_channels_pre_layer[i], + num_channels_cur_layer[i], + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + num_channels_cur_layer[i])[1], + nn.ReLU(inplace=True))) + else: + transition_layers.append(nn.Identity()) + else: + # For new scale branches, add stacked downsample conv blocks. + # For example, num_branches_pre = 2, for the 4th branch, add + # stacked two downsample conv blocks. + conv_downsamples = [] + for j in range(i + 1 - num_branches_pre): + in_channels = num_channels_pre_layer[-1] + out_channels = num_channels_cur_layer[i] \ + if j == i - num_branches_pre else in_channels + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels, + out_channels, + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, out_channels)[1], + nn.ReLU(inplace=True))) + transition_layers.append(nn.Sequential(*conv_downsamples)) + + return nn.ModuleList(transition_layers) + + def _make_stage(self, layer_config, in_channels, multiscale_output=True): + num_modules = layer_config['num_modules'] + num_branches = layer_config['num_branches'] + num_blocks = layer_config['num_blocks'] + num_channels = layer_config['num_channels'] + block = self.blocks_dict[layer_config['block']] + + hr_modules = [] + block_init_cfg = None + if self.zero_init_residual: + if block is BasicBlock: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm2')) + elif block is Bottleneck: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm3')) + + for i in range(num_modules): + # multi_scale_output is only used for the last module + if not multiscale_output and i == num_modules - 1: + reset_multiscale_output = False + else: + reset_multiscale_output = True + + hr_modules.append( + HRModule( + num_branches, + block, + num_blocks, + in_channels, + num_channels, + reset_multiscale_output, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg, + block_init_cfg=block_init_cfg)) + + return Sequential(*hr_modules) + + def forward(self, x): + """Forward function.""" + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.norm2(x) + x = self.relu(x) + x = self.layer1(x) + + x_list = [x] + + for i in range(2, 5): + # Apply transition + transition = getattr(self, f'transition{i-1}') + inputs = [] + for j, layer in enumerate(transition): + if j < len(x_list): + inputs.append(layer(x_list[j])) + else: + inputs.append(layer(x_list[-1])) + # Forward HRModule + stage = getattr(self, f'stage{i}') + x_list = stage(inputs) + + return tuple(x_list) + + def train(self, mode=True): + """Convert the model into training mode will keeping the normalization + layer freezed.""" + super(HRNet, self).train(mode) + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + def parse_arch(self, arch, extra=None): + if extra is not None: + return extra + + assert arch in self.arch_zoo, \ + ('Invalid arch, please choose arch from ' + f'{list(self.arch_zoo.keys())}, or specify `extra` ' + 'argument directly.') + + extra = dict() + for i, stage_setting in enumerate(self.arch_zoo[arch], start=1): + extra[f'stage{i}'] = dict( + num_modules=stage_setting[0], + num_branches=stage_setting[1], + block=stage_setting[2], + num_blocks=stage_setting[3], + num_channels=stage_setting[4], + ) + + return extra diff --git a/mmpretrain/models/backbones/inception_v3.py b/mmpretrain/models/backbones/inception_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..1d6c04b9fba4b50fce31539d14874dc7a47a539a --- /dev/null +++ b/mmpretrain/models/backbones/inception_v3.py @@ -0,0 +1,501 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.nn as nn +from mmcv.cnn import build_conv_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class BasicConv2d(BaseModule): + """A basic convolution block including convolution, batch norm and ReLU. + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + conv_cfg (dict, optional): The config of convolution layer. + Defaults to None, which means to use ``nn.Conv2d``. + init_cfg (dict, optional): The config of initialization. + Defaults to None. + **kwargs: Other keyword arguments of the convolution layer. + """ + + def __init__(self, + in_channels: int, + out_channels: int, + conv_cfg: Optional[dict] = None, + init_cfg: Optional[dict] = None, + **kwargs) -> None: + super().__init__(init_cfg=init_cfg) + self.conv = build_conv_layer( + conv_cfg, in_channels, out_channels, bias=False, **kwargs) + self.bn = nn.BatchNorm2d(out_channels, eps=0.001) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + x = self.conv(x) + x = self.bn(x) + return self.relu(x) + + +class InceptionA(BaseModule): + """Type-A Inception block. + + Args: + in_channels (int): The number of input channels. + pool_features (int): The number of channels in pooling branch. + conv_cfg (dict, optional): The convolution layer config in the + :class:`BasicConv2d` block. Defaults to None. + init_cfg (dict, optional): The config of initialization. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + pool_features: int, + conv_cfg: Optional[dict] = None, + init_cfg: Optional[dict] = None): + super().__init__(init_cfg=init_cfg) + self.branch1x1 = BasicConv2d( + in_channels, 64, kernel_size=1, conv_cfg=conv_cfg) + + self.branch5x5_1 = BasicConv2d( + in_channels, 48, kernel_size=1, conv_cfg=conv_cfg) + self.branch5x5_2 = BasicConv2d( + 48, 64, kernel_size=5, padding=2, conv_cfg=conv_cfg) + + self.branch3x3dbl_1 = BasicConv2d( + in_channels, 64, kernel_size=1, conv_cfg=conv_cfg) + self.branch3x3dbl_2 = BasicConv2d( + 64, 96, kernel_size=3, padding=1, conv_cfg=conv_cfg) + self.branch3x3dbl_3 = BasicConv2d( + 96, 96, kernel_size=3, padding=1, conv_cfg=conv_cfg) + + self.branch_pool_downsample = nn.AvgPool2d( + kernel_size=3, stride=1, padding=1) + self.branch_pool = BasicConv2d( + in_channels, pool_features, kernel_size=1, conv_cfg=conv_cfg) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + branch1x1 = self.branch1x1(x) + + branch5x5 = self.branch5x5_1(x) + branch5x5 = self.branch5x5_2(branch5x5) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl) + + branch_pool = self.branch_pool_downsample(x) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool] + return torch.cat(outputs, 1) + + +class InceptionB(BaseModule): + """Type-B Inception block. + + Args: + in_channels (int): The number of input channels. + conv_cfg (dict, optional): The convolution layer config in the + :class:`BasicConv2d` block. Defaults to None. + init_cfg (dict, optional): The config of initialization. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + conv_cfg: Optional[dict] = None, + init_cfg: Optional[dict] = None): + super().__init__(init_cfg=init_cfg) + self.branch3x3 = BasicConv2d( + in_channels, 384, kernel_size=3, stride=2, conv_cfg=conv_cfg) + + self.branch3x3dbl_1 = BasicConv2d( + in_channels, 64, kernel_size=1, conv_cfg=conv_cfg) + self.branch3x3dbl_2 = BasicConv2d( + 64, 96, kernel_size=3, padding=1, conv_cfg=conv_cfg) + self.branch3x3dbl_3 = BasicConv2d( + 96, 96, kernel_size=3, stride=2, conv_cfg=conv_cfg) + + self.branch_pool = nn.MaxPool2d(kernel_size=3, stride=2) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + branch3x3 = self.branch3x3(x) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl) + + branch_pool = self.branch_pool(x) + + outputs = [branch3x3, branch3x3dbl, branch_pool] + return torch.cat(outputs, 1) + + +class InceptionC(BaseModule): + """Type-C Inception block. + + Args: + in_channels (int): The number of input channels. + channels_7x7 (int): The number of channels in 7x7 convolution branch. + conv_cfg (dict, optional): The convolution layer config in the + :class:`BasicConv2d` block. Defaults to None. + init_cfg (dict, optional): The config of initialization. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + channels_7x7: int, + conv_cfg: Optional[dict] = None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.branch1x1 = BasicConv2d( + in_channels, 192, kernel_size=1, conv_cfg=conv_cfg) + + c7 = channels_7x7 + self.branch7x7_1 = BasicConv2d( + in_channels, c7, kernel_size=1, conv_cfg=conv_cfg) + self.branch7x7_2 = BasicConv2d( + c7, c7, kernel_size=(1, 7), padding=(0, 3), conv_cfg=conv_cfg) + self.branch7x7_3 = BasicConv2d( + c7, 192, kernel_size=(7, 1), padding=(3, 0), conv_cfg=conv_cfg) + + self.branch7x7dbl_1 = BasicConv2d( + in_channels, c7, kernel_size=1, conv_cfg=conv_cfg) + self.branch7x7dbl_2 = BasicConv2d( + c7, c7, kernel_size=(7, 1), padding=(3, 0), conv_cfg=conv_cfg) + self.branch7x7dbl_3 = BasicConv2d( + c7, c7, kernel_size=(1, 7), padding=(0, 3), conv_cfg=conv_cfg) + self.branch7x7dbl_4 = BasicConv2d( + c7, c7, kernel_size=(7, 1), padding=(3, 0), conv_cfg=conv_cfg) + self.branch7x7dbl_5 = BasicConv2d( + c7, 192, kernel_size=(1, 7), padding=(0, 3), conv_cfg=conv_cfg) + + self.branch_pool_downsample = nn.AvgPool2d( + kernel_size=3, stride=1, padding=1) + self.branch_pool = BasicConv2d( + in_channels, 192, kernel_size=1, conv_cfg=conv_cfg) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + branch1x1 = self.branch1x1(x) + + branch7x7 = self.branch7x7_1(x) + branch7x7 = self.branch7x7_2(branch7x7) + branch7x7 = self.branch7x7_3(branch7x7) + + branch7x7dbl = self.branch7x7dbl_1(x) + branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl) + branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl) + + branch_pool = self.branch_pool_downsample(x) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool] + return torch.cat(outputs, 1) + + +class InceptionD(BaseModule): + """Type-D Inception block. + + Args: + in_channels (int): The number of input channels. + conv_cfg (dict, optional): The convolution layer config in the + :class:`BasicConv2d` block. Defaults to None. + init_cfg (dict, optional): The config of initialization. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + conv_cfg: Optional[dict] = None, + init_cfg: Optional[dict] = None): + super().__init__(init_cfg=init_cfg) + self.branch3x3_1 = BasicConv2d( + in_channels, 192, kernel_size=1, conv_cfg=conv_cfg) + self.branch3x3_2 = BasicConv2d( + 192, 320, kernel_size=3, stride=2, conv_cfg=conv_cfg) + + self.branch7x7x3_1 = BasicConv2d( + in_channels, 192, kernel_size=1, conv_cfg=conv_cfg) + self.branch7x7x3_2 = BasicConv2d( + 192, 192, kernel_size=(1, 7), padding=(0, 3), conv_cfg=conv_cfg) + self.branch7x7x3_3 = BasicConv2d( + 192, 192, kernel_size=(7, 1), padding=(3, 0), conv_cfg=conv_cfg) + self.branch7x7x3_4 = BasicConv2d( + 192, 192, kernel_size=3, stride=2, conv_cfg=conv_cfg) + + self.branch_pool = nn.MaxPool2d(kernel_size=3, stride=2) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + branch3x3 = self.branch3x3_1(x) + branch3x3 = self.branch3x3_2(branch3x3) + + branch7x7x3 = self.branch7x7x3_1(x) + branch7x7x3 = self.branch7x7x3_2(branch7x7x3) + branch7x7x3 = self.branch7x7x3_3(branch7x7x3) + branch7x7x3 = self.branch7x7x3_4(branch7x7x3) + + branch_pool = self.branch_pool(x) + outputs = [branch3x3, branch7x7x3, branch_pool] + return torch.cat(outputs, 1) + + +class InceptionE(BaseModule): + """Type-E Inception block. + + Args: + in_channels (int): The number of input channels. + conv_cfg (dict, optional): The convolution layer config in the + :class:`BasicConv2d` block. Defaults to None. + init_cfg (dict, optional): The config of initialization. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + conv_cfg: Optional[dict] = None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.branch1x1 = BasicConv2d( + in_channels, 320, kernel_size=1, conv_cfg=conv_cfg) + + self.branch3x3_1 = BasicConv2d( + in_channels, 384, kernel_size=1, conv_cfg=conv_cfg) + self.branch3x3_2a = BasicConv2d( + 384, 384, kernel_size=(1, 3), padding=(0, 1), conv_cfg=conv_cfg) + self.branch3x3_2b = BasicConv2d( + 384, 384, kernel_size=(3, 1), padding=(1, 0), conv_cfg=conv_cfg) + + self.branch3x3dbl_1 = BasicConv2d( + in_channels, 448, kernel_size=1, conv_cfg=conv_cfg) + self.branch3x3dbl_2 = BasicConv2d( + 448, 384, kernel_size=3, padding=1, conv_cfg=conv_cfg) + self.branch3x3dbl_3a = BasicConv2d( + 384, 384, kernel_size=(1, 3), padding=(0, 1), conv_cfg=conv_cfg) + self.branch3x3dbl_3b = BasicConv2d( + 384, 384, kernel_size=(3, 1), padding=(1, 0), conv_cfg=conv_cfg) + + self.branch_pool_downsample = nn.AvgPool2d( + kernel_size=3, stride=1, padding=1) + self.branch_pool = BasicConv2d( + in_channels, 192, kernel_size=1, conv_cfg=conv_cfg) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + branch1x1 = self.branch1x1(x) + + branch3x3 = self.branch3x3_1(x) + branch3x3 = [ + self.branch3x3_2a(branch3x3), + self.branch3x3_2b(branch3x3), + ] + branch3x3 = torch.cat(branch3x3, 1) + + branch3x3dbl = self.branch3x3dbl_1(x) + branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) + branch3x3dbl = [ + self.branch3x3dbl_3a(branch3x3dbl), + self.branch3x3dbl_3b(branch3x3dbl), + ] + branch3x3dbl = torch.cat(branch3x3dbl, 1) + + branch_pool = self.branch_pool_downsample(x) + branch_pool = self.branch_pool(branch_pool) + + outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool] + return torch.cat(outputs, 1) + + +class InceptionAux(BaseModule): + """The Inception block for the auxiliary classification branch. + + Args: + in_channels (int): The number of input channels. + num_classes (int): The number of categroies. + conv_cfg (dict, optional): The convolution layer config in the + :class:`BasicConv2d` block. Defaults to None. + init_cfg (dict, optional): The config of initialization. + Defaults to use trunc normal with ``std=0.01`` for Conv2d layers + and use trunc normal with ``std=0.001`` for Linear layers.. + """ + + def __init__(self, + in_channels: int, + num_classes: int, + conv_cfg: Optional[dict] = None, + init_cfg: Optional[dict] = [ + dict(type='TruncNormal', layer='Conv2d', std=0.01), + dict(type='TruncNormal', layer='Linear', std=0.001) + ]): + super().__init__(init_cfg=init_cfg) + self.downsample = nn.AvgPool2d(kernel_size=5, stride=3) + self.conv0 = BasicConv2d( + in_channels, 128, kernel_size=1, conv_cfg=conv_cfg) + self.conv1 = BasicConv2d(128, 768, kernel_size=5, conv_cfg=conv_cfg) + self.gap = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(768, num_classes) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + # N x 768 x 17 x 17 + x = self.downsample(x) + # N x 768 x 5 x 5 + x = self.conv0(x) + # N x 128 x 5 x 5 + x = self.conv1(x) + # N x 768 x 1 x 1 + # Adaptive average pooling + x = self.gap(x) + # N x 768 x 1 x 1 + x = torch.flatten(x, 1) + # N x 768 + x = self.fc(x) + # N x 1000 + return x + + +@MODELS.register_module() +class InceptionV3(BaseBackbone): + """Inception V3 backbone. + + A PyTorch implementation of `Rethinking the Inception Architecture for + Computer Vision `_ + + This implementation is modified from + https://github.com/pytorch/vision/blob/main/torchvision/models/inception.py. + Licensed under the BSD 3-Clause License. + + Args: + num_classes (int): The number of categroies. Defaults to 1000. + aux_logits (bool): Whether to enable the auxiliary branch. If False, + the auxiliary logits output will be None. Defaults to False. + dropout (float): Dropout rate. Defaults to 0.5. + init_cfg (dict, optional): The config of initialization. Defaults + to use trunc normal with ``std=0.1`` for all Conv2d and Linear + layers and constant with ``val=1`` for all BatchNorm2d layers. + + Example: + >>> import torch + >>> from mmpretrain.models import build_backbone + >>> + >>> inputs = torch.rand(2, 3, 299, 299) + >>> cfg = dict(type='InceptionV3', num_classes=100) + >>> backbone = build_backbone(cfg) + >>> aux_out, out = backbone(inputs) + >>> # The auxiliary branch is disabled by default. + >>> assert aux_out is None + >>> print(out.shape) + torch.Size([2, 100]) + >>> cfg = dict(type='InceptionV3', num_classes=100, aux_logits=True) + >>> backbone = build_backbone(cfg) + >>> aux_out, out = backbone(inputs) + >>> print(aux_out.shape, out.shape) + torch.Size([2, 100]) torch.Size([2, 100]) + """ + + def __init__( + self, + num_classes: int = 1000, + aux_logits: bool = False, + dropout: float = 0.5, + init_cfg: Optional[dict] = [ + dict(type='TruncNormal', layer=['Conv2d', 'Linear'], std=0.1), + dict(type='Constant', layer='BatchNorm2d', val=1) + ], + ) -> None: + super().__init__(init_cfg=init_cfg) + + self.aux_logits = aux_logits + self.Conv2d_1a_3x3 = BasicConv2d(3, 32, kernel_size=3, stride=2) + self.Conv2d_2a_3x3 = BasicConv2d(32, 32, kernel_size=3) + self.Conv2d_2b_3x3 = BasicConv2d(32, 64, kernel_size=3, padding=1) + self.maxpool1 = nn.MaxPool2d(kernel_size=3, stride=2) + self.Conv2d_3b_1x1 = BasicConv2d(64, 80, kernel_size=1) + self.Conv2d_4a_3x3 = BasicConv2d(80, 192, kernel_size=3) + self.maxpool2 = nn.MaxPool2d(kernel_size=3, stride=2) + self.Mixed_5b = InceptionA(192, pool_features=32) + self.Mixed_5c = InceptionA(256, pool_features=64) + self.Mixed_5d = InceptionA(288, pool_features=64) + self.Mixed_6a = InceptionB(288) + self.Mixed_6b = InceptionC(768, channels_7x7=128) + self.Mixed_6c = InceptionC(768, channels_7x7=160) + self.Mixed_6d = InceptionC(768, channels_7x7=160) + self.Mixed_6e = InceptionC(768, channels_7x7=192) + self.AuxLogits: Optional[nn.Module] = None + if aux_logits: + self.AuxLogits = InceptionAux(768, num_classes) + self.Mixed_7a = InceptionD(768) + self.Mixed_7b = InceptionE(1280) + self.Mixed_7c = InceptionE(2048) + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.dropout = nn.Dropout(p=dropout) + self.fc = nn.Linear(2048, num_classes) + + def forward( + self, + x: torch.Tensor) -> Tuple[Optional[torch.Tensor], torch.Tensor]: + """Forward function.""" + # N x 3 x 299 x 299 + x = self.Conv2d_1a_3x3(x) + # N x 32 x 149 x 149 + x = self.Conv2d_2a_3x3(x) + # N x 32 x 147 x 147 + x = self.Conv2d_2b_3x3(x) + # N x 64 x 147 x 147 + x = self.maxpool1(x) + # N x 64 x 73 x 73 + x = self.Conv2d_3b_1x1(x) + # N x 80 x 73 x 73 + x = self.Conv2d_4a_3x3(x) + # N x 192 x 71 x 71 + x = self.maxpool2(x) + # N x 192 x 35 x 35 + x = self.Mixed_5b(x) + # N x 256 x 35 x 35 + x = self.Mixed_5c(x) + # N x 288 x 35 x 35 + x = self.Mixed_5d(x) + # N x 288 x 35 x 35 + x = self.Mixed_6a(x) + # N x 768 x 17 x 17 + x = self.Mixed_6b(x) + # N x 768 x 17 x 17 + x = self.Mixed_6c(x) + # N x 768 x 17 x 17 + x = self.Mixed_6d(x) + # N x 768 x 17 x 17 + x = self.Mixed_6e(x) + # N x 768 x 17 x 17 + aux: Optional[torch.Tensor] = None + if self.aux_logits and self.training: + aux = self.AuxLogits(x) + # N x 768 x 17 x 17 + x = self.Mixed_7a(x) + # N x 1280 x 8 x 8 + x = self.Mixed_7b(x) + # N x 2048 x 8 x 8 + x = self.Mixed_7c(x) + # N x 2048 x 8 x 8 + # Adaptive average pooling + x = self.avgpool(x) + # N x 2048 x 1 x 1 + x = self.dropout(x) + # N x 2048 x 1 x 1 + x = torch.flatten(x, 1) + # N x 2048 + x = self.fc(x) + # N x 1000 (num_classes) + return aux, x diff --git a/mmpretrain/models/backbones/lenet.py b/mmpretrain/models/backbones/lenet.py new file mode 100644 index 0000000000000000000000000000000000000000..8e423c0b15a60660714617e47fd68857b3a6d1e0 --- /dev/null +++ b/mmpretrain/models/backbones/lenet.py @@ -0,0 +1,42 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +@MODELS.register_module() +class LeNet5(BaseBackbone): + """`LeNet5 `_ backbone. + + The input for LeNet-5 is a 32×32 grayscale image. + + Args: + num_classes (int): number of classes for classification. + The default value is -1, which uses the backbone as + a feature extractor without the top classifier. + """ + + def __init__(self, num_classes=-1): + super(LeNet5, self).__init__() + self.num_classes = num_classes + self.features = nn.Sequential( + nn.Conv2d(1, 6, kernel_size=5, stride=1), nn.Tanh(), + nn.AvgPool2d(kernel_size=2), + nn.Conv2d(6, 16, kernel_size=5, stride=1), nn.Tanh(), + nn.AvgPool2d(kernel_size=2), + nn.Conv2d(16, 120, kernel_size=5, stride=1), nn.Tanh()) + if self.num_classes > 0: + self.classifier = nn.Sequential( + nn.Linear(120, 84), + nn.Tanh(), + nn.Linear(84, num_classes), + ) + + def forward(self, x): + + x = self.features(x) + if self.num_classes > 0: + x = self.classifier(x.squeeze()) + + return (x, ) diff --git a/mmpretrain/models/backbones/levit.py b/mmpretrain/models/backbones/levit.py new file mode 100644 index 0000000000000000000000000000000000000000..5f7aa324e28b1725fb9e67110a26ea2d5c2831bd --- /dev/null +++ b/mmpretrain/models/backbones/levit.py @@ -0,0 +1,522 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import itertools + +import torch +import torch.nn as nn +from mmcv.cnn import build_activation_layer, fuse_conv_bn +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule, ModuleList, Sequential + +from mmpretrain.models.backbones.base_backbone import BaseBackbone +from mmpretrain.registry import MODELS +from ..utils import build_norm_layer + + +class HybridBackbone(BaseModule): + + def __init__( + self, + embed_dim, + kernel_size=3, + stride=2, + pad=1, + dilation=1, + groups=1, + act_cfg=dict(type='HSwish'), + conv_cfg=None, + norm_cfg=dict(type='BN'), + init_cfg=None, + ): + super(HybridBackbone, self).__init__(init_cfg=init_cfg) + + self.input_channels = [ + 3, embed_dim // 8, embed_dim // 4, embed_dim // 2 + ] + self.output_channels = [ + embed_dim // 8, embed_dim // 4, embed_dim // 2, embed_dim + ] + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.patch_embed = Sequential() + + for i in range(len(self.input_channels)): + conv_bn = ConvolutionBatchNorm( + self.input_channels[i], + self.output_channels[i], + kernel_size=kernel_size, + stride=stride, + pad=pad, + dilation=dilation, + groups=groups, + norm_cfg=norm_cfg, + ) + self.patch_embed.add_module('%d' % (2 * i), conv_bn) + if i < len(self.input_channels) - 1: + self.patch_embed.add_module('%d' % (i * 2 + 1), + build_activation_layer(act_cfg)) + + def forward(self, x): + x = self.patch_embed(x) + return x + + +class ConvolutionBatchNorm(BaseModule): + + def __init__( + self, + in_channel, + out_channel, + kernel_size=3, + stride=2, + pad=1, + dilation=1, + groups=1, + norm_cfg=dict(type='BN'), + ): + super(ConvolutionBatchNorm, self).__init__() + self.conv = nn.Conv2d( + in_channel, + out_channel, + kernel_size=kernel_size, + stride=stride, + padding=pad, + dilation=dilation, + groups=groups, + bias=False) + self.bn = build_norm_layer(norm_cfg, out_channel) + + def forward(self, x): + x = self.conv(x) + x = self.bn(x) + return x + + @torch.no_grad() + def fuse(self): + return fuse_conv_bn(self).conv + + +class LinearBatchNorm(BaseModule): + + def __init__(self, in_feature, out_feature, norm_cfg=dict(type='BN1d')): + super(LinearBatchNorm, self).__init__() + self.linear = nn.Linear(in_feature, out_feature, bias=False) + self.bn = build_norm_layer(norm_cfg, out_feature) + + def forward(self, x): + x = self.linear(x) + x = self.bn(x.flatten(0, 1)).reshape_as(x) + return x + + @torch.no_grad() + def fuse(self): + w = self.bn.weight / (self.bn.running_var + self.bn.eps)**0.5 + w = self.linear.weight * w[:, None] + b = self.bn.bias - self.bn.running_mean * self.bn.weight / \ + (self.bn.running_var + self.bn.eps) ** 0.5 + + factory_kwargs = { + 'device': self.linear.weight.device, + 'dtype': self.linear.weight.dtype + } + bias = nn.Parameter( + torch.empty(self.linear.out_features, **factory_kwargs)) + self.linear.register_parameter('bias', bias) + self.linear.weight.data.copy_(w) + self.linear.bias.data.copy_(b) + return self.linear + + +class Residual(BaseModule): + + def __init__(self, block, drop_path_rate=0.): + super(Residual, self).__init__() + self.block = block + if drop_path_rate > 0: + self.drop_path = DropPath(drop_path_rate) + else: + self.drop_path = nn.Identity() + + def forward(self, x): + x = x + self.drop_path(self.block(x)) + return x + + +class Attention(BaseModule): + + def __init__( + self, + dim, + key_dim, + num_heads=8, + attn_ratio=4, + act_cfg=dict(type='HSwish'), + resolution=14, + ): + super(Attention, self).__init__() + self.num_heads = num_heads + self.scale = key_dim**-0.5 + self.key_dim = key_dim + self.nh_kd = nh_kd = key_dim * num_heads + self.d = int(attn_ratio * key_dim) + self.dh = int(attn_ratio * key_dim) * num_heads + self.attn_ratio = attn_ratio + h = self.dh + nh_kd * 2 + self.qkv = LinearBatchNorm(dim, h) + self.proj = nn.Sequential( + build_activation_layer(act_cfg), LinearBatchNorm(self.dh, dim)) + + points = list(itertools.product(range(resolution), range(resolution))) + N = len(points) + attention_offsets = {} + idxs = [] + for p1 in points: + for p2 in points: + offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1])) + if offset not in attention_offsets: + attention_offsets[offset] = len(attention_offsets) + idxs.append(attention_offsets[offset]) + self.attention_biases = torch.nn.Parameter( + torch.zeros(num_heads, len(attention_offsets))) + self.register_buffer('attention_bias_idxs', + torch.LongTensor(idxs).view(N, N)) + + @torch.no_grad() + def train(self, mode=True): + """change the mode of model.""" + super(Attention, self).train(mode) + if mode and hasattr(self, 'ab'): + del self.ab + else: + self.ab = self.attention_biases[:, self.attention_bias_idxs] + + def forward(self, x): # x (B,N,C) + B, N, C = x.shape # 2 196 128 + qkv = self.qkv(x) # 2 196 128 + q, k, v = qkv.view(B, N, self.num_heads, -1).split( + [self.key_dim, self.key_dim, self.d], + dim=3) # q 2 196 4 16 ; k 2 196 4 16; v 2 196 4 32 + q = q.permute(0, 2, 1, 3) # 2 4 196 16 + k = k.permute(0, 2, 1, 3) + v = v.permute(0, 2, 1, 3) + + attn = ((q @ k.transpose(-2, -1)) * + self.scale # 2 4 196 16 * 2 4 16 196 -> 2 4 196 196 + + (self.attention_biases[:, self.attention_bias_idxs] + if self.training else self.ab)) + attn = attn.softmax(dim=-1) # 2 4 196 196 -> 2 4 196 196 + x = (attn @ v).transpose(1, 2).reshape( + B, N, + self.dh) # 2 4 196 196 * 2 4 196 32 -> 2 4 196 32 -> 2 196 128 + x = self.proj(x) + return x + + +class MLP(nn.Sequential): + + def __init__(self, embed_dim, mlp_ratio, act_cfg=dict(type='HSwish')): + super(MLP, self).__init__() + h = embed_dim * mlp_ratio + self.linear1 = LinearBatchNorm(embed_dim, h) + self.activation = build_activation_layer(act_cfg) + self.linear2 = LinearBatchNorm(h, embed_dim) + + def forward(self, x): + x = self.linear1(x) + x = self.activation(x) + x = self.linear2(x) + return x + + +class Subsample(BaseModule): + + def __init__(self, stride, resolution): + super(Subsample, self).__init__() + self.stride = stride + self.resolution = resolution + + def forward(self, x): + B, _, C = x.shape + # B, N, C -> B, H, W, C + x = x.view(B, self.resolution, self.resolution, C) + x = x[:, ::self.stride, ::self.stride] + x = x.reshape(B, -1, C) # B, H', W', C -> B, N', C + return x + + +class AttentionSubsample(nn.Sequential): + + def __init__(self, + in_dim, + out_dim, + key_dim, + num_heads=8, + attn_ratio=2, + act_cfg=dict(type='HSwish'), + stride=2, + resolution=14): + super(AttentionSubsample, self).__init__() + self.num_heads = num_heads + self.scale = key_dim**-0.5 + self.key_dim = key_dim + self.nh_kd = nh_kd = key_dim * num_heads + self.d = int(attn_ratio * key_dim) + self.dh = int(attn_ratio * key_dim) * self.num_heads + self.attn_ratio = attn_ratio + self.sub_resolution = (resolution - 1) // stride + 1 + h = self.dh + nh_kd + self.kv = LinearBatchNorm(in_dim, h) + + self.q = nn.Sequential( + Subsample(stride, resolution), LinearBatchNorm(in_dim, nh_kd)) + self.proj = nn.Sequential( + build_activation_layer(act_cfg), LinearBatchNorm(self.dh, out_dim)) + + self.stride = stride + self.resolution = resolution + points = list(itertools.product(range(resolution), range(resolution))) + sub_points = list( + itertools.product( + range(self.sub_resolution), range(self.sub_resolution))) + N = len(points) + N_sub = len(sub_points) + attention_offsets = {} + idxs = [] + for p1 in sub_points: + for p2 in points: + size = 1 + offset = (abs(p1[0] * stride - p2[0] + (size - 1) / 2), + abs(p1[1] * stride - p2[1] + (size - 1) / 2)) + if offset not in attention_offsets: + attention_offsets[offset] = len(attention_offsets) + idxs.append(attention_offsets[offset]) + self.attention_biases = torch.nn.Parameter( + torch.zeros(num_heads, len(attention_offsets))) + self.register_buffer('attention_bias_idxs', + torch.LongTensor(idxs).view(N_sub, N)) + + @torch.no_grad() + def train(self, mode=True): + super(AttentionSubsample, self).train(mode) + if mode and hasattr(self, 'ab'): + del self.ab + else: + self.ab = self.attention_biases[:, self.attention_bias_idxs] + + def forward(self, x): + B, N, C = x.shape + k, v = self.kv(x).view(B, N, self.num_heads, + -1).split([self.key_dim, self.d], dim=3) + k = k.permute(0, 2, 1, 3) # BHNC + v = v.permute(0, 2, 1, 3) # BHNC + q = self.q(x).view(B, self.sub_resolution**2, self.num_heads, + self.key_dim).permute(0, 2, 1, 3) + + attn = (q @ k.transpose(-2, -1)) * self.scale + \ + (self.attention_biases[:, self.attention_bias_idxs] + if self.training else self.ab) + attn = attn.softmax(dim=-1) + + x = (attn @ v).transpose(1, 2).reshape(B, -1, self.dh) + x = self.proj(x) + return x + + +@MODELS.register_module() +class LeViT(BaseBackbone): + """LeViT backbone. + + A PyTorch implementation of `LeViT: A Vision Transformer in ConvNet's + Clothing for Faster Inference `_ + + Modified from the official implementation: + https://github.com/facebookresearch/LeViT + + Args: + arch (str | dict): LeViT architecture. + + If use string, choose from '128s', '128', '192', '256' and '384'. + If use dict, it should have below keys: + + - **embed_dims** (List[int]): The embed dimensions of each stage. + - **key_dims** (List[int]): The embed dimensions of the key in the + attention layers of each stage. + - **num_heads** (List[int]): The number of heads in each stage. + - **depths** (List[int]): The number of blocks in each stage. + + img_size (int): Input image size + patch_size (int | tuple): The patch size. Deault to 16 + attn_ratio (int): Ratio of hidden dimensions of the value in attention + layers. Defaults to 2. + mlp_ratio (int): Ratio of hidden dimensions in MLP layers. + Defaults to 2. + act_cfg (dict): The config of activation functions. + Defaults to ``dict(type='HSwish')``. + hybrid_backbone (callable): A callable object to build the patch embed + module. Defaults to use :class:`HybridBackbone`. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + deploy (bool): Whether to switch the model structure to + deployment mode. Defaults to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + '128s': { + 'embed_dims': [128, 256, 384], + 'num_heads': [4, 6, 8], + 'depths': [2, 3, 4], + 'key_dims': [16, 16, 16], + }, + '128': { + 'embed_dims': [128, 256, 384], + 'num_heads': [4, 8, 12], + 'depths': [4, 4, 4], + 'key_dims': [16, 16, 16], + }, + '192': { + 'embed_dims': [192, 288, 384], + 'num_heads': [3, 5, 6], + 'depths': [4, 4, 4], + 'key_dims': [32, 32, 32], + }, + '256': { + 'embed_dims': [256, 384, 512], + 'num_heads': [4, 6, 8], + 'depths': [4, 4, 4], + 'key_dims': [32, 32, 32], + }, + '384': { + 'embed_dims': [384, 512, 768], + 'num_heads': [6, 9, 12], + 'depths': [4, 4, 4], + 'key_dims': [32, 32, 32], + }, + } + + def __init__(self, + arch, + img_size=224, + patch_size=16, + attn_ratio=2, + mlp_ratio=2, + act_cfg=dict(type='HSwish'), + hybrid_backbone=HybridBackbone, + out_indices=-1, + deploy=False, + drop_path_rate=0, + init_cfg=None): + super(LeViT, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch = self.arch_zoo[arch] + elif isinstance(arch, dict): + essential_keys = {'embed_dim', 'num_heads', 'depth', 'key_dim'} + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch = arch + else: + raise TypeError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + self.embed_dims = self.arch['embed_dims'] + self.num_heads = self.arch['num_heads'] + self.key_dims = self.arch['key_dims'] + self.depths = self.arch['depths'] + self.num_stages = len(self.embed_dims) + self.drop_path_rate = drop_path_rate + + self.patch_embed = hybrid_backbone(self.embed_dims[0]) + + self.resolutions = [] + resolution = img_size // patch_size + self.stages = ModuleList() + for i, (embed_dims, key_dims, depth, num_heads) in enumerate( + zip(self.embed_dims, self.key_dims, self.depths, + self.num_heads)): + blocks = [] + if i > 0: + downsample = AttentionSubsample( + in_dim=self.embed_dims[i - 1], + out_dim=embed_dims, + key_dim=key_dims, + num_heads=self.embed_dims[i - 1] // key_dims, + attn_ratio=4, + act_cfg=act_cfg, + stride=2, + resolution=resolution) + blocks.append(downsample) + resolution = downsample.sub_resolution + if mlp_ratio > 0: # mlp_ratio + blocks.append( + Residual( + MLP(embed_dims, mlp_ratio, act_cfg=act_cfg), + self.drop_path_rate)) + self.resolutions.append(resolution) + for _ in range(depth): + blocks.append( + Residual( + Attention( + embed_dims, + key_dims, + num_heads, + attn_ratio=attn_ratio, + act_cfg=act_cfg, + resolution=resolution, + ), self.drop_path_rate)) + if mlp_ratio > 0: + blocks.append( + Residual( + MLP(embed_dims, mlp_ratio, act_cfg=act_cfg), + self.drop_path_rate)) + + self.stages.append(Sequential(*blocks)) + + if isinstance(out_indices, int): + out_indices = [out_indices] + elif isinstance(out_indices, tuple): + out_indices = list(out_indices) + elif not isinstance(out_indices, list): + raise TypeError('"out_indices" must by a list, tuple or int, ' + f'get {type(out_indices)} instead.') + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_stages + index + assert 0 <= out_indices[i] < self.num_stages, \ + f'Invalid out_indices {index}.' + self.out_indices = out_indices + + self.deploy = False + if deploy: + self.switch_to_deploy() + + def switch_to_deploy(self): + if self.deploy: + return + fuse_parameters(self) + self.deploy = True + + def forward(self, x): + x = self.patch_embed(x) + x = x.flatten(2).transpose(1, 2) # B, C, H, W -> B, L, C + outs = [] + for i, stage in enumerate(self.stages): + x = stage(x) + B, _, C = x.shape + if i in self.out_indices: + out = x.reshape(B, self.resolutions[i], self.resolutions[i], C) + out = out.permute(0, 3, 1, 2).contiguous() + outs.append(out) + + return tuple(outs) + + +def fuse_parameters(module): + for child_name, child in module.named_children(): + if hasattr(child, 'fuse'): + setattr(module, child_name, child.fuse()) + else: + fuse_parameters(child) diff --git a/mmpretrain/models/backbones/mixmim.py b/mmpretrain/models/backbones/mixmim.py new file mode 100644 index 0000000000000000000000000000000000000000..2c67aa0c3a45c5c85adbacb94ae90dc170b2d0bb --- /dev/null +++ b/mmpretrain/models/backbones/mixmim.py @@ -0,0 +1,533 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +from mmcv.cnn import build_norm_layer +from mmcv.cnn.bricks.drop import DropPath +from mmcv.cnn.bricks.transformer import PatchEmbed, PatchMerging +from mmengine.model import BaseModule +from torch import nn +from torch.utils.checkpoint import checkpoint + +from mmpretrain.registry import MODELS +from ..utils import WindowMSA, to_2tuple +from .base_backbone import BaseBackbone +from .vision_transformer import TransformerEncoderLayer + + +class MixMIMWindowAttention(WindowMSA): + """MixMIM Window Attention. + + Compared with WindowMSA, we add some modifications + in ``forward`` to meet the requirement of MixMIM during + pretraining. + + Implements one windown attention in MixMIM. + Args: + embed_dims (int): The feature dimension. + window_size (list): The height and width of the window. + num_heads (int): The number of head in attention. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + attn_drop_rate (float): attention drop rate. + Defaults to 0. + proj_drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop_rate=0., + proj_drop_rate=0., + init_cfg=None): + + super().__init__( + embed_dims=embed_dims, + window_size=window_size, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop_rate, + proj_drop=proj_drop_rate, + init_cfg=init_cfg) + + def forward(self, x, mask=None): + + B_, N, C = x.shape + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, + C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[ + 2] # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1)].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + mask = mask.reshape(B_, 1, 1, N) + mask_new = mask * mask.transpose( + 2, 3) + (1 - mask) * (1 - mask).transpose(2, 3) + mask_new = 1 - mask_new + + if mask_new.dtype == torch.float16: + attn = attn - 65500 * mask_new + else: + attn = attn - 1e30 * mask_new + + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class MixMIMBlock(TransformerEncoderLayer): + """MixMIM Block. Implements one block in MixMIM. + + Args: + embed_dims (int): The feature dimension. + input_resolution (tuple): Input resolution of this layer. + num_heads (int): The number of head in attention, + window_size (list): The height and width of the window. + mlp_ratio (int): The MLP ration in FFN. + num_fcs (int): The number of linear layers in a block. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + proj_drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + attn_drop_rate (float): attention drop rate. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. + Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims, + input_resolution, + num_heads, + window_size=7, + mlp_ratio=4., + num_fcs=2, + qkv_bias=True, + proj_drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + + super().__init__( + embed_dims=embed_dims, + num_heads=num_heads, + feedforward_channels=int(mlp_ratio * embed_dims), + drop_rate=proj_drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=drop_path_rate, + num_fcs=num_fcs, + qkv_bias=qkv_bias, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + init_cfg=init_cfg) + + self.embed_dims = embed_dims + self.input_resolution = input_resolution + self.num_heads = num_heads + self.window_size = window_size + self.mlp_ratio = mlp_ratio + + if min(self.input_resolution) <= self.window_size: + self.window_size = min(self.input_resolution) + + self.attn = MixMIMWindowAttention( + embed_dims=embed_dims, + window_size=to_2tuple(self.window_size), + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_drop_rate=attn_drop_rate, + proj_drop_rate=proj_drop_rate) + + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0. else nn.Identity() + + @staticmethod + def window_reverse(windows, H, W, window_size): + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view(B, H // window_size, W // window_size, window_size, + window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + @staticmethod + def window_partition(x, window_size): + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, + window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous() + windows = windows.view(-1, window_size, window_size, C) + return windows + + def forward(self, x, attn_mask=None): + H, W = self.input_resolution + B, L, C = x.shape + + shortcut = x + x = self.ln1(x) + x = x.view(B, H, W, C) + + # partition windows + x_windows = self.window_partition( + x, self.window_size) # nW*B, window_size, window_size, C + x_windows = x_windows.view(-1, self.window_size * self.window_size, + C) # nW*B, window_size*window_size, C + if attn_mask is not None: + attn_mask = attn_mask.repeat(B, 1, 1) # B, N, 1 + attn_mask = attn_mask.view(B, H, W, 1) + attn_mask = self.window_partition(attn_mask, self.window_size) + attn_mask = attn_mask.view(-1, self.window_size * self.window_size, + 1) + + # W-MSA/SW-MSA + attn_windows = self.attn( + x_windows, mask=attn_mask) # nW*B, window_size*window_size, C + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, + self.window_size, C) + x = self.window_reverse(attn_windows, H, W, + self.window_size) # B H' W' C + + x = x.view(B, H * W, C) + + x = shortcut + self.drop_path(x) + + x = self.ffn(self.norm2(x), identity=x) # ffn contains DropPath + + return x + + +class MixMIMLayer(BaseModule): + """Implements one MixMIM layer, which may contains several MixMIM blocks. + + Args: + embed_dims (int): The feature dimension. + input_resolution (tuple): Input resolution of this layer. + depth (int): The number of blocks in this layer. + num_heads (int): The number of head in attention, + window_size (list): The height and width of the window. + mlp_ratio (int): The MLP ration in FFN. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + proj_drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + attn_drop_rate (float): attention drop rate. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. + Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + downsample (class, optional): Downsample the output of blocks b + y patch merging.Defaults to None. + use_checkpoint (bool): Whether use the checkpoint to + reduce GPU memory cost. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims: int, + input_resolution: int, + depth: int, + num_heads: int, + window_size: int, + mlp_ratio=4., + qkv_bias=True, + proj_drop_rate=0., + attn_drop_rate=0., + drop_path_rate=[0.], + norm_cfg=dict(type='LN'), + downsample=None, + use_checkpoint=False, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__(init_cfg=init_cfg) + self.embed_dims = embed_dims + self.input_resolution = input_resolution + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList() + for i in range(depth): + self.blocks.append( + MixMIMBlock( + embed_dims=embed_dims, + input_resolution=input_resolution, + num_heads=num_heads, + window_size=window_size, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + proj_drop_rate=proj_drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=drop_path_rate[i], + norm_cfg=norm_cfg)) + # patch merging layer + if downsample is not None: + self.downsample = downsample( + in_channels=embed_dims, + out_channels=2 * embed_dims, + norm_cfg=norm_cfg) + else: + self.downsample = None + + def forward(self, x, attn_mask=None): + for blk in self.blocks: + if self.use_checkpoint: + x = checkpoint(blk, x, attn_mask) + else: + x = blk(x, attn_mask=attn_mask) + if self.downsample is not None: + x, _ = self.downsample(x, self.input_resolution) + return x + + def extra_repr(self) -> str: + return f'dim={self.embed_dims}, \ + input_resolution={self.input_resolution}, depth={self.depth}' + + +@MODELS.register_module() +class MixMIMTransformer(BaseBackbone): + """MixMIM backbone. + + A PyTorch implement of : ` MixMIM: Mixed and Masked Image + Modeling for Efficient Visual Representation Learning + `_ + + Args: + arch (str | dict): MixMIM architecture. If use string, + choose from 'base','large' and 'huge'. + If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **depths** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + + Defaults to 'base'. + mlp_ratio (int): The mlp ratio in FFN. Defaults to 4. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to mlp_ratio + the most common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + window_size (list): The height and width of the window. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + patch_cfg (dict): Extra config dict for patch embedding. + Defaults to an empty dict. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + attn_drop_rate (float): attention drop rate. Defaults to 0. + use_checkpoint (bool): Whether use the checkpoint to + reduce GPU memory cost. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 128, + 'depths': [2, 2, 18, 2], + 'num_heads': [4, 8, 16, 32] + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 192, + 'depths': [2, 2, 18, 2], + 'num_heads': [6, 12, 24, 48] + }), + **dict.fromkeys( + ['h', 'huge'], { + 'embed_dims': 352, + 'depths': [2, 2, 18, 2], + 'num_heads': [11, 22, 44, 88] + }), + } + + def __init__( + self, + arch='base', + mlp_ratio=4, + img_size=224, + patch_size=4, + in_channels=3, + window_size=[14, 14, 14, 7], + qkv_bias=True, + patch_cfg=dict(), + norm_cfg=dict(type='LN'), + drop_rate=0.0, + drop_path_rate=0.0, + attn_drop_rate=0.0, + use_checkpoint=False, + init_cfg: Optional[dict] = None, + ) -> None: + super(MixMIMTransformer, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = {'embed_dims', 'depths', 'num_heads'} + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.depths = self.arch_settings['depths'] + self.num_heads = self.arch_settings['num_heads'] + + self.encoder_stride = 32 + + self.num_layers = len(self.depths) + self.qkv_bias = qkv_bias + self.drop_rate = drop_rate + self.attn_drop_rate = attn_drop_rate + self.use_checkpoint = use_checkpoint + self.mlp_ratio = mlp_ratio + self.window_size = window_size + + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + norm_cfg=dict(type='LN'), + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + + self.dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, sum(self.depths)) + ] + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + self.layers.append( + MixMIMLayer( + embed_dims=int(self.embed_dims * 2**i_layer), + input_resolution=(self.patch_resolution[0] // (2**i_layer), + self.patch_resolution[1] // + (2**i_layer)), + depth=self.depths[i_layer], + num_heads=self.num_heads[i_layer], + window_size=self.window_size[i_layer], + mlp_ratio=self.mlp_ratio, + qkv_bias=self.qkv_bias, + proj_drop_rate=self.drop_rate, + attn_drop_rate=self.attn_drop_rate, + drop_path_rate=self.dpr[sum(self.depths[:i_layer] + ):sum(self.depths[:i_layer + + 1])], + norm_cfg=norm_cfg, + downsample=PatchMerging if + (i_layer < self.num_layers - 1) else None, + use_checkpoint=self.use_checkpoint)) + + self.num_features = int(self.embed_dims * 2**(self.num_layers - 1)) + self.drop_after_pos = nn.Dropout(p=self.drop_rate) + + self.avgpool = nn.AdaptiveAvgPool1d(1) + self.num_patches = self.patch_resolution[0] * self.patch_resolution[1] + self.absolute_pos_embed = nn.Parameter( + torch.zeros(1, self.num_patches, self.embed_dims), + requires_grad=False) + + _, self.norm = build_norm_layer(norm_cfg, self.num_features) + + def forward(self, x: torch.Tensor): + x, _ = self.patch_embed(x) + + x = x + self.absolute_pos_embed + x = self.drop_after_pos(x) + + for layer in self.layers: + x = layer(x, attn_mask=None) + + x = self.norm(x) + x = self.avgpool(x.transpose(1, 2)) # B C 1 + x = torch.flatten(x, 1) + + return (x, ) + + def get_layer_depth(self, param_name: str, prefix: str = ''): + """Get the layer-wise depth of a parameter. + + Args: + param_name (str): The name of the parameter. + prefix (str): The prefix for the parameter. + Defaults to an empty string. + + Returns: + Tuple[int, int]: The layer-wise depth and the num of layers. + + Note: + The first depth is the stem module (``layer_depth=0``), and the + last depth is the subsequent module (``layer_depth=num_layers-1``) + """ + num_layers = sum(self.depths) + 2 + + if not param_name.startswith(prefix): + # For subsequent module like neck and head + if param_name.startswith('neck'): + return num_layers - 2, num_layers + else: + return num_layers - 1, num_layers + + param_name = param_name[len(prefix):] + + stem_layers = ('patch_embed', 'absolute_pos_embed', 'pos_embed') + if any(stem in param_name for stem in stem_layers): + layer_depth = 0 + elif param_name.startswith('layers'): + layer_id = int(param_name.split('.')[1]) + block_id = param_name.split('.')[3] + + if block_id in ('downsample', 'reduction', 'norm'): + layer_depth = sum(self.depths[:layer_id + 1]) + else: + layer_depth = sum(self.depths[:layer_id]) + int(block_id) + 1 + else: + layer_depth = num_layers - 2 + + return layer_depth, num_layers diff --git a/mmpretrain/models/backbones/mlp_mixer.py b/mmpretrain/models/backbones/mlp_mixer.py new file mode 100644 index 0000000000000000000000000000000000000000..af714fea48ae296be7d4b2b5a7e124ad94b048e9 --- /dev/null +++ b/mmpretrain/models/backbones/mlp_mixer.py @@ -0,0 +1,263 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList + +from mmpretrain.registry import MODELS +from ..utils import to_2tuple +from .base_backbone import BaseBackbone + + +class MixerBlock(BaseModule): + """Mlp-Mixer basic block. + + Basic module of `MLP-Mixer: An all-MLP Architecture for Vision + `_ + + Args: + num_tokens (int): The number of patched tokens + embed_dims (int): The feature dimension + tokens_mlp_dims (int): The hidden dimension for tokens FFNs + channels_mlp_dims (int): The hidden dimension for channels FFNs + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + num_tokens, + embed_dims, + tokens_mlp_dims, + channels_mlp_dims, + drop_rate=0., + drop_path_rate=0., + num_fcs=2, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + init_cfg=None): + super(MixerBlock, self).__init__(init_cfg=init_cfg) + + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, embed_dims, postfix=1) + self.add_module(self.norm1_name, norm1) + self.token_mix = FFN( + embed_dims=num_tokens, + feedforward_channels=tokens_mlp_dims, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg, + add_identity=False) + + self.norm2_name, norm2 = build_norm_layer( + norm_cfg, embed_dims, postfix=2) + self.add_module(self.norm2_name, norm2) + self.channel_mix = FFN( + embed_dims=embed_dims, + feedforward_channels=channels_mlp_dims, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg) + + @property + def norm1(self): + return getattr(self, self.norm1_name) + + @property + def norm2(self): + return getattr(self, self.norm2_name) + + def init_weights(self): + super(MixerBlock, self).init_weights() + for m in self.token_mix.modules(): + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + nn.init.normal_(m.bias, std=1e-6) + for m in self.channel_mix.modules(): + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + nn.init.normal_(m.bias, std=1e-6) + + def forward(self, x): + out = self.norm1(x).transpose(1, 2) + x = x + self.token_mix(out).transpose(1, 2) + x = self.channel_mix(self.norm2(x), identity=x) + return x + + +@MODELS.register_module() +class MlpMixer(BaseBackbone): + """Mlp-Mixer backbone. + + Pytorch implementation of `MLP-Mixer: An all-MLP Architecture for Vision + `_ + + Args: + arch (str | dict): MLP Mixer architecture. If use string, choose from + 'small', 'base' and 'large'. If use dict, it should have below + keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of MLP blocks. + - **tokens_mlp_dims** (int): The hidden dimensions for tokens FFNs. + - **channels_mlp_dims** (int): The The hidden dimensions for + channels FFNs. + + Defaults to 'base'. + img_size (int | tuple): The input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + out_indices (Sequence | int): Output from which layer. + Defaults to -1, means the last layer. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + act_cfg (dict): The activation config for FFNs. Default GELU. + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each mixer block layer. + Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + arch_zoo = { + **dict.fromkeys( + ['s', 'small'], { + 'embed_dims': 512, + 'num_layers': 8, + 'tokens_mlp_dims': 256, + 'channels_mlp_dims': 2048, + }), + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'tokens_mlp_dims': 384, + 'channels_mlp_dims': 3072, + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'num_layers': 24, + 'tokens_mlp_dims': 512, + 'channels_mlp_dims': 4096, + }), + } + + def __init__(self, + arch='base', + img_size=224, + patch_size=16, + out_indices=-1, + drop_rate=0., + drop_path_rate=0., + norm_cfg=dict(type='LN'), + act_cfg=dict(type='GELU'), + patch_cfg=dict(), + layer_cfgs=dict(), + init_cfg=None): + super(MlpMixer, self).__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'tokens_mlp_dims', + 'channels_mlp_dims' + } + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.tokens_mlp_dims = self.arch_settings['tokens_mlp_dims'] + self.channels_mlp_dims = self.arch_settings['channels_mlp_dims'] + + self.img_size = to_2tuple(img_size) + + _patch_cfg = dict( + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must be a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_layers + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + else: + assert index >= self.num_layers, f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + num_tokens=num_patches, + embed_dims=self.embed_dims, + tokens_mlp_dims=self.tokens_mlp_dims, + channels_mlp_dims=self.channels_mlp_dims, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + ) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(MixerBlock(**_layer_cfg)) + + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, self.embed_dims, postfix=1) + self.add_module(self.norm1_name, norm1) + + @property + def norm1(self): + return getattr(self, self.norm1_name) + + def forward(self, x): + assert x.shape[2:] == self.img_size, \ + "The MLP-Mixer doesn't support dynamic input shape. " \ + f'Please input images with shape {self.img_size}' + x, _ = self.patch_embed(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + + if i == len(self.layers) - 1: + x = self.norm1(x) + + if i in self.out_indices: + out = x.transpose(1, 2) + outs.append(out) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/mobilenet_v2.py b/mmpretrain/models/backbones/mobilenet_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..bca1418a13c4ed81c4666e7f53b0417c36b2e99b --- /dev/null +++ b/mmpretrain/models/backbones/mobilenet_v2.py @@ -0,0 +1,264 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import ConvModule +from mmengine.model import BaseModule +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.models.utils import make_divisible +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class InvertedResidual(BaseModule): + """InvertedResidual block for MobileNetV2. + + Args: + in_channels (int): The input channels of the InvertedResidual block. + out_channels (int): The output channels of the InvertedResidual block. + stride (int): Stride of the middle (first) 3x3 convolution. + expand_ratio (int): adjusts number of channels of the hidden layer + in InvertedResidual by this amount. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU6'). + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + + Returns: + Tensor: The output tensor + """ + + def __init__(self, + in_channels, + out_channels, + stride, + expand_ratio, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU6'), + with_cp=False, + init_cfg=None): + super(InvertedResidual, self).__init__(init_cfg) + self.stride = stride + assert stride in [1, 2], f'stride must in [1, 2]. ' \ + f'But received {stride}.' + self.with_cp = with_cp + self.use_res_connect = self.stride == 1 and in_channels == out_channels + hidden_dim = int(round(in_channels * expand_ratio)) + + layers = [] + if expand_ratio != 1: + layers.append( + ConvModule( + in_channels=in_channels, + out_channels=hidden_dim, + kernel_size=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + layers.extend([ + ConvModule( + in_channels=hidden_dim, + out_channels=hidden_dim, + kernel_size=3, + stride=stride, + padding=1, + groups=hidden_dim, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg), + ConvModule( + in_channels=hidden_dim, + out_channels=out_channels, + kernel_size=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + ]) + self.conv = nn.Sequential(*layers) + + def forward(self, x): + + def _inner_forward(x): + if self.use_res_connect: + return x + self.conv(x) + else: + return self.conv(x) + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + return out + + +@MODELS.register_module() +class MobileNetV2(BaseBackbone): + """MobileNetV2 backbone. + + Args: + widen_factor (float): Width multiplier, multiply number of + channels in each layer by this amount. Default: 1.0. + out_indices (None or Sequence[int]): Output from which stages. + Default: (7, ). + frozen_stages (int): Stages to be frozen (all param fixed). + Default: -1, which means not freezing any parameters. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU6'). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + """ + + # Parameters to build layers. 4 parameters are needed to construct a + # layer, from left to right: expand_ratio, channel, num_blocks, stride. + arch_settings = [[1, 16, 1, 1], [6, 24, 2, 2], [6, 32, 3, 2], + [6, 64, 4, 2], [6, 96, 3, 1], [6, 160, 3, 2], + [6, 320, 1, 1]] + + def __init__(self, + widen_factor=1., + out_indices=(7, ), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU6'), + norm_eval=False, + with_cp=False, + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ]): + super(MobileNetV2, self).__init__(init_cfg) + self.widen_factor = widen_factor + self.out_indices = out_indices + for index in out_indices: + if index not in range(0, 8): + raise ValueError('the item in out_indices must in ' + f'range(0, 8). But received {index}') + + if frozen_stages not in range(-1, 8): + raise ValueError('frozen_stages must be in range(-1, 8). ' + f'But received {frozen_stages}') + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + + self.in_channels = make_divisible(32 * widen_factor, 8) + + self.conv1 = ConvModule( + in_channels=3, + out_channels=self.in_channels, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + + self.layers = [] + + for i, layer_cfg in enumerate(self.arch_settings): + expand_ratio, channel, num_blocks, stride = layer_cfg + out_channels = make_divisible(channel * widen_factor, 8) + inverted_res_layer = self.make_layer( + out_channels=out_channels, + num_blocks=num_blocks, + stride=stride, + expand_ratio=expand_ratio) + layer_name = f'layer{i + 1}' + self.add_module(layer_name, inverted_res_layer) + self.layers.append(layer_name) + + if widen_factor > 1.0: + self.out_channel = int(1280 * widen_factor) + else: + self.out_channel = 1280 + + layer = ConvModule( + in_channels=self.in_channels, + out_channels=self.out_channel, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + self.add_module('conv2', layer) + self.layers.append('conv2') + + def make_layer(self, out_channels, num_blocks, stride, expand_ratio): + """Stack InvertedResidual blocks to build a layer for MobileNetV2. + + Args: + out_channels (int): out_channels of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + expand_ratio (int): Expand the number of channels of the + hidden layer in InvertedResidual by this ratio. Default: 6. + """ + layers = [] + for i in range(num_blocks): + if i >= 1: + stride = 1 + layers.append( + InvertedResidual( + self.in_channels, + out_channels, + stride, + expand_ratio=expand_ratio, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + with_cp=self.with_cp)) + self.in_channels = out_channels + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + + outs = [] + for i, layer_name in enumerate(self.layers): + layer = getattr(self, layer_name) + x = layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + for param in self.conv1.parameters(): + param.requires_grad = False + for i in range(1, self.frozen_stages + 1): + layer = getattr(self, f'layer{i}') + layer.eval() + for param in layer.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(MobileNetV2, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() diff --git a/mmpretrain/models/backbones/mobilenet_v3.py b/mmpretrain/models/backbones/mobilenet_v3.py new file mode 100644 index 0000000000000000000000000000000000000000..577dba94040dec5ecda9388819b8b5205f307dce --- /dev/null +++ b/mmpretrain/models/backbones/mobilenet_v3.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.cnn import ConvModule +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.registry import MODELS +from ..utils import InvertedResidual +from .base_backbone import BaseBackbone + + +@MODELS.register_module() +class MobileNetV3(BaseBackbone): + """MobileNetV3 backbone. + + Args: + arch (str): Architecture of mobilnetv3, from {small, large}. + Default: small. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + out_indices (None or Sequence[int]): Output from which stages. + Default: None, which means output tensors from final stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Default: -1, which means not freezing any parameters. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save + some memory while slowing down the training speed. + Default: False. + """ + # Parameters to build each block: + # [kernel size, mid channels, out channels, with_se, act type, stride] + arch_settings = { + 'small': [[3, 16, 16, True, 'ReLU', 2], + [3, 72, 24, False, 'ReLU', 2], + [3, 88, 24, False, 'ReLU', 1], + [5, 96, 40, True, 'HSwish', 2], + [5, 240, 40, True, 'HSwish', 1], + [5, 240, 40, True, 'HSwish', 1], + [5, 120, 48, True, 'HSwish', 1], + [5, 144, 48, True, 'HSwish', 1], + [5, 288, 96, True, 'HSwish', 2], + [5, 576, 96, True, 'HSwish', 1], + [5, 576, 96, True, 'HSwish', 1]], + 'small_075': [[3, 16, 16, True, 'ReLU', 2], + [3, 72, 24, False, 'ReLU', 2], + [3, 88, 24, False, 'ReLU', 1], + [5, 96, 32, True, 'HSwish', 2], + [5, 192, 32, True, 'HSwish', 1], + [5, 192, 32, True, 'HSwish', 1], + [5, 96, 40, True, 'HSwish', 1], + [5, 120, 40, True, 'HSwish', 1], + [5, 240, 72, True, 'HSwish', 2], + [5, 432, 72, True, 'HSwish', 1], + [5, 432, 72, True, 'HSwish', 1]], + 'small_050': [[3, 16, 8, True, 'ReLU', 2], + [3, 40, 16, False, 'ReLU', 2], + [3, 56, 16, False, 'ReLU', 1], + [5, 64, 24, True, 'HSwish', 2], + [5, 144, 24, True, 'HSwish', 1], + [5, 144, 24, True, 'HSwish', 1], + [5, 72, 24, True, 'HSwish', 1], + [5, 72, 24, True, 'HSwish', 1], + [5, 144, 48, True, 'HSwish', 2], + [5, 288, 48, True, 'HSwish', 1], + [5, 288, 48, True, 'HSwish', 1]], + 'large': [[3, 16, 16, False, 'ReLU', 1], + [3, 64, 24, False, 'ReLU', 2], + [3, 72, 24, False, 'ReLU', 1], + [5, 72, 40, True, 'ReLU', 2], + [5, 120, 40, True, 'ReLU', 1], + [5, 120, 40, True, 'ReLU', 1], + [3, 240, 80, False, 'HSwish', 2], + [3, 200, 80, False, 'HSwish', 1], + [3, 184, 80, False, 'HSwish', 1], + [3, 184, 80, False, 'HSwish', 1], + [3, 480, 112, True, 'HSwish', 1], + [3, 672, 112, True, 'HSwish', 1], + [5, 672, 160, True, 'HSwish', 2], + [5, 960, 160, True, 'HSwish', 1], + [5, 960, 160, True, 'HSwish', 1]] + } # yapf: disable + + def __init__(self, + arch='small', + conv_cfg=None, + norm_cfg=dict(type='BN', eps=0.001, momentum=0.01), + out_indices=None, + frozen_stages=-1, + norm_eval=False, + with_cp=False, + init_cfg=[ + dict( + type='Kaiming', + layer=['Conv2d'], + nonlinearity='leaky_relu'), + dict(type='Normal', layer=['Linear'], std=0.01), + dict(type='Constant', layer=['BatchNorm2d'], val=1) + ]): + super(MobileNetV3, self).__init__(init_cfg) + assert arch in self.arch_settings + if out_indices is None: + out_indices = (12, ) if 'small' in arch else (16, ) + for order, index in enumerate(out_indices): + if index not in range(0, len(self.arch_settings[arch]) + 2): + raise ValueError( + 'the item in out_indices must in ' + f'range(0, {len(self.arch_settings[arch]) + 2}). ' + f'But received {index}') + + if frozen_stages not in range(-1, len(self.arch_settings[arch]) + 2): + raise ValueError('frozen_stages must be in range(-1, ' + f'{len(self.arch_settings[arch]) + 2}). ' + f'But received {frozen_stages}') + self.arch = arch + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + self.with_cp = with_cp + + self.layers = self._make_layer() + self.feat_dim = self.arch_settings[arch][-1][1] + + def _make_layer(self): + layers = [] + layer_setting = self.arch_settings[self.arch] + in_channels = 16 + + layer = ConvModule( + in_channels=3, + out_channels=in_channels, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=dict(type='HSwish')) + self.add_module('layer0', layer) + layers.append('layer0') + + for i, params in enumerate(layer_setting): + (kernel_size, mid_channels, out_channels, with_se, act, + stride) = params + if with_se: + se_cfg = dict( + channels=mid_channels, + ratio=4, + act_cfg=(dict(type='ReLU'), + dict( + type='HSigmoid', + bias=3, + divisor=6, + min_value=0, + max_value=1))) + else: + se_cfg = None + + layer = InvertedResidual( + in_channels=in_channels, + out_channels=out_channels, + mid_channels=mid_channels, + kernel_size=kernel_size, + stride=stride, + se_cfg=se_cfg, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=dict(type=act), + with_cp=self.with_cp) + in_channels = out_channels + layer_name = 'layer{}'.format(i + 1) + self.add_module(layer_name, layer) + layers.append(layer_name) + + # Build the last layer before pooling + # TODO: No dilation + layer = ConvModule( + in_channels=in_channels, + out_channels=mid_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=dict(type='HSwish')) + layer_name = 'layer{}'.format(len(layer_setting) + 1) + self.add_module(layer_name, layer) + layers.append(layer_name) + + return layers + + def forward(self, x): + outs = [] + for i, layer_name in enumerate(self.layers): + layer = getattr(self, layer_name) + x = layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(0, self.frozen_stages + 1): + layer = getattr(self, f'layer{i}') + layer.eval() + for param in layer.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(MobileNetV3, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() diff --git a/mmpretrain/models/backbones/mobileone.py b/mmpretrain/models/backbones/mobileone.py new file mode 100644 index 0000000000000000000000000000000000000000..1111441af82d43a49d15ecbb5dc0778fc9f87596 --- /dev/null +++ b/mmpretrain/models/backbones/mobileone.py @@ -0,0 +1,515 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Modified from official impl https://github.com/apple/ml-mobileone/blob/main/mobileone.py # noqa: E501 +from typing import Optional, Sequence + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import build_activation_layer, build_conv_layer, build_norm_layer +from mmengine.model import BaseModule, ModuleList, Sequential +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.registry import MODELS +from ..utils.se_layer import SELayer +from .base_backbone import BaseBackbone + + +class MobileOneBlock(BaseModule): + """MobileOne block for MobileOne backbone. + + Args: + in_channels (int): The input channels of the block. + out_channels (int): The output channels of the block. + kernel_size (int): The kernel size of the convs in the block. If the + kernel size is large than 1, there will be a ``branch_scale`` in + the block. + num_convs (int): Number of the convolution branches in the block. + stride (int): Stride of convolution layers. Defaults to 1. + padding (int): Padding of the convolution layers. Defaults to 1. + dilation (int): Dilation of the convolution layers. Defaults to 1. + groups (int): Groups of the convolution layers. Defaults to 1. + se_cfg (None or dict): The configuration of the se module. + Defaults to None. + norm_cfg (dict): Configuration to construct and config norm layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='ReLU')``. + deploy (bool): Whether the model structure is in the deployment mode. + Defaults to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + out_channels: int, + kernel_size: int, + num_convs: int, + stride: int = 1, + padding: int = 1, + dilation: int = 1, + groups: int = 1, + se_cfg: Optional[dict] = None, + conv_cfg: Optional[dict] = None, + norm_cfg: Optional[dict] = dict(type='BN'), + act_cfg: Optional[dict] = dict(type='ReLU'), + deploy: bool = False, + init_cfg: Optional[dict] = None): + super(MobileOneBlock, self).__init__(init_cfg) + + assert se_cfg is None or isinstance(se_cfg, dict) + if se_cfg is not None: + self.se = SELayer(channels=out_channels, **se_cfg) + else: + self.se = nn.Identity() + + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.num_conv_branches = num_convs + self.stride = stride + self.padding = padding + self.se_cfg = se_cfg + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.deploy = deploy + self.groups = groups + self.dilation = dilation + + if deploy: + self.branch_reparam = build_conv_layer( + conv_cfg, + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + groups=self.groups, + stride=stride, + padding=padding, + dilation=dilation, + bias=True) + else: + # judge if input shape and output shape are the same. + # If true, add a normalized identity shortcut. + if out_channels == in_channels and stride == 1: + self.branch_norm = build_norm_layer(norm_cfg, in_channels)[1] + else: + self.branch_norm = None + + self.branch_scale = None + if kernel_size > 1: + self.branch_scale = self.create_conv_bn(kernel_size=1) + + self.branch_conv_list = ModuleList() + for _ in range(num_convs): + self.branch_conv_list.append( + self.create_conv_bn( + kernel_size=kernel_size, + padding=padding, + dilation=dilation)) + + self.act = build_activation_layer(act_cfg) + + def create_conv_bn(self, kernel_size, dilation=1, padding=0): + """cearte a (conv + bn) Sequential layer.""" + conv_bn = Sequential() + conv_bn.add_module( + 'conv', + build_conv_layer( + self.conv_cfg, + in_channels=self.in_channels, + out_channels=self.out_channels, + kernel_size=kernel_size, + groups=self.groups, + stride=self.stride, + dilation=dilation, + padding=padding, + bias=False)) + conv_bn.add_module( + 'norm', + build_norm_layer(self.norm_cfg, num_features=self.out_channels)[1]) + + return conv_bn + + def forward(self, x): + + def _inner_forward(inputs): + if self.deploy: + return self.branch_reparam(inputs) + + inner_out = 0 + if self.branch_norm is not None: + inner_out = self.branch_norm(inputs) + + if self.branch_scale is not None: + inner_out += self.branch_scale(inputs) + + for branch_conv in self.branch_conv_list: + inner_out += branch_conv(inputs) + + return inner_out + + return self.act(self.se(_inner_forward(x))) + + def switch_to_deploy(self): + """Switch the model structure from training mode to deployment mode.""" + if self.deploy: + return + assert self.norm_cfg['type'] == 'BN', \ + "Switch is not allowed when norm_cfg['type'] != 'BN'." + + reparam_weight, reparam_bias = self.reparameterize() + self.branch_reparam = build_conv_layer( + self.conv_cfg, + self.in_channels, + self.out_channels, + kernel_size=self.kernel_size, + stride=self.stride, + padding=self.padding, + dilation=self.dilation, + groups=self.groups, + bias=True) + self.branch_reparam.weight.data = reparam_weight + self.branch_reparam.bias.data = reparam_bias + + for param in self.parameters(): + param.detach_() + delattr(self, 'branch_conv_list') + if hasattr(self, 'branch_scale'): + delattr(self, 'branch_scale') + delattr(self, 'branch_norm') + + self.deploy = True + + def reparameterize(self): + """Fuse all the parameters of all branches. + + Returns: + tuple[torch.Tensor, torch.Tensor]: Parameters after fusion of all + branches. the first element is the weights and the second is + the bias. + """ + weight_conv, bias_conv = 0, 0 + for branch_conv in self.branch_conv_list: + weight, bias = self._fuse_conv_bn(branch_conv) + weight_conv += weight + bias_conv += bias + + weight_scale, bias_scale = 0, 0 + if self.branch_scale is not None: + weight_scale, bias_scale = self._fuse_conv_bn(self.branch_scale) + # Pad scale branch kernel to match conv branch kernel size. + pad = self.kernel_size // 2 + weight_scale = F.pad(weight_scale, [pad, pad, pad, pad]) + + weight_norm, bias_norm = 0, 0 + if self.branch_norm: + tmp_conv_bn = self._norm_to_conv(self.branch_norm) + weight_norm, bias_norm = self._fuse_conv_bn(tmp_conv_bn) + + return (weight_conv + weight_scale + weight_norm, + bias_conv + bias_scale + bias_norm) + + def _fuse_conv_bn(self, branch): + """Fuse the parameters in a branch with a conv and bn. + + Args: + branch (mmcv.runner.Sequential): A branch with conv and bn. + + Returns: + tuple[torch.Tensor, torch.Tensor]: The parameters obtained after + fusing the parameters of conv and bn in one branch. + The first element is the weight and the second is the bias. + """ + if branch is None: + return 0, 0 + kernel = branch.conv.weight + running_mean = branch.norm.running_mean + running_var = branch.norm.running_var + gamma = branch.norm.weight + beta = branch.norm.bias + eps = branch.norm.eps + + std = (running_var + eps).sqrt() + fused_weight = (gamma / std).reshape(-1, 1, 1, 1) * kernel + fused_bias = beta - running_mean * gamma / std + + return fused_weight, fused_bias + + def _norm_to_conv(self, branch_nrom): + """Convert a norm layer to a conv-bn sequence towards + ``self.kernel_size``. + + Args: + branch (nn.BatchNorm2d): A branch only with bn in the block. + + Returns: + (mmcv.runner.Sequential): a sequential with conv and bn. + """ + input_dim = self.in_channels // self.groups + conv_weight = torch.zeros( + (self.in_channels, input_dim, self.kernel_size, self.kernel_size), + dtype=branch_nrom.weight.dtype) + + for i in range(self.in_channels): + conv_weight[i, i % input_dim, self.kernel_size // 2, + self.kernel_size // 2] = 1 + conv_weight = conv_weight.to(branch_nrom.weight.device) + + tmp_conv = self.create_conv_bn(kernel_size=self.kernel_size) + tmp_conv.conv.weight.data = conv_weight + tmp_conv.norm = branch_nrom + return tmp_conv + + +@MODELS.register_module() +class MobileOne(BaseBackbone): + """MobileOne backbone. + + A PyTorch impl of : `An Improved One millisecond Mobile Backbone + `_ + + Args: + arch (str | dict): MobileOne architecture. If use string, choose + from 's0', 's1', 's2', 's3' and 's4'. If use dict, it should + have below keys: + + - num_blocks (Sequence[int]): Number of blocks in each stage. + - width_factor (Sequence[float]): Width factor in each stage. + - num_conv_branches (Sequence[int]): Number of conv branches + in each stage. + - num_se_blocks (Sequence[int]): Number of SE layers in each + stage, all the SE layers are placed in the subsequent order + in each stage. + + Defaults to 's0'. + in_channels (int): Number of input image channels. Default: 3. + out_indices (Sequence[int] | int): Output from which stages. + Defaults to ``(3, )``. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. Defaults to -1. + conv_cfg (dict | None): The config dict for conv layers. + Defaults to None. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='ReLU')``. + deploy (bool): Whether to switch the model structure to deployment + mode. Defaults to False. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + + Example: + >>> from mmpretrain.models import MobileOne + >>> import torch + >>> x = torch.rand(1, 3, 224, 224) + >>> model = MobileOne("s0", out_indices=(0, 1, 2, 3)) + >>> model.eval() + >>> outputs = model(x) + >>> for out in outputs: + ... print(tuple(out.shape)) + (1, 48, 56, 56) + (1, 128, 28, 28) + (1, 256, 14, 14) + (1, 1024, 7, 7) + """ + + arch_zoo = { + 's0': + dict( + num_blocks=[2, 8, 10, 1], + width_factor=[0.75, 1.0, 1.0, 2.0], + num_conv_branches=[4, 4, 4, 4], + num_se_blocks=[0, 0, 0, 0]), + 's1': + dict( + num_blocks=[2, 8, 10, 1], + width_factor=[1.5, 1.5, 2.0, 2.5], + num_conv_branches=[1, 1, 1, 1], + num_se_blocks=[0, 0, 0, 0]), + 's2': + dict( + num_blocks=[2, 8, 10, 1], + width_factor=[1.5, 2.0, 2.5, 4.0], + num_conv_branches=[1, 1, 1, 1], + num_se_blocks=[0, 0, 0, 0]), + 's3': + dict( + num_blocks=[2, 8, 10, 1], + width_factor=[2.0, 2.5, 3.0, 4.0], + num_conv_branches=[1, 1, 1, 1], + num_se_blocks=[0, 0, 0, 0]), + 's4': + dict( + num_blocks=[2, 8, 10, 1], + width_factor=[3.0, 3.5, 3.5, 4.0], + num_conv_branches=[1, 1, 1, 1], + num_se_blocks=[0, 0, 5, 1]) + } + + def __init__(self, + arch, + in_channels=3, + out_indices=(3, ), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + se_cfg=dict(ratio=16), + deploy=False, + norm_eval=False, + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict(type='Constant', val=1, layer=['_BatchNorm']) + ]): + super(MobileOne, self).__init__(init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_zoo, f'"arch": "{arch}"' \ + f' is not one of the {list(self.arch_zoo.keys())}' + arch = self.arch_zoo[arch] + elif not isinstance(arch, dict): + raise TypeError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + self.arch = arch + for k, value in self.arch.items(): + assert isinstance(value, list) and len(value) == 4, \ + f'the value of {k} in arch must be list with 4 items.' + + self.in_channels = in_channels + self.deploy = deploy + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.se_cfg = se_cfg + self.act_cfg = act_cfg + + base_channels = [64, 128, 256, 512] + channels = min(64, + int(base_channels[0] * self.arch['width_factor'][0])) + self.stage0 = MobileOneBlock( + self.in_channels, + channels, + stride=2, + kernel_size=3, + num_convs=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + deploy=deploy) + + self.in_planes = channels + self.stages = [] + for i, num_blocks in enumerate(self.arch['num_blocks']): + planes = int(base_channels[i] * self.arch['width_factor'][i]) + + stage = self._make_stage(planes, num_blocks, + arch['num_se_blocks'][i], + arch['num_conv_branches'][i]) + + stage_name = f'stage{i + 1}' + self.add_module(stage_name, stage) + self.stages.append(stage_name) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + out_indices = list(out_indices) + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = len(self.stages) + index + assert 0 <= out_indices[i] <= len(self.stages), \ + f'Invalid out_indices {index}.' + self.out_indices = out_indices + + def _make_stage(self, planes, num_blocks, num_se, num_conv_branches): + strides = [2] + [1] * (num_blocks - 1) + if num_se > num_blocks: + raise ValueError('Number of SE blocks cannot ' + 'exceed number of layers.') + blocks = [] + for i in range(num_blocks): + use_se = False + if i >= (num_blocks - num_se): + use_se = True + + blocks.append( + # Depthwise conv + MobileOneBlock( + in_channels=self.in_planes, + out_channels=self.in_planes, + kernel_size=3, + num_convs=num_conv_branches, + stride=strides[i], + padding=1, + groups=self.in_planes, + se_cfg=self.se_cfg if use_se else None, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + deploy=self.deploy)) + + blocks.append( + # Pointwise conv + MobileOneBlock( + in_channels=self.in_planes, + out_channels=planes, + kernel_size=1, + num_convs=num_conv_branches, + stride=1, + padding=0, + se_cfg=self.se_cfg if use_se else None, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + deploy=self.deploy)) + + self.in_planes = planes + + return Sequential(*blocks) + + def forward(self, x): + x = self.stage0(x) + outs = [] + for i, stage_name in enumerate(self.stages): + stage = getattr(self, stage_name) + x = stage(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.stage0.eval() + for param in self.stage0.parameters(): + param.requires_grad = False + for i in range(self.frozen_stages): + stage = getattr(self, f'stage{i+1}') + stage.eval() + for param in stage.parameters(): + param.requires_grad = False + + def train(self, mode=True): + """switch the mobile to train mode or not.""" + super(MobileOne, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() + + def switch_to_deploy(self): + """switch the model to deploy mode, which has smaller amount of + parameters and calculations.""" + for m in self.modules(): + if isinstance(m, MobileOneBlock): + m.switch_to_deploy() + self.deploy = True diff --git a/mmpretrain/models/backbones/mobilevit.py b/mmpretrain/models/backbones/mobilevit.py new file mode 100644 index 0000000000000000000000000000000000000000..9e4043fe46049a4d1bddecc6b7b3768236318e82 --- /dev/null +++ b/mmpretrain/models/backbones/mobilevit.py @@ -0,0 +1,431 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Callable, Optional, Sequence + +import torch +import torch.nn.functional as F +from mmcv.cnn import ConvModule, build_norm_layer +from torch import nn + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone +from .mobilenet_v2 import InvertedResidual +from .vision_transformer import TransformerEncoderLayer + + +class MobileVitBlock(nn.Module): + """MobileViT block. + + According to the paper, the MobileViT block has a local representation. + a transformer-as-convolution layer which consists of a global + representation with unfolding and folding, and a final fusion layer. + + Args: + in_channels (int): Number of input image channels. + transformer_dim (int): Number of transformer channels. + ffn_dim (int): Number of ffn channels in transformer block. + out_channels (int): Number of channels in output. + conv_ksize (int): Conv kernel size in local representation + and fusion. Defaults to 3. + conv_cfg (dict, optional): Config dict for convolution layer. + Defaults to None, which means using conv2d. + norm_cfg (dict, optional): Config dict for normalization layer. + Defaults to dict(type='BN'). + act_cfg (dict, optional): Config dict for activation layer. + Defaults to dict(type='Swish'). + num_transformer_blocks (int): Number of transformer blocks in + a MobileViT block. Defaults to 2. + patch_size (int): Patch size for unfolding and folding. + Defaults to 2. + num_heads (int): Number of heads in global representation. + Defaults to 4. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop_rate (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + no_fusion (bool): Whether to remove the fusion layer. + Defaults to False. + transformer_norm_cfg (dict, optional): Config dict for normalization + layer in transformer. Defaults to dict(type='LN'). + """ + + def __init__( + self, + in_channels: int, + transformer_dim: int, + ffn_dim: int, + out_channels: int, + conv_ksize: int = 3, + conv_cfg: Optional[dict] = None, + norm_cfg: Optional[dict] = dict(type='BN'), + act_cfg: Optional[dict] = dict(type='Swish'), + num_transformer_blocks: int = 2, + patch_size: int = 2, + num_heads: int = 4, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + no_fusion: bool = False, + transformer_norm_cfg: Callable = dict(type='LN'), + ): + super(MobileVitBlock, self).__init__() + + self.local_rep = nn.Sequential( + ConvModule( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=conv_ksize, + padding=int((conv_ksize - 1) / 2), + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg), + ConvModule( + in_channels=in_channels, + out_channels=transformer_dim, + kernel_size=1, + bias=False, + conv_cfg=conv_cfg, + norm_cfg=None, + act_cfg=None), + ) + + global_rep = [ + TransformerEncoderLayer( + embed_dims=transformer_dim, + num_heads=num_heads, + feedforward_channels=ffn_dim, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=drop_path_rate, + qkv_bias=True, + act_cfg=dict(type='Swish'), + norm_cfg=transformer_norm_cfg) + for _ in range(num_transformer_blocks) + ] + global_rep.append( + build_norm_layer(transformer_norm_cfg, transformer_dim)[1]) + self.global_rep = nn.Sequential(*global_rep) + + self.conv_proj = ConvModule( + in_channels=transformer_dim, + out_channels=out_channels, + kernel_size=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + if no_fusion: + self.conv_fusion = None + else: + self.conv_fusion = ConvModule( + in_channels=in_channels + out_channels, + out_channels=out_channels, + kernel_size=conv_ksize, + padding=int((conv_ksize - 1) / 2), + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + self.patch_size = (patch_size, patch_size) + self.patch_area = self.patch_size[0] * self.patch_size[1] + + def forward(self, x: torch.Tensor) -> torch.Tensor: + shortcut = x + + # Local representation + x = self.local_rep(x) + + # Unfold (feature map -> patches) + patch_h, patch_w = self.patch_size + B, C, H, W = x.shape + new_h, new_w = math.ceil(H / patch_h) * patch_h, math.ceil( + W / patch_w) * patch_w + num_patch_h, num_patch_w = new_h // patch_h, new_w // patch_w # n_h, n_w # noqa + num_patches = num_patch_h * num_patch_w # N + interpolate = False + if new_h != H or new_w != W: + # Note: Padding can be done, but then it needs to be handled in attention function. # noqa + x = F.interpolate( + x, size=(new_h, new_w), mode='bilinear', align_corners=False) + interpolate = True + + # [B, C, H, W] --> [B * C * n_h, n_w, p_h, p_w] + x = x.reshape(B * C * num_patch_h, patch_h, num_patch_w, + patch_w).transpose(1, 2) + # [B * C * n_h, n_w, p_h, p_w] --> [BP, N, C] where P = p_h * p_w and N = n_h * n_w # noqa + x = x.reshape(B, C, num_patches, + self.patch_area).transpose(1, 3).reshape( + B * self.patch_area, num_patches, -1) + + # Global representations + x = self.global_rep(x) + + # Fold (patch -> feature map) + # [B, P, N, C] --> [B*C*n_h, n_w, p_h, p_w] + x = x.contiguous().view(B, self.patch_area, num_patches, -1) + x = x.transpose(1, 3).reshape(B * C * num_patch_h, num_patch_w, + patch_h, patch_w) + # [B*C*n_h, n_w, p_h, p_w] --> [B*C*n_h, p_h, n_w, p_w] --> [B, C, H, W] # noqa + x = x.transpose(1, 2).reshape(B, C, num_patch_h * patch_h, + num_patch_w * patch_w) + if interpolate: + x = F.interpolate( + x, size=(H, W), mode='bilinear', align_corners=False) + + x = self.conv_proj(x) + if self.conv_fusion is not None: + x = self.conv_fusion(torch.cat((shortcut, x), dim=1)) + return x + + +@MODELS.register_module() +class MobileViT(BaseBackbone): + """MobileViT backbone. + + A PyTorch implementation of : `MobileViT: Light-weight, General-purpose, + and Mobile-friendly Vision Transformer `_ + + Modified from the `official repo + `_ + and `timm + `_. + + Args: + arch (str | List[list]): Architecture of MobileViT. + + - If a string, choose from "small", "x_small" and "xx_small". + + - If a list, every item should be also a list, and the first item + of the sub-list can be chosen from "moblienetv2" and "mobilevit", + which indicates the type of this layer sequence. If "mobilenetv2", + the other items are the arguments of :attr:`~MobileViT.make_mobilenetv2_layer` + (except ``in_channels``) and if "mobilevit", the other items are + the arguments of :attr:`~MobileViT.make_mobilevit_layer` + (except ``in_channels``). + + Defaults to "small". + in_channels (int): Number of input image channels. Defaults to 3. + stem_channels (int): Channels of stem layer. Defaults to 16. + last_exp_factor (int): Channels expand factor of last layer. + Defaults to 4. + out_indices (Sequence[int]): Output from which stages. + Defaults to (4, ). + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to -1, which means not freezing any parameters. + conv_cfg (dict, optional): Config dict for convolution layer. + Defaults to None, which means using conv2d. + norm_cfg (dict, optional): Config dict for normalization layer. + Defaults to dict(type='BN'). + act_cfg (dict, optional): Config dict for activation layer. + Defaults to dict(type='Swish'). + init_cfg (dict, optional): Initialization config dict. + """ # noqa + + # Parameters to build layers. The first param is the type of layer. + # For `mobilenetv2` layer, the rest params from left to right are: + # out channels, stride, num of blocks, expand_ratio. + # For `mobilevit` layer, the rest params from left to right are: + # out channels, stride, transformer_channels, ffn channels, + # num of transformer blocks, expand_ratio. + arch_settings = { + 'small': [ + ['mobilenetv2', 32, 1, 1, 4], + ['mobilenetv2', 64, 2, 3, 4], + ['mobilevit', 96, 2, 144, 288, 2, 4], + ['mobilevit', 128, 2, 192, 384, 4, 4], + ['mobilevit', 160, 2, 240, 480, 3, 4], + ], + 'x_small': [ + ['mobilenetv2', 32, 1, 1, 4], + ['mobilenetv2', 48, 2, 3, 4], + ['mobilevit', 64, 2, 96, 192, 2, 4], + ['mobilevit', 80, 2, 120, 240, 4, 4], + ['mobilevit', 96, 2, 144, 288, 3, 4], + ], + 'xx_small': [ + ['mobilenetv2', 16, 1, 1, 2], + ['mobilenetv2', 24, 2, 3, 2], + ['mobilevit', 48, 2, 64, 128, 2, 2], + ['mobilevit', 64, 2, 80, 160, 4, 2], + ['mobilevit', 80, 2, 96, 192, 3, 2], + ] + } + + def __init__(self, + arch='small', + in_channels=3, + stem_channels=16, + last_exp_factor=4, + out_indices=(4, ), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='Swish'), + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ]): + super(MobileViT, self).__init__(init_cfg) + if isinstance(arch, str): + arch = arch.lower() + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a list.' + arch = self.arch_settings[arch] + + self.arch = arch + self.num_stages = len(arch) + + # check out indices and frozen stages + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_stages + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + + if frozen_stages not in range(-1, self.num_stages): + raise ValueError('frozen_stages must be in range(-1, ' + f'{self.num_stages}). ' + f'But received {frozen_stages}') + self.frozen_stages = frozen_stages + + _make_layer_func = { + 'mobilenetv2': self.make_mobilenetv2_layer, + 'mobilevit': self.make_mobilevit_layer, + } + + self.stem = ConvModule( + in_channels=in_channels, + out_channels=stem_channels, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + in_channels = stem_channels + layers = [] + for i, layer_settings in enumerate(arch): + layer_type, settings = layer_settings[0], layer_settings[1:] + layer, out_channels = _make_layer_func[layer_type](in_channels, + *settings) + layers.append(layer) + in_channels = out_channels + self.layers = nn.Sequential(*layers) + + self.conv_1x1_exp = ConvModule( + in_channels=in_channels, + out_channels=last_exp_factor * in_channels, + kernel_size=1, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + @staticmethod + def make_mobilevit_layer(in_channels, + out_channels, + stride, + transformer_dim, + ffn_dim, + num_transformer_blocks, + expand_ratio=4): + """Build mobilevit layer, which consists of one InvertedResidual and + one MobileVitBlock. + + Args: + in_channels (int): The input channels. + out_channels (int): The output channels. + stride (int): The stride of the first 3x3 convolution in the + ``InvertedResidual`` layers. + transformer_dim (int): The channels of the transformer layers. + ffn_dim (int): The mid-channels of the feedforward network in + transformer layers. + num_transformer_blocks (int): The number of transformer blocks. + expand_ratio (int): adjusts number of channels of the hidden layer + in ``InvertedResidual`` by this amount. Defaults to 4. + """ + layer = [] + layer.append( + InvertedResidual( + in_channels=in_channels, + out_channels=out_channels, + stride=stride, + expand_ratio=expand_ratio, + act_cfg=dict(type='Swish'), + )) + layer.append( + MobileVitBlock( + in_channels=out_channels, + transformer_dim=transformer_dim, + ffn_dim=ffn_dim, + out_channels=out_channels, + num_transformer_blocks=num_transformer_blocks, + )) + return nn.Sequential(*layer), out_channels + + @staticmethod + def make_mobilenetv2_layer(in_channels, + out_channels, + stride, + num_blocks, + expand_ratio=4): + """Build mobilenetv2 layer, which consists of several InvertedResidual + layers. + + Args: + in_channels (int): The input channels. + out_channels (int): The output channels. + stride (int): The stride of the first 3x3 convolution in the + ``InvertedResidual`` layers. + num_blocks (int): The number of ``InvertedResidual`` blocks. + expand_ratio (int): adjusts number of channels of the hidden layer + in ``InvertedResidual`` by this amount. Defaults to 4. + """ + layer = [] + for i in range(num_blocks): + stride = stride if i == 0 else 1 + + layer.append( + InvertedResidual( + in_channels=in_channels, + out_channels=out_channels, + stride=stride, + expand_ratio=expand_ratio, + act_cfg=dict(type='Swish'), + )) + in_channels = out_channels + return nn.Sequential(*layer), out_channels + + def _freeze_stages(self): + for i in range(0, self.frozen_stages): + layer = self.layers[i] + layer.eval() + for param in layer.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(MobileViT, self).train(mode) + self._freeze_stages() + + def forward(self, x): + x = self.stem(x) + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + if i == len(self.layers) - 1: + x = self.conv_1x1_exp(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/mvit.py b/mmpretrain/models/backbones/mvit.py new file mode 100644 index 0000000000000000000000000000000000000000..68aee97ddf3077ca58e488f38e9d9422b171d691 --- /dev/null +++ b/mmpretrain/models/backbones/mvit.py @@ -0,0 +1,700 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import build_activation_layer, build_norm_layer +from mmcv.cnn.bricks import DropPath +from mmcv.cnn.bricks.transformer import PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ +from mmengine.utils import to_2tuple + +from ..builder import BACKBONES +from ..utils import resize_pos_embed +from .base_backbone import BaseBackbone + + +def resize_decomposed_rel_pos(rel_pos, q_size, k_size): + """Get relative positional embeddings according to the relative positions + of query and key sizes. + + Args: + q_size (int): size of query q. + k_size (int): size of key k. + rel_pos (Tensor): relative position embeddings (L, C). + + Returns: + Extracted positional embeddings according to relative positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + resized = F.interpolate( + # (L, C) -> (1, C, L) + rel_pos.transpose(0, 1).unsqueeze(0), + size=max_rel_dist, + mode='linear', + ) + # (1, C, L) -> (L, C) + resized = resized.squeeze(0).transpose(0, 1) + else: + resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_h_ratio = max(k_size / q_size, 1.0) + k_h_ratio = max(q_size / k_size, 1.0) + q_coords = torch.arange(q_size)[:, None] * q_h_ratio + k_coords = torch.arange(k_size)[None, :] * k_h_ratio + relative_coords = (q_coords - k_coords) + (k_size - 1) * k_h_ratio + + return resized[relative_coords.long()] + + +def add_decomposed_rel_pos(attn, + q, + q_shape, + k_shape, + rel_pos_h, + rel_pos_w, + has_cls_token=False): + """Spatial Relative Positional Embeddings.""" + sp_idx = 1 if has_cls_token else 0 + B, num_heads, _, C = q.shape + q_h, q_w = q_shape + k_h, k_w = k_shape + + Rh = resize_decomposed_rel_pos(rel_pos_h, q_h, k_h) + Rw = resize_decomposed_rel_pos(rel_pos_w, q_w, k_w) + + r_q = q[:, :, sp_idx:].reshape(B, num_heads, q_h, q_w, C) + rel_h = torch.einsum('byhwc,hkc->byhwk', r_q, Rh) + rel_w = torch.einsum('byhwc,wkc->byhwk', r_q, Rw) + rel_pos_embed = rel_h[:, :, :, :, :, None] + rel_w[:, :, :, :, None, :] + + attn_map = attn[:, :, sp_idx:, sp_idx:].view(B, -1, q_h, q_w, k_h, k_w) + attn_map += rel_pos_embed + attn[:, :, sp_idx:, sp_idx:] = attn_map.view(B, -1, q_h * q_w, k_h * k_w) + + return attn + + +class MLP(BaseModule): + """Two-layer multilayer perceptron. + + Comparing with :class:`mmcv.cnn.bricks.transformer.FFN`, this class allows + different input and output channel numbers. + + Args: + in_channels (int): The number of input channels. + hidden_channels (int, optional): The number of hidden layer channels. + If None, same as the ``in_channels``. Defaults to None. + out_channels (int, optional): The number of output channels. If None, + same as the ``in_channels``. Defaults to None. + act_cfg (dict): The config of activation function. + Defaults to ``dict(type='GELU')``. + init_cfg (dict, optional): The config of weight initialization. + Defaults to None. + """ + + def __init__(self, + in_channels, + hidden_channels=None, + out_channels=None, + act_cfg=dict(type='GELU'), + init_cfg=None): + super().__init__(init_cfg=init_cfg) + out_channels = out_channels or in_channels + hidden_channels = hidden_channels or in_channels + self.fc1 = nn.Linear(in_channels, hidden_channels) + self.act = build_activation_layer(act_cfg) + self.fc2 = nn.Linear(hidden_channels, out_channels) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.fc2(x) + return x + + +def attention_pool(x: torch.Tensor, + pool: nn.Module, + in_size: tuple, + norm: Optional[nn.Module] = None): + """Pooling the feature tokens. + + Args: + x (torch.Tensor): The input tensor, should be with shape + ``(B, num_heads, L, C)`` or ``(B, L, C)``. + pool (nn.Module): The pooling module. + in_size (Tuple[int]): The shape of the input feature map. + norm (nn.Module, optional): The normalization module. + Defaults to None. + """ + ndim = x.ndim + if ndim == 4: + B, num_heads, L, C = x.shape + elif ndim == 3: + num_heads = 1 + B, L, C = x.shape + else: + raise RuntimeError(f'Unsupported input dimension {x.shape}') + + H, W = in_size + assert L == H * W + + # (B, num_heads, H*W, C) -> (B*num_heads, C, H, W) + x = x.reshape(B * num_heads, H, W, C).permute(0, 3, 1, 2).contiguous() + x = pool(x) + out_size = x.shape[-2:] + + # (B*num_heads, C, H', W') -> (B, num_heads, H'*W', C) + x = x.reshape(B, num_heads, C, -1).transpose(2, 3) + + if norm is not None: + x = norm(x) + + if ndim == 3: + x = x.squeeze(1) + + return x, out_size + + +class MultiScaleAttention(BaseModule): + """Multiscale Multi-head Attention block. + + Args: + in_dims (int): Number of input channels. + out_dims (int): Number of output channels. + num_heads (int): Number of attention heads. + qkv_bias (bool): If True, add a learnable bias to query, key and + value. Defaults to True. + norm_cfg (dict): The config of normalization layers. + Defaults to ``dict(type='LN')``. + pool_kernel (tuple): kernel size for qkv pooling layers. + Defaults to (3, 3). + stride_q (int): stride size for q pooling layer. Defaults to 1. + stride_kv (int): stride size for kv pooling layer. Defaults to 1. + rel_pos_spatial (bool): Whether to enable the spatial relative + position embedding. Defaults to True. + residual_pooling (bool): Whether to enable the residual connection + after attention pooling. Defaults to True. + input_size (Tuple[int], optional): The input resolution, necessary + if enable the ``rel_pos_spatial``. Defaults to None. + rel_pos_zero_init (bool): If True, zero initialize relative + positional parameters. Defaults to False. + init_cfg (dict, optional): The config of weight initialization. + Defaults to None. + """ + + def __init__(self, + in_dims, + out_dims, + num_heads, + qkv_bias=True, + norm_cfg=dict(type='LN'), + pool_kernel=(3, 3), + stride_q=1, + stride_kv=1, + rel_pos_spatial=False, + residual_pooling=True, + input_size=None, + rel_pos_zero_init=False, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.num_heads = num_heads + self.in_dims = in_dims + self.out_dims = out_dims + + head_dim = out_dims // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(in_dims, out_dims * 3, bias=qkv_bias) + self.proj = nn.Linear(out_dims, out_dims) + + # qkv pooling + pool_padding = [k // 2 for k in pool_kernel] + pool_dims = out_dims // num_heads + + def build_pooling(stride): + pool = nn.Conv2d( + pool_dims, + pool_dims, + pool_kernel, + stride=stride, + padding=pool_padding, + groups=pool_dims, + bias=False, + ) + norm = build_norm_layer(norm_cfg, pool_dims)[1] + return pool, norm + + self.pool_q, self.norm_q = build_pooling(stride_q) + self.pool_k, self.norm_k = build_pooling(stride_kv) + self.pool_v, self.norm_v = build_pooling(stride_kv) + + self.residual_pooling = residual_pooling + + self.rel_pos_spatial = rel_pos_spatial + self.rel_pos_zero_init = rel_pos_zero_init + if self.rel_pos_spatial: + # initialize relative positional embeddings + assert input_size[0] == input_size[1] + + size = input_size[0] + rel_dim = 2 * max(size // stride_q, size // stride_kv) - 1 + self.rel_pos_h = nn.Parameter(torch.zeros(rel_dim, head_dim)) + self.rel_pos_w = nn.Parameter(torch.zeros(rel_dim, head_dim)) + + def init_weights(self): + """Weight initialization.""" + super().init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress rel_pos_zero_init if use pretrained model. + return + + if not self.rel_pos_zero_init: + trunc_normal_(self.rel_pos_h, std=0.02) + trunc_normal_(self.rel_pos_w, std=0.02) + + def forward(self, x, in_size): + """Forward the MultiScaleAttention.""" + B, N, _ = x.shape # (B, H*W, C) + + # qkv: (B, H*W, 3, num_heads, C) + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, -1) + # q, k, v: (B, num_heads, H*W, C) + q, k, v = qkv.permute(2, 0, 3, 1, 4).unbind(0) + + q, q_shape = attention_pool(q, self.pool_q, in_size, norm=self.norm_q) + k, k_shape = attention_pool(k, self.pool_k, in_size, norm=self.norm_k) + v, v_shape = attention_pool(v, self.pool_v, in_size, norm=self.norm_v) + + attn = (q * self.scale) @ k.transpose(-2, -1) + if self.rel_pos_spatial: + attn = add_decomposed_rel_pos(attn, q, q_shape, k_shape, + self.rel_pos_h, self.rel_pos_w) + + attn = attn.softmax(dim=-1) + x = attn @ v + + if self.residual_pooling: + x = x + q + + # (B, num_heads, H'*W', C'//num_heads) -> (B, H'*W', C') + x = x.transpose(1, 2).reshape(B, -1, self.out_dims) + x = self.proj(x) + + return x, q_shape + + +class MultiScaleBlock(BaseModule): + """Multiscale Transformer blocks. + + Args: + in_dims (int): Number of input channels. + out_dims (int): Number of output channels. + num_heads (int): Number of attention heads. + mlp_ratio (float): Ratio of hidden dimensions in MLP layers. + Defaults to 4.0. + qkv_bias (bool): If True, add a learnable bias to query, key and + value. Defaults to True. + drop_path (float): Stochastic depth rate. Defaults to 0. + norm_cfg (dict): The config of normalization layers. + Defaults to ``dict(type='LN')``. + act_cfg (dict): The config of activation function. + Defaults to ``dict(type='GELU')``. + qkv_pool_kernel (tuple): kernel size for qkv pooling layers. + Defaults to (3, 3). + stride_q (int): stride size for q pooling layer. Defaults to 1. + stride_kv (int): stride size for kv pooling layer. Defaults to 1. + rel_pos_spatial (bool): Whether to enable the spatial relative + position embedding. Defaults to True. + residual_pooling (bool): Whether to enable the residual connection + after attention pooling. Defaults to True. + dim_mul_in_attention (bool): Whether to multiply the ``embed_dims`` in + attention layers. If False, multiply it in MLP layers. + Defaults to True. + input_size (Tuple[int], optional): The input resolution, necessary + if enable the ``rel_pos_spatial``. Defaults to None. + rel_pos_zero_init (bool): If True, zero initialize relative + positional parameters. Defaults to False. + init_cfg (dict, optional): The config of weight initialization. + Defaults to None. + """ + + def __init__( + self, + in_dims, + out_dims, + num_heads, + mlp_ratio=4.0, + qkv_bias=True, + drop_path=0.0, + norm_cfg=dict(type='LN'), + act_cfg=dict(type='GELU'), + qkv_pool_kernel=(3, 3), + stride_q=1, + stride_kv=1, + rel_pos_spatial=True, + residual_pooling=True, + dim_mul_in_attention=True, + input_size=None, + rel_pos_zero_init=False, + init_cfg=None, + ): + super().__init__(init_cfg=init_cfg) + self.in_dims = in_dims + self.out_dims = out_dims + self.norm1 = build_norm_layer(norm_cfg, in_dims)[1] + self.dim_mul_in_attention = dim_mul_in_attention + + attn_dims = out_dims if dim_mul_in_attention else in_dims + self.attn = MultiScaleAttention( + in_dims, + attn_dims, + num_heads=num_heads, + qkv_bias=qkv_bias, + norm_cfg=norm_cfg, + pool_kernel=qkv_pool_kernel, + stride_q=stride_q, + stride_kv=stride_kv, + rel_pos_spatial=rel_pos_spatial, + residual_pooling=residual_pooling, + input_size=input_size, + rel_pos_zero_init=rel_pos_zero_init) + self.drop_path = DropPath( + drop_path) if drop_path > 0.0 else nn.Identity() + + self.norm2 = build_norm_layer(norm_cfg, attn_dims)[1] + + self.mlp = MLP( + in_channels=attn_dims, + hidden_channels=int(attn_dims * mlp_ratio), + out_channels=out_dims, + act_cfg=act_cfg) + + if in_dims != out_dims: + self.proj = nn.Linear(in_dims, out_dims) + else: + self.proj = None + + if stride_q > 1: + kernel_skip = stride_q + 1 + padding_skip = int(kernel_skip // 2) + self.pool_skip = nn.MaxPool2d( + kernel_skip, stride_q, padding_skip, ceil_mode=False) + + if input_size is not None: + input_size = to_2tuple(input_size) + out_size = [size // stride_q for size in input_size] + self.init_out_size = out_size + else: + self.init_out_size = None + else: + self.pool_skip = None + self.init_out_size = input_size + + def forward(self, x, in_size): + x_norm = self.norm1(x) + x_attn, out_size = self.attn(x_norm, in_size) + + if self.dim_mul_in_attention and self.proj is not None: + skip = self.proj(x_norm) + else: + skip = x + + if self.pool_skip is not None: + skip, _ = attention_pool(skip, self.pool_skip, in_size) + + x = skip + self.drop_path(x_attn) + x_norm = self.norm2(x) + x_mlp = self.mlp(x_norm) + + if not self.dim_mul_in_attention and self.proj is not None: + skip = self.proj(x_norm) + else: + skip = x + + x = skip + self.drop_path(x_mlp) + + return x, out_size + + +@BACKBONES.register_module() +class MViT(BaseBackbone): + """Multi-scale ViT v2. + + A PyTorch implement of : `MViTv2: Improved Multiscale Vision Transformers + for Classification and Detection `_ + + Inspiration from `the official implementation + `_ and `the detectron2 + implementation `_ + + Args: + arch (str | dict): MViT architecture. If use string, choose + from 'tiny', 'small', 'base' and 'large'. If use dict, it should + have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of layers. + - **num_heads** (int): The number of heads in attention + modules of the initial layer. + - **downscale_indices** (List[int]): The layer indices to downscale + the feature map. + + Defaults to 'base'. + img_size (int): The expected input image shape. Defaults to 224. + in_channels (int): The num of input channels. Defaults to 3. + out_scales (int | Sequence[int]): The output scale indices. + They should not exceed the length of ``downscale_indices``. + Defaults to -1, which means the last scale. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.1. + use_abs_pos_embed (bool): If True, add absolute position embedding to + the patch embedding. Defaults to False. + interpolate_mode (str): Select the interpolate mode for absolute + position embedding vector resize. Defaults to "bicubic". + pool_kernel (tuple): kernel size for qkv pooling layers. + Defaults to (3, 3). + dim_mul (int): The magnification for ``embed_dims`` in the downscale + layers. Defaults to 2. + head_mul (int): The magnification for ``num_heads`` in the downscale + layers. Defaults to 2. + adaptive_kv_stride (int): The stride size for kv pooling in the initial + layer. Defaults to 4. + rel_pos_spatial (bool): Whether to enable the spatial relative position + embedding. Defaults to True. + residual_pooling (bool): Whether to enable the residual connection + after attention pooling. Defaults to True. + dim_mul_in_attention (bool): Whether to multiply the ``embed_dims`` in + attention layers. If False, multiply it in MLP layers. + Defaults to True. + rel_pos_zero_init (bool): If True, zero initialize relative + positional parameters. Defaults to False. + mlp_ratio (float): Ratio of hidden dimensions in MLP layers. + Defaults to 4.0. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + norm_cfg (dict): Config dict for normalization layer for all output + features. Defaults to ``dict(type='LN', eps=1e-6)``. + patch_cfg (dict): Config dict for the patch embedding layer. + Defaults to ``dict(kernel_size=7, stride=4, padding=3)``. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + + Examples: + >>> import torch + >>> from mmpretrain.models import build_backbone + >>> + >>> cfg = dict(type='MViT', arch='tiny', out_scales=[0, 1, 2, 3]) + >>> model = build_backbone(cfg) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> outputs = model(inputs) + >>> for i, output in enumerate(outputs): + >>> print(f'scale{i}: {output.shape}') + scale0: torch.Size([1, 96, 56, 56]) + scale1: torch.Size([1, 192, 28, 28]) + scale2: torch.Size([1, 384, 14, 14]) + scale3: torch.Size([1, 768, 7, 7]) + """ + arch_zoo = { + 'tiny': { + 'embed_dims': 96, + 'num_layers': 10, + 'num_heads': 1, + 'downscale_indices': [1, 3, 8] + }, + 'small': { + 'embed_dims': 96, + 'num_layers': 16, + 'num_heads': 1, + 'downscale_indices': [1, 3, 14] + }, + 'base': { + 'embed_dims': 96, + 'num_layers': 24, + 'num_heads': 1, + 'downscale_indices': [2, 5, 21] + }, + 'large': { + 'embed_dims': 144, + 'num_layers': 48, + 'num_heads': 2, + 'downscale_indices': [2, 8, 44] + }, + } + num_extra_tokens = 0 + + def __init__(self, + arch='base', + img_size=224, + in_channels=3, + out_scales=-1, + drop_path_rate=0., + use_abs_pos_embed=False, + interpolate_mode='bicubic', + pool_kernel=(3, 3), + dim_mul=2, + head_mul=2, + adaptive_kv_stride=4, + rel_pos_spatial=True, + residual_pooling=True, + dim_mul_in_attention=True, + rel_pos_zero_init=False, + mlp_ratio=4., + qkv_bias=True, + norm_cfg=dict(type='LN', eps=1e-6), + patch_cfg=dict(kernel_size=7, stride=4, padding=3), + init_cfg=None): + super().__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'downscale_indices' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.num_heads = self.arch_settings['num_heads'] + self.downscale_indices = self.arch_settings['downscale_indices'] + self.num_scales = len(self.downscale_indices) + 1 + self.stage_indices = { + index - 1: i + for i, index in enumerate(self.downscale_indices) + } + self.stage_indices[self.num_layers - 1] = self.num_scales - 1 + self.use_abs_pos_embed = use_abs_pos_embed + self.interpolate_mode = interpolate_mode + + if isinstance(out_scales, int): + out_scales = [out_scales] + assert isinstance(out_scales, Sequence), \ + f'"out_scales" must by a sequence or int, ' \ + f'get {type(out_scales)} instead.' + for i, index in enumerate(out_scales): + if index < 0: + out_scales[i] = self.num_scales + index + assert 0 <= out_scales[i] <= self.num_scales, \ + f'Invalid out_scales {index}' + self.out_scales = sorted(list(out_scales)) + + # Set patch embedding + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + + # Set absolute position embedding + if self.use_abs_pos_embed: + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches, self.embed_dims)) + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + + self.blocks = ModuleList() + out_dims_list = [self.embed_dims] + num_heads = self.num_heads + stride_kv = adaptive_kv_stride + input_size = self.patch_resolution + for i in range(self.num_layers): + if i in self.downscale_indices: + num_heads *= head_mul + stride_q = 2 + stride_kv = max(stride_kv // 2, 1) + else: + stride_q = 1 + + # Set output embed_dims + if dim_mul_in_attention and i in self.downscale_indices: + # multiply embed_dims in downscale layers. + out_dims = out_dims_list[-1] * dim_mul + elif not dim_mul_in_attention and i + 1 in self.downscale_indices: + # multiply embed_dims before downscale layers. + out_dims = out_dims_list[-1] * dim_mul + else: + out_dims = out_dims_list[-1] + + attention_block = MultiScaleBlock( + in_dims=out_dims_list[-1], + out_dims=out_dims, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop_path=dpr[i], + norm_cfg=norm_cfg, + qkv_pool_kernel=pool_kernel, + stride_q=stride_q, + stride_kv=stride_kv, + rel_pos_spatial=rel_pos_spatial, + residual_pooling=residual_pooling, + dim_mul_in_attention=dim_mul_in_attention, + input_size=input_size, + rel_pos_zero_init=rel_pos_zero_init) + self.blocks.append(attention_block) + + input_size = attention_block.init_out_size + out_dims_list.append(out_dims) + + if i in self.stage_indices: + stage_index = self.stage_indices[i] + if stage_index in self.out_scales: + norm_layer = build_norm_layer(norm_cfg, out_dims)[1] + self.add_module(f'norm{stage_index}', norm_layer) + + def init_weights(self): + super().init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + if self.use_abs_pos_embed: + trunc_normal_(self.pos_embed, std=0.02) + + def forward(self, x): + """Forward the MViT.""" + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + if self.use_abs_pos_embed: + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + + outs = [] + for i, block in enumerate(self.blocks): + x, patch_resolution = block(x, patch_resolution) + + if i in self.stage_indices: + stage_index = self.stage_indices[i] + if stage_index in self.out_scales: + B, _, C = x.shape + x = getattr(self, f'norm{stage_index}')(x) + out = x.transpose(1, 2).reshape(B, C, *patch_resolution) + outs.append(out.contiguous()) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/poolformer.py b/mmpretrain/models/backbones/poolformer.py new file mode 100644 index 0000000000000000000000000000000000000000..e2ad67043dbeb0ce6969c2770853342b30df2a74 --- /dev/null +++ b/mmpretrain/models/backbones/poolformer.py @@ -0,0 +1,416 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import torch +import torch.nn as nn +from mmcv.cnn.bricks import DropPath, build_activation_layer, build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class PatchEmbed(nn.Module): + """Patch Embedding module implemented by a layer of convolution. + + Input: tensor in shape [B, C, H, W] + Output: tensor in shape [B, C, H/stride, W/stride] + Args: + patch_size (int): Patch size of the patch embedding. Defaults to 16. + stride (int): Stride of the patch embedding. Defaults to 16. + padding (int): Padding of the patch embedding. Defaults to 0. + in_chans (int): Input channels. Defaults to 3. + embed_dim (int): Output dimension of the patch embedding. + Defaults to 768. + norm_layer (module): Normalization module. Defaults to None (not use). + """ + + def __init__(self, + patch_size=16, + stride=16, + padding=0, + in_chans=3, + embed_dim=768, + norm_layer=None): + super().__init__() + self.proj = nn.Conv2d( + in_chans, + embed_dim, + kernel_size=patch_size, + stride=stride, + padding=padding) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x): + x = self.proj(x) + x = self.norm(x) + return x + + +class Pooling(nn.Module): + """Pooling module. + + Args: + pool_size (int): Pooling size. Defaults to 3. + """ + + def __init__(self, pool_size=3): + super().__init__() + self.pool = nn.AvgPool2d( + pool_size, + stride=1, + padding=pool_size // 2, + count_include_pad=False) + + def forward(self, x): + return self.pool(x) - x + + +class Mlp(nn.Module): + """Mlp implemented by with 1*1 convolutions. + + Input: Tensor with shape [B, C, H, W]. + Output: Tensor with shape [B, C, H, W]. + Args: + in_features (int): Dimension of input features. + hidden_features (int): Dimension of hidden features. + out_features (int): Dimension of output features. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + drop (float): Dropout rate. Defaults to 0.0. + """ + + def __init__(self, + in_features, + hidden_features=None, + out_features=None, + act_cfg=dict(type='GELU'), + drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Conv2d(in_features, hidden_features, 1) + self.act = build_activation_layer(act_cfg) + self.fc2 = nn.Conv2d(hidden_features, out_features, 1) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class PoolFormerBlock(BaseModule): + """PoolFormer Block. + + Args: + dim (int): Embedding dim. + pool_size (int): Pooling size. Defaults to 3. + mlp_ratio (float): Mlp expansion ratio. Defaults to 4. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='GN', num_groups=1)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + drop (float): Dropout rate. Defaults to 0. + drop_path (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): Init value for Layer Scale. + Defaults to 1e-5. + """ + + def __init__(self, + dim, + pool_size=3, + mlp_ratio=4., + norm_cfg=dict(type='GN', num_groups=1), + act_cfg=dict(type='GELU'), + drop=0., + drop_path=0., + layer_scale_init_value=1e-5): + + super().__init__() + + self.norm1 = build_norm_layer(norm_cfg, dim)[1] + self.token_mixer = Pooling(pool_size=pool_size) + self.norm2 = build_norm_layer(norm_cfg, dim)[1] + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_cfg=act_cfg, + drop=drop) + + # The following two techniques are useful to train deep PoolFormers. + self.drop_path = DropPath(drop_path) if drop_path > 0. \ + else nn.Identity() + self.layer_scale_1 = nn.Parameter( + layer_scale_init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter( + layer_scale_init_value * torch.ones((dim)), requires_grad=True) + + def forward(self, x): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * + self.token_mixer(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * + self.mlp(self.norm2(x))) + return x + + +def basic_blocks(dim, + index, + layers, + pool_size=3, + mlp_ratio=4., + norm_cfg=dict(type='GN', num_groups=1), + act_cfg=dict(type='GELU'), + drop_rate=.0, + drop_path_rate=0., + layer_scale_init_value=1e-5): + """ + generate PoolFormer blocks for a stage + return: PoolFormer blocks + """ + blocks = [] + for block_idx in range(layers[index]): + block_dpr = drop_path_rate * (block_idx + sum(layers[:index])) / ( + sum(layers) - 1) + blocks.append( + PoolFormerBlock( + dim, + pool_size=pool_size, + mlp_ratio=mlp_ratio, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + drop=drop_rate, + drop_path=block_dpr, + layer_scale_init_value=layer_scale_init_value, + )) + blocks = nn.Sequential(*blocks) + + return blocks + + +@MODELS.register_module() +class PoolFormer(BaseBackbone): + """PoolFormer. + + A PyTorch implementation of PoolFormer introduced by: + `MetaFormer is Actually What You Need for Vision `_ + + Modified from the `official repo + `. + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architecture in ``PoolFormer.arch_settings``. And if dict, it + should include the following two keys: + + - layers (list[int]): Number of blocks at each stage. + - embed_dims (list[int]): The number of channels at each stage. + - mlp_ratios (list[int]): Expansion ratio of MLPs. + - layer_scale_init_value (float): Init value for Layer Scale. + + Defaults to 'S12'. + + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='LN2d', eps=1e-6)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + in_patch_size (int): The patch size of input image patch embedding. + Defaults to 7. + in_stride (int): The stride of input image patch embedding. + Defaults to 4. + in_pad (int): The padding of input image patch embedding. + Defaults to 2. + down_patch_size (int): The patch size of downsampling patch embedding. + Defaults to 3. + down_stride (int): The stride of downsampling patch embedding. + Defaults to 2. + down_pad (int): The padding of downsampling patch embedding. + Defaults to 1. + drop_rate (float): Dropout rate. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + out_indices (Sequence | int): Output from which network position. + Index 0-6 respectively corresponds to + [stage1, downsampling, stage2, downsampling, stage3, downsampling, stage4] + Defaults to -1, means the last stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + init_cfg (dict, optional): Initialization config dict + """ # noqa: E501 + + # --layers: [x,x,x,x], numbers of layers for the four stages + # --embed_dims, --mlp_ratios: + # embedding dims and mlp ratios for the four stages + # --downsamples: flags to apply downsampling or not in four blocks + arch_settings = { + 's12': { + 'layers': [2, 2, 6, 2], + 'embed_dims': [64, 128, 320, 512], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-5, + }, + 's24': { + 'layers': [4, 4, 12, 4], + 'embed_dims': [64, 128, 320, 512], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-5, + }, + 's36': { + 'layers': [6, 6, 18, 6], + 'embed_dims': [64, 128, 320, 512], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-6, + }, + 'm36': { + 'layers': [6, 6, 18, 6], + 'embed_dims': [96, 192, 384, 768], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-6, + }, + 'm48': { + 'layers': [8, 8, 24, 8], + 'embed_dims': [96, 192, 384, 768], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-6, + }, + } + + def __init__(self, + arch='s12', + pool_size=3, + norm_cfg=dict(type='GN', num_groups=1), + act_cfg=dict(type='GELU'), + in_patch_size=7, + in_stride=4, + in_pad=2, + down_patch_size=3, + down_stride=2, + down_pad=1, + drop_rate=0., + drop_path_rate=0., + out_indices=-1, + frozen_stages=0, + init_cfg=None): + + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + assert 'layers' in arch and 'embed_dims' in arch, \ + f'The arch dict must have "layers" and "embed_dims", ' \ + f'but got {list(arch.keys())}.' + + layers = arch['layers'] + embed_dims = arch['embed_dims'] + mlp_ratios = arch['mlp_ratios'] \ + if 'mlp_ratios' in arch else [4, 4, 4, 4] + layer_scale_init_value = arch['layer_scale_init_value'] \ + if 'layer_scale_init_value' in arch else 1e-5 + + self.patch_embed = PatchEmbed( + patch_size=in_patch_size, + stride=in_stride, + padding=in_pad, + in_chans=3, + embed_dim=embed_dims[0]) + + # set the main block in network + network = [] + for i in range(len(layers)): + stage = basic_blocks( + embed_dims[i], + i, + layers, + pool_size=pool_size, + mlp_ratio=mlp_ratios[i], + norm_cfg=norm_cfg, + act_cfg=act_cfg, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + layer_scale_init_value=layer_scale_init_value) + network.append(stage) + if i >= len(layers) - 1: + break + if embed_dims[i] != embed_dims[i + 1]: + # downsampling between two stages + network.append( + PatchEmbed( + patch_size=down_patch_size, + stride=down_stride, + padding=down_pad, + in_chans=embed_dims[i], + embed_dim=embed_dims[i + 1])) + + self.network = nn.ModuleList(network) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = 7 + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + if self.out_indices: + for i_layer in self.out_indices: + layer = build_norm_layer(norm_cfg, + embed_dims[(i_layer + 1) // 2])[1] + layer_name = f'norm{i_layer}' + self.add_module(layer_name, layer) + + self.frozen_stages = frozen_stages + self._freeze_stages() + + def forward_embeddings(self, x): + x = self.patch_embed(x) + return x + + def forward_tokens(self, x): + outs = [] + for idx, block in enumerate(self.network): + x = block(x) + if idx in self.out_indices: + norm_layer = getattr(self, f'norm{idx}') + x_out = norm_layer(x) + outs.append(x_out) + return tuple(outs) + + def forward(self, x): + # input embedding + x = self.forward_embeddings(x) + # through backbone + x = self.forward_tokens(x) + return x + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + for i in range(self.frozen_stages): + # Include both block and downsample layer. + module = self.network[i] + module.eval() + for param in module.parameters(): + param.requires_grad = False + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + norm_layer.eval() + for param in norm_layer.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(PoolFormer, self).train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/backbones/regnet.py b/mmpretrain/models/backbones/regnet.py new file mode 100644 index 0000000000000000000000000000000000000000..85dbdef0bfeb607ecddff1d68d1cf405b61bea65 --- /dev/null +++ b/mmpretrain/models/backbones/regnet.py @@ -0,0 +1,312 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import numpy as np +import torch.nn as nn +from mmcv.cnn import build_conv_layer, build_norm_layer + +from mmpretrain.registry import MODELS +from .resnet import ResNet +from .resnext import Bottleneck + + +@MODELS.register_module() +class RegNet(ResNet): + """RegNet backbone. + + More details can be found in `paper `_ . + + Args: + arch (dict): The parameter of RegNets. + - w0 (int): initial width + - wa (float): slope of width + - wm (float): quantization parameter to quantize the width + - depth (int): depth of the backbone + - group_w (int): width of group + - bot_mul (float): bottleneck ratio, i.e. expansion of bottleneck. + strides (Sequence[int]): Strides of the first block of each stage. + base_channels (int): Base channels after stem layer. + in_channels (int): Number of input image channels. Default: 3. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. Default: "pytorch". + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. Default: -1. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + + Example: + >>> from mmpretrain.models import RegNet + >>> import torch + >>> self = RegNet( + arch=dict( + w0=88, + wa=26.31, + wm=2.25, + group_w=48, + depth=25, + bot_mul=1.0)) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 96, 8, 8) + (1, 192, 4, 4) + (1, 432, 2, 2) + (1, 1008, 1, 1) + """ + arch_settings = { + 'regnetx_400mf': + dict(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22, bot_mul=1.0), + 'regnetx_800mf': + dict(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16, bot_mul=1.0), + 'regnetx_1.6gf': + dict(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18, bot_mul=1.0), + 'regnetx_3.2gf': + dict(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25, bot_mul=1.0), + 'regnetx_4.0gf': + dict(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23, bot_mul=1.0), + 'regnetx_6.4gf': + dict(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17, bot_mul=1.0), + 'regnetx_8.0gf': + dict(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23, bot_mul=1.0), + 'regnetx_12gf': + dict(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, bot_mul=1.0), + } + + def __init__(self, + arch, + in_channels=3, + stem_channels=32, + base_channels=32, + strides=(2, 2, 2, 2), + dilations=(1, 1, 1, 1), + out_indices=(3, ), + style='pytorch', + deep_stem=False, + avg_down=False, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=False, + with_cp=False, + zero_init_residual=True, + init_cfg=None): + super(ResNet, self).__init__(init_cfg) + + # Generate RegNet parameters first + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'"arch": "{arch}" is not one of the' \ + ' arch_settings' + arch = self.arch_settings[arch] + elif not isinstance(arch, dict): + raise TypeError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + widths, num_stages = self.generate_regnet( + arch['w0'], + arch['wa'], + arch['wm'], + arch['depth'], + ) + # Convert to per stage format + stage_widths, stage_blocks = self.get_stages_from_blocks(widths) + # Generate group widths and bot muls + group_widths = [arch['group_w'] for _ in range(num_stages)] + self.bottleneck_ratio = [arch['bot_mul'] for _ in range(num_stages)] + # Adjust the compatibility of stage_widths and group_widths + stage_widths, group_widths = self.adjust_width_group( + stage_widths, self.bottleneck_ratio, group_widths) + + # Group params by stage + self.stage_widths = stage_widths + self.group_widths = group_widths + self.depth = sum(stage_blocks) + self.stem_channels = stem_channels + self.base_channels = base_channels + self.num_stages = num_stages + assert num_stages >= 1 and num_stages <= 4 + self.strides = strides + self.dilations = dilations + assert len(strides) == len(dilations) == num_stages + self.out_indices = out_indices + assert max(out_indices) < num_stages + self.style = style + self.deep_stem = deep_stem + if self.deep_stem: + raise NotImplementedError( + 'deep_stem has not been implemented for RegNet') + self.avg_down = avg_down + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + self.zero_init_residual = zero_init_residual + self.stage_blocks = stage_blocks[:num_stages] + + self._make_stem_layer(in_channels, stem_channels) + + _in_channels = stem_channels + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = self.strides[i] + dilation = self.dilations[i] + group_width = self.group_widths[i] + width = int(round(self.stage_widths[i] * self.bottleneck_ratio[i])) + stage_groups = width // group_width + + res_layer = self.make_res_layer( + block=Bottleneck, + num_blocks=num_blocks, + in_channels=_in_channels, + out_channels=self.stage_widths[i], + expansion=1, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + base_channels=self.stage_widths[i], + groups=stage_groups, + width_per_group=group_width) + _in_channels = self.stage_widths[i] + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + self.feat_dim = stage_widths[-1] + + def _make_stem_layer(self, in_channels, base_channels): + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + base_channels, + kernel_size=3, + stride=2, + padding=1, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, base_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + + def generate_regnet(self, + initial_width, + width_slope, + width_parameter, + depth, + divisor=8): + """Generates per block width from RegNet parameters. + + Args: + initial_width ([int]): Initial width of the backbone + width_slope ([float]): Slope of the quantized linear function + width_parameter ([int]): Parameter used to quantize the width. + depth ([int]): Depth of the backbone. + divisor (int): The divisor of channels. Defaults to 8. + + Returns: + tuple: tuple containing: + - list: Widths of each stage. + - int: The number of stages. + """ + assert width_slope >= 0 + assert initial_width > 0 + assert width_parameter > 1 + assert initial_width % divisor == 0 + widths_cont = np.arange(depth) * width_slope + initial_width + ks = np.round( + np.log(widths_cont / initial_width) / np.log(width_parameter)) + widths = initial_width * np.power(width_parameter, ks) + widths = np.round(np.divide(widths, divisor)) * divisor + num_stages = len(np.unique(widths)) + widths, widths_cont = widths.astype(int).tolist(), widths_cont.tolist() + return widths, num_stages + + @staticmethod + def quantize_float(number, divisor): + """Converts a float to closest non-zero int divisible by divior. + + Args: + number (int): Original number to be quantized. + divisor (int): Divisor used to quantize the number. + + Returns: + int: quantized number that is divisible by devisor. + """ + return int(round(number / divisor) * divisor) + + def adjust_width_group(self, widths, bottleneck_ratio, groups): + """Adjusts the compatibility of widths and groups. + + Args: + widths (list[int]): Width of each stage. + bottleneck_ratio (float): Bottleneck ratio. + groups (int): number of groups in each stage + + Returns: + tuple(list): The adjusted widths and groups of each stage. + """ + bottleneck_width = [ + int(w * b) for w, b in zip(widths, bottleneck_ratio) + ] + groups = [min(g, w_bot) for g, w_bot in zip(groups, bottleneck_width)] + bottleneck_width = [ + self.quantize_float(w_bot, g) + for w_bot, g in zip(bottleneck_width, groups) + ] + widths = [ + int(w_bot / b) + for w_bot, b in zip(bottleneck_width, bottleneck_ratio) + ] + return widths, groups + + def get_stages_from_blocks(self, widths): + """Gets widths/stage_blocks of network at each stage. + + Args: + widths (list[int]): Width in each stage. + + Returns: + tuple(list): width and depth of each stage + """ + width_diff = [ + width != width_prev + for width, width_prev in zip(widths + [0], [0] + widths) + ] + stage_widths = [ + width for width, diff in zip(widths, width_diff[:-1]) if diff + ] + stage_blocks = np.diff([ + depth for depth, diff in zip(range(len(width_diff)), width_diff) + if diff + ]).tolist() + return stage_widths, stage_blocks + + def forward(self, x): + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/replknet.py b/mmpretrain/models/backbones/replknet.py new file mode 100644 index 0000000000000000000000000000000000000000..4dce4154fbe1d95806eec118b69ff70f0d74c1c6 --- /dev/null +++ b/mmpretrain/models/backbones/replknet.py @@ -0,0 +1,668 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +import torch.utils.checkpoint as checkpoint +from mmcv.cnn import build_activation_layer, build_norm_layer +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +def conv_bn(in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + dilation=1, + norm_cfg=dict(type='BN')): + """Construct a sequential conv and bn. + + Args: + in_channels (int): Dimension of input features. + out_channels (int): Dimension of output features. + kernel_size (int): kernel_size of the convolution. + stride (int): stride of the convolution. + padding (int): stride of the convolution. + groups (int): groups of the convolution. + dilation (int): dilation of the convolution. Default to 1. + norm_cfg (dict): dictionary to construct and config norm layer. + Default to ``dict(type='BN', requires_grad=True)``. + + Returns: + nn.Sequential(): A conv layer and a batch norm layer. + """ + if padding is None: + padding = kernel_size // 2 + result = nn.Sequential() + result.add_module( + 'conv', + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=False)) + result.add_module('bn', build_norm_layer(norm_cfg, out_channels)[1]) + return result + + +def conv_bn_relu(in_channels, + out_channels, + kernel_size, + stride, + padding, + groups, + dilation=1): + """Construct a sequential conv, bn and relu. + + Args: + in_channels (int): Dimension of input features. + out_channels (int): Dimension of output features. + kernel_size (int): kernel_size of the convolution. + stride (int): stride of the convolution. + padding (int): stride of the convolution. + groups (int): groups of the convolution. + dilation (int): dilation of the convolution. Default to 1. + + Returns: + nn.Sequential(): A conv layer, batch norm layer and a relu function. + """ + + if padding is None: + padding = kernel_size // 2 + result = conv_bn( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + groups=groups, + dilation=dilation) + result.add_module('nonlinear', nn.ReLU()) + return result + + +def fuse_bn(conv, bn): + """Fuse the parameters in a branch with a conv and bn. + + Args: + conv (nn.Conv2d): The convolution module to fuse. + bn (nn.BatchNorm2d): The batch normalization to fuse. + + Returns: + tuple[torch.Tensor, torch.Tensor]: The parameters obtained after + fusing the parameters of conv and bn in one branch. + The first element is the weight and the second is the bias. + """ + kernel = conv.weight + running_mean = bn.running_mean + running_var = bn.running_var + gamma = bn.weight + beta = bn.bias + eps = bn.eps + std = (running_var + eps).sqrt() + t = (gamma / std).reshape(-1, 1, 1, 1) + return kernel * t, beta - running_mean * gamma / std + + +class ReparamLargeKernelConv(BaseModule): + """Super large kernel implemented by with large convolutions. + + Input: Tensor with shape [B, C, H, W]. + Output: Tensor with shape [B, C, H, W]. + + Args: + in_channels (int): Dimension of input features. + out_channels (int): Dimension of output features. + kernel_size (int): kernel_size of the large convolution. + stride (int): stride of the large convolution. + groups (int): groups of the large convolution. + small_kernel (int): kernel_size of the small convolution. + small_kernel_merged (bool): Whether to switch the model structure to + deployment mode (merge the small kernel to the large kernel). + Default to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride, + groups, + small_kernel, + small_kernel_merged=False, + init_cfg=None): + super(ReparamLargeKernelConv, self).__init__(init_cfg) + self.kernel_size = kernel_size + self.small_kernel = small_kernel + self.small_kernel_merged = small_kernel_merged + # We assume the conv does not change the feature map size, + # so padding = k//2. + # Otherwise, you may configure padding as you wish, + # and change the padding of small_conv accordingly. + padding = kernel_size // 2 + if small_kernel_merged: + self.lkb_reparam = nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=1, + groups=groups, + bias=True) + else: + self.lkb_origin = conv_bn( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=1, + groups=groups) + if small_kernel is not None: + assert small_kernel <= kernel_size + self.small_conv = conv_bn( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=small_kernel, + stride=stride, + padding=small_kernel // 2, + groups=groups, + dilation=1) + + def forward(self, inputs): + if hasattr(self, 'lkb_reparam'): + out = self.lkb_reparam(inputs) + else: + out = self.lkb_origin(inputs) + if hasattr(self, 'small_conv'): + out += self.small_conv(inputs) + return out + + def get_equivalent_kernel_bias(self): + eq_k, eq_b = fuse_bn(self.lkb_origin.conv, self.lkb_origin.bn) + if hasattr(self, 'small_conv'): + small_k, small_b = fuse_bn(self.small_conv.conv, + self.small_conv.bn) + eq_b += small_b + # add to the central part + eq_k += nn.functional.pad( + small_k, [(self.kernel_size - self.small_kernel) // 2] * 4) + return eq_k, eq_b + + def merge_kernel(self): + """Switch the model structure from training mode to deployment mode.""" + if self.small_kernel_merged: + return + eq_k, eq_b = self.get_equivalent_kernel_bias() + self.lkb_reparam = nn.Conv2d( + in_channels=self.lkb_origin.conv.in_channels, + out_channels=self.lkb_origin.conv.out_channels, + kernel_size=self.lkb_origin.conv.kernel_size, + stride=self.lkb_origin.conv.stride, + padding=self.lkb_origin.conv.padding, + dilation=self.lkb_origin.conv.dilation, + groups=self.lkb_origin.conv.groups, + bias=True) + + self.lkb_reparam.weight.data = eq_k + self.lkb_reparam.bias.data = eq_b + self.__delattr__('lkb_origin') + if hasattr(self, 'small_conv'): + self.__delattr__('small_conv') + + self.small_kernel_merged = True + + +class ConvFFN(BaseModule): + """Mlp implemented by with 1*1 convolutions. + + Input: Tensor with shape [B, C, H, W]. + Output: Tensor with shape [B, C, H, W]. + + Args: + in_channels (int): Dimension of input features. + internal_channels (int): Dimension of hidden features. + out_channels (int): Dimension of output features. + drop_path (float): Stochastic depth rate. Defaults to 0. + norm_cfg (dict): dictionary to construct and config norm layer. + Default to ``dict(type='BN', requires_grad=True)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + in_channels, + internal_channels, + out_channels, + drop_path, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='GELU'), + init_cfg=None): + super(ConvFFN, self).__init__(init_cfg) + self.drop_path = DropPath( + drop_prob=drop_path) if drop_path > 0. else nn.Identity() + self.preffn_bn = build_norm_layer(norm_cfg, in_channels)[1] + self.pw1 = conv_bn( + in_channels=in_channels, + out_channels=internal_channels, + kernel_size=1, + stride=1, + padding=0, + groups=1) + self.pw2 = conv_bn( + in_channels=internal_channels, + out_channels=out_channels, + kernel_size=1, + stride=1, + padding=0, + groups=1) + self.nonlinear = build_activation_layer(act_cfg) + + def forward(self, x): + out = self.preffn_bn(x) + out = self.pw1(out) + out = self.nonlinear(out) + out = self.pw2(out) + return x + self.drop_path(out) + + +class RepLKBlock(BaseModule): + """RepLKBlock for RepLKNet backbone. + + Args: + in_channels (int): The input channels of the block. + dw_channels (int): The intermediate channels of the block, + i.e., input channels of the large kernel convolution. + block_lk_size (int): size of the super large kernel. Defaults: 31. + small_kernel (int): size of the parallel small kernel. Defaults: 5. + drop_path (float): Stochastic depth rate. Defaults: 0. + small_kernel_merged (bool): Whether to switch the model structure to + deployment mode (merge the small kernel to the large kernel). + Default to False. + norm_cfg (dict): dictionary to construct and config norm layer. + Default to ``dict(type='BN', requires_grad=True)``. + act_cfg (dict): Config dict for activation layer. + Default to ``dict(type='ReLU')``. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default to None + """ + + def __init__(self, + in_channels, + dw_channels, + block_lk_size, + small_kernel, + drop_path, + small_kernel_merged=False, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + init_cfg=None): + super(RepLKBlock, self).__init__(init_cfg) + self.pw1 = conv_bn_relu(in_channels, dw_channels, 1, 1, 0, groups=1) + self.pw2 = conv_bn(dw_channels, in_channels, 1, 1, 0, groups=1) + self.large_kernel = ReparamLargeKernelConv( + in_channels=dw_channels, + out_channels=dw_channels, + kernel_size=block_lk_size, + stride=1, + groups=dw_channels, + small_kernel=small_kernel, + small_kernel_merged=small_kernel_merged) + self.lk_nonlinear = build_activation_layer(act_cfg) + self.prelkb_bn = build_norm_layer(norm_cfg, in_channels)[1] + self.drop_path = DropPath( + drop_prob=drop_path) if drop_path > 0. else nn.Identity() + # print('drop path:', self.drop_path) + + def forward(self, x): + out = self.prelkb_bn(x) + out = self.pw1(out) + out = self.large_kernel(out) + out = self.lk_nonlinear(out) + out = self.pw2(out) + return x + self.drop_path(out) + + +class RepLKNetStage(BaseModule): + """ + generate RepLKNet blocks for a stage + return: RepLKNet blocks + + Args: + channels (int): The input channels of the stage. + num_blocks (int): The number of blocks of the stage. + stage_lk_size (int): size of the super large kernel. Defaults: 31. + drop_path (float): Stochastic depth rate. Defaults: 0. + small_kernel (int): size of the parallel small kernel. Defaults: 5. + dw_ratio (float): The intermediate channels + expansion ratio of the block. Defaults: 1. + ffn_ratio (float): Mlp expansion ratio. Defaults to 4. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default to False. + small_kernel_merged (bool): Whether to switch the model structure to + deployment mode (merge the small kernel to the large kernel). + Default to False. + norm_intermediate_features (bool): Construct and config norm layer + or not. + Using True will normalize the intermediate features for + downstream dense prediction tasks. + norm_cfg (dict): dictionary to construct and config norm layer. + Default to ``dict(type='BN', requires_grad=True)``. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default to None + """ + + def __init__( + self, + channels, + num_blocks, + stage_lk_size, + drop_path, + small_kernel, + dw_ratio=1, + ffn_ratio=4, + with_cp=False, # train with torch.utils.checkpoint to save memory + small_kernel_merged=False, + norm_intermediate_features=False, + norm_cfg=dict(type='BN'), + init_cfg=None): + super(RepLKNetStage, self).__init__(init_cfg) + self.with_cp = with_cp + blks = [] + for i in range(num_blocks): + block_drop_path = drop_path[i] if isinstance(drop_path, + list) else drop_path + # Assume all RepLK Blocks within a stage share the same lk_size. + # You may tune it on your own model. + replk_block = RepLKBlock( + in_channels=channels, + dw_channels=int(channels * dw_ratio), + block_lk_size=stage_lk_size, + small_kernel=small_kernel, + drop_path=block_drop_path, + small_kernel_merged=small_kernel_merged) + convffn_block = ConvFFN( + in_channels=channels, + internal_channels=int(channels * ffn_ratio), + out_channels=channels, + drop_path=block_drop_path) + blks.append(replk_block) + blks.append(convffn_block) + self.blocks = nn.ModuleList(blks) + if norm_intermediate_features: + self.norm = build_norm_layer(norm_cfg, channels)[1] + else: + self.norm = nn.Identity() + + def forward(self, x): + for blk in self.blocks: + if self.with_cp: + x = checkpoint.checkpoint(blk, x) # Save training memory + else: + x = blk(x) + return x + + +@MODELS.register_module() +class RepLKNet(BaseBackbone): + """RepLKNet backbone. + + A PyTorch impl of : + `Scaling Up Your Kernels to 31x31: Revisiting Large Kernel Design in CNNs + `_ + + Args: + arch (str | dict): The parameter of RepLKNet. + If it's a dict, it should contain the following keys: + + - large_kernel_sizes (Sequence[int]): + Large kernel size in each stage. + - layers (Sequence[int]): Number of blocks in each stage. + - channels (Sequence[int]): Number of channels in each stage. + - small_kernel (int): size of the parallel small kernel. + - dw_ratio (float): The intermediate channels + expansion ratio of the block. + in_channels (int): Number of input image channels. Default to 3. + ffn_ratio (float): Mlp expansion ratio. Defaults to 4. + out_indices (Sequence[int]): Output from which stages. + Default to (3, ). + strides (Sequence[int]): Strides of the first block of each stage. + Default to (2, 2, 2, 2). + dilations (Sequence[int]): Dilation of each stage. + Default to (1, 1, 1, 1). + frozen_stages (int): Stages to be frozen + (all param fixed). -1 means not freezing any parameters. + Default to -1. + conv_cfg (dict | None): The config dict for conv layers. + Default to None. + norm_cfg (dict): The config dict for norm layers. + Default to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Default to ``dict(type='ReLU')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default to False. + deploy (bool): Whether to switch the model structure to deployment + mode. Default to False. + norm_intermediate_features (bool): Construct and + config norm layer or not. + Using True will normalize the intermediate features + for downstream dense prediction tasks. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + arch_settings = { + '31B': + dict( + large_kernel_sizes=[31, 29, 27, 13], + layers=[2, 2, 18, 2], + channels=[128, 256, 512, 1024], + small_kernel=5, + dw_ratio=1), + '31L': + dict( + large_kernel_sizes=[31, 29, 27, 13], + layers=[2, 2, 18, 2], + channels=[192, 384, 768, 1536], + small_kernel=5, + dw_ratio=1), + 'XL': + dict( + large_kernel_sizes=[27, 27, 27, 13], + layers=[2, 2, 18, 2], + channels=[256, 512, 1024, 2048], + small_kernel=None, + dw_ratio=1.5), + } + + def __init__(self, + arch, + in_channels=3, + ffn_ratio=4, + out_indices=(3, ), + strides=(2, 2, 2, 2), + dilations=(1, 1, 1, 1), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + with_cp=False, + drop_path_rate=0.3, + small_kernel_merged=False, + norm_intermediate_features=False, + norm_eval=False, + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ]): + super(RepLKNet, self).__init__(init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'"arch": "{arch}" is not one of the arch_settings' + arch = self.arch_settings[arch] + elif not isinstance(arch, dict): + raise TypeError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + assert len(arch['layers']) == len( + arch['channels']) == len(strides) == len(dilations) + assert max(out_indices) < len(arch['layers']) + + self.arch = arch + self.in_channels = in_channels + self.out_indices = out_indices + self.strides = strides + self.dilations = dilations + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.with_cp = with_cp + self.drop_path_rate = drop_path_rate + self.small_kernel_merged = small_kernel_merged + self.norm_eval = norm_eval + self.norm_intermediate_features = norm_intermediate_features + + self.out_indices = out_indices + + base_width = self.arch['channels'][0] + self.norm_intermediate_features = norm_intermediate_features + self.num_stages = len(self.arch['layers']) + self.stem = nn.ModuleList([ + conv_bn_relu( + in_channels=in_channels, + out_channels=base_width, + kernel_size=3, + stride=2, + padding=1, + groups=1), + conv_bn_relu( + in_channels=base_width, + out_channels=base_width, + kernel_size=3, + stride=1, + padding=1, + groups=base_width), + conv_bn_relu( + in_channels=base_width, + out_channels=base_width, + kernel_size=1, + stride=1, + padding=0, + groups=1), + conv_bn_relu( + in_channels=base_width, + out_channels=base_width, + kernel_size=3, + stride=2, + padding=1, + groups=base_width) + ]) + # stochastic depth. We set block-wise drop-path rate. + # The higher level blocks are more likely to be dropped. + # This implementation follows Swin. + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, + sum(self.arch['layers'])) + ] + self.stages = nn.ModuleList() + self.transitions = nn.ModuleList() + for stage_idx in range(self.num_stages): + layer = RepLKNetStage( + channels=self.arch['channels'][stage_idx], + num_blocks=self.arch['layers'][stage_idx], + stage_lk_size=self.arch['large_kernel_sizes'][stage_idx], + drop_path=dpr[sum(self.arch['layers'][:stage_idx] + ):sum(self.arch['layers'][:stage_idx + 1])], + small_kernel=self.arch['small_kernel'], + dw_ratio=self.arch['dw_ratio'], + ffn_ratio=ffn_ratio, + with_cp=with_cp, + small_kernel_merged=small_kernel_merged, + norm_intermediate_features=(stage_idx in out_indices)) + self.stages.append(layer) + if stage_idx < len(self.arch['layers']) - 1: + transition = nn.Sequential( + conv_bn_relu( + self.arch['channels'][stage_idx], + self.arch['channels'][stage_idx + 1], + 1, + 1, + 0, + groups=1), + conv_bn_relu( + self.arch['channels'][stage_idx + 1], + self.arch['channels'][stage_idx + 1], + 3, + stride=2, + padding=1, + groups=self.arch['channels'][stage_idx + 1])) + self.transitions.append(transition) + + def forward_features(self, x): + x = self.stem[0](x) + for stem_layer in self.stem[1:]: + if self.with_cp: + x = checkpoint.checkpoint(stem_layer, x) # save memory + else: + x = stem_layer(x) + + # Need the intermediate feature maps + outs = [] + for stage_idx in range(self.num_stages): + x = self.stages[stage_idx](x) + if stage_idx in self.out_indices: + outs.append(self.stages[stage_idx].norm(x)) + # For RepLKNet-XL normalize the features + # before feeding them into the heads + if stage_idx < self.num_stages - 1: + x = self.transitions[stage_idx](x) + return outs + + def forward(self, x): + x = self.forward_features(x) + return tuple(x) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + for i in range(self.frozen_stages): + stage = self.stages[i] + stage.eval() + for param in stage.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(RepLKNet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() + + def switch_to_deploy(self): + for m in self.modules(): + if hasattr(m, 'merge_kernel'): + m.merge_kernel() + self.small_kernel_merged = True diff --git a/mmpretrain/models/backbones/repmlp.py b/mmpretrain/models/backbones/repmlp.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c06c4875710b33c57f2794c437034d93169b30 --- /dev/null +++ b/mmpretrain/models/backbones/repmlp.py @@ -0,0 +1,578 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Adapted from official impl at https://github.com/DingXiaoH/RepMLP. +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import (ConvModule, build_activation_layer, build_conv_layer, + build_norm_layer) +from mmcv.cnn.bricks.transformer import PatchEmbed as _PatchEmbed +from mmengine.model import BaseModule, ModuleList, Sequential + +from mmpretrain.models.utils import SELayer, to_2tuple +from mmpretrain.registry import MODELS + + +def fuse_bn(conv_or_fc, bn): + """fuse conv and bn.""" + std = (bn.running_var + bn.eps).sqrt() + tmp_weight = bn.weight / std + tmp_weight = tmp_weight.reshape(-1, 1, 1, 1) + + if len(tmp_weight) == conv_or_fc.weight.size(0): + return (conv_or_fc.weight * tmp_weight, + bn.bias - bn.running_mean * bn.weight / std) + else: + # in RepMLPBlock, dim0 of fc3 weights and fc3_bn weights + # are different. + repeat_times = conv_or_fc.weight.size(0) // len(tmp_weight) + repeated = tmp_weight.repeat_interleave(repeat_times, 0) + fused_weight = conv_or_fc.weight * repeated + bias = bn.bias - bn.running_mean * bn.weight / std + fused_bias = (bias).repeat_interleave(repeat_times, 0) + return (fused_weight, fused_bias) + + +class PatchEmbed(_PatchEmbed): + """Image to Patch Embedding. + + Compared with default Patch Embedding(in ViT), Patch Embedding of RepMLP + have ReLu and do not convert output tensor into shape (N, L, C). + + Args: + in_channels (int): The num of input channels. Default: 3 + embed_dims (int): The dimensions of embedding. Default: 768 + conv_type (str): The type of convolution + to generate patch embedding. Default: "Conv2d". + kernel_size (int): The kernel_size of embedding conv. Default: 16. + stride (int): The slide stride of embedding conv. + Default: 16. + padding (int | tuple | string): The padding length of + embedding conv. When it is a string, it means the mode + of adaptive padding, support "same" and "corner" now. + Default: "corner". + dilation (int): The dilation rate of embedding conv. Default: 1. + bias (bool): Bias of embed conv. Default: True. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None. + input_size (int | tuple | None): The size of input, which will be + used to calculate the out size. Only works when `dynamic_size` + is False. Default: None. + init_cfg (`mmcv.ConfigDict`, optional): The Config for initialization. + Default: None. + """ + + def __init__(self, *args, **kwargs): + super(PatchEmbed, self).__init__(*args, **kwargs) + self.relu = nn.ReLU() + + def forward(self, x): + """ + Args: + x (Tensor): Has shape (B, C, H, W). In most case, C is 3. + Returns: + tuple: Contains merged results and its spatial shape. + - x (Tensor): The output tensor. + - out_size (tuple[int]): Spatial shape of x, arrange as + (out_h, out_w). + """ + + if self.adaptive_padding: + x = self.adaptive_padding(x) + + x = self.projection(x) + if self.norm is not None: + x = self.norm(x) + x = self.relu(x) + out_size = (x.shape[2], x.shape[3]) + return x, out_size + + +class GlobalPerceptron(SELayer): + """GlobalPerceptron implemented by using ``mmpretrain.modes.SELayer``. + + Args: + input_channels (int): The number of input (and output) channels + in the GlobalPerceptron. + ratio (int): Squeeze ratio in GlobalPerceptron, the intermediate + channel will be ``make_divisible(channels // ratio, divisor)``. + """ + + def __init__(self, input_channels: int, ratio: int, **kwargs) -> None: + super(GlobalPerceptron, self).__init__( + channels=input_channels, + ratio=ratio, + return_weight=True, + act_cfg=(dict(type='ReLU'), dict(type='Sigmoid')), + **kwargs) + + +class RepMLPBlock(BaseModule): + """Basic RepMLPNet, consists of PartitionPerceptron and GlobalPerceptron. + + Args: + channels (int): The number of input and the output channels of the + block. + path_h (int): The height of patches. + path_w (int): The weidth of patches. + reparam_conv_kernels (Squeue(int) | None): The conv kernels in the + GlobalPerceptron. Default: None. + globalperceptron_ratio (int): The reducation ratio in the + GlobalPerceptron. Default: 4. + num_sharesets (int): The number of sharesets in the + PartitionPerceptron. Default 1. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + deploy (bool): Whether to switch the model structure to + deployment mode. Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + channels, + path_h, + path_w, + reparam_conv_kernels=None, + globalperceptron_ratio=4, + num_sharesets=1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + deploy=False, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + self.deploy = deploy + self.channels = channels + self.num_sharesets = num_sharesets + self.path_h, self.path_w = path_h, path_w + # the input channel of fc3 + self._path_vec_channles = path_h * path_w * num_sharesets + + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.gp = GlobalPerceptron( + input_channels=channels, ratio=globalperceptron_ratio) + + # using a conv layer to implement a fc layer + self.fc3 = build_conv_layer( + conv_cfg, + in_channels=self._path_vec_channles, + out_channels=self._path_vec_channles, + kernel_size=1, + stride=1, + padding=0, + bias=deploy, + groups=num_sharesets) + if deploy: + self.fc3_bn = nn.Identity() + else: + norm_layer = build_norm_layer(norm_cfg, num_sharesets)[1] + self.add_module('fc3_bn', norm_layer) + + self.reparam_conv_kernels = reparam_conv_kernels + if not deploy and reparam_conv_kernels is not None: + for k in reparam_conv_kernels: + conv_branch = ConvModule( + in_channels=num_sharesets, + out_channels=num_sharesets, + kernel_size=k, + stride=1, + padding=k // 2, + norm_cfg=dict(type='BN', requires_grad=True), + groups=num_sharesets, + act_cfg=None) + self.__setattr__('repconv{}'.format(k), conv_branch) + + def partition(self, x, h_parts, w_parts): + # convert (N, C, H, W) to (N, h_parts, w_parts, C, path_h, path_w) + x = x.reshape(-1, self.channels, h_parts, self.path_h, w_parts, + self.path_w) + x = x.permute(0, 2, 4, 1, 3, 5) + return x + + def partition_affine(self, x, h_parts, w_parts): + """perform Partition Perceptron.""" + fc_inputs = x.reshape(-1, self._path_vec_channles, 1, 1) + out = self.fc3(fc_inputs) + out = out.reshape(-1, self.num_sharesets, self.path_h, self.path_w) + out = self.fc3_bn(out) + out = out.reshape(-1, h_parts, w_parts, self.num_sharesets, + self.path_h, self.path_w) + return out + + def forward(self, inputs): + # Global Perceptron + global_vec = self.gp(inputs) + + origin_shape = inputs.size() + h_parts = origin_shape[2] // self.path_h + w_parts = origin_shape[3] // self.path_w + + partitions = self.partition(inputs, h_parts, w_parts) + + # Channel Perceptron + fc3_out = self.partition_affine(partitions, h_parts, w_parts) + + # perform Local Perceptron + if self.reparam_conv_kernels is not None and not self.deploy: + conv_inputs = partitions.reshape(-1, self.num_sharesets, + self.path_h, self.path_w) + conv_out = 0 + for k in self.reparam_conv_kernels: + conv_branch = self.__getattr__('repconv{}'.format(k)) + conv_out += conv_branch(conv_inputs) + conv_out = conv_out.reshape(-1, h_parts, w_parts, + self.num_sharesets, self.path_h, + self.path_w) + fc3_out += conv_out + + # N, h_parts, w_parts, num_sharesets, out_h, out_w + fc3_out = fc3_out.permute(0, 3, 1, 4, 2, 5) + out = fc3_out.reshape(*origin_shape) + out = out * global_vec + return out + + def get_equivalent_fc3(self): + """get the equivalent fc3 weight and bias.""" + fc_weight, fc_bias = fuse_bn(self.fc3, self.fc3_bn) + if self.reparam_conv_kernels is not None: + largest_k = max(self.reparam_conv_kernels) + largest_branch = self.__getattr__('repconv{}'.format(largest_k)) + total_kernel, total_bias = fuse_bn(largest_branch.conv, + largest_branch.bn) + for k in self.reparam_conv_kernels: + if k != largest_k: + k_branch = self.__getattr__('repconv{}'.format(k)) + kernel, bias = fuse_bn(k_branch.conv, k_branch.bn) + total_kernel += F.pad(kernel, [(largest_k - k) // 2] * 4) + total_bias += bias + rep_weight, rep_bias = self._convert_conv_to_fc( + total_kernel, total_bias) + final_fc3_weight = rep_weight.reshape_as(fc_weight) + fc_weight + final_fc3_bias = rep_bias + fc_bias + else: + final_fc3_weight = fc_weight + final_fc3_bias = fc_bias + return final_fc3_weight, final_fc3_bias + + def local_inject(self): + """inject the Local Perceptron into Partition Perceptron.""" + self.deploy = True + # Locality Injection + fc3_weight, fc3_bias = self.get_equivalent_fc3() + # Remove Local Perceptron + if self.reparam_conv_kernels is not None: + for k in self.reparam_conv_kernels: + self.__delattr__('repconv{}'.format(k)) + self.__delattr__('fc3') + self.__delattr__('fc3_bn') + self.fc3 = build_conv_layer( + self.conv_cfg, + self._path_vec_channles, + self._path_vec_channles, + 1, + 1, + 0, + bias=True, + groups=self.num_sharesets) + self.fc3_bn = nn.Identity() + self.fc3.weight.data = fc3_weight + self.fc3.bias.data = fc3_bias + + def _convert_conv_to_fc(self, conv_kernel, conv_bias): + """convert conv_k1 to fc, which is still a conv_k2, and the k2 > k1.""" + in_channels = torch.eye(self.path_h * self.path_w).repeat( + 1, self.num_sharesets).reshape(self.path_h * self.path_w, + self.num_sharesets, self.path_h, + self.path_w).to(conv_kernel.device) + fc_k = F.conv2d( + in_channels, + conv_kernel, + padding=(conv_kernel.size(2) // 2, conv_kernel.size(3) // 2), + groups=self.num_sharesets) + fc_k = fc_k.reshape(self.path_w * self.path_w, self.num_sharesets * + self.path_h * self.path_w).t() + fc_bias = conv_bias.repeat_interleave(self.path_h * self.path_w) + return fc_k, fc_bias + + +class RepMLPNetUnit(BaseModule): + """A basic unit in RepMLPNet : [REPMLPBlock + BN + ConvFFN + BN]. + + Args: + channels (int): The number of input and the output channels of the + unit. + path_h (int): The height of patches. + path_w (int): The weidth of patches. + reparam_conv_kernels (Squeue(int) | None): The conv kernels in the + GlobalPerceptron. Default: None. + globalperceptron_ratio (int): The reducation ratio in the + GlobalPerceptron. Default: 4. + num_sharesets (int): The number of sharesets in the + PartitionPerceptron. Default 1. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + deploy (bool): Whether to switch the model structure to + deployment mode. Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + channels, + path_h, + path_w, + reparam_conv_kernels, + globalperceptron_ratio, + norm_cfg=dict(type='BN', requires_grad=True), + ffn_expand=4, + num_sharesets=1, + deploy=False, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.repmlp_block = RepMLPBlock( + channels=channels, + path_h=path_h, + path_w=path_w, + reparam_conv_kernels=reparam_conv_kernels, + globalperceptron_ratio=globalperceptron_ratio, + num_sharesets=num_sharesets, + deploy=deploy) + self.ffn_block = ConvFFN(channels, channels * ffn_expand) + norm1 = build_norm_layer(norm_cfg, channels)[1] + self.add_module('norm1', norm1) + norm2 = build_norm_layer(norm_cfg, channels)[1] + self.add_module('norm2', norm2) + + def forward(self, x): + y = x + self.repmlp_block(self.norm1(x)) + out = y + self.ffn_block(self.norm2(y)) + return out + + +class ConvFFN(nn.Module): + """ConvFFN implemented by using point-wise convs.""" + + def __init__(self, + in_channels, + hidden_channels=None, + out_channels=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='GELU')): + super().__init__() + out_features = out_channels or in_channels + hidden_features = hidden_channels or in_channels + self.ffn_fc1 = ConvModule( + in_channels=in_channels, + out_channels=hidden_features, + kernel_size=1, + stride=1, + padding=0, + norm_cfg=norm_cfg, + act_cfg=None) + self.ffn_fc2 = ConvModule( + in_channels=hidden_features, + out_channels=out_features, + kernel_size=1, + stride=1, + padding=0, + norm_cfg=norm_cfg, + act_cfg=None) + self.act = build_activation_layer(act_cfg) + + def forward(self, x): + x = self.ffn_fc1(x) + x = self.act(x) + x = self.ffn_fc2(x) + return x + + +@MODELS.register_module() +class RepMLPNet(BaseModule): + """RepMLPNet backbone. + + A PyTorch impl of : `RepMLP: Re-parameterizing Convolutions into + Fully-connected Layers for Image Recognition + `_ + + Args: + arch (str | dict): RepMLP architecture. If use string, choose + from 'base' and 'b'. If use dict, it should have below keys: + + - channels (List[int]): Number of blocks in each stage. + - depths (List[int]): The number of blocks in each branch. + - sharesets_nums (List[int]): RepVGG Block that declares + the need to apply group convolution. + + img_size (int | tuple): The size of input image. Defaults: 224. + in_channels (int): Number of input image channels. Default: 3. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 4. + out_indices (Sequence[int]): Output from which stages. + Default: ``(3, )``. + reparam_conv_kernels (Squeue(int) | None): The conv kernels in the + GlobalPerceptron. Default: None. + globalperceptron_ratio (int): The reducation ratio in the + GlobalPerceptron. Default: 4. + num_sharesets (int): The number of sharesets in the + PartitionPerceptron. Default 1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + Default: dict(type='BN', requires_grad=True). + patch_cfg (dict): Extra config dict for patch embedding. + Defaults to an empty dict. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + deploy (bool): Whether to switch the model structure to deployment + mode. Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + arch_zoo = { + **dict.fromkeys(['b', 'base'], + {'channels': [96, 192, 384, 768], + 'depths': [2, 2, 12, 2], + 'sharesets_nums': [1, 4, 32, 128]}), + } # yapf: disable + + num_extra_tokens = 0 # there is no cls-token in RepMLP + + def __init__(self, + arch, + img_size=224, + in_channels=3, + patch_size=4, + out_indices=(3, ), + reparam_conv_kernels=(3, ), + globalperceptron_ratio=4, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + patch_cfg=dict(), + final_norm=True, + deploy=False, + init_cfg=None): + super(RepMLPNet, self).__init__(init_cfg=init_cfg) + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = {'channels', 'depths', 'sharesets_nums'} + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}.' + self.arch_settings = arch + + self.img_size = to_2tuple(img_size) + self.patch_size = to_2tuple(patch_size) + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.num_stage = len(self.arch_settings['channels']) + for value in self.arch_settings.values(): + assert isinstance(value, list) and len(value) == self.num_stage, ( + 'Length of setting item in arch dict must be type of list and' + ' have the same length.') + + self.channels = self.arch_settings['channels'] + self.depths = self.arch_settings['depths'] + self.sharesets_nums = self.arch_settings['sharesets_nums'] + + _patch_cfg = dict( + in_channels=in_channels, + input_size=self.img_size, + embed_dims=self.channels[0], + conv_type='Conv2d', + kernel_size=self.patch_size, + stride=self.patch_size, + norm_cfg=self.norm_cfg, + bias=False) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + + self.patch_hs = [ + self.patch_resolution[0] // 2**i for i in range(self.num_stage) + ] + self.patch_ws = [ + self.patch_resolution[1] // 2**i for i in range(self.num_stage) + ] + + self.stages = ModuleList() + self.downsample_layers = ModuleList() + for stage_idx in range(self.num_stage): + # make stage layers + _stage_cfg = dict( + channels=self.channels[stage_idx], + path_h=self.patch_hs[stage_idx], + path_w=self.patch_ws[stage_idx], + reparam_conv_kernels=reparam_conv_kernels, + globalperceptron_ratio=globalperceptron_ratio, + norm_cfg=self.norm_cfg, + ffn_expand=4, + num_sharesets=self.sharesets_nums[stage_idx], + deploy=deploy) + stage_blocks = [ + RepMLPNetUnit(**_stage_cfg) + for _ in range(self.depths[stage_idx]) + ] + self.stages.append(Sequential(*stage_blocks)) + + # make downsample layers + if stage_idx < self.num_stage - 1: + self.downsample_layers.append( + ConvModule( + in_channels=self.channels[stage_idx], + out_channels=self.channels[stage_idx + 1], + kernel_size=2, + stride=2, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + inplace=True)) + + self.out_indice = out_indices + + if final_norm: + norm_layer = build_norm_layer(norm_cfg, self.channels[-1])[1] + else: + norm_layer = nn.Identity() + self.add_module('final_norm', norm_layer) + + def forward(self, x): + assert x.shape[2:] == self.img_size, \ + "The Rep-MLP doesn't support dynamic input shape. " \ + f'Please input images with shape {self.img_size}' + + outs = [] + + x, _ = self.patch_embed(x) + for i, stage in enumerate(self.stages): + x = stage(x) + + # downsample after each stage except last stage + if i < len(self.stages) - 1: + downsample = self.downsample_layers[i] + x = downsample(x) + + if i in self.out_indice: + if self.final_norm and i == len(self.stages) - 1: + out = self.final_norm(x) + else: + out = x + outs.append(out) + + return tuple(outs) + + def switch_to_deploy(self): + for m in self.modules(): + if hasattr(m, 'local_inject'): + m.local_inject() diff --git a/mmpretrain/models/backbones/repvgg.py b/mmpretrain/models/backbones/repvgg.py new file mode 100644 index 0000000000000000000000000000000000000000..67c9d147546eb2839a44749040a1a787ee5ce0ea --- /dev/null +++ b/mmpretrain/models/backbones/repvgg.py @@ -0,0 +1,622 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn.functional as F +import torch.utils.checkpoint as cp +from mmcv.cnn import (ConvModule, build_activation_layer, build_conv_layer, + build_norm_layer) +from mmengine.model import BaseModule, Sequential +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm +from torch import nn + +from mmpretrain.registry import MODELS +from ..utils.se_layer import SELayer +from .base_backbone import BaseBackbone + + +class RepVGGBlock(BaseModule): + """RepVGG block for RepVGG backbone. + + Args: + in_channels (int): The input channels of the block. + out_channels (int): The output channels of the block. + stride (int): Stride of the 3x3 and 1x1 convolution layer. Default: 1. + padding (int): Padding of the 3x3 convolution layer. + dilation (int): Dilation of the 3x3 convolution layer. + groups (int): Groups of the 3x3 and 1x1 convolution layer. Default: 1. + padding_mode (str): Padding mode of the 3x3 convolution layer. + Default: 'zeros'. + se_cfg (None or dict): The configuration of the se module. + Default: None. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + deploy (bool): Whether to switch the model structure to + deployment mode. Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + out_channels, + stride=1, + padding=1, + dilation=1, + groups=1, + padding_mode='zeros', + se_cfg=None, + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + deploy=False, + init_cfg=None): + super(RepVGGBlock, self).__init__(init_cfg) + + assert se_cfg is None or isinstance(se_cfg, dict) + + self.in_channels = in_channels + self.out_channels = out_channels + self.stride = stride + self.padding = padding + self.dilation = dilation + self.groups = groups + self.se_cfg = se_cfg + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.deploy = deploy + + if deploy: + self.branch_reparam = build_conv_layer( + conv_cfg, + in_channels=in_channels, + out_channels=out_channels, + kernel_size=3, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=True, + padding_mode=padding_mode) + else: + # judge if input shape and output shape are the same. + # If true, add a normalized identity shortcut. + if out_channels == in_channels and stride == 1 and \ + padding == dilation: + self.branch_norm = build_norm_layer(norm_cfg, in_channels)[1] + else: + self.branch_norm = None + + self.branch_3x3 = self.create_conv_bn( + kernel_size=3, + dilation=dilation, + padding=padding, + ) + self.branch_1x1 = self.create_conv_bn(kernel_size=1) + + if se_cfg is not None: + self.se_layer = SELayer(channels=out_channels, **se_cfg) + else: + self.se_layer = None + + self.act = build_activation_layer(act_cfg) + + def create_conv_bn(self, kernel_size, dilation=1, padding=0): + conv_bn = Sequential() + conv_bn.add_module( + 'conv', + build_conv_layer( + self.conv_cfg, + in_channels=self.in_channels, + out_channels=self.out_channels, + kernel_size=kernel_size, + stride=self.stride, + dilation=dilation, + padding=padding, + groups=self.groups, + bias=False)) + conv_bn.add_module( + 'norm', + build_norm_layer(self.norm_cfg, num_features=self.out_channels)[1]) + + return conv_bn + + def forward(self, x): + + def _inner_forward(inputs): + if self.deploy: + return self.branch_reparam(inputs) + + if self.branch_norm is None: + branch_norm_out = 0 + else: + branch_norm_out = self.branch_norm(inputs) + + inner_out = self.branch_3x3(inputs) + self.branch_1x1( + inputs) + branch_norm_out + + if self.se_cfg is not None: + inner_out = self.se_layer(inner_out) + + return inner_out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.act(out) + + return out + + def switch_to_deploy(self): + """Switch the model structure from training mode to deployment mode.""" + if self.deploy: + return + assert self.norm_cfg['type'] == 'BN', \ + "Switch is not allowed when norm_cfg['type'] != 'BN'." + + reparam_weight, reparam_bias = self.reparameterize() + self.branch_reparam = build_conv_layer( + self.conv_cfg, + self.in_channels, + self.out_channels, + kernel_size=3, + stride=self.stride, + padding=self.padding, + dilation=self.dilation, + groups=self.groups, + bias=True) + self.branch_reparam.weight.data = reparam_weight + self.branch_reparam.bias.data = reparam_bias + + for param in self.parameters(): + param.detach_() + delattr(self, 'branch_3x3') + delattr(self, 'branch_1x1') + delattr(self, 'branch_norm') + + self.deploy = True + + def reparameterize(self): + """Fuse all the parameters of all branches. + + Returns: + tuple[torch.Tensor, torch.Tensor]: Parameters after fusion of all + branches. the first element is the weights and the second is + the bias. + """ + weight_3x3, bias_3x3 = self._fuse_conv_bn(self.branch_3x3) + weight_1x1, bias_1x1 = self._fuse_conv_bn(self.branch_1x1) + # pad a conv1x1 weight to a conv3x3 weight + weight_1x1 = F.pad(weight_1x1, [1, 1, 1, 1], value=0) + + weight_norm, bias_norm = 0, 0 + if self.branch_norm: + tmp_conv_bn = self._norm_to_conv3x3(self.branch_norm) + weight_norm, bias_norm = self._fuse_conv_bn(tmp_conv_bn) + + return (weight_3x3 + weight_1x1 + weight_norm, + bias_3x3 + bias_1x1 + bias_norm) + + def _fuse_conv_bn(self, branch): + """Fuse the parameters in a branch with a conv and bn. + + Args: + branch (mmcv.runner.Sequential): A branch with conv and bn. + + Returns: + tuple[torch.Tensor, torch.Tensor]: The parameters obtained after + fusing the parameters of conv and bn in one branch. + The first element is the weight and the second is the bias. + """ + if branch is None: + return 0, 0 + conv_weight = branch.conv.weight + running_mean = branch.norm.running_mean + running_var = branch.norm.running_var + gamma = branch.norm.weight + beta = branch.norm.bias + eps = branch.norm.eps + + std = (running_var + eps).sqrt() + fused_weight = (gamma / std).reshape(-1, 1, 1, 1) * conv_weight + fused_bias = -running_mean * gamma / std + beta + + return fused_weight, fused_bias + + def _norm_to_conv3x3(self, branch_nrom): + """Convert a norm layer to a conv3x3-bn sequence. + + Args: + branch (nn.BatchNorm2d): A branch only with bn in the block. + + Returns: + tmp_conv3x3 (mmcv.runner.Sequential): a sequential with conv3x3 and + bn. + """ + input_dim = self.in_channels // self.groups + conv_weight = torch.zeros((self.in_channels, input_dim, 3, 3), + dtype=branch_nrom.weight.dtype) + + for i in range(self.in_channels): + conv_weight[i, i % input_dim, 1, 1] = 1 + conv_weight = conv_weight.to(branch_nrom.weight.device) + + tmp_conv3x3 = self.create_conv_bn(kernel_size=3) + tmp_conv3x3.conv.weight.data = conv_weight + tmp_conv3x3.norm = branch_nrom + return tmp_conv3x3 + + +class MTSPPF(BaseModule): + """MTSPPF block for YOLOX-PAI RepVGG backbone. + + Args: + in_channels (int): The input channels of the block. + out_channels (int): The output channels of the block. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + kernel_size (int): Kernel size of pooling. Default: 5. + """ + + def __init__(self, + in_channels, + out_channels, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + kernel_size=5): + super().__init__() + hidden_features = in_channels // 2 # hidden channels + self.conv1 = ConvModule( + in_channels, + hidden_features, + 1, + stride=1, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.conv2 = ConvModule( + hidden_features * 4, + out_channels, + 1, + stride=1, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.maxpool = nn.MaxPool2d( + kernel_size=kernel_size, stride=1, padding=kernel_size // 2) + + def forward(self, x): + x = self.conv1(x) + y1 = self.maxpool(x) + y2 = self.maxpool(y1) + return self.conv2(torch.cat([x, y1, y2, self.maxpool(y2)], 1)) + + +@MODELS.register_module() +class RepVGG(BaseBackbone): + """RepVGG backbone. + + A PyTorch impl of : `RepVGG: Making VGG-style ConvNets Great Again + `_ + + Args: + arch (str | dict): RepVGG architecture. If use string, choose from + 'A0', 'A1`', 'A2', 'B0', 'B1', 'B1g2', 'B1g4', 'B2', 'B2g2', + 'B2g4', 'B3', 'B3g2', 'B3g4' or 'D2se'. If use dict, it should + have below keys: + + - **num_blocks** (Sequence[int]): Number of blocks in each stage. + - **width_factor** (Sequence[float]): Width deflator in each stage. + - **group_layer_map** (dict | None): RepVGG Block that declares + the need to apply group convolution. + - **se_cfg** (dict | None): SE Layer config. + - **stem_channels** (int, optional): The stem channels, the final + stem channels will be + ``min(stem_channels, base_channels*width_factor[0])``. + If not set here, 64 is used by default in the code. + + in_channels (int): Number of input image channels. Defaults to 3. + base_channels (int): Base channels of RepVGG backbone, work with + width_factor together. Defaults to 64. + out_indices (Sequence[int]): Output from which stages. + Defaults to ``(3, )``. + strides (Sequence[int]): Strides of the first block of each stage. + Defaults to ``(2, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Defaults to ``(1, 1, 1, 1)``. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. Defaults to -1. + conv_cfg (dict | None): The config dict for conv layers. + Defaults to None. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='ReLU')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + deploy (bool): Whether to switch the model structure to deployment + mode. Defaults to False. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + add_ppf (bool): Whether to use the MTSPPF block. Defaults to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + groupwise_layers = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26] + g2_layer_map = {layer: 2 for layer in groupwise_layers} + g4_layer_map = {layer: 4 for layer in groupwise_layers} + + arch_settings = { + 'A0': + dict( + num_blocks=[2, 4, 14, 1], + width_factor=[0.75, 0.75, 0.75, 2.5], + group_layer_map=None, + se_cfg=None), + 'A1': + dict( + num_blocks=[2, 4, 14, 1], + width_factor=[1, 1, 1, 2.5], + group_layer_map=None, + se_cfg=None), + 'A2': + dict( + num_blocks=[2, 4, 14, 1], + width_factor=[1.5, 1.5, 1.5, 2.75], + group_layer_map=None, + se_cfg=None), + 'B0': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[1, 1, 1, 2.5], + group_layer_map=None, + se_cfg=None, + stem_channels=64), + 'B1': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[2, 2, 2, 4], + group_layer_map=None, + se_cfg=None), + 'B1g2': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[2, 2, 2, 4], + group_layer_map=g2_layer_map, + se_cfg=None), + 'B1g4': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[2, 2, 2, 4], + group_layer_map=g4_layer_map, + se_cfg=None), + 'B2': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[2.5, 2.5, 2.5, 5], + group_layer_map=None, + se_cfg=None), + 'B2g2': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[2.5, 2.5, 2.5, 5], + group_layer_map=g2_layer_map, + se_cfg=None), + 'B2g4': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[2.5, 2.5, 2.5, 5], + group_layer_map=g4_layer_map, + se_cfg=None), + 'B3': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[3, 3, 3, 5], + group_layer_map=None, + se_cfg=None), + 'B3g2': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[3, 3, 3, 5], + group_layer_map=g2_layer_map, + se_cfg=None), + 'B3g4': + dict( + num_blocks=[4, 6, 16, 1], + width_factor=[3, 3, 3, 5], + group_layer_map=g4_layer_map, + se_cfg=None), + 'D2se': + dict( + num_blocks=[8, 14, 24, 1], + width_factor=[2.5, 2.5, 2.5, 5], + group_layer_map=None, + se_cfg=dict(ratio=16, divisor=1)), + 'yolox-pai-small': + dict( + num_blocks=[3, 5, 7, 3], + width_factor=[1, 1, 1, 1], + group_layer_map=None, + se_cfg=None, + stem_channels=32), + } + + def __init__(self, + arch, + in_channels=3, + base_channels=64, + out_indices=(3, ), + strides=(2, 2, 2, 2), + dilations=(1, 1, 1, 1), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + with_cp=False, + deploy=False, + norm_eval=False, + add_ppf=False, + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ]): + super(RepVGG, self).__init__(init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'"arch": "{arch}" is not one of the arch_settings' + arch = self.arch_settings[arch] + elif not isinstance(arch, dict): + raise TypeError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + assert len(arch['num_blocks']) == len( + arch['width_factor']) == len(strides) == len(dilations) + assert max(out_indices) < len(arch['num_blocks']) + if arch['group_layer_map'] is not None: + assert max(arch['group_layer_map'].keys()) <= sum( + arch['num_blocks']) + + if arch['se_cfg'] is not None: + assert isinstance(arch['se_cfg'], dict) + + self.base_channels = base_channels + self.arch = arch + self.in_channels = in_channels + self.out_indices = out_indices + self.strides = strides + self.dilations = dilations + self.deploy = deploy + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + + # defaults to 64 to prevert BC-breaking if stem_channels + # not in arch dict; + # the stem channels should not be larger than that of stage1. + channels = min( + arch.get('stem_channels', 64), + int(self.base_channels * self.arch['width_factor'][0])) + self.stem = RepVGGBlock( + self.in_channels, + channels, + stride=2, + se_cfg=arch['se_cfg'], + with_cp=with_cp, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + deploy=deploy) + + next_create_block_idx = 1 + self.stages = [] + for i in range(len(arch['num_blocks'])): + num_blocks = self.arch['num_blocks'][i] + stride = self.strides[i] + dilation = self.dilations[i] + out_channels = int(self.base_channels * 2**i * + self.arch['width_factor'][i]) + + stage, next_create_block_idx = self._make_stage( + channels, out_channels, num_blocks, stride, dilation, + next_create_block_idx, init_cfg) + stage_name = f'stage_{i + 1}' + self.add_module(stage_name, stage) + self.stages.append(stage_name) + + channels = out_channels + + if add_ppf: + self.ppf = MTSPPF( + out_channels, + out_channels, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + kernel_size=5) + else: + self.ppf = nn.Identity() + + def _make_stage(self, in_channels, out_channels, num_blocks, stride, + dilation, next_create_block_idx, init_cfg): + strides = [stride] + [1] * (num_blocks - 1) + dilations = [dilation] * num_blocks + + blocks = [] + for i in range(num_blocks): + groups = self.arch['group_layer_map'].get( + next_create_block_idx, + 1) if self.arch['group_layer_map'] is not None else 1 + blocks.append( + RepVGGBlock( + in_channels, + out_channels, + stride=strides[i], + padding=dilations[i], + dilation=dilations[i], + groups=groups, + se_cfg=self.arch['se_cfg'], + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + deploy=self.deploy, + init_cfg=init_cfg)) + in_channels = out_channels + next_create_block_idx += 1 + + return Sequential(*blocks), next_create_block_idx + + def forward(self, x): + x = self.stem(x) + outs = [] + for i, stage_name in enumerate(self.stages): + stage = getattr(self, stage_name) + x = stage(x) + if i + 1 == len(self.stages): + x = self.ppf(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + for i in range(self.frozen_stages): + stage = getattr(self, f'stage_{i+1}') + stage.eval() + for param in stage.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(RepVGG, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() + + def switch_to_deploy(self): + for m in self.modules(): + if isinstance(m, RepVGGBlock): + m.switch_to_deploy() + self.deploy = True diff --git a/mmpretrain/models/backbones/res2net.py b/mmpretrain/models/backbones/res2net.py new file mode 100644 index 0000000000000000000000000000000000000000..6e9bb6df37a2d2c9d19e613faa50ce0103aff357 --- /dev/null +++ b/mmpretrain/models/backbones/res2net.py @@ -0,0 +1,317 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmengine.model import ModuleList, Sequential + +from mmpretrain.registry import MODELS +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottle2neck(_Bottleneck): + expansion = 4 + + def __init__(self, + in_channels, + out_channels, + scales=4, + base_width=26, + base_channels=64, + stage_type='normal', + **kwargs): + """Bottle2neck block for Res2Net.""" + super(Bottle2neck, self).__init__(in_channels, out_channels, **kwargs) + assert scales > 1, 'Res2Net degenerates to ResNet when scales = 1.' + + mid_channels = out_channels // self.expansion + width = int(math.floor(mid_channels * (base_width / base_channels))) + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width * scales, postfix=1) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.out_channels, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.in_channels, + width * scales, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + + if stage_type == 'stage': + self.pool = nn.AvgPool2d( + kernel_size=3, stride=self.conv2_stride, padding=1) + + self.convs = ModuleList() + self.bns = ModuleList() + for i in range(scales - 1): + self.convs.append( + build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False)) + self.bns.append( + build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1]) + + self.conv3 = build_conv_layer( + self.conv_cfg, + width * scales, + self.out_channels, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + self.stage_type = stage_type + self.scales = scales + self.width = width + delattr(self, 'conv2') + delattr(self, self.norm2_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + spx = torch.split(out, self.width, 1) + sp = self.convs[0](spx[0].contiguous()) + sp = self.relu(self.bns[0](sp)) + out = sp + for i in range(1, self.scales - 1): + if self.stage_type == 'stage': + sp = spx[i] + else: + sp = sp + spx[i] + sp = self.convs[i](sp.contiguous()) + sp = self.relu(self.bns[i](sp)) + out = torch.cat((out, sp), 1) + + if self.stage_type == 'normal' and self.scales != 1: + out = torch.cat((out, spx[self.scales - 1]), 1) + elif self.stage_type == 'stage' and self.scales != 1: + out = torch.cat((out, self.pool(spx[self.scales - 1])), 1) + + out = self.conv3(out) + out = self.norm3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +class Res2Layer(Sequential): + """Res2Layer to build Res2Net style backbone. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottle2neck. Defaults to True. + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + scales (int): Scales used in Res2Net. Default: 4 + base_width (int): Basic width of each scale. Default: 26 + drop_path_rate (float or np.ndarray): stochastic depth rate. + Default: 0. + """ + + def __init__(self, + block, + in_channels, + out_channels, + num_blocks, + stride=1, + avg_down=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + scales=4, + base_width=26, + drop_path_rate=0.0, + **kwargs): + self.block = block + + if isinstance(drop_path_rate, float): + drop_path_rate = [drop_path_rate] * num_blocks + + assert len(drop_path_rate + ) == num_blocks, 'Please check the length of drop_path_rate' + + downsample = None + if stride != 1 or in_channels != out_channels: + if avg_down: + downsample = nn.Sequential( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False), + build_conv_layer( + conv_cfg, + in_channels, + out_channels, + kernel_size=1, + stride=1, + bias=False), + build_norm_layer(norm_cfg, out_channels)[1], + ) + else: + downsample = nn.Sequential( + build_conv_layer( + conv_cfg, + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False), + build_norm_layer(norm_cfg, out_channels)[1], + ) + + layers = [] + layers.append( + block( + in_channels=in_channels, + out_channels=out_channels, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + scales=scales, + base_width=base_width, + stage_type='stage', + drop_path_rate=drop_path_rate[0], + **kwargs)) + in_channels = out_channels + for i in range(1, num_blocks): + layers.append( + block( + in_channels=in_channels, + out_channels=out_channels, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + scales=scales, + base_width=base_width, + drop_path_rate=drop_path_rate[i], + **kwargs)) + super(Res2Layer, self).__init__(*layers) + + +@MODELS.register_module() +class Res2Net(ResNet): + """Res2Net backbone. + + A PyTorch implement of : `Res2Net: A New Multi-scale Backbone + Architecture `_ + + Args: + depth (int): Depth of Res2Net, choose from {50, 101, 152}. + scales (int): Scales used in Res2Net. Defaults to 4. + base_width (int): Basic width of each scale. Defaults to 26. + in_channels (int): Number of input image channels. Defaults to 3. + num_stages (int): Number of Res2Net stages. Defaults to 4. + strides (Sequence[int]): Strides of the first block of each stage. + Defaults to ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Defaults to ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. + Defaults to ``(3, )``. + style (str): "pytorch" or "caffe". If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. Defaults to "pytorch". + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. + Defaults to True. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottle2neck. Defaults to True. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_cfg (dict): Dictionary to construct and config norm layer. + Defaults to ``dict(type='BN', requires_grad=True)``. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Defaults to True. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + + Example: + >>> from mmpretrain.models import Res2Net + >>> import torch + >>> model = Res2Net(depth=50, + ... scales=4, + ... base_width=26, + ... out_indices=(0, 1, 2, 3)) + >>> model.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = model.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 256, 8, 8) + (1, 512, 4, 4) + (1, 1024, 2, 2) + (1, 2048, 1, 1) + """ + + arch_settings = { + 50: (Bottle2neck, (3, 4, 6, 3)), + 101: (Bottle2neck, (3, 4, 23, 3)), + 152: (Bottle2neck, (3, 8, 36, 3)) + } + + def __init__(self, + scales=4, + base_width=26, + style='pytorch', + deep_stem=True, + avg_down=True, + init_cfg=None, + **kwargs): + self.scales = scales + self.base_width = base_width + super(Res2Net, self).__init__( + style=style, + deep_stem=deep_stem, + avg_down=avg_down, + init_cfg=init_cfg, + **kwargs) + + def make_res_layer(self, **kwargs): + return Res2Layer( + scales=self.scales, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) diff --git a/mmpretrain/models/backbones/resnest.py b/mmpretrain/models/backbones/resnest.py new file mode 100644 index 0000000000000000000000000000000000000000..4bb438f042d606946fd7b69d73568f28563e0efa --- /dev/null +++ b/mmpretrain/models/backbones/resnest.py @@ -0,0 +1,339 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer + +from mmpretrain.registry import MODELS +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResLayer, ResNetV1d + + +class RSoftmax(nn.Module): + """Radix Softmax module in ``SplitAttentionConv2d``. + + Args: + radix (int): Radix of input. + groups (int): Groups of input. + """ + + def __init__(self, radix, groups): + super().__init__() + self.radix = radix + self.groups = groups + + def forward(self, x): + batch = x.size(0) + if self.radix > 1: + x = x.view(batch, self.groups, self.radix, -1).transpose(1, 2) + x = F.softmax(x, dim=1) + x = x.reshape(batch, -1) + else: + x = torch.sigmoid(x) + return x + + +class SplitAttentionConv2d(nn.Module): + """Split-Attention Conv2d. + + Args: + in_channels (int): Same as nn.Conv2d. + out_channels (int): Same as nn.Conv2d. + kernel_size (int | tuple[int]): Same as nn.Conv2d. + stride (int | tuple[int]): Same as nn.Conv2d. + padding (int | tuple[int]): Same as nn.Conv2d. + dilation (int | tuple[int]): Same as nn.Conv2d. + groups (int): Same as nn.Conv2d. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of SplitAttentionConv2d. + Default: 4. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None. + """ + + def __init__(self, + in_channels, + channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + radix=2, + reduction_factor=4, + conv_cfg=None, + norm_cfg=dict(type='BN')): + super(SplitAttentionConv2d, self).__init__() + inter_channels = max(in_channels * radix // reduction_factor, 32) + self.radix = radix + self.groups = groups + self.channels = channels + self.conv = build_conv_layer( + conv_cfg, + in_channels, + channels * radix, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups * radix, + bias=False) + self.norm0_name, norm0 = build_norm_layer( + norm_cfg, channels * radix, postfix=0) + self.add_module(self.norm0_name, norm0) + self.relu = nn.ReLU(inplace=True) + self.fc1 = build_conv_layer( + None, channels, inter_channels, 1, groups=self.groups) + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, inter_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.fc2 = build_conv_layer( + None, inter_channels, channels * radix, 1, groups=self.groups) + self.rsoftmax = RSoftmax(radix, groups) + + @property + def norm0(self): + return getattr(self, self.norm0_name) + + @property + def norm1(self): + return getattr(self, self.norm1_name) + + def forward(self, x): + x = self.conv(x) + x = self.norm0(x) + x = self.relu(x) + + batch, rchannel = x.shape[:2] + if self.radix > 1: + splits = x.view(batch, self.radix, -1, *x.shape[2:]) + gap = splits.sum(dim=1) + else: + gap = x + gap = F.adaptive_avg_pool2d(gap, 1) + gap = self.fc1(gap) + + gap = self.norm1(gap) + gap = self.relu(gap) + + atten = self.fc2(gap) + atten = self.rsoftmax(atten).view(batch, -1, 1, 1) + + if self.radix > 1: + attens = atten.view(batch, self.radix, -1, *atten.shape[2:]) + out = torch.sum(attens * splits, dim=1) + else: + out = atten * x + return out.contiguous() + + +class Bottleneck(_Bottleneck): + """Bottleneck block for ResNeSt. + + Args: + in_channels (int): Input channels of this block. + out_channels (int): Output channels of this block. + groups (int): Groups of conv2. + width_per_group (int): Width per group of conv2. 64x4d indicates + ``groups=64, width_per_group=4`` and 32x8d indicates + ``groups=32, width_per_group=8``. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of SplitAttentionConv2d. + Default: 4. + avg_down_stride (bool): Whether to use average pool for stride in + Bottleneck. Default: True. + stride (int): stride of the block. Default: 1 + dilation (int): dilation of convolution. Default: 1 + downsample (nn.Module, optional): downsample operation on identity + branch. Default: None + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + conv_cfg (dict, optional): dictionary to construct and config conv + layer. Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + """ + + def __init__(self, + in_channels, + out_channels, + groups=1, + width_per_group=4, + base_channels=64, + radix=2, + reduction_factor=4, + avg_down_stride=True, + **kwargs): + super(Bottleneck, self).__init__(in_channels, out_channels, **kwargs) + + self.groups = groups + self.width_per_group = width_per_group + + # For ResNet bottleneck, middle channels are determined by expansion + # and out_channels, but for ResNeXt bottleneck, it is determined by + # groups and width_per_group and the stage it is located in. + if groups != 1: + assert self.mid_channels % base_channels == 0 + self.mid_channels = ( + groups * width_per_group * self.mid_channels // base_channels) + + self.avg_down_stride = avg_down_stride and self.conv2_stride > 1 + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, self.mid_channels, postfix=1) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.out_channels, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.in_channels, + self.mid_channels, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = SplitAttentionConv2d( + self.mid_channels, + self.mid_channels, + kernel_size=3, + stride=1 if self.avg_down_stride else self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + radix=radix, + reduction_factor=reduction_factor, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + delattr(self, self.norm2_name) + + if self.avg_down_stride: + self.avd_layer = nn.AvgPool2d(3, self.conv2_stride, padding=1) + + self.conv3 = build_conv_layer( + self.conv_cfg, + self.mid_channels, + self.out_channels, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + def forward(self, x): + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + + if self.avg_down_stride: + out = self.avd_layer(out) + + out = self.conv3(out) + out = self.norm3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +@MODELS.register_module() +class ResNeSt(ResNetV1d): + """ResNeSt backbone. + + Please refer to the `paper `__ for + details. + + Args: + depth (int): Network depth, from {50, 101, 152, 200}. + groups (int): Groups of conv2 in Bottleneck. Default: 32. + width_per_group (int): Width per group of conv2 in Bottleneck. + Default: 4. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of SplitAttentionConv2d. + Default: 4. + avg_down_stride (bool): Whether to use average pool for stride in + Bottleneck. Default: True. + in_channels (int): Number of input image channels. Default: 3. + stem_channels (int): Output channels of the stem layer. Default: 64. + num_stages (int): Stages of the network. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + Default: ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Default: ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. If only one + stage is specified, a single tensor (feature map) is returned, + otherwise multiple stages are specified, a tuple of tensors will + be returned. Default: ``(3, )``. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. + Default: False. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)), + 200: (Bottleneck, (3, 24, 36, 3)), + 269: (Bottleneck, (3, 30, 48, 8)) + } + + def __init__(self, + depth, + groups=1, + width_per_group=4, + radix=2, + reduction_factor=4, + avg_down_stride=True, + **kwargs): + self.groups = groups + self.width_per_group = width_per_group + self.radix = radix + self.reduction_factor = reduction_factor + self.avg_down_stride = avg_down_stride + super(ResNeSt, self).__init__(depth=depth, **kwargs) + + def make_res_layer(self, **kwargs): + return ResLayer( + groups=self.groups, + width_per_group=self.width_per_group, + base_channels=self.base_channels, + radix=self.radix, + reduction_factor=self.reduction_factor, + avg_down_stride=self.avg_down_stride, + **kwargs) diff --git a/mmpretrain/models/backbones/resnet.py b/mmpretrain/models/backbones/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..e4df601db560ecb9c613c97d64eda6f45ee8be38 --- /dev/null +++ b/mmpretrain/models/backbones/resnet.py @@ -0,0 +1,708 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import (ConvModule, build_activation_layer, build_conv_layer, + build_norm_layer) +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule +from mmengine.model.weight_init import constant_init +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + +eps = 1.0e-5 + + +class BasicBlock(BaseModule): + """BasicBlock for ResNet. + + Args: + in_channels (int): Input channels of this block. + out_channels (int): Output channels of this block. + expansion (int): The ratio of ``out_channels/mid_channels`` where + ``mid_channels`` is the output channels of conv1. This is a + reserved argument in BasicBlock and should always be 1. Default: 1. + stride (int): stride of the block. Default: 1 + dilation (int): dilation of convolution. Default: 1 + downsample (nn.Module, optional): downsample operation on identity + branch. Default: None. + style (str): `pytorch` or `caffe`. It is unused and reserved for + unified API with Bottleneck. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + conv_cfg (dict, optional): dictionary to construct and config conv + layer. Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + """ + + def __init__(self, + in_channels, + out_channels, + expansion=1, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + drop_path_rate=0.0, + act_cfg=dict(type='ReLU', inplace=True), + init_cfg=None): + super(BasicBlock, self).__init__(init_cfg=init_cfg) + self.in_channels = in_channels + self.out_channels = out_channels + self.expansion = expansion + assert self.expansion == 1 + assert out_channels % expansion == 0 + self.mid_channels = out_channels // expansion + self.stride = stride + self.dilation = dilation + self.style = style + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, self.mid_channels, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + norm_cfg, out_channels, postfix=2) + + self.conv1 = build_conv_layer( + conv_cfg, + in_channels, + self.mid_channels, + 3, + stride=stride, + padding=dilation, + dilation=dilation, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + conv_cfg, + self.mid_channels, + out_channels, + 3, + padding=1, + bias=False) + self.add_module(self.norm2_name, norm2) + + self.relu = build_activation_layer(act_cfg) + self.downsample = downsample + self.drop_path = DropPath(drop_prob=drop_path_rate + ) if drop_path_rate > eps else nn.Identity() + + @property + def norm1(self): + return getattr(self, self.norm1_name) + + @property + def norm2(self): + return getattr(self, self.norm2_name) + + def forward(self, x): + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.norm2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out = self.drop_path(out) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +class Bottleneck(BaseModule): + """Bottleneck block for ResNet. + + Args: + in_channels (int): Input channels of this block. + out_channels (int): Output channels of this block. + expansion (int): The ratio of ``out_channels/mid_channels`` where + ``mid_channels`` is the input/output channels of conv2. Default: 4. + stride (int): stride of the block. Default: 1 + dilation (int): dilation of convolution. Default: 1 + downsample (nn.Module, optional): downsample operation on identity + branch. Default: None. + style (str): ``"pytorch"`` or ``"caffe"``. If set to "pytorch", the + stride-two layer is the 3x3 conv layer, otherwise the stride-two + layer is the first 1x1 conv layer. Default: "pytorch". + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + conv_cfg (dict, optional): dictionary to construct and config conv + layer. Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + """ + + def __init__(self, + in_channels, + out_channels, + expansion=4, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU', inplace=True), + drop_path_rate=0.0, + init_cfg=None): + super(Bottleneck, self).__init__(init_cfg=init_cfg) + assert style in ['pytorch', 'caffe'] + + self.in_channels = in_channels + self.out_channels = out_channels + self.expansion = expansion + assert out_channels % expansion == 0 + self.mid_channels = out_channels // expansion + self.stride = stride + self.dilation = dilation + self.style = style + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + if self.style == 'pytorch': + self.conv1_stride = 1 + self.conv2_stride = stride + else: + self.conv1_stride = stride + self.conv2_stride = 1 + + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, self.mid_channels, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + norm_cfg, self.mid_channels, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + norm_cfg, out_channels, postfix=3) + + self.conv1 = build_conv_layer( + conv_cfg, + in_channels, + self.mid_channels, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + conv_cfg, + self.mid_channels, + self.mid_channels, + kernel_size=3, + stride=self.conv2_stride, + padding=dilation, + dilation=dilation, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + conv_cfg, + self.mid_channels, + out_channels, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + self.relu = build_activation_layer(act_cfg) + self.downsample = downsample + self.drop_path = DropPath(drop_prob=drop_path_rate + ) if drop_path_rate > eps else nn.Identity() + + @property + def norm1(self): + return getattr(self, self.norm1_name) + + @property + def norm2(self): + return getattr(self, self.norm2_name) + + @property + def norm3(self): + return getattr(self, self.norm3_name) + + def forward(self, x): + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.norm2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.norm3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out = self.drop_path(out) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +def get_expansion(block, expansion=None): + """Get the expansion of a residual block. + + The block expansion will be obtained by the following order: + + 1. If ``expansion`` is given, just return it. + 2. If ``block`` has the attribute ``expansion``, then return + ``block.expansion``. + 3. Return the default value according the the block type: + 1 for ``BasicBlock`` and 4 for ``Bottleneck``. + + Args: + block (class): The block class. + expansion (int | None): The given expansion ratio. + + Returns: + int: The expansion of the block. + """ + if isinstance(expansion, int): + assert expansion > 0 + elif expansion is None: + if hasattr(block, 'expansion'): + expansion = block.expansion + elif issubclass(block, BasicBlock): + expansion = 1 + elif issubclass(block, Bottleneck): + expansion = 4 + else: + raise TypeError(f'expansion is not specified for {block.__name__}') + else: + raise TypeError('expansion must be an integer or None') + + return expansion + + +class ResLayer(nn.Sequential): + """ResLayer to build ResNet style backbone. + + Args: + block (nn.Module): Residual block used to build ResLayer. + num_blocks (int): Number of blocks. + in_channels (int): Input channels of this block. + out_channels (int): Output channels of this block. + expansion (int, optional): The expansion for BasicBlock/Bottleneck. + If not specified, it will firstly be obtained via + ``block.expansion``. If the block has no attribute "expansion", + the following default values will be used: 1 for BasicBlock and + 4 for Bottleneck. Default: None. + stride (int): stride of the first block. Default: 1. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False + conv_cfg (dict, optional): dictionary to construct and config conv + layer. Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + drop_path_rate (float or list): stochastic depth rate. + Default: 0. + """ + + def __init__(self, + block, + num_blocks, + in_channels, + out_channels, + expansion=None, + stride=1, + avg_down=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + drop_path_rate=0.0, + **kwargs): + self.block = block + self.expansion = get_expansion(block, expansion) + + if isinstance(drop_path_rate, float): + drop_path_rate = [drop_path_rate] * num_blocks + + assert len(drop_path_rate + ) == num_blocks, 'Please check the length of drop_path_rate' + + downsample = None + if stride != 1 or in_channels != out_channels: + downsample = [] + conv_stride = stride + if avg_down and stride != 1: + conv_stride = 1 + downsample.append( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False)) + downsample.extend([ + build_conv_layer( + conv_cfg, + in_channels, + out_channels, + kernel_size=1, + stride=conv_stride, + bias=False), + build_norm_layer(norm_cfg, out_channels)[1] + ]) + downsample = nn.Sequential(*downsample) + + layers = [] + layers.append( + block( + in_channels=in_channels, + out_channels=out_channels, + expansion=self.expansion, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + drop_path_rate=drop_path_rate[0], + **kwargs)) + in_channels = out_channels + for i in range(1, num_blocks): + layers.append( + block( + in_channels=in_channels, + out_channels=out_channels, + expansion=self.expansion, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + drop_path_rate=drop_path_rate[i], + **kwargs)) + super(ResLayer, self).__init__(*layers) + + +@MODELS.register_module() +class ResNet(BaseBackbone): + """ResNet backbone. + + Please refer to the `paper `__ for + details. + + Args: + depth (int): Network depth, from {18, 34, 50, 101, 152}. + in_channels (int): Number of input image channels. Default: 3. + stem_channels (int): Output channels of the stem layer. Default: 64. + base_channels (int): Middle channels of the first stage. Default: 64. + num_stages (int): Stages of the network. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + Default: ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Default: ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. + Default: ``(3, )``. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. + Default: False. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + + Example: + >>> from mmpretrain.models import ResNet + >>> import torch + >>> self = ResNet(depth=18) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 64, 8, 8) + (1, 128, 4, 4) + (1, 256, 2, 2) + (1, 512, 1, 1) + """ + + arch_settings = { + 18: (BasicBlock, (2, 2, 2, 2)), + 34: (BasicBlock, (3, 4, 6, 3)), + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, + depth, + in_channels=3, + stem_channels=64, + base_channels=64, + expansion=None, + num_stages=4, + strides=(1, 2, 2, 2), + dilations=(1, 1, 1, 1), + out_indices=(3, ), + style='pytorch', + deep_stem=False, + avg_down=False, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=False, + with_cp=False, + zero_init_residual=True, + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ], + drop_path_rate=0.0): + super(ResNet, self).__init__(init_cfg) + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for resnet') + self.depth = depth + self.stem_channels = stem_channels + self.base_channels = base_channels + self.num_stages = num_stages + assert num_stages >= 1 and num_stages <= 4 + self.strides = strides + self.dilations = dilations + assert len(strides) == len(dilations) == num_stages + self.out_indices = out_indices + assert max(out_indices) < num_stages + self.style = style + self.deep_stem = deep_stem + self.avg_down = avg_down + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + self.zero_init_residual = zero_init_residual + self.block, stage_blocks = self.arch_settings[depth] + self.stage_blocks = stage_blocks[:num_stages] + self.expansion = get_expansion(self.block, expansion) + + self._make_stem_layer(in_channels, stem_channels) + + self.res_layers = [] + _in_channels = stem_channels + _out_channels = base_channels * self.expansion + + # stochastic depth decay rule + total_depth = sum(stage_blocks) + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] + + for i, num_blocks in enumerate(self.stage_blocks): + stride = strides[i] + dilation = dilations[i] + res_layer = self.make_res_layer( + block=self.block, + num_blocks=num_blocks, + in_channels=_in_channels, + out_channels=_out_channels, + expansion=self.expansion, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=with_cp, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + drop_path_rate=dpr[:num_blocks]) + _in_channels = _out_channels + _out_channels *= 2 + dpr = dpr[num_blocks:] + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + self.feat_dim = res_layer[-1].out_channels + + def make_res_layer(self, **kwargs): + return ResLayer(**kwargs) + + @property + def norm1(self): + return getattr(self, self.norm1_name) + + def _make_stem_layer(self, in_channels, stem_channels): + if self.deep_stem: + self.stem = nn.Sequential( + ConvModule( + in_channels, + stem_channels // 2, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + inplace=True), + ConvModule( + stem_channels // 2, + stem_channels // 2, + kernel_size=3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + inplace=True), + ConvModule( + stem_channels // 2, + stem_channels, + kernel_size=3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + inplace=True)) + else: + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + stem_channels, + kernel_size=7, + stride=2, + padding=3, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, stem_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + if self.deep_stem: + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + else: + self.norm1.eval() + for m in [self.conv1, self.norm1]: + for param in m.parameters(): + param.requires_grad = False + + for i in range(1, self.frozen_stages + 1): + m = getattr(self, f'layer{i}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def init_weights(self): + super(ResNet, self).init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress zero_init_residual if use pretrained model. + return + + if self.zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + constant_init(m.norm3, 0) + elif isinstance(m, BasicBlock): + constant_init(m.norm2, 0) + + def forward(self, x): + if self.deep_stem: + x = self.stem(x) + else: + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.maxpool(x) + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) + + def train(self, mode=True): + super(ResNet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + +@MODELS.register_module() +class ResNetV1c(ResNet): + """ResNetV1c backbone. + + This variant is described in `Bag of Tricks. + `_. + + Compared with default ResNet(ResNetV1b), ResNetV1c replaces the 7x7 conv + in the input stem with three 3x3 convs. + """ + + def __init__(self, **kwargs): + super(ResNetV1c, self).__init__( + deep_stem=True, avg_down=False, **kwargs) + + +@MODELS.register_module() +class ResNetV1d(ResNet): + """ResNetV1d backbone. + + This variant is described in `Bag of Tricks. + `_. + + Compared with default ResNet(ResNetV1b), ResNetV1d replaces the 7x7 conv in + the input stem with three 3x3 convs. And in the downsampling block, a 2x2 + avg_pool with stride 2 is added before conv, whose stride is changed to 1. + """ + + def __init__(self, **kwargs): + super(ResNetV1d, self).__init__( + deep_stem=True, avg_down=True, **kwargs) diff --git a/mmpretrain/models/backbones/resnet_cifar.py b/mmpretrain/models/backbones/resnet_cifar.py new file mode 100644 index 0000000000000000000000000000000000000000..9f17f92fd76a690ea90977b38ab2ea00345ba903 --- /dev/null +++ b/mmpretrain/models/backbones/resnet_cifar.py @@ -0,0 +1,81 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +from mmcv.cnn import build_conv_layer, build_norm_layer + +from mmpretrain.registry import MODELS +from .resnet import ResNet + + +@MODELS.register_module() +class ResNet_CIFAR(ResNet): + """ResNet backbone for CIFAR. + + Compared to standard ResNet, it uses `kernel_size=3` and `stride=1` in + conv1, and does not apply MaxPoolinng after stem. It has been proven to + be more efficient than standard ResNet in other public codebase, e.g., + `https://github.com/kuangliu/pytorch-cifar/blob/master/models/resnet.py`. + + Args: + depth (int): Network depth, from {18, 34, 50, 101, 152}. + in_channels (int): Number of input image channels. Default: 3. + stem_channels (int): Output channels of the stem layer. Default: 64. + base_channels (int): Middle channels of the first stage. Default: 64. + num_stages (int): Stages of the network. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + Default: ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Default: ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. If only one + stage is specified, a single tensor (feature map) is returned, + otherwise multiple stages are specified, a tuple of tensors will + be returned. Default: ``(3, )``. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): This network has specific designed stem, thus it is + asserted to be False. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + """ + + def __init__(self, depth, deep_stem=False, **kwargs): + super(ResNet_CIFAR, self).__init__( + depth, deep_stem=deep_stem, **kwargs) + assert not self.deep_stem, 'ResNet_CIFAR do not support deep_stem' + + def _make_stem_layer(self, in_channels, base_channels): + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + base_channels, + kernel_size=3, + stride=1, + padding=1, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, base_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) diff --git a/mmpretrain/models/backbones/resnext.py b/mmpretrain/models/backbones/resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..8858b7d3dffdcb20677e091fba4f5a1084d086a3 --- /dev/null +++ b/mmpretrain/models/backbones/resnext.py @@ -0,0 +1,148 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.cnn import build_conv_layer, build_norm_layer + +from mmpretrain.registry import MODELS +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResLayer, ResNet + + +class Bottleneck(_Bottleneck): + """Bottleneck block for ResNeXt. + + Args: + in_channels (int): Input channels of this block. + out_channels (int): Output channels of this block. + groups (int): Groups of conv2. + width_per_group (int): Width per group of conv2. 64x4d indicates + ``groups=64, width_per_group=4`` and 32x8d indicates + ``groups=32, width_per_group=8``. + stride (int): stride of the block. Default: 1 + dilation (int): dilation of convolution. Default: 1 + downsample (nn.Module, optional): downsample operation on identity + branch. Default: None + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + conv_cfg (dict, optional): dictionary to construct and config conv + layer. Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + """ + + def __init__(self, + in_channels, + out_channels, + base_channels=64, + groups=32, + width_per_group=4, + **kwargs): + super(Bottleneck, self).__init__(in_channels, out_channels, **kwargs) + self.groups = groups + self.width_per_group = width_per_group + + # For ResNet bottleneck, middle channels are determined by expansion + # and out_channels, but for ResNeXt bottleneck, it is determined by + # groups and width_per_group and the stage it is located in. + if groups != 1: + assert self.mid_channels % base_channels == 0 + self.mid_channels = ( + groups * width_per_group * self.mid_channels // base_channels) + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, self.mid_channels, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + self.norm_cfg, self.mid_channels, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.out_channels, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.in_channels, + self.mid_channels, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + self.conv_cfg, + self.mid_channels, + self.mid_channels, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + self.conv_cfg, + self.mid_channels, + self.out_channels, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + +@MODELS.register_module() +class ResNeXt(ResNet): + """ResNeXt backbone. + + Please refer to the `paper `__ for + details. + + Args: + depth (int): Network depth, from {50, 101, 152}. + groups (int): Groups of conv2 in Bottleneck. Default: 32. + width_per_group (int): Width per group of conv2 in Bottleneck. + Default: 4. + in_channels (int): Number of input image channels. Default: 3. + stem_channels (int): Output channels of the stem layer. Default: 64. + num_stages (int): Stages of the network. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + Default: ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Default: ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. If only one + stage is specified, a single tensor (feature map) is returned, + otherwise multiple stages are specified, a tuple of tensors will + be returned. Default: ``(3, )``. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. + Default: False. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, depth, groups=32, width_per_group=4, **kwargs): + self.groups = groups + self.width_per_group = width_per_group + super(ResNeXt, self).__init__(depth, **kwargs) + + def make_res_layer(self, **kwargs): + return ResLayer( + groups=self.groups, + width_per_group=self.width_per_group, + base_channels=self.base_channels, + **kwargs) diff --git a/mmpretrain/models/backbones/revvit.py b/mmpretrain/models/backbones/revvit.py new file mode 100644 index 0000000000000000000000000000000000000000..f2e6c28c943c83d0580634ac04450ee7ffc5f478 --- /dev/null +++ b/mmpretrain/models/backbones/revvit.py @@ -0,0 +1,671 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import sys + +import numpy as np +import torch +from mmcv.cnn.bricks.drop import build_dropout +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ +from torch import nn +from torch.autograd import Function as Function + +from mmpretrain.models.backbones.base_backbone import BaseBackbone +from mmpretrain.registry import MODELS +from ..utils import (MultiheadAttention, build_norm_layer, resize_pos_embed, + to_2tuple) + + +class RevBackProp(Function): + """Custom Backpropagation function to allow (A) flushing memory in forward + and (B) activation recomputation reversibly in backward for gradient + calculation. + + Inspired by + https://github.com/RobinBruegger/RevTorch/blob/master/revtorch/revtorch.py + """ + + @staticmethod + def forward( + ctx, + x, + layers, + buffer_layers, # List of layer ids for int activation to buffer + ): + """Reversible Forward pass. + + Any intermediate activations from `buffer_layers` are cached in ctx for + forward pass. This is not necessary for standard usecases. Each + reversible layer implements its own forward pass logic. + """ + buffer_layers.sort() + x1, x2 = torch.chunk(x, 2, dim=-1) + intermediate = [] + + for layer in layers: + x1, x2 = layer(x1, x2) + if layer.layer_id in buffer_layers: + intermediate.extend([x1.detach(), x2.detach()]) + + if len(buffer_layers) == 0: + all_tensors = [x1.detach(), x2.detach()] + else: + intermediate = [torch.LongTensor(buffer_layers), *intermediate] + all_tensors = [x1.detach(), x2.detach(), *intermediate] + + ctx.save_for_backward(*all_tensors) + ctx.layers = layers + + return torch.cat([x1, x2], dim=-1) + + @staticmethod + def backward(ctx, dx): + """Reversible Backward pass. + + Any intermediate activations from `buffer_layers` are recovered from + ctx. Each layer implements its own loic for backward pass (both + activation recomputation and grad calculation). + """ + d_x1, d_x2 = torch.chunk(dx, 2, dim=-1) + # retrieve params from ctx for backward + x1, x2, *int_tensors = ctx.saved_tensors + # no buffering + if len(int_tensors) != 0: + buffer_layers = int_tensors[0].tolist() + else: + buffer_layers = [] + + layers = ctx.layers + + for _, layer in enumerate(layers[::-1]): + if layer.layer_id in buffer_layers: + x1, x2, d_x1, d_x2 = layer.backward_pass( + y1=int_tensors[buffer_layers.index(layer.layer_id) * 2 + + 1], + y2=int_tensors[buffer_layers.index(layer.layer_id) * 2 + + 2], + d_y1=d_x1, + d_y2=d_x2, + ) + else: + x1, x2, d_x1, d_x2 = layer.backward_pass( + y1=x1, + y2=x2, + d_y1=d_x1, + d_y2=d_x2, + ) + + dx = torch.cat([d_x1, d_x2], dim=-1) + + del int_tensors + del d_x1, d_x2, x1, x2 + + return dx, None, None + + +class RevTransformerEncoderLayer(BaseModule): + """Reversible Transformer Encoder Layer. + + This module is a building block of Reversible Transformer Encoder, + which support backpropagation without storing activations. + The residual connection is not applied to the FFN layer. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + drop_rate (float): Probability of an element to be zeroed. + Default: 0.0 + attn_drop_rate (float): The drop out rate for attention layer. + Default: 0.0 + drop_path_rate (float): stochastic depth rate. + Default 0.0 + num_fcs (int): The number of linear in FFN + Default: 2 + qkv_bias (bool): enable bias for qkv if True. + Default: True + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU') + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN') + layer_id (int): The layer id of current layer. Used in RevBackProp. + Default: 0 + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + embed_dims: int, + num_heads: int, + feedforward_channels: int, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + num_fcs: int = 2, + qkv_bias: bool = True, + act_cfg: dict = dict(type='GELU'), + norm_cfg: dict = dict(type='LN'), + layer_id: int = 0, + init_cfg=None): + super(RevTransformerEncoderLayer, self).__init__(init_cfg=init_cfg) + + self.drop_path_cfg = dict(type='DropPath', drop_prob=drop_path_rate) + self.embed_dims = embed_dims + + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + + self.attn = MultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + qkv_bias=qkv_bias) + + self.ln2 = build_norm_layer(norm_cfg, self.embed_dims) + + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + act_cfg=act_cfg, + add_identity=False) + + self.layer_id = layer_id + self.seeds = {} + + def init_weights(self): + super(RevTransformerEncoderLayer, self).init_weights() + for m in self.ffn.modules(): + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + nn.init.normal_(m.bias, std=1e-6) + + def seed_cuda(self, key): + """Fix seeds to allow for stochastic elements such as dropout to be + reproduced exactly in activation recomputation in the backward pass.""" + # randomize seeds + # use cuda generator if available + if (hasattr(torch.cuda, 'default_generators') + and len(torch.cuda.default_generators) > 0): + # GPU + device_idx = torch.cuda.current_device() + seed = torch.cuda.default_generators[device_idx].seed() + else: + # CPU + seed = int(torch.seed() % sys.maxsize) + + self.seeds[key] = seed + torch.manual_seed(self.seeds[key]) + + def forward(self, x1, x2): + """ + Implementation of Reversible TransformerEncoderLayer + + ` + x = x + self.attn(self.ln1(x)) + x = self.ffn(self.ln2(x), identity=x) + ` + """ + self.seed_cuda('attn') + # attention output + f_x2 = self.attn(self.ln1(x2)) + # apply droppath on attention output + self.seed_cuda('droppath') + f_x2_dropped = build_dropout(self.drop_path_cfg)(f_x2) + y1 = x1 + f_x2_dropped + + # free memory + if self.training: + del x1 + + # ffn output + self.seed_cuda('ffn') + g_y1 = self.ffn(self.ln2(y1)) + # apply droppath on ffn output + torch.manual_seed(self.seeds['droppath']) + g_y1_dropped = build_dropout(self.drop_path_cfg)(g_y1) + # final output + y2 = x2 + g_y1_dropped + + # free memory + if self.training: + del x2 + + return y1, y2 + + def backward_pass(self, y1, y2, d_y1, d_y2): + """Activation re-compute with the following equation. + + x2 = y2 - g(y1), g = FFN + x1 = y1 - f(x2), f = MSHA + """ + + # temporarily record intermediate activation for G + # and use them for gradient calculation of G + with torch.enable_grad(): + y1.requires_grad = True + + torch.manual_seed(self.seeds['ffn']) + g_y1 = self.ffn(self.ln2(y1)) + + torch.manual_seed(self.seeds['droppath']) + g_y1 = build_dropout(self.drop_path_cfg)(g_y1) + + g_y1.backward(d_y2, retain_graph=True) + + # activate recomputation is by design and not part of + # the computation graph in forward pass + with torch.no_grad(): + x2 = y2 - g_y1 + del g_y1 + + d_y1 = d_y1 + y1.grad + y1.grad = None + + # record F activation and calculate gradients on F + with torch.enable_grad(): + x2.requires_grad = True + + torch.manual_seed(self.seeds['attn']) + f_x2 = self.attn(self.ln1(x2)) + + torch.manual_seed(self.seeds['droppath']) + f_x2 = build_dropout(self.drop_path_cfg)(f_x2) + + f_x2.backward(d_y1, retain_graph=True) + + # propagate reverse computed activations at the + # start of the previous block + with torch.no_grad(): + x1 = y1 - f_x2 + del f_x2, y1 + + d_y2 = d_y2 + x2.grad + + x2.grad = None + x2 = x2.detach() + + return x1, x2, d_y1, d_y2 + + +class TwoStreamFusion(nn.Module): + """A general constructor for neural modules fusing two equal sized tensors + in forward. + + Args: + mode (str): The mode of fusion. Options are 'add', 'max', 'min', + 'avg', 'concat'. + """ + + def __init__(self, mode: str): + super().__init__() + self.mode = mode + + if mode == 'add': + self.fuse_fn = lambda x: torch.stack(x).sum(dim=0) + elif mode == 'max': + self.fuse_fn = lambda x: torch.stack(x).max(dim=0).values + elif mode == 'min': + self.fuse_fn = lambda x: torch.stack(x).min(dim=0).values + elif mode == 'avg': + self.fuse_fn = lambda x: torch.stack(x).mean(dim=0) + elif mode == 'concat': + self.fuse_fn = lambda x: torch.cat(x, dim=-1) + else: + raise NotImplementedError + + def forward(self, x): + # split the tensor into two halves in the channel dimension + x = torch.chunk(x, 2, dim=2) + return self.fuse_fn(x) + + +@MODELS.register_module() +class RevVisionTransformer(BaseBackbone): + """Reversible Vision Transformer. + + A PyTorch implementation of : `Reversible Vision Transformers + `_ # noqa: E501 + + Args: + arch (str | dict): Vision Transformer architecture. If use string, + choose from 'small', 'base', 'large', 'deit-tiny', 'deit-small' + and 'deit-base'. If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + + Defaults to 'base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + Defaults to ``"avg_featmap"``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + fusion_mode (str): The fusion mode of transformer layers. + Defaults to 'concat'. + no_custom_backward (bool): Whether to use custom backward. + Defaults to False. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys( + ['s', 'small'], { + 'embed_dims': 768, + 'num_layers': 8, + 'num_heads': 8, + 'feedforward_channels': 768 * 3, + }), + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 3072 + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'num_layers': 24, + 'num_heads': 16, + 'feedforward_channels': 4096 + }), + **dict.fromkeys( + ['h', 'huge'], + { + # The same as the implementation in MAE + # + 'embed_dims': 1280, + 'num_layers': 32, + 'num_heads': 16, + 'feedforward_channels': 5120 + }), + **dict.fromkeys( + ['deit-t', 'deit-tiny'], { + 'embed_dims': 192, + 'num_layers': 12, + 'num_heads': 3, + 'feedforward_channels': 192 * 4 + }), + **dict.fromkeys( + ['deit-s', 'deit-small'], { + 'embed_dims': 384, + 'num_layers': 12, + 'num_heads': 6, + 'feedforward_channels': 384 * 4 + }), + **dict.fromkeys( + ['deit-b', 'deit-base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 768 * 4 + }), + } + num_extra_tokens = 0 # The official RevViT doesn't have class token + OUT_TYPES = {'raw', 'cls_token', 'featmap', 'avg_featmap'} + + def __init__(self, + arch='base', + img_size=224, + patch_size=16, + in_channels=3, + drop_rate=0., + drop_path_rate=0., + qkv_bias=True, + norm_cfg=dict(type='LN', eps=1e-6), + final_norm=True, + out_type='avg_featmap', + with_cls_token=False, + frozen_stages=-1, + interpolate_mode='bicubic', + patch_cfg=dict(), + layer_cfgs=dict(), + fusion_mode='concat', + no_custom_backward=False, + init_cfg=None): + super(RevVisionTransformer, self).__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'feedforward_channels' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.img_size = to_2tuple(img_size) + self.no_custom_backward = no_custom_backward + + # Set patch embedding + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + # Set out type + if out_type not in self.OUT_TYPES: + raise ValueError(f'Unsupported `out_type` {out_type}, please ' + f'choose from {self.OUT_TYPES}') + self.out_type = out_type + + # Set cls token + if with_cls_token: + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + self.num_extra_tokens = 1 + elif out_type != 'cls_token': + self.cls_token = None + self.num_extra_tokens = 0 + else: + raise ValueError( + 'with_cls_token must be True when `out_type="cls_token"`.') + + # Set position embedding + self.interpolate_mode = interpolate_mode + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches + self.num_extra_tokens, + self.embed_dims)) + self._register_load_state_dict_pre_hook(self._prepare_pos_embed) + + self.drop_after_pos = nn.Dropout(p=drop_rate) + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + embed_dims=self.embed_dims, + num_heads=self.arch_settings['num_heads'], + feedforward_channels=self. + arch_settings['feedforward_channels'], + drop_rate=drop_rate, + drop_path_rate=dpr[i], + qkv_bias=qkv_bias, + layer_id=i, + norm_cfg=norm_cfg) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(RevTransformerEncoderLayer(**_layer_cfg)) + + # fusion operation for the final output + self.fusion_layer = TwoStreamFusion(mode=fusion_mode) + + self.frozen_stages = frozen_stages + self.final_norm = final_norm + if final_norm: + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims * 2) + + # freeze stages only when self.frozen_stages > 0 + if self.frozen_stages > 0: + self._freeze_stages() + + def init_weights(self): + super(RevVisionTransformer, self).init_weights() + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + trunc_normal_(self.pos_embed, std=0.02) + + def _prepare_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + f'Resize the pos_embed shape from {ckpt_pos_embed_shape} ' + f'to {self.pos_embed.shape}.') + + ckpt_pos_embed_shape = to_2tuple( + int(np.sqrt(ckpt_pos_embed_shape[1] - self.num_extra_tokens))) + pos_embed_shape = self.patch_embed.init_out_size + + state_dict[name] = resize_pos_embed(state_dict[name], + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, + self.num_extra_tokens) + + @staticmethod + def resize_pos_embed(*args, **kwargs): + """Interface for backward-compatibility.""" + return resize_pos_embed(*args, **kwargs) + + def _freeze_stages(self): + # freeze position embedding + self.pos_embed.requires_grad = False + # set dropout to eval model + self.drop_after_pos.eval() + # freeze patch embedding + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + # freeze cls_token + if self.cls_token is not None: + self.cls_token.requires_grad = False + # freeze layers + for i in range(1, self.frozen_stages + 1): + m = self.layers[i - 1] + m.eval() + for param in m.parameters(): + param.requires_grad = False + # freeze the last layer norm + if self.frozen_stages == len(self.layers) and self.final_norm: + self.ln1.eval() + for param in self.ln1.parameters(): + param.requires_grad = False + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + if self.cls_token is not None: + cls_token = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_token, x), dim=1) + + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + x = torch.cat([x, x], dim=-1) + + # forward with different conditions + if not self.training or self.no_custom_backward: + # in eval/inference model + executing_fn = RevVisionTransformer._forward_vanilla_bp + else: + # use custom backward when self.training=True. + executing_fn = RevBackProp.apply + + x = executing_fn(x, self.layers, []) + + if self.final_norm: + x = self.ln1(x) + x = self.fusion_layer(x) + + return (self._format_output(x, patch_resolution), ) + + @staticmethod + def _forward_vanilla_bp(hidden_state, layers, buffer=[]): + """Using reversible layers without reversible backpropagation. + + Debugging purpose only. Activated with self.no_custom_backward + """ + # split into ffn state(ffn_out) and attention output(attn_out) + ffn_out, attn_out = torch.chunk(hidden_state, 2, dim=-1) + del hidden_state + + for _, layer in enumerate(layers): + attn_out, ffn_out = layer(attn_out, ffn_out) + + return torch.cat([attn_out, ffn_out], dim=-1) + + def _format_output(self, x, hw): + if self.out_type == 'raw': + return x + if self.out_type == 'cls_token': + return x[:, 0] + + patch_token = x[:, self.num_extra_tokens:] + if self.out_type == 'featmap': + B = x.size(0) + # (B, N, C) -> (B, H, W, C) -> (B, C, H, W) + return patch_token.reshape(B, *hw, -1).permute(0, 3, 1, 2) + if self.out_type == 'avg_featmap': + return patch_token.mean(dim=1) diff --git a/mmpretrain/models/backbones/riformer.py b/mmpretrain/models/backbones/riformer.py new file mode 100644 index 0000000000000000000000000000000000000000..ad7cb4d37c2ac6f1479fd3c533c456f3b0a0c45e --- /dev/null +++ b/mmpretrain/models/backbones/riformer.py @@ -0,0 +1,390 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import torch +import torch.nn as nn +from mmcv.cnn.bricks import DropPath, build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone +from .poolformer import Mlp, PatchEmbed + + +class Affine(nn.Module): + """Affine Transformation module. + + Args: + in_features (int): Input dimension. + """ + + def __init__(self, in_features): + super().__init__() + self.affine = nn.Conv2d( + in_features, + in_features, + kernel_size=1, + stride=1, + padding=0, + groups=in_features, + bias=True) + + def forward(self, x): + return self.affine(x) - x + + +class RIFormerBlock(BaseModule): + """RIFormer Block. + + Args: + dim (int): Embedding dim. + mlp_ratio (float): Mlp expansion ratio. Defaults to 4. + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='GN', num_groups=1)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + drop (float): Dropout rate. Defaults to 0. + drop_path (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): Init value for Layer Scale. + Defaults to 1e-5. + deploy (bool): Whether to switch the model structure to + deployment mode. Default: False. + """ + + def __init__(self, + dim, + mlp_ratio=4., + norm_cfg=dict(type='GN', num_groups=1), + act_cfg=dict(type='GELU'), + drop=0., + drop_path=0., + layer_scale_init_value=1e-5, + deploy=False): + + super().__init__() + + if deploy: + self.norm_reparam = build_norm_layer(norm_cfg, dim)[1] + else: + self.norm1 = build_norm_layer(norm_cfg, dim)[1] + self.token_mixer = Affine(in_features=dim) + self.norm2 = build_norm_layer(norm_cfg, dim)[1] + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_cfg=act_cfg, + drop=drop) + + # The following two techniques are useful to train deep RIFormers. + self.drop_path = DropPath(drop_path) if drop_path > 0. \ + else nn.Identity() + self.layer_scale_1 = nn.Parameter( + layer_scale_init_value * torch.ones((dim)), requires_grad=True) + self.layer_scale_2 = nn.Parameter( + layer_scale_init_value * torch.ones((dim)), requires_grad=True) + self.norm_cfg = norm_cfg + self.dim = dim + self.deploy = deploy + + def forward(self, x): + if hasattr(self, 'norm_reparam'): + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * + self.norm_reparam(x)) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * + self.mlp(self.norm2(x))) + else: + x = x + self.drop_path( + self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * + self.token_mixer(self.norm1(x))) + x = x + self.drop_path( + self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * + self.mlp(self.norm2(x))) + return x + + def fuse_affine(self, norm, token_mixer): + gamma_affn = token_mixer.affine.weight.reshape(-1) + gamma_affn = gamma_affn - torch.ones_like(gamma_affn) + beta_affn = token_mixer.affine.bias + gamma_ln = norm.weight + beta_ln = norm.bias + return (gamma_ln * gamma_affn), (beta_ln * gamma_affn + beta_affn) + + def get_equivalent_scale_bias(self): + eq_s, eq_b = self.fuse_affine(self.norm1, self.token_mixer) + return eq_s, eq_b + + def switch_to_deploy(self): + if self.deploy: + return + eq_s, eq_b = self.get_equivalent_scale_bias() + self.norm_reparam = build_norm_layer(self.norm_cfg, self.dim)[1] + self.norm_reparam.weight.data = eq_s + self.norm_reparam.bias.data = eq_b + self.__delattr__('norm1') + if hasattr(self, 'token_mixer'): + self.__delattr__('token_mixer') + self.deploy = True + + +def basic_blocks(dim, + index, + layers, + mlp_ratio=4., + norm_cfg=dict(type='GN', num_groups=1), + act_cfg=dict(type='GELU'), + drop_rate=.0, + drop_path_rate=0., + layer_scale_init_value=1e-5, + deploy=False): + """generate RIFormer blocks for a stage.""" + blocks = [] + for block_idx in range(layers[index]): + block_dpr = drop_path_rate * (block_idx + sum(layers[:index])) / ( + sum(layers) - 1) + blocks.append( + RIFormerBlock( + dim, + mlp_ratio=mlp_ratio, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + drop=drop_rate, + drop_path=block_dpr, + layer_scale_init_value=layer_scale_init_value, + deploy=deploy, + )) + blocks = nn.Sequential(*blocks) + + return blocks + + +@MODELS.register_module() +class RIFormer(BaseBackbone): + """RIFormer. + + A PyTorch implementation of RIFormer introduced by: + `RIFormer: Keep Your Vision Backbone Effective But Removing Token Mixer `_ + + Args: + arch (str | dict): The model's architecture. If string, it should be + one of architecture in ``RIFormer.arch_settings``. And if dict, it + should include the following two keys: + + - layers (list[int]): Number of blocks at each stage. + - embed_dims (list[int]): The number of channels at each stage. + - mlp_ratios (list[int]): Expansion ratio of MLPs. + - layer_scale_init_value (float): Init value for Layer Scale. + + Defaults to 'S12'. + + norm_cfg (dict): The config dict for norm layers. + Defaults to ``dict(type='LN2d', eps=1e-6)``. + act_cfg (dict): The config dict for activation between pointwise + convolution. Defaults to ``dict(type='GELU')``. + in_patch_size (int): The patch size of/? input image patch embedding. + Defaults to 7. + in_stride (int): The stride of input image patch embedding. + Defaults to 4. + in_pad (int): The padding of input image patch embedding. + Defaults to 2. + down_patch_size (int): The patch size of downsampling patch embedding. + Defaults to 3. + down_stride (int): The stride of downsampling patch embedding. + Defaults to 2. + down_pad (int): The padding of downsampling patch embedding. + Defaults to 1. + drop_rate (float): Dropout rate. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + out_indices (Sequence | int): Output from which network position. + Index 0-6 respectively corresponds to + [stage1, downsampling, stage2, downsampling, stage3, downsampling, stage4] + Defaults to -1, means the last stage. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to -1, which means not freezing any parameters. + deploy (bool): Whether to switch the model structure to + deployment mode. Default: False. + init_cfg (dict, optional): Initialization config dict + """ # noqa: E501 + + # --layers: [x,x,x,x], numbers of layers for the four stages + # --embed_dims, --mlp_ratios: + # embedding dims and mlp ratios for the four stages + # --downsamples: flags to apply downsampling or not in four blocks + arch_settings = { + 's12': { + 'layers': [2, 2, 6, 2], + 'embed_dims': [64, 128, 320, 512], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-5, + }, + 's24': { + 'layers': [4, 4, 12, 4], + 'embed_dims': [64, 128, 320, 512], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-5, + }, + 's36': { + 'layers': [6, 6, 18, 6], + 'embed_dims': [64, 128, 320, 512], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-6, + }, + 'm36': { + 'layers': [6, 6, 18, 6], + 'embed_dims': [96, 192, 384, 768], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-6, + }, + 'm48': { + 'layers': [8, 8, 24, 8], + 'embed_dims': [96, 192, 384, 768], + 'mlp_ratios': [4, 4, 4, 4], + 'layer_scale_init_value': 1e-6, + }, + } + + def __init__(self, + arch='s12', + in_channels=3, + norm_cfg=dict(type='GN', num_groups=1), + act_cfg=dict(type='GELU'), + in_patch_size=7, + in_stride=4, + in_pad=2, + down_patch_size=3, + down_stride=2, + down_pad=1, + drop_rate=0., + drop_path_rate=0., + out_indices=-1, + frozen_stages=-1, + init_cfg=None, + deploy=False): + + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavailable arch, please choose from ' \ + f'({set(self.arch_settings)}) or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + assert 'layers' in arch and 'embed_dims' in arch, \ + f'The arch dict must have "layers" and "embed_dims", ' \ + f'but got {list(arch.keys())}.' + + layers = arch['layers'] + embed_dims = arch['embed_dims'] + mlp_ratios = arch['mlp_ratios'] \ + if 'mlp_ratios' in arch else [4, 4, 4, 4] + layer_scale_init_value = arch['layer_scale_init_value'] \ + if 'layer_scale_init_value' in arch else 1e-5 + + self.patch_embed = PatchEmbed( + patch_size=in_patch_size, + stride=in_stride, + padding=in_pad, + in_chans=in_channels, + embed_dim=embed_dims[0]) + + # set the main block in network + network = [] + for i in range(len(layers)): + stage = basic_blocks( + embed_dims[i], + i, + layers, + mlp_ratio=mlp_ratios[i], + norm_cfg=norm_cfg, + act_cfg=act_cfg, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + layer_scale_init_value=layer_scale_init_value, + deploy=deploy) + network.append(stage) + if i >= len(layers) - 1: + break + if embed_dims[i] != embed_dims[i + 1]: + # downsampling between two stages + network.append( + PatchEmbed( + patch_size=down_patch_size, + stride=down_stride, + padding=down_pad, + in_chans=embed_dims[i], + embed_dim=embed_dims[i + 1])) + + self.network = nn.ModuleList(network) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = 7 + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + if self.out_indices: + for i_layer in self.out_indices: + layer = build_norm_layer(norm_cfg, + embed_dims[(i_layer + 1) // 2])[1] + layer_name = f'norm{i_layer}' + self.add_module(layer_name, layer) + + self.frozen_stages = frozen_stages + self._freeze_stages() + self.deploy = deploy + + def forward_embeddings(self, x): + x = self.patch_embed(x) + return x + + def forward_tokens(self, x): + outs = [] + for idx, block in enumerate(self.network): + x = block(x) + if idx in self.out_indices: + norm_layer = getattr(self, f'norm{idx}') + x_out = norm_layer(x) + outs.append(x_out) + return tuple(outs) + + def forward(self, x): + # input embedding + x = self.forward_embeddings(x) + # through backbone + x = self.forward_tokens(x) + return x + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + for i in range(0, self.frozen_stages + 1): + # Include both block and downsample layer. + module = self.network[i] + module.eval() + for param in module.parameters(): + param.requires_grad = False + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + norm_layer.eval() + for param in norm_layer.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(RIFormer, self).train(mode) + self._freeze_stages() + return self + + def switch_to_deploy(self): + for m in self.modules(): + if isinstance(m, RIFormerBlock): + m.switch_to_deploy() + self.deploy = True diff --git a/mmpretrain/models/backbones/seresnet.py b/mmpretrain/models/backbones/seresnet.py new file mode 100644 index 0000000000000000000000000000000000000000..4437c17fa06d62f57ac18a31967a35b4f44f190f --- /dev/null +++ b/mmpretrain/models/backbones/seresnet.py @@ -0,0 +1,125 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.utils.checkpoint as cp + +from mmpretrain.registry import MODELS +from ..utils.se_layer import SELayer +from .resnet import Bottleneck, ResLayer, ResNet + + +class SEBottleneck(Bottleneck): + """SEBottleneck block for SEResNet. + + Args: + in_channels (int): The input channels of the SEBottleneck block. + out_channels (int): The output channel of the SEBottleneck block. + se_ratio (int): Squeeze ratio in SELayer. Default: 16 + """ + + def __init__(self, in_channels, out_channels, se_ratio=16, **kwargs): + super(SEBottleneck, self).__init__(in_channels, out_channels, **kwargs) + self.se_layer = SELayer(out_channels, ratio=se_ratio) + + def forward(self, x): + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.norm2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.norm3(out) + + out = self.se_layer(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +@MODELS.register_module() +class SEResNet(ResNet): + """SEResNet backbone. + + Please refer to the `paper `__ for + details. + + Args: + depth (int): Network depth, from {50, 101, 152}. + se_ratio (int): Squeeze ratio in SELayer. Default: 16. + in_channels (int): Number of input image channels. Default: 3. + stem_channels (int): Output channels of the stem layer. Default: 64. + num_stages (int): Stages of the network. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + Default: ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Default: ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. If only one + stage is specified, a single tensor (feature map) is returned, + otherwise multiple stages are specified, a tuple of tensors will + be returned. Default: ``(3, )``. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. + Default: False. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + + Example: + >>> from mmpretrain.models import SEResNet + >>> import torch + >>> self = SEResNet(depth=50) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 224, 224) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 64, 56, 56) + (1, 128, 28, 28) + (1, 256, 14, 14) + (1, 512, 7, 7) + """ + + arch_settings = { + 50: (SEBottleneck, (3, 4, 6, 3)), + 101: (SEBottleneck, (3, 4, 23, 3)), + 152: (SEBottleneck, (3, 8, 36, 3)) + } + + def __init__(self, depth, se_ratio=16, **kwargs): + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for SEResNet') + self.se_ratio = se_ratio + super(SEResNet, self).__init__(depth, **kwargs) + + def make_res_layer(self, **kwargs): + return ResLayer(se_ratio=self.se_ratio, **kwargs) diff --git a/mmpretrain/models/backbones/seresnext.py b/mmpretrain/models/backbones/seresnext.py new file mode 100644 index 0000000000000000000000000000000000000000..6a2838074225930795d6d8ad70ba067b6ad4c2da --- /dev/null +++ b/mmpretrain/models/backbones/seresnext.py @@ -0,0 +1,155 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmcv.cnn import build_conv_layer, build_norm_layer + +from mmpretrain.registry import MODELS +from .resnet import ResLayer +from .seresnet import SEBottleneck as _SEBottleneck +from .seresnet import SEResNet + + +class SEBottleneck(_SEBottleneck): + """SEBottleneck block for SEResNeXt. + + Args: + in_channels (int): Input channels of this block. + out_channels (int): Output channels of this block. + base_channels (int): Middle channels of the first stage. Default: 64. + groups (int): Groups of conv2. + width_per_group (int): Width per group of conv2. 64x4d indicates + ``groups=64, width_per_group=4`` and 32x8d indicates + ``groups=32, width_per_group=8``. + stride (int): stride of the block. Default: 1 + dilation (int): dilation of convolution. Default: 1 + downsample (nn.Module, optional): downsample operation on identity + branch. Default: None + se_ratio (int): Squeeze ratio in SELayer. Default: 16 + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + conv_cfg (dict, optional): dictionary to construct and config conv + layer. Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + """ + + def __init__(self, + in_channels, + out_channels, + base_channels=64, + groups=32, + width_per_group=4, + se_ratio=16, + **kwargs): + super(SEBottleneck, self).__init__(in_channels, out_channels, se_ratio, + **kwargs) + self.groups = groups + self.width_per_group = width_per_group + + # We follow the same rational of ResNext to compute mid_channels. + # For SEResNet bottleneck, middle channels are determined by expansion + # and out_channels, but for SEResNeXt bottleneck, it is determined by + # groups and width_per_group and the stage it is located in. + if groups != 1: + assert self.mid_channels % base_channels == 0 + self.mid_channels = ( + groups * width_per_group * self.mid_channels // base_channels) + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, self.mid_channels, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + self.norm_cfg, self.mid_channels, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.out_channels, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.in_channels, + self.mid_channels, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + self.conv_cfg, + self.mid_channels, + self.mid_channels, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + self.conv_cfg, + self.mid_channels, + self.out_channels, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + +@MODELS.register_module() +class SEResNeXt(SEResNet): + """SEResNeXt backbone. + + Please refer to the `paper `__ for + details. + + Args: + depth (int): Network depth, from {50, 101, 152}. + groups (int): Groups of conv2 in Bottleneck. Default: 32. + width_per_group (int): Width per group of conv2 in Bottleneck. + Default: 4. + se_ratio (int): Squeeze ratio in SELayer. Default: 16. + in_channels (int): Number of input image channels. Default: 3. + stem_channels (int): Output channels of the stem layer. Default: 64. + num_stages (int): Stages of the network. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + Default: ``(1, 2, 2, 2)``. + dilations (Sequence[int]): Dilation of each stage. + Default: ``(1, 1, 1, 1)``. + out_indices (Sequence[int]): Output from which stages. If only one + stage is specified, a single tensor (feature map) is returned, + otherwise multiple stages are specified, a tuple of tensors will + be returned. Default: ``(3, )``. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv. + Default: False. + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict | None): The config dict for conv layers. Default: None. + norm_cfg (dict): The config dict for norm layers. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. Default: True. + """ + + arch_settings = { + 50: (SEBottleneck, (3, 4, 6, 3)), + 101: (SEBottleneck, (3, 4, 23, 3)), + 152: (SEBottleneck, (3, 8, 36, 3)) + } + + def __init__(self, depth, groups=32, width_per_group=4, **kwargs): + self.groups = groups + self.width_per_group = width_per_group + super(SEResNeXt, self).__init__(depth, **kwargs) + + def make_res_layer(self, **kwargs): + return ResLayer( + groups=self.groups, + width_per_group=self.width_per_group, + base_channels=self.base_channels, + **kwargs) diff --git a/mmpretrain/models/backbones/shufflenet_v1.py b/mmpretrain/models/backbones/shufflenet_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..2cc3617f93b82fa5e37fa2bb5b47d93e6bd9a58f --- /dev/null +++ b/mmpretrain/models/backbones/shufflenet_v1.py @@ -0,0 +1,321 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import ConvModule, build_activation_layer +from mmengine.model import BaseModule +from mmengine.model.weight_init import constant_init, normal_init +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.models.utils import channel_shuffle, make_divisible +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class ShuffleUnit(BaseModule): + """ShuffleUnit block. + + ShuffleNet unit with pointwise group convolution (GConv) and channel + shuffle. + + Args: + in_channels (int): The input channels of the ShuffleUnit. + out_channels (int): The output channels of the ShuffleUnit. + groups (int): The number of groups to be used in grouped 1x1 + convolutions in each ShuffleUnit. Default: 3 + first_block (bool): Whether it is the first ShuffleUnit of a + sequential ShuffleUnits. Default: True, which means not using the + grouped 1x1 convolution. + combine (str): The ways to combine the input and output + branches. Default: 'add'. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + with_cp (bool): Use checkpoint or not. Using checkpoint + will save some memory while slowing down the training speed. + Default: False. + + Returns: + Tensor: The output tensor. + """ + + def __init__(self, + in_channels, + out_channels, + groups=3, + first_block=True, + combine='add', + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + with_cp=False): + super(ShuffleUnit, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.first_block = first_block + self.combine = combine + self.groups = groups + self.bottleneck_channels = self.out_channels // 4 + self.with_cp = with_cp + + if self.combine == 'add': + self.depthwise_stride = 1 + self._combine_func = self._add + assert in_channels == out_channels, ( + 'in_channels must be equal to out_channels when combine ' + 'is add') + elif self.combine == 'concat': + self.depthwise_stride = 2 + self._combine_func = self._concat + self.out_channels -= self.in_channels + self.avgpool = nn.AvgPool2d(kernel_size=3, stride=2, padding=1) + else: + raise ValueError(f'Cannot combine tensors with {self.combine}. ' + 'Only "add" and "concat" are supported') + + self.first_1x1_groups = 1 if first_block else self.groups + self.g_conv_1x1_compress = ConvModule( + in_channels=self.in_channels, + out_channels=self.bottleneck_channels, + kernel_size=1, + groups=self.first_1x1_groups, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + self.depthwise_conv3x3_bn = ConvModule( + in_channels=self.bottleneck_channels, + out_channels=self.bottleneck_channels, + kernel_size=3, + stride=self.depthwise_stride, + padding=1, + groups=self.bottleneck_channels, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + self.g_conv_1x1_expand = ConvModule( + in_channels=self.bottleneck_channels, + out_channels=self.out_channels, + kernel_size=1, + groups=self.groups, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + self.act = build_activation_layer(act_cfg) + + @staticmethod + def _add(x, out): + # residual connection + return x + out + + @staticmethod + def _concat(x, out): + # concatenate along channel axis + return torch.cat((x, out), 1) + + def forward(self, x): + + def _inner_forward(x): + residual = x + + out = self.g_conv_1x1_compress(x) + out = self.depthwise_conv3x3_bn(out) + + if self.groups > 1: + out = channel_shuffle(out, self.groups) + + out = self.g_conv_1x1_expand(out) + + if self.combine == 'concat': + residual = self.avgpool(residual) + out = self.act(out) + out = self._combine_func(residual, out) + else: + out = self._combine_func(residual, out) + out = self.act(out) + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + return out + + +@MODELS.register_module() +class ShuffleNetV1(BaseBackbone): + """ShuffleNetV1 backbone. + + Args: + groups (int): The number of groups to be used in grouped 1x1 + convolutions in each ShuffleUnit. Default: 3. + widen_factor (float): Width multiplier - adjusts the number + of channels in each layer by this amount. Default: 1.0. + out_indices (Sequence[int]): Output from which stages. + Default: (2, ) + frozen_stages (int): Stages to be frozen (all param fixed). + Default: -1, which means not freezing any parameters. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + """ + + def __init__(self, + groups=3, + widen_factor=1.0, + out_indices=(2, ), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + norm_eval=False, + with_cp=False, + init_cfg=None): + super(ShuffleNetV1, self).__init__(init_cfg) + self.init_cfg = init_cfg + self.stage_blocks = [4, 8, 4] + self.groups = groups + + for index in out_indices: + if index not in range(0, 3): + raise ValueError('the item in out_indices must in ' + f'range(0, 3). But received {index}') + + if frozen_stages not in range(-1, 3): + raise ValueError('frozen_stages must be in range(-1, 3). ' + f'But received {frozen_stages}') + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + + if groups == 1: + channels = (144, 288, 576) + elif groups == 2: + channels = (200, 400, 800) + elif groups == 3: + channels = (240, 480, 960) + elif groups == 4: + channels = (272, 544, 1088) + elif groups == 8: + channels = (384, 768, 1536) + else: + raise ValueError(f'{groups} groups is not supported for 1x1 ' + 'Grouped Convolutions') + + channels = [make_divisible(ch * widen_factor, 8) for ch in channels] + + self.in_channels = int(24 * widen_factor) + + self.conv1 = ConvModule( + in_channels=3, + out_channels=self.in_channels, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.layers = nn.ModuleList() + for i, num_blocks in enumerate(self.stage_blocks): + first_block = True if i == 0 else False + layer = self.make_layer(channels[i], num_blocks, first_block) + self.layers.append(layer) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + for param in self.conv1.parameters(): + param.requires_grad = False + for i in range(self.frozen_stages): + layer = self.layers[i] + layer.eval() + for param in layer.parameters(): + param.requires_grad = False + + def init_weights(self): + super(ShuffleNetV1, self).init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + for name, m in self.named_modules(): + if isinstance(m, nn.Conv2d): + if 'conv1' in name: + normal_init(m, mean=0, std=0.01) + else: + normal_init(m, mean=0, std=1.0 / m.weight.shape[1]) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m, val=1, bias=0.0001) + if isinstance(m, _BatchNorm): + if m.running_mean is not None: + nn.init.constant_(m.running_mean, 0) + + def make_layer(self, out_channels, num_blocks, first_block=False): + """Stack ShuffleUnit blocks to make a layer. + + Args: + out_channels (int): out_channels of the block. + num_blocks (int): Number of blocks. + first_block (bool): Whether is the first ShuffleUnit of a + sequential ShuffleUnits. Default: False, which means using + the grouped 1x1 convolution. + """ + layers = [] + for i in range(num_blocks): + first_block = first_block if i == 0 else False + combine_mode = 'concat' if i == 0 else 'add' + layers.append( + ShuffleUnit( + self.in_channels, + out_channels, + groups=self.groups, + first_block=first_block, + combine=combine_mode, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + with_cp=self.with_cp)) + self.in_channels = out_channels + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.maxpool(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def train(self, mode=True): + super(ShuffleNetV1, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() diff --git a/mmpretrain/models/backbones/shufflenet_v2.py b/mmpretrain/models/backbones/shufflenet_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..02f9c749a814b0b4ee4e04dd6afacda078ae6f39 --- /dev/null +++ b/mmpretrain/models/backbones/shufflenet_v2.py @@ -0,0 +1,305 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import ConvModule +from mmengine.model import BaseModule +from mmengine.model.weight_init import constant_init, normal_init +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.models.utils import channel_shuffle +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class InvertedResidual(BaseModule): + """InvertedResidual block for ShuffleNetV2 backbone. + + Args: + in_channels (int): The input channels of the block. + out_channels (int): The output channels of the block. + stride (int): Stride of the 3x3 convolution layer. Default: 1 + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + + Returns: + Tensor: The output tensor. + """ + + def __init__(self, + in_channels, + out_channels, + stride=1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + with_cp=False, + init_cfg=None): + super(InvertedResidual, self).__init__(init_cfg) + self.stride = stride + self.with_cp = with_cp + + branch_features = out_channels // 2 + if self.stride == 1: + assert in_channels == branch_features * 2, ( + f'in_channels ({in_channels}) should equal to ' + f'branch_features * 2 ({branch_features * 2}) ' + 'when stride is 1') + + if in_channels != branch_features * 2: + assert self.stride != 1, ( + f'stride ({self.stride}) should not equal 1 when ' + f'in_channels != branch_features * 2') + + if self.stride > 1: + self.branch1 = nn.Sequential( + ConvModule( + in_channels, + in_channels, + kernel_size=3, + stride=self.stride, + padding=1, + groups=in_channels, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None), + ConvModule( + in_channels, + branch_features, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg), + ) + + self.branch2 = nn.Sequential( + ConvModule( + in_channels if (self.stride > 1) else branch_features, + branch_features, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg), + ConvModule( + branch_features, + branch_features, + kernel_size=3, + stride=self.stride, + padding=1, + groups=branch_features, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None), + ConvModule( + branch_features, + branch_features, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + + def forward(self, x): + + def _inner_forward(x): + if self.stride > 1: + out = torch.cat((self.branch1(x), self.branch2(x)), dim=1) + else: + # Channel Split operation. using these lines of code to replace + # ``chunk(x, 2, dim=1)`` can make it easier to deploy a + # shufflenetv2 model by using mmdeploy. + channels = x.shape[1] + c = channels // 2 + channels % 2 + x1 = x[:, :c, :, :] + x2 = x[:, c:, :, :] + + out = torch.cat((x1, self.branch2(x2)), dim=1) + + out = channel_shuffle(out, 2) + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + return out + + +@MODELS.register_module() +class ShuffleNetV2(BaseBackbone): + """ShuffleNetV2 backbone. + + Args: + widen_factor (float): Width multiplier - adjusts the number of + channels in each layer by this amount. Default: 1.0. + out_indices (Sequence[int]): Output from which stages. + Default: (0, 1, 2, 3). + frozen_stages (int): Stages to be frozen (all param fixed). + Default: -1, which means not freezing any parameters. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + """ + + def __init__(self, + widen_factor=1.0, + out_indices=(3, ), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + norm_eval=False, + with_cp=False, + init_cfg=None): + super(ShuffleNetV2, self).__init__(init_cfg) + self.stage_blocks = [4, 8, 4] + for index in out_indices: + if index not in range(0, 4): + raise ValueError('the item in out_indices must in ' + f'range(0, 4). But received {index}') + + if frozen_stages not in range(-1, 4): + raise ValueError('frozen_stages must be in range(-1, 4). ' + f'But received {frozen_stages}') + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + + if widen_factor == 0.5: + channels = [48, 96, 192, 1024] + elif widen_factor == 1.0: + channels = [116, 232, 464, 1024] + elif widen_factor == 1.5: + channels = [176, 352, 704, 1024] + elif widen_factor == 2.0: + channels = [244, 488, 976, 2048] + else: + raise ValueError('widen_factor must be in [0.5, 1.0, 1.5, 2.0]. ' + f'But received {widen_factor}') + + self.in_channels = 24 + self.conv1 = ConvModule( + in_channels=3, + out_channels=self.in_channels, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + self.layers = nn.ModuleList() + for i, num_blocks in enumerate(self.stage_blocks): + layer = self._make_layer(channels[i], num_blocks) + self.layers.append(layer) + + output_channels = channels[-1] + self.layers.append( + ConvModule( + in_channels=self.in_channels, + out_channels=output_channels, + kernel_size=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + + def _make_layer(self, out_channels, num_blocks): + """Stack blocks to make a layer. + + Args: + out_channels (int): out_channels of the block. + num_blocks (int): number of blocks. + """ + layers = [] + for i in range(num_blocks): + stride = 2 if i == 0 else 1 + layers.append( + InvertedResidual( + in_channels=self.in_channels, + out_channels=out_channels, + stride=stride, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + with_cp=self.with_cp)) + self.in_channels = out_channels + + return nn.Sequential(*layers) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + for param in self.conv1.parameters(): + param.requires_grad = False + + for i in range(self.frozen_stages): + m = self.layers[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def init_weights(self): + super(ShuffleNetV2, self).init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + for name, m in self.named_modules(): + if isinstance(m, nn.Conv2d): + if 'conv1' in name: + normal_init(m, mean=0, std=0.01) + else: + normal_init(m, mean=0, std=1.0 / m.weight.shape[1]) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m.weight, val=1, bias=0.0001) + if isinstance(m, _BatchNorm): + if m.running_mean is not None: + nn.init.constant_(m.running_mean, 0) + + def forward(self, x): + x = self.conv1(x) + x = self.maxpool(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def train(self, mode=True): + super(ShuffleNetV2, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() diff --git a/mmpretrain/models/backbones/swin_transformer.py b/mmpretrain/models/backbones/swin_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..559fd5e9150f78a9801fcb9070e114b4e96113c5 --- /dev/null +++ b/mmpretrain/models/backbones/swin_transformer.py @@ -0,0 +1,585 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from typing import Sequence + +import numpy as np +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_norm_layer +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed, PatchMerging +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from mmpretrain.registry import MODELS +from ..utils import (ShiftWindowMSA, resize_pos_embed, + resize_relative_position_bias_table, to_2tuple) +from .base_backbone import BaseBackbone + + +class SwinBlock(BaseModule): + """Swin Transformer block. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + shift (bool): Shift the attention window or not. Defaults to False. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + drop_path (float): The drop path rate after attention and ffn. + Defaults to 0. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + attn_cfgs (dict): The extra config of Shift Window-MSA. + Defaults to empty dict. + ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict. + norm_cfg (dict): The config of norm layers. + Defaults to ``dict(type='LN')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + window_size=7, + shift=False, + ffn_ratio=4., + drop_path=0., + pad_small_map=False, + attn_cfgs=dict(), + ffn_cfgs=dict(), + norm_cfg=dict(type='LN'), + with_cp=False, + init_cfg=None): + + super(SwinBlock, self).__init__(init_cfg) + self.with_cp = with_cp + + _attn_cfgs = { + 'embed_dims': embed_dims, + 'num_heads': num_heads, + 'shift_size': window_size // 2 if shift else 0, + 'window_size': window_size, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'pad_small_map': pad_small_map, + **attn_cfgs + } + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + self.attn = ShiftWindowMSA(**_attn_cfgs) + + _ffn_cfgs = { + 'embed_dims': embed_dims, + 'feedforward_channels': int(embed_dims * ffn_ratio), + 'num_fcs': 2, + 'ffn_drop': 0, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'act_cfg': dict(type='GELU'), + **ffn_cfgs + } + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + self.ffn = FFN(**_ffn_cfgs) + + def forward(self, x, hw_shape): + + def _inner_forward(x): + identity = x + x = self.norm1(x) + x = self.attn(x, hw_shape) + x = x + identity + + identity = x + x = self.norm2(x) + x = self.ffn(x, identity=identity) + + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + + return x + + +class SwinBlockSequence(BaseModule): + """Module with successive Swin Transformer blocks and downsample layer. + + Args: + embed_dims (int): Number of input channels. + depth (int): Number of successive swin transformer blocks. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + downsample (bool): Downsample the output of blocks by patch merging. + Defaults to False. + downsample_cfg (dict): The extra config of the patch merging layer. + Defaults to empty dict. + drop_paths (Sequence[float] | float): The drop path rate in each block. + Defaults to 0. + block_cfgs (Sequence[dict] | dict): The extra config of each block. + Defaults to empty dicts. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + depth, + num_heads, + window_size=7, + downsample=False, + downsample_cfg=dict(), + drop_paths=0., + block_cfgs=dict(), + with_cp=False, + pad_small_map=False, + init_cfg=None): + super().__init__(init_cfg) + + if not isinstance(drop_paths, Sequence): + drop_paths = [drop_paths] * depth + + if not isinstance(block_cfgs, Sequence): + block_cfgs = [deepcopy(block_cfgs) for _ in range(depth)] + + self.embed_dims = embed_dims + self.blocks = ModuleList() + for i in range(depth): + _block_cfg = { + 'embed_dims': embed_dims, + 'num_heads': num_heads, + 'window_size': window_size, + 'shift': False if i % 2 == 0 else True, + 'drop_path': drop_paths[i], + 'with_cp': with_cp, + 'pad_small_map': pad_small_map, + **block_cfgs[i] + } + block = SwinBlock(**_block_cfg) + self.blocks.append(block) + + if downsample: + _downsample_cfg = { + 'in_channels': embed_dims, + 'out_channels': 2 * embed_dims, + 'norm_cfg': dict(type='LN'), + **downsample_cfg + } + self.downsample = PatchMerging(**_downsample_cfg) + else: + self.downsample = None + + def forward(self, x, in_shape, do_downsample=True): + for block in self.blocks: + x = block(x, in_shape) + + if self.downsample is not None and do_downsample: + x, out_shape = self.downsample(x, in_shape) + else: + out_shape = in_shape + return x, out_shape + + @property + def out_channels(self): + if self.downsample: + return self.downsample.out_channels + else: + return self.embed_dims + + +@MODELS.register_module() +class SwinTransformer(BaseBackbone): + """Swin Transformer. + + A PyTorch implement of : `Swin Transformer: + Hierarchical Vision Transformer using Shifted Windows + `_ + + Inspiration from + https://github.com/microsoft/Swin-Transformer + + Args: + arch (str | dict): Swin Transformer architecture. If use string, choose + from 'tiny', 'small', 'base' and 'large'. If use dict, it should + have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **depths** (List[int]): The number of blocks in each stage. + - **num_heads** (List[int]): The number of heads in attention + modules of each stage. + + Defaults to 'tiny'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 4. + in_channels (int): The num of input channels. Defaults to 3. + window_size (int): The height and width of the window. Defaults to 7. + drop_rate (float): Dropout rate after embedding. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.1. + out_after_downsample (bool): Whether to output the feature map of a + stage after the following downsample layer. Defaults to False. + use_abs_pos_embed (bool): If True, add absolute position embedding to + the patch embedding. Defaults to False. + interpolate_mode (str): Select the interpolate mode for absolute + position embeding vector resize. Defaults to "bicubic". + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + norm_cfg (dict): Config dict for normalization layer for all output + features. Defaults to ``dict(type='LN')`` + stage_cfgs (Sequence[dict] | dict): Extra config dict for each + stage. Defaults to an empty dict. + patch_cfg (dict): Extra config dict for patch embedding. + Defaults to an empty dict. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + + Examples: + >>> from mmpretrain.models import SwinTransformer + >>> import torch + >>> extra_config = dict( + >>> arch='tiny', + >>> stage_cfgs=dict(downsample_cfg={'kernel_size': 3, + >>> 'expansion_ratio': 3})) + >>> self = SwinTransformer(**extra_config) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> output = self.forward(inputs) + >>> print(output.shape) + (1, 2592, 4) + """ + arch_zoo = { + **dict.fromkeys(['t', 'tiny'], + {'embed_dims': 96, + 'depths': [2, 2, 6, 2], + 'num_heads': [3, 6, 12, 24]}), + **dict.fromkeys(['s', 'small'], + {'embed_dims': 96, + 'depths': [2, 2, 18, 2], + 'num_heads': [3, 6, 12, 24]}), + **dict.fromkeys(['b', 'base'], + {'embed_dims': 128, + 'depths': [2, 2, 18, 2], + 'num_heads': [4, 8, 16, 32]}), + **dict.fromkeys(['l', 'large'], + {'embed_dims': 192, + 'depths': [2, 2, 18, 2], + 'num_heads': [6, 12, 24, 48]}), + } # yapf: disable + + _version = 3 + num_extra_tokens = 0 + + def __init__(self, + arch='tiny', + img_size=224, + patch_size=4, + in_channels=3, + window_size=7, + drop_rate=0., + drop_path_rate=0.1, + out_indices=(3, ), + out_after_downsample=False, + use_abs_pos_embed=False, + interpolate_mode='bicubic', + with_cp=False, + frozen_stages=-1, + norm_eval=False, + pad_small_map=False, + norm_cfg=dict(type='LN'), + stage_cfgs=dict(), + patch_cfg=dict(), + init_cfg=None): + super(SwinTransformer, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = {'embed_dims', 'depths', 'num_heads'} + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.depths = self.arch_settings['depths'] + self.num_heads = self.arch_settings['num_heads'] + self.num_layers = len(self.depths) + self.out_indices = out_indices + self.out_after_downsample = out_after_downsample + self.use_abs_pos_embed = use_abs_pos_embed + self.interpolate_mode = interpolate_mode + self.frozen_stages = frozen_stages + + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + norm_cfg=dict(type='LN'), + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + + if self.use_abs_pos_embed: + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + self.absolute_pos_embed = nn.Parameter( + torch.zeros(1, num_patches, self.embed_dims)) + self._register_load_state_dict_pre_hook( + self._prepare_abs_pos_embed) + + self._register_load_state_dict_pre_hook( + self._prepare_relative_position_bias_table) + + self.drop_after_pos = nn.Dropout(p=drop_rate) + self.norm_eval = norm_eval + + # stochastic depth + total_depth = sum(self.depths) + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] # stochastic depth decay rule + + self.stages = ModuleList() + embed_dims = [self.embed_dims] + for i, (depth, + num_heads) in enumerate(zip(self.depths, self.num_heads)): + if isinstance(stage_cfgs, Sequence): + stage_cfg = stage_cfgs[i] + else: + stage_cfg = deepcopy(stage_cfgs) + downsample = True if i < self.num_layers - 1 else False + _stage_cfg = { + 'embed_dims': embed_dims[-1], + 'depth': depth, + 'num_heads': num_heads, + 'window_size': window_size, + 'downsample': downsample, + 'drop_paths': dpr[:depth], + 'with_cp': with_cp, + 'pad_small_map': pad_small_map, + **stage_cfg + } + + stage = SwinBlockSequence(**_stage_cfg) + self.stages.append(stage) + + dpr = dpr[depth:] + embed_dims.append(stage.out_channels) + + if self.out_after_downsample: + self.num_features = embed_dims[1:] + else: + self.num_features = embed_dims[:-1] + + for i in out_indices: + if norm_cfg is not None: + norm_layer = build_norm_layer(norm_cfg, + self.num_features[i])[1] + else: + norm_layer = nn.Identity() + + self.add_module(f'norm{i}', norm_layer) + + def init_weights(self): + super(SwinTransformer, self).init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + if self.use_abs_pos_embed: + trunc_normal_(self.absolute_pos_embed, std=0.02) + + def forward(self, x): + x, hw_shape = self.patch_embed(x) + if self.use_abs_pos_embed: + x = x + resize_pos_embed( + self.absolute_pos_embed, self.patch_resolution, hw_shape, + self.interpolate_mode, self.num_extra_tokens) + x = self.drop_after_pos(x) + + outs = [] + for i, stage in enumerate(self.stages): + x, hw_shape = stage( + x, hw_shape, do_downsample=self.out_after_downsample) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + out = norm_layer(x) + out = out.view(-1, *hw_shape, + self.num_features[i]).permute(0, 3, 1, + 2).contiguous() + outs.append(out) + if stage.downsample is not None and not self.out_after_downsample: + x, hw_shape = stage.downsample(x, hw_shape) + + return tuple(outs) + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, *args, + **kwargs): + """load checkpoints.""" + # Names of some parameters in has been changed. + version = local_metadata.get('version', None) + if (version is None + or version < 2) and self.__class__ is SwinTransformer: + final_stage_num = len(self.stages) - 1 + state_dict_keys = list(state_dict.keys()) + for k in state_dict_keys: + if k.startswith('norm.') or k.startswith('backbone.norm.'): + convert_key = k.replace('norm.', f'norm{final_stage_num}.') + state_dict[convert_key] = state_dict[k] + del state_dict[k] + if (version is None + or version < 3) and self.__class__ is SwinTransformer: + state_dict_keys = list(state_dict.keys()) + for k in state_dict_keys: + if 'attn_mask' in k: + del state_dict[k] + + super()._load_from_state_dict(state_dict, prefix, local_metadata, + *args, **kwargs) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + for i in range(0, self.frozen_stages + 1): + m = self.stages[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + for i in self.out_indices: + if i <= self.frozen_stages: + for param in getattr(self, f'norm{i}').parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(SwinTransformer, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + def _prepare_abs_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'absolute_pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.absolute_pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + 'Resize the absolute_pos_embed shape from ' + f'{ckpt_pos_embed_shape} to {self.absolute_pos_embed.shape}.') + + ckpt_pos_embed_shape = to_2tuple( + int(np.sqrt(ckpt_pos_embed_shape[1] - self.num_extra_tokens))) + pos_embed_shape = self.patch_embed.init_out_size + + state_dict[name] = resize_pos_embed(state_dict[name], + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, + self.num_extra_tokens) + + def _prepare_relative_position_bias_table(self, state_dict, prefix, *args, + **kwargs): + state_dict_model = self.state_dict() + all_keys = list(state_dict_model.keys()) + for key in all_keys: + if 'relative_position_bias_table' in key: + ckpt_key = prefix + key + if ckpt_key not in state_dict: + continue + relative_position_bias_table_pretrained = state_dict[ckpt_key] + relative_position_bias_table_current = state_dict_model[key] + L1, nH1 = relative_position_bias_table_pretrained.size() + L2, nH2 = relative_position_bias_table_current.size() + if L1 != L2: + src_size = int(L1**0.5) + dst_size = int(L2**0.5) + new_rel_pos_bias = resize_relative_position_bias_table( + src_size, dst_size, + relative_position_bias_table_pretrained, nH1) + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info('Resize the relative_position_bias_table from ' + f'{state_dict[ckpt_key].shape} to ' + f'{new_rel_pos_bias.shape}') + state_dict[ckpt_key] = new_rel_pos_bias + + # The index buffer need to be re-generated. + index_buffer = ckpt_key.replace('bias_table', 'index') + del state_dict[index_buffer] + + def get_layer_depth(self, param_name: str, prefix: str = ''): + """Get the layer-wise depth of a parameter. + + Args: + param_name (str): The name of the parameter. + prefix (str): The prefix for the parameter. + Defaults to an empty string. + + Returns: + Tuple[int, int]: The layer-wise depth and the num of layers. + + Note: + The first depth is the stem module (``layer_depth=0``), and the + last depth is the subsequent module (``layer_depth=num_layers-1``) + """ + num_layers = sum(self.depths) + 2 + + if not param_name.startswith(prefix): + # For subsequent module like head + return num_layers - 1, num_layers + + param_name = param_name[len(prefix):] + + if param_name.startswith('patch_embed'): + layer_depth = 0 + elif param_name.startswith('stages'): + stage_id = int(param_name.split('.')[1]) + block_id = param_name.split('.')[3] + if block_id in ('reduction', 'norm'): + layer_depth = sum(self.depths[:stage_id + 1]) + else: + layer_depth = sum(self.depths[:stage_id]) + int(block_id) + 1 + else: + layer_depth = num_layers - 1 + + return layer_depth, num_layers diff --git a/mmpretrain/models/backbones/swin_transformer_v2.py b/mmpretrain/models/backbones/swin_transformer_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..142505a808ae3fc631d54e1a56ae483db242da31 --- /dev/null +++ b/mmpretrain/models/backbones/swin_transformer_v2.py @@ -0,0 +1,567 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from typing import Sequence + +import numpy as np +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_norm_layer +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from ..builder import MODELS +from ..utils import (PatchMerging, ShiftWindowMSA, WindowMSAV2, + resize_pos_embed, to_2tuple) +from .base_backbone import BaseBackbone + + +class SwinBlockV2(BaseModule): + """Swin Transformer V2 block. Use post normalization. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + shift (bool): Shift the attention window or not. Defaults to False. + extra_norm (bool): Whether add extra norm at the end of main branch. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + drop_path (float): The drop path rate after attention and ffn. + Defaults to 0. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + attn_cfgs (dict): The extra config of Shift Window-MSA. + Defaults to empty dict. + ffn_cfgs (dict): The extra config of FFN. Defaults to empty dict. + norm_cfg (dict): The config of norm layers. + Defaults to ``dict(type='LN')``. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + pretrained_window_size (int): Window size in pretrained. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + window_size=8, + shift=False, + extra_norm=False, + ffn_ratio=4., + drop_path=0., + pad_small_map=False, + attn_cfgs=dict(), + ffn_cfgs=dict(), + norm_cfg=dict(type='LN'), + with_cp=False, + pretrained_window_size=0, + init_cfg=None): + + super(SwinBlockV2, self).__init__(init_cfg) + self.with_cp = with_cp + self.extra_norm = extra_norm + + _attn_cfgs = { + 'embed_dims': embed_dims, + 'num_heads': num_heads, + 'shift_size': window_size // 2 if shift else 0, + 'window_size': window_size, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'pad_small_map': pad_small_map, + **attn_cfgs + } + # use V2 attention implementation + _attn_cfgs.update( + window_msa=WindowMSAV2, + pretrained_window_size=to_2tuple(pretrained_window_size)) + self.attn = ShiftWindowMSA(**_attn_cfgs) + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + + _ffn_cfgs = { + 'embed_dims': embed_dims, + 'feedforward_channels': int(embed_dims * ffn_ratio), + 'num_fcs': 2, + 'ffn_drop': 0, + 'dropout_layer': dict(type='DropPath', drop_prob=drop_path), + 'act_cfg': dict(type='GELU'), + 'add_identity': False, + **ffn_cfgs + } + self.ffn = FFN(**_ffn_cfgs) + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + + # add extra norm for every n blocks in huge and giant model + if self.extra_norm: + self.norm3 = build_norm_layer(norm_cfg, embed_dims)[1] + + def forward(self, x, hw_shape): + + def _inner_forward(x): + # Use post normalization + identity = x + x = self.attn(x, hw_shape) + x = self.norm1(x) + x = x + identity + + identity = x + x = self.ffn(x) + x = self.norm2(x) + x = x + identity + + if self.extra_norm: + x = self.norm3(x) + + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + + return x + + +class SwinBlockV2Sequence(BaseModule): + """Module with successive Swin Transformer blocks and downsample layer. + + Args: + embed_dims (int): Number of input channels. + depth (int): Number of successive swin transformer blocks. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. Defaults to 7. + downsample (bool): Downsample the output of blocks by patch merging. + Defaults to False. + downsample_cfg (dict): The extra config of the patch merging layer. + Defaults to empty dict. + drop_paths (Sequence[float] | float): The drop path rate in each block. + Defaults to 0. + block_cfgs (Sequence[dict] | dict): The extra config of each block. + Defaults to empty dicts. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + extra_norm_every_n_blocks (int): Add extra norm at the end of main + branch every n blocks. Defaults to 0, which means no needs for + extra norm layer. + pretrained_window_size (int): Window size in pretrained. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + depth, + num_heads, + window_size=8, + downsample=False, + downsample_cfg=dict(), + drop_paths=0., + block_cfgs=dict(), + with_cp=False, + pad_small_map=False, + extra_norm_every_n_blocks=0, + pretrained_window_size=0, + init_cfg=None): + super().__init__(init_cfg) + + if not isinstance(drop_paths, Sequence): + drop_paths = [drop_paths] * depth + + if not isinstance(block_cfgs, Sequence): + block_cfgs = [deepcopy(block_cfgs) for _ in range(depth)] + + if downsample: + self.out_channels = 2 * embed_dims + _downsample_cfg = { + 'in_channels': embed_dims, + 'out_channels': self.out_channels, + 'norm_cfg': dict(type='LN'), + **downsample_cfg + } + self.downsample = PatchMerging(**_downsample_cfg) + else: + self.out_channels = embed_dims + self.downsample = None + + self.blocks = ModuleList() + for i in range(depth): + extra_norm = True if extra_norm_every_n_blocks and \ + (i + 1) % extra_norm_every_n_blocks == 0 else False + _block_cfg = { + 'embed_dims': self.out_channels, + 'num_heads': num_heads, + 'window_size': window_size, + 'shift': False if i % 2 == 0 else True, + 'extra_norm': extra_norm, + 'drop_path': drop_paths[i], + 'with_cp': with_cp, + 'pad_small_map': pad_small_map, + 'pretrained_window_size': pretrained_window_size, + **block_cfgs[i] + } + block = SwinBlockV2(**_block_cfg) + self.blocks.append(block) + + def forward(self, x, in_shape): + if self.downsample: + x, out_shape = self.downsample(x, in_shape) + else: + out_shape = in_shape + + for block in self.blocks: + x = block(x, out_shape) + + return x, out_shape + + +@MODELS.register_module() +class SwinTransformerV2(BaseBackbone): + """Swin Transformer V2. + + A PyTorch implement of : `Swin Transformer V2: + Scaling Up Capacity and Resolution + `_ + + Inspiration from + https://github.com/microsoft/Swin-Transformer + + Args: + arch (str | dict): Swin Transformer architecture. If use string, choose + from 'tiny', 'small', 'base' and 'large'. If use dict, it should + have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **depths** (List[int]): The number of blocks in each stage. + - **num_heads** (List[int]): The number of heads in attention + modules of each stage. + - **extra_norm_every_n_blocks** (int): Add extra norm at the end + of main branch every n blocks. + + Defaults to 'tiny'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 4. + in_channels (int): The num of input channels. Defaults to 3. + window_size (int | Sequence): The height and width of the window. + Defaults to 7. + drop_rate (float): Dropout rate after embedding. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.1. + use_abs_pos_embed (bool): If True, add absolute position embedding to + the patch embedding. Defaults to False. + interpolate_mode (str): Select the interpolate mode for absolute + position embeding vector resize. Defaults to "bicubic". + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + norm_cfg (dict): Config dict for normalization layer for all output + features. Defaults to ``dict(type='LN')`` + stage_cfgs (Sequence[dict] | dict): Extra config dict for each + stage. Defaults to an empty dict. + patch_cfg (dict): Extra config dict for patch embedding. + Defaults to an empty dict. + pretrained_window_sizes (tuple(int)): Pretrained window sizes of + each layer. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + + Examples: + >>> from mmpretrain.models import SwinTransformerV2 + >>> import torch + >>> extra_config = dict( + >>> arch='tiny', + >>> stage_cfgs=dict(downsample_cfg={'kernel_size': 3, + >>> 'padding': 'same'})) + >>> self = SwinTransformerV2(**extra_config) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> output = self.forward(inputs) + >>> print(output.shape) + (1, 2592, 4) + """ + arch_zoo = { + **dict.fromkeys(['t', 'tiny'], + {'embed_dims': 96, + 'depths': [2, 2, 6, 2], + 'num_heads': [3, 6, 12, 24], + 'extra_norm_every_n_blocks': 0}), + **dict.fromkeys(['s', 'small'], + {'embed_dims': 96, + 'depths': [2, 2, 18, 2], + 'num_heads': [3, 6, 12, 24], + 'extra_norm_every_n_blocks': 0}), + **dict.fromkeys(['b', 'base'], + {'embed_dims': 128, + 'depths': [2, 2, 18, 2], + 'num_heads': [4, 8, 16, 32], + 'extra_norm_every_n_blocks': 0}), + **dict.fromkeys(['l', 'large'], + {'embed_dims': 192, + 'depths': [2, 2, 18, 2], + 'num_heads': [6, 12, 24, 48], + 'extra_norm_every_n_blocks': 0}), + # head count not certain for huge, and is employed for another + # parallel study about self-supervised learning. + **dict.fromkeys(['h', 'huge'], + {'embed_dims': 352, + 'depths': [2, 2, 18, 2], + 'num_heads': [8, 16, 32, 64], + 'extra_norm_every_n_blocks': 6}), + **dict.fromkeys(['g', 'giant'], + {'embed_dims': 512, + 'depths': [2, 2, 42, 4], + 'num_heads': [16, 32, 64, 128], + 'extra_norm_every_n_blocks': 6}), + } # yapf: disable + + _version = 1 + num_extra_tokens = 0 + + def __init__(self, + arch='tiny', + img_size=256, + patch_size=4, + in_channels=3, + window_size=8, + drop_rate=0., + drop_path_rate=0.1, + out_indices=(3, ), + use_abs_pos_embed=False, + interpolate_mode='bicubic', + with_cp=False, + frozen_stages=-1, + norm_eval=False, + pad_small_map=False, + norm_cfg=dict(type='LN'), + stage_cfgs=dict(), + patch_cfg=dict(), + pretrained_window_sizes=[0, 0, 0, 0], + init_cfg=None): + super(SwinTransformerV2, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'depths', 'num_heads', + 'extra_norm_every_n_blocks' + } + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.depths = self.arch_settings['depths'] + self.num_heads = self.arch_settings['num_heads'] + self.extra_norm_every_n_blocks = self.arch_settings[ + 'extra_norm_every_n_blocks'] + self.num_layers = len(self.depths) + self.out_indices = out_indices + self.use_abs_pos_embed = use_abs_pos_embed + self.interpolate_mode = interpolate_mode + self.frozen_stages = frozen_stages + + if isinstance(window_size, int): + self.window_sizes = [window_size for _ in range(self.num_layers)] + elif isinstance(window_size, Sequence): + assert len(window_size) == self.num_layers, \ + f'Length of window_sizes {len(window_size)} is not equal to '\ + f'length of stages {self.num_layers}.' + self.window_sizes = window_size + else: + raise TypeError('window_size should be a Sequence or int.') + + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + norm_cfg=dict(type='LN'), + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + + if self.use_abs_pos_embed: + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + self.absolute_pos_embed = nn.Parameter( + torch.zeros(1, num_patches, self.embed_dims)) + self._register_load_state_dict_pre_hook( + self._prepare_abs_pos_embed) + + self._register_load_state_dict_pre_hook(self._delete_reinit_params) + + self.drop_after_pos = nn.Dropout(p=drop_rate) + self.norm_eval = norm_eval + + # stochastic depth + total_depth = sum(self.depths) + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] # stochastic depth decay rule + + self.stages = ModuleList() + embed_dims = [self.embed_dims] + for i, (depth, + num_heads) in enumerate(zip(self.depths, self.num_heads)): + if isinstance(stage_cfgs, Sequence): + stage_cfg = stage_cfgs[i] + else: + stage_cfg = deepcopy(stage_cfgs) + downsample = True if i > 0 else False + _stage_cfg = { + 'embed_dims': embed_dims[-1], + 'depth': depth, + 'num_heads': num_heads, + 'window_size': self.window_sizes[i], + 'downsample': downsample, + 'drop_paths': dpr[:depth], + 'with_cp': with_cp, + 'pad_small_map': pad_small_map, + 'extra_norm_every_n_blocks': self.extra_norm_every_n_blocks, + 'pretrained_window_size': pretrained_window_sizes[i], + 'downsample_cfg': dict(use_post_norm=True), + **stage_cfg + } + + stage = SwinBlockV2Sequence(**_stage_cfg) + self.stages.append(stage) + + dpr = dpr[depth:] + embed_dims.append(stage.out_channels) + + for i in out_indices: + if norm_cfg is not None: + norm_layer = build_norm_layer(norm_cfg, embed_dims[i + 1])[1] + else: + norm_layer = nn.Identity() + + self.add_module(f'norm{i}', norm_layer) + + def init_weights(self): + super(SwinTransformerV2, self).init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + if self.use_abs_pos_embed: + trunc_normal_(self.absolute_pos_embed, std=0.02) + + def forward(self, x): + x, hw_shape = self.patch_embed(x) + + if self.use_abs_pos_embed: + x = x + resize_pos_embed( + self.absolute_pos_embed, self.patch_resolution, hw_shape, + self.interpolate_mode, self.num_extra_tokens) + x = self.drop_after_pos(x) + + outs = [] + for i, stage in enumerate(self.stages): + x, hw_shape = stage(x, hw_shape) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + out = norm_layer(x) + out = out.view(-1, *hw_shape, + stage.out_channels).permute(0, 3, 1, + 2).contiguous() + outs.append(out) + + return tuple(outs) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + for i in range(0, self.frozen_stages + 1): + m = self.stages[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + for i in self.out_indices: + if i <= self.frozen_stages: + for param in getattr(self, f'norm{i}').parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(SwinTransformerV2, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + def _prepare_abs_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'absolute_pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.absolute_pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + 'Resize the absolute_pos_embed shape from ' + f'{ckpt_pos_embed_shape} to {self.absolute_pos_embed.shape}.') + + ckpt_pos_embed_shape = to_2tuple( + int(np.sqrt(ckpt_pos_embed_shape[1] - self.num_extra_tokens))) + pos_embed_shape = self.patch_embed.init_out_size + + state_dict[name] = resize_pos_embed(state_dict[name], + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, + self.num_extra_tokens) + + def _delete_reinit_params(self, state_dict, prefix, *args, **kwargs): + # delete relative_position_index since we always re-init it + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + 'Delete `relative_position_index` and `relative_coords_table` ' + 'since we always re-init these params according to the ' + '`window_size`, which might cause unwanted but unworried ' + 'warnings when loading checkpoint.') + relative_position_index_keys = [ + k for k in state_dict.keys() if 'relative_position_index' in k + ] + for k in relative_position_index_keys: + del state_dict[k] + + # delete relative_coords_table since we always re-init it + relative_position_index_keys = [ + k for k in state_dict.keys() if 'relative_coords_table' in k + ] + for k in relative_position_index_keys: + del state_dict[k] diff --git a/mmpretrain/models/backbones/t2t_vit.py b/mmpretrain/models/backbones/t2t_vit.py new file mode 100644 index 0000000000000000000000000000000000000000..288ef0dc2575c8063fa67c330d49782cd2b6ba00 --- /dev/null +++ b/mmpretrain/models/backbones/t2t_vit.py @@ -0,0 +1,447 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from copy import deepcopy +from typing import Sequence + +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn.bricks.transformer import FFN +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from ..utils import (MultiheadAttention, build_norm_layer, resize_pos_embed, + to_2tuple) +from .base_backbone import BaseBackbone + + +class T2TTransformerLayer(BaseModule): + """Transformer Layer for T2T_ViT. + + Comparing with :obj:`TransformerEncoderLayer` in ViT, it supports + different ``input_dims`` and ``embed_dims``. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs + input_dims (int, optional): The input token dimension. + Defaults to None. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop_rate (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``(input_dims // num_heads) ** -0.5`` if set. Defaults to None. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + + Notes: + In general, ``qk_scale`` should be ``head_dims ** -0.5``, i.e. + ``(embed_dims // num_heads) ** -0.5``. However, in the official + code, it uses ``(input_dims // num_heads) ** -0.5``, so here we + keep the same with the official implementation. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + input_dims=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_fcs=2, + qkv_bias=False, + qk_scale=None, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + init_cfg=None): + super(T2TTransformerLayer, self).__init__(init_cfg=init_cfg) + + self.v_shortcut = True if input_dims is not None else False + input_dims = input_dims or embed_dims + + self.ln1 = build_norm_layer(norm_cfg, input_dims) + + self.attn = MultiheadAttention( + input_dims=input_dims, + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + qkv_bias=qkv_bias, + qk_scale=qk_scale or (input_dims // num_heads)**-0.5, + v_shortcut=self.v_shortcut) + + self.ln2 = build_norm_layer(norm_cfg, embed_dims) + + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg) + + def forward(self, x): + if self.v_shortcut: + x = self.attn(self.ln1(x)) + else: + x = x + self.attn(self.ln1(x)) + x = self.ffn(self.ln2(x), identity=x) + return x + + +class T2TModule(BaseModule): + """Tokens-to-Token module. + + "Tokens-to-Token module" (T2T Module) can model the local structure + information of images and reduce the length of tokens progressively. + + Args: + img_size (int): Input image size + in_channels (int): Number of input channels + embed_dims (int): Embedding dimension + token_dims (int): Tokens dimension in T2TModuleAttention. + use_performer (bool): If True, use Performer version self-attention to + adopt regular self-attention. Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Default: None. + + Notes: + Usually, ``token_dim`` is set as a small value (32 or 64) to reduce + MACs + """ + + def __init__( + self, + img_size=224, + in_channels=3, + embed_dims=384, + token_dims=64, + use_performer=False, + init_cfg=None, + ): + super(T2TModule, self).__init__(init_cfg) + + self.embed_dims = embed_dims + + self.soft_split0 = nn.Unfold( + kernel_size=(7, 7), stride=(4, 4), padding=(2, 2)) + self.soft_split1 = nn.Unfold( + kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + self.soft_split2 = nn.Unfold( + kernel_size=(3, 3), stride=(2, 2), padding=(1, 1)) + + if not use_performer: + self.attention1 = T2TTransformerLayer( + input_dims=in_channels * 7 * 7, + embed_dims=token_dims, + num_heads=1, + feedforward_channels=token_dims) + + self.attention2 = T2TTransformerLayer( + input_dims=token_dims * 3 * 3, + embed_dims=token_dims, + num_heads=1, + feedforward_channels=token_dims) + + self.project = nn.Linear(token_dims * 3 * 3, embed_dims) + else: + raise NotImplementedError("Performer hasn't been implemented.") + + # there are 3 soft split, stride are 4,2,2 separately + out_side = img_size // (4 * 2 * 2) + self.init_out_size = [out_side, out_side] + self.num_patches = out_side**2 + + @staticmethod + def _get_unfold_size(unfold: nn.Unfold, input_size): + h, w = input_size + kernel_size = to_2tuple(unfold.kernel_size) + stride = to_2tuple(unfold.stride) + padding = to_2tuple(unfold.padding) + dilation = to_2tuple(unfold.dilation) + + h_out = (h + 2 * padding[0] - dilation[0] * + (kernel_size[0] - 1) - 1) // stride[0] + 1 + w_out = (w + 2 * padding[1] - dilation[1] * + (kernel_size[1] - 1) - 1) // stride[1] + 1 + return (h_out, w_out) + + def forward(self, x): + # step0: soft split + hw_shape = self._get_unfold_size(self.soft_split0, x.shape[2:]) + x = self.soft_split0(x).transpose(1, 2) + + for step in [1, 2]: + # re-structurization/reconstruction + attn = getattr(self, f'attention{step}') + x = attn(x).transpose(1, 2) + B, C, _ = x.shape + x = x.reshape(B, C, hw_shape[0], hw_shape[1]) + + # soft split + soft_split = getattr(self, f'soft_split{step}') + hw_shape = self._get_unfold_size(soft_split, hw_shape) + x = soft_split(x).transpose(1, 2) + + # final tokens + x = self.project(x) + return x, hw_shape + + +def get_sinusoid_encoding(n_position, embed_dims): + """Generate sinusoid encoding table. + + Sinusoid encoding is a kind of relative position encoding method came from + `Attention Is All You Need`_. + + Args: + n_position (int): The length of the input token. + embed_dims (int): The position embedding dimension. + + Returns: + :obj:`torch.FloatTensor`: The sinusoid encoding table. + """ + + def get_position_angle_vec(position): + return [ + position / np.power(10000, 2 * (i // 2) / embed_dims) + for i in range(embed_dims) + ] + + sinusoid_table = np.array( + [get_position_angle_vec(pos) for pos in range(n_position)]) + sinusoid_table[:, 0::2] = np.sin(sinusoid_table[:, 0::2]) # dim 2i + sinusoid_table[:, 1::2] = np.cos(sinusoid_table[:, 1::2]) # dim 2i+1 + + return torch.FloatTensor(sinusoid_table).unsqueeze(0) + + +@MODELS.register_module() +class T2T_ViT(BaseBackbone): + """Tokens-to-Token Vision Transformer (T2T-ViT) + + A PyTorch implementation of `Tokens-to-Token ViT: Training Vision + Transformers from Scratch on ImageNet `_ + + Args: + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + in_channels (int): Number of input channels. + embed_dims (int): Embedding dimension. + num_layers (int): Num of transformer layers in encoder. + Defaults to 14. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Dropout rate after position embedding. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. Defaults to + ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + Defaults to ``"cls_token"``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + t2t_cfg (dict): Extra config of Tokens-to-Token module. + Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + OUT_TYPES = {'raw', 'cls_token', 'featmap', 'avg_featmap'} + + def __init__(self, + img_size=224, + in_channels=3, + embed_dims=384, + num_layers=14, + out_indices=-1, + drop_rate=0., + drop_path_rate=0., + norm_cfg=dict(type='LN'), + final_norm=True, + out_type='cls_token', + with_cls_token=True, + interpolate_mode='bicubic', + t2t_cfg=dict(), + layer_cfgs=dict(), + init_cfg=None): + super().__init__(init_cfg) + + # Token-to-Token Module + self.tokens_to_token = T2TModule( + img_size=img_size, + in_channels=in_channels, + embed_dims=embed_dims, + **t2t_cfg) + self.patch_resolution = self.tokens_to_token.init_out_size + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + # Set out type + if out_type not in self.OUT_TYPES: + raise ValueError(f'Unsupported `out_type` {out_type}, please ' + f'choose from {self.OUT_TYPES}') + self.out_type = out_type + + # Set cls token + if with_cls_token: + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dims)) + self.num_extra_tokens = 1 + elif out_type != 'cls_token': + self.cls_token = None + self.num_extra_tokens = 0 + else: + raise ValueError( + 'with_cls_token must be True when `out_type="cls_token"`.') + + # Set position embedding + self.interpolate_mode = interpolate_mode + sinusoid_table = get_sinusoid_encoding( + num_patches + self.num_extra_tokens, embed_dims) + self.register_buffer('pos_embed', sinusoid_table) + self._register_load_state_dict_pre_hook(self._prepare_pos_embed) + + self.drop_after_pos = nn.Dropout(p=drop_rate) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must be a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = num_layers + index + assert 0 <= out_indices[i] <= num_layers, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + # stochastic depth decay rule + dpr = [x for x in np.linspace(0, drop_path_rate, num_layers)] + + self.encoder = ModuleList() + for i in range(num_layers): + if isinstance(layer_cfgs, Sequence): + layer_cfg = layer_cfgs[i] + else: + layer_cfg = deepcopy(layer_cfgs) + layer_cfg = { + 'embed_dims': embed_dims, + 'num_heads': 6, + 'feedforward_channels': 3 * embed_dims, + 'drop_path_rate': dpr[i], + 'qkv_bias': False, + 'norm_cfg': norm_cfg, + **layer_cfg + } + + layer = T2TTransformerLayer(**layer_cfg) + self.encoder.append(layer) + + self.final_norm = final_norm + if final_norm: + self.norm = build_norm_layer(norm_cfg, embed_dims) + else: + self.norm = nn.Identity() + + def init_weights(self): + super().init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress custom init if use pretrained model. + return + + trunc_normal_(self.cls_token, std=.02) + + def _prepare_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + f'Resize the pos_embed shape from {ckpt_pos_embed_shape} ' + f'to {self.pos_embed.shape}.') + + ckpt_pos_embed_shape = to_2tuple( + int(np.sqrt(ckpt_pos_embed_shape[1] - self.num_extra_tokens))) + pos_embed_shape = self.tokens_to_token.init_out_size + + state_dict[name] = resize_pos_embed(state_dict[name], + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, + self.num_extra_tokens) + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.tokens_to_token(x) + + if self.cls_token is not None: + # stole cls_tokens impl from Phil Wang, thanks + cls_token = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_token, x), dim=1) + + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + outs = [] + for i, layer in enumerate(self.encoder): + x = layer(x) + + if i == len(self.encoder) - 1 and self.final_norm: + x = self.norm(x) + + if i in self.out_indices: + outs.append(self._format_output(x, patch_resolution)) + + return tuple(outs) + + def _format_output(self, x, hw): + if self.out_type == 'raw': + return x + if self.out_type == 'cls_token': + return x[:, 0] + + patch_token = x[:, self.num_extra_tokens:] + if self.out_type == 'featmap': + B = x.size(0) + # (B, N, C) -> (B, H, W, C) -> (B, C, H, W) + return patch_token.reshape(B, *hw, -1).permute(0, 3, 1, 2) + if self.out_type == 'avg_featmap': + return patch_token.mean(dim=1) diff --git a/mmpretrain/models/backbones/timm_backbone.py b/mmpretrain/models/backbones/timm_backbone.py new file mode 100644 index 0000000000000000000000000000000000000000..51ecbdbb077be0643026de2ec91c0169263a41f7 --- /dev/null +++ b/mmpretrain/models/backbones/timm_backbone.py @@ -0,0 +1,111 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings + +from mmengine.logging import MMLogger + +from mmpretrain.registry import MODELS +from mmpretrain.utils import require +from .base_backbone import BaseBackbone + + +def print_timm_feature_info(feature_info): + """Print feature_info of timm backbone to help development and debug. + + Args: + feature_info (list[dict] | timm.models.features.FeatureInfo | None): + feature_info of timm backbone. + """ + logger = MMLogger.get_current_instance() + if feature_info is None: + logger.warning('This backbone does not have feature_info') + elif isinstance(feature_info, list): + for feat_idx, each_info in enumerate(feature_info): + logger.info(f'backbone feature_info[{feat_idx}]: {each_info}') + else: + try: + logger.info(f'backbone out_indices: {feature_info.out_indices}') + logger.info(f'backbone out_channels: {feature_info.channels()}') + logger.info(f'backbone out_strides: {feature_info.reduction()}') + except AttributeError: + logger.warning('Unexpected format of backbone feature_info') + + +@MODELS.register_module() +class TIMMBackbone(BaseBackbone): + """Wrapper to use backbones from timm library. + + More details can be found in + `timm `_. + See especially the document for `feature extraction + `_. + + Args: + model_name (str): Name of timm model to instantiate. + features_only (bool): Whether to extract feature pyramid (multi-scale + feature maps from the deepest layer at each stride). For Vision + Transformer models that do not support this argument, + set this False. Defaults to False. + pretrained (bool): Whether to load pretrained weights. + Defaults to False. + checkpoint_path (str): Path of checkpoint to load at the last of + ``timm.create_model``. Defaults to empty string, which means + not loading. + in_channels (int): Number of input image channels. Defaults to 3. + init_cfg (dict or list[dict], optional): Initialization config dict of + OpenMMLab projects. Defaults to None. + **kwargs: Other timm & model specific arguments. + """ + + @require('timm') + def __init__(self, + model_name, + features_only=False, + pretrained=False, + checkpoint_path='', + in_channels=3, + init_cfg=None, + **kwargs): + import timm + + if not isinstance(pretrained, bool): + raise TypeError('pretrained must be bool, not str for model path') + if features_only and checkpoint_path: + warnings.warn( + 'Using both features_only and checkpoint_path will cause error' + ' in timm. See ' + 'https://github.com/rwightman/pytorch-image-models/issues/488') + + super(TIMMBackbone, self).__init__(init_cfg) + if 'norm_layer' in kwargs: + norm_class = MODELS.get(kwargs['norm_layer']) + + def build_norm(*args, **kwargs): + return norm_class(*args, **kwargs) + + kwargs['norm_layer'] = build_norm + self.timm_model = timm.create_model( + model_name=model_name, + features_only=features_only, + pretrained=pretrained, + in_chans=in_channels, + checkpoint_path=checkpoint_path, + **kwargs) + + # reset classifier + if hasattr(self.timm_model, 'reset_classifier'): + self.timm_model.reset_classifier(0, '') + + # Hack to use pretrained weights from timm + if pretrained or checkpoint_path: + self._is_init = True + + feature_info = getattr(self.timm_model, 'feature_info', None) + print_timm_feature_info(feature_info) + + def forward(self, x): + features = self.timm_model(x) + if isinstance(features, (list, tuple)): + features = tuple(features) + else: + features = (features, ) + return features diff --git a/mmpretrain/models/backbones/tinyvit.py b/mmpretrain/models/backbones/tinyvit.py new file mode 100644 index 0000000000000000000000000000000000000000..5279832184343a6e8ff4b253891de1b990192775 --- /dev/null +++ b/mmpretrain/models/backbones/tinyvit.py @@ -0,0 +1,769 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence, Tuple + +import torch +import torch.nn as nn +import torch.utils.checkpoint as checkpoint +from mmcv.cnn.bricks import DropPath, build_activation_layer, build_norm_layer +from mmengine.model import BaseModule, ModuleList, Sequential +from torch.nn import functional as F + +from mmpretrain.registry import MODELS +from ..utils import LeAttention +from .base_backbone import BaseBackbone + + +class ConvBN2d(Sequential): + """An implementation of Conv2d + BatchNorm2d with support of fusion. + + Modified from + https://github.com/microsoft/Cream/blob/main/TinyViT/models/tiny_vit.py + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + kernel_size (int): The size of the convolution kernel. + Default: 1. + stride (int): The stride of the convolution. + Default: 1. + padding (int): The padding of the convolution. + Default: 0. + dilation (int): The dilation of the convolution. + Default: 1. + groups (int): The number of groups in the convolution. + Default: 1. + bn_weight_init (float): The initial value of the weight of + the nn.BatchNorm2d layer. Default: 1.0. + init_cfg (dict): The initialization config of the module. + Default: None. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=1, + stride=1, + padding=0, + dilation=1, + groups=1, + bn_weight_init=1.0, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.add_module( + 'conv2d', + nn.Conv2d( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=False)) + bn2d = nn.BatchNorm2d(num_features=out_channels) + # bn initialization + torch.nn.init.constant_(bn2d.weight, bn_weight_init) + torch.nn.init.constant_(bn2d.bias, 0) + self.add_module('bn2d', bn2d) + + @torch.no_grad() + def fuse(self): + conv2d, bn2d = self._modules.values() + w = bn2d.weight / (bn2d.running_var + bn2d.eps)**0.5 + w = conv2d.weight * w[:, None, None, None] + b = bn2d.bias - bn2d.running_mean * bn2d.weight / \ + (bn2d.running_var + bn2d.eps)**0.5 + + m = nn.Conv2d( + in_channels=w.size(1) * self.c.groups, + out_channels=w.size(0), + kernel_size=w.shape[2:], + stride=self.conv2d.stride, + padding=self.conv2d.padding, + dilation=self.conv2d.dilation, + groups=self.conv2d.groups) + m.weight.data.copy_(w) + m.bias.data.copy_(b) + return m + + +class PatchEmbed(BaseModule): + """Patch Embedding for Vision Transformer. + + Adapted from + https://github.com/microsoft/Cream/blob/main/TinyViT/models/tiny_vit.py + + Different from `mmcv.cnn.bricks.transformer.PatchEmbed`, this module use + Conv2d and BatchNorm2d to implement PatchEmbedding, and output shape is + (N, C, H, W). + + Args: + in_channels (int): The number of input channels. + embed_dim (int): The embedding dimension. + resolution (Tuple[int, int]): The resolution of the input feature. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + """ + + def __init__(self, + in_channels, + embed_dim, + resolution, + act_cfg=dict(type='GELU')): + super().__init__() + img_size: Tuple[int, int] = resolution + self.patches_resolution = (img_size[0] // 4, img_size[1] // 4) + self.num_patches = self.patches_resolution[0] * \ + self.patches_resolution[1] + self.in_channels = in_channels + self.embed_dim = embed_dim + self.seq = nn.Sequential( + ConvBN2d( + in_channels, + embed_dim // 2, + kernel_size=3, + stride=2, + padding=1), + build_activation_layer(act_cfg), + ConvBN2d( + embed_dim // 2, embed_dim, kernel_size=3, stride=2, padding=1), + ) + + def forward(self, x): + return self.seq(x) + + +class PatchMerging(nn.Module): + """Patch Merging for TinyViT. + + Adapted from + https://github.com/microsoft/Cream/blob/main/TinyViT/models/tiny_vit.py + + Different from `mmpretrain.models.utils.PatchMerging`, this module use + Conv2d and BatchNorm2d to implement PatchMerging. + + Args: + in_channels (int): The number of input channels. + resolution (Tuple[int, int]): The resolution of the input feature. + out_channels (int): The number of output channels. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + """ + + def __init__(self, + resolution, + in_channels, + out_channels, + act_cfg=dict(type='GELU')): + super().__init__() + + self.img_size = resolution + + self.act = build_activation_layer(act_cfg) + self.conv1 = ConvBN2d(in_channels, out_channels, kernel_size=1) + self.conv2 = ConvBN2d( + out_channels, + out_channels, + kernel_size=3, + stride=2, + padding=1, + groups=out_channels) + self.conv3 = ConvBN2d(out_channels, out_channels, kernel_size=1) + self.out_resolution = (resolution[0] // 2, resolution[1] // 2) + + def forward(self, x): + if len(x.shape) == 3: + H, W = self.img_size + B = x.shape[0] + x = x.view(B, H, W, -1).permute(0, 3, 1, 2) + x = self.conv1(x) + x = self.act(x) + x = self.conv2(x) + x = self.act(x) + x = self.conv3(x) + + x = x.flatten(2).transpose(1, 2) + return x + + +class MBConvBlock(nn.Module): + """Mobile Inverted Residual Bottleneck Block for TinyViT. Adapted from + https://github.com/microsoft/Cream/blob/main/TinyViT/models/tiny_vit.py. + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + expand_ratio (int): The expand ratio of the hidden channels. + drop_rate (float): The drop rate of the block. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + """ + + def __init__(self, + in_channels, + out_channels, + expand_ratio, + drop_path, + act_cfg=dict(type='GELU')): + super().__init__() + self.in_channels = in_channels + hidden_channels = int(in_channels * expand_ratio) + + # linear + self.conv1 = ConvBN2d(in_channels, hidden_channels, kernel_size=1) + self.act = build_activation_layer(act_cfg) + # depthwise conv + self.conv2 = ConvBN2d( + in_channels=hidden_channels, + out_channels=hidden_channels, + kernel_size=3, + stride=1, + padding=1, + groups=hidden_channels) + # linear + self.conv3 = ConvBN2d( + hidden_channels, out_channels, kernel_size=1, bn_weight_init=0.0) + + self.drop_path = DropPath( + drop_path) if drop_path > 0. else nn.Identity() + + def forward(self, x): + shortcut = x + + x = self.conv1(x) + x = self.act(x) + + x = self.conv2(x) + x = self.act(x) + + x = self.conv3(x) + + x = self.drop_path(x) + + x += shortcut + x = self.act(x) + + return x + + +class ConvStage(BaseModule): + """Convolution Stage for TinyViT. + + Adapted from + https://github.com/microsoft/Cream/blob/main/TinyViT/models/tiny_vit.py + + Args: + in_channels (int): The number of input channels. + resolution (Tuple[int, int]): The resolution of the input feature. + depth (int): The number of blocks in the stage. + act_cfg (dict): The activation config of the module. + drop_path (float): The drop path of the block. + downsample (None | nn.Module): The downsample operation. + Default: None. + use_checkpoint (bool): Whether to use checkpointing to save memory. + out_channels (int): The number of output channels. + conv_expand_ratio (int): The expand ratio of the hidden channels. + Default: 4. + init_cfg (dict | list[dict], optional): Initialization config dict. + Default: None. + """ + + def __init__(self, + in_channels, + resolution, + depth, + act_cfg, + drop_path=0., + downsample=None, + use_checkpoint=False, + out_channels=None, + conv_expand_ratio=4., + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + self.use_checkpoint = use_checkpoint + # build blocks + self.blocks = ModuleList([ + MBConvBlock( + in_channels=in_channels, + out_channels=in_channels, + expand_ratio=conv_expand_ratio, + drop_path=drop_path[i] + if isinstance(drop_path, list) else drop_path) + for i in range(depth) + ]) + + # patch merging layer + if downsample is not None: + self.downsample = downsample( + resolution=resolution, + in_channels=in_channels, + out_channels=out_channels, + act_cfg=act_cfg) + self.resolution = self.downsample.out_resolution + else: + self.downsample = None + self.resolution = resolution + + def forward(self, x): + for block in self.blocks: + if self.use_checkpoint: + x = checkpoint.checkpoint(block, x) + else: + x = block(x) + + if self.downsample is not None: + x = self.downsample(x) + return x + + +class MLP(BaseModule): + """MLP module for TinyViT. + + Args: + in_channels (int): The number of input channels. + hidden_channels (int, optional): The number of hidden channels. + Default: None. + out_channels (int, optional): The number of output channels. + Default: None. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + drop (float): Probability of an element to be zeroed. + Default: 0. + init_cfg (dict | list[dict], optional): Initialization config dict. + Default: None. + """ + + def __init__(self, + in_channels, + hidden_channels=None, + out_channels=None, + act_cfg=dict(type='GELU'), + drop=0., + init_cfg=None): + super().__init__(init_cfg=init_cfg) + out_channels = out_channels or in_channels + hidden_channels = hidden_channels or in_channels + self.norm = nn.LayerNorm(in_channels) + self.fc1 = nn.Linear(in_channels, hidden_channels) + self.fc2 = nn.Linear(hidden_channels, out_channels) + self.act = build_activation_layer(act_cfg) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.norm(x) + + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class TinyViTBlock(BaseModule): + """TinViT Block. + + Args: + in_channels (int): The number of input channels. + resolution (Tuple[int, int]): The resolution of the input feature. + num_heads (int): The number of heads in the multi-head attention. + window_size (int): The size of the window. + Default: 7. + mlp_ratio (float): The ratio of mlp hidden dim to embedding dim. + Default: 4. + drop (float): Probability of an element to be zeroed. + Default: 0. + drop_path (float): The drop path of the block. + Default: 0. + local_conv_size (int): The size of the local convolution. + Default: 3. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + """ + + def __init__(self, + in_channels, + resolution, + num_heads, + window_size=7, + mlp_ratio=4., + drop=0., + drop_path=0., + local_conv_size=3, + act_cfg=dict(type='GELU')): + super().__init__() + self.in_channels = in_channels + self.img_size = resolution + self.num_heads = num_heads + assert window_size > 0, 'window_size must be greater than 0' + self.window_size = window_size + self.mlp_ratio = mlp_ratio + + self.drop_path = DropPath( + drop_path) if drop_path > 0. else nn.Identity() + + assert in_channels % num_heads == 0, \ + 'dim must be divisible by num_heads' + head_dim = in_channels // num_heads + + window_resolution = (window_size, window_size) + self.attn = LeAttention( + in_channels, + head_dim, + num_heads, + attn_ratio=1, + resolution=window_resolution) + + mlp_hidden_dim = int(in_channels * mlp_ratio) + self.mlp = MLP( + in_channels=in_channels, + hidden_channels=mlp_hidden_dim, + act_cfg=act_cfg, + drop=drop) + + self.local_conv = ConvBN2d( + in_channels=in_channels, + out_channels=in_channels, + kernel_size=local_conv_size, + stride=1, + padding=local_conv_size // 2, + groups=in_channels) + + def forward(self, x): + H, W = self.img_size + B, L, C = x.shape + assert L == H * W, 'input feature has wrong size' + res_x = x + if H == self.window_size and W == self.window_size: + x = self.attn(x) + else: + x = x.view(B, H, W, C) + pad_b = (self.window_size - + H % self.window_size) % self.window_size + pad_r = (self.window_size - + W % self.window_size) % self.window_size + padding = pad_b > 0 or pad_r > 0 + + if padding: + x = F.pad(x, (0, 0, 0, pad_r, 0, pad_b)) + + pH, pW = H + pad_b, W + pad_r + nH = pH // self.window_size + nW = pW // self.window_size + # window partition + x = x.view(B, nH, self.window_size, nW, self.window_size, + C).transpose(2, 3).reshape( + B * nH * nW, self.window_size * self.window_size, C) + x = self.attn(x) + # window reverse + x = x.view(B, nH, nW, self.window_size, self.window_size, + C).transpose(2, 3).reshape(B, pH, pW, C) + + if padding: + x = x[:, :H, :W].contiguous() + + x = x.view(B, L, C) + + x = res_x + self.drop_path(x) + + x = x.transpose(1, 2).reshape(B, C, H, W) + x = self.local_conv(x) + x = x.view(B, C, L).transpose(1, 2) + + x = x + self.drop_path(self.mlp(x)) + return x + + +class BasicStage(BaseModule): + """Basic Stage for TinyViT. + + Args: + in_channels (int): The number of input channels. + resolution (Tuple[int, int]): The resolution of the input feature. + depth (int): The number of blocks in the stage. + num_heads (int): The number of heads in the multi-head attention. + window_size (int): The size of the window. + mlp_ratio (float): The ratio of mlp hidden dim to embedding dim. + Default: 4. + drop (float): Probability of an element to be zeroed. + Default: 0. + drop_path (float): The drop path of the block. + Default: 0. + downsample (None | nn.Module): The downsample operation. + Default: None. + use_checkpoint (bool): Whether to use checkpointing to save memory. + Default: False. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + init_cfg (dict | list[dict], optional): Initialization config dict. + Default: None. + """ + + def __init__(self, + in_channels, + resolution, + depth, + num_heads, + window_size, + mlp_ratio=4., + drop=0., + drop_path=0., + downsample=None, + use_checkpoint=False, + local_conv_size=3, + out_channels=None, + act_cfg=dict(type='GELU'), + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.use_checkpoint = use_checkpoint + # build blocks + self.blocks = ModuleList([ + TinyViTBlock( + in_channels=in_channels, + resolution=resolution, + num_heads=num_heads, + window_size=window_size, + mlp_ratio=mlp_ratio, + drop=drop, + local_conv_size=local_conv_size, + act_cfg=act_cfg, + drop_path=drop_path[i] + if isinstance(drop_path, list) else drop_path) + for i in range(depth) + ]) + + # build patch merging layer + if downsample is not None: + self.downsample = downsample( + resolution=resolution, + in_channels=in_channels, + out_channels=out_channels, + act_cfg=act_cfg) + self.resolution = self.downsample.out_resolution + else: + self.downsample = None + self.resolution = resolution + + def forward(self, x): + for block in self.blocks: + if self.use_checkpoint: + x = checkpoint.checkpoint(block, x) + else: + x = block(x) + + if self.downsample is not None: + x = self.downsample(x) + return x + + +@MODELS.register_module() +class TinyViT(BaseBackbone): + """TinyViT. + A PyTorch implementation of : `TinyViT: Fast Pretraining Distillation + for Small Vision Transformers`_ + + Inspiration from + https://github.com/microsoft/Cream/blob/main/TinyViT + + Args: + arch (str | dict): The architecture of TinyViT. + Default: '5m'. + img_size (tuple | int): The resolution of the input image. + Default: (224, 224) + window_size (list): The size of the window. + Default: [7, 7, 14, 7] + in_channels (int): The number of input channels. + Default: 3. + depths (list[int]): The depth of each stage. + Default: [2, 2, 6, 2]. + mlp_ratio (list[int]): The ratio of mlp hidden dim to embedding dim. + Default: 4. + drop_rate (float): Probability of an element to be zeroed. + Default: 0. + drop_path_rate (float): The drop path of the block. + Default: 0.1. + use_checkpoint (bool): Whether to use checkpointing to save memory. + Default: False. + mbconv_expand_ratio (int): The expand ratio of the mbconv. + Default: 4.0 + local_conv_size (int): The size of the local conv. + Default: 3. + layer_lr_decay (float): The layer lr decay. + Default: 1.0 + out_indices (int | list[int]): Output from which stages. + Default: -1 + frozen_stages (int | list[int]): Stages to be frozen (all param fixed). + Default: -0 + gap_before_final_nrom (bool): Whether to add a gap before the final + norm. Default: True. + act_cfg (dict): The activation config of the module. + Default: dict(type='GELU'). + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + init_cfg (dict | list[dict], optional): Initialization config dict. + Default: None. + """ + arch_settings = { + '5m': { + 'channels': [64, 128, 160, 320], + 'num_heads': [2, 4, 5, 10], + 'depths': [2, 2, 6, 2], + }, + '11m': { + 'channels': [64, 128, 256, 448], + 'num_heads': [2, 4, 8, 14], + 'depths': [2, 2, 6, 2], + }, + '21m': { + 'channels': [96, 192, 384, 576], + 'num_heads': [3, 6, 12, 18], + 'depths': [2, 2, 6, 2], + }, + } + + def __init__(self, + arch='5m', + img_size=(224, 224), + window_size=[7, 7, 14, 7], + in_channels=3, + mlp_ratio=4., + drop_rate=0., + drop_path_rate=0.1, + use_checkpoint=False, + mbconv_expand_ratio=4.0, + local_conv_size=3, + layer_lr_decay=1.0, + out_indices=-1, + frozen_stages=0, + gap_before_final_norm=True, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'Unavaiable arch, please choose from ' \ + f'({set(self.arch_settings)} or pass a dict.' + arch = self.arch_settings[arch] + elif isinstance(arch, dict): + assert 'channels' in arch and 'num_heads' in arch and \ + 'depths' in arch, 'The arch dict must have' \ + f'"channels", "num_heads", "window_sizes" ' \ + f'keys, but got {arch.keys()}' + + self.channels = arch['channels'] + self.num_heads = arch['num_heads'] + self.widow_sizes = window_size + self.img_size = img_size + self.depths = arch['depths'] + + self.num_stages = len(self.channels) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = 4 + index + assert out_indices[i] >= 0, f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.frozen_stages = frozen_stages + self.gap_before_final_norm = gap_before_final_norm + self.layer_lr_decay = layer_lr_decay + + self.patch_embed = PatchEmbed( + in_channels=in_channels, + embed_dim=self.channels[0], + resolution=self.img_size, + act_cfg=dict(type='GELU')) + patches_resolution = self.patch_embed.patches_resolution + + # stochastic depth decay rule + dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, sum(self.depths)) + ] + + # build stages + self.stages = ModuleList() + for i in range(self.num_stages): + depth = self.depths[i] + channel = self.channels[i] + curr_resolution = (patches_resolution[0] // (2**i), + patches_resolution[1] // (2**i)) + drop_path = dpr[sum(self.depths[:i]):sum(self.depths[:i + 1])] + downsample = PatchMerging if (i < self.num_stages - 1) else None + out_channels = self.channels[min(i + 1, self.num_stages - 1)] + if i >= 1: + stage = BasicStage( + in_channels=channel, + resolution=curr_resolution, + depth=depth, + num_heads=self.num_heads[i], + window_size=self.widow_sizes[i], + mlp_ratio=mlp_ratio, + drop=drop_rate, + drop_path=drop_path, + downsample=downsample, + use_checkpoint=use_checkpoint, + local_conv_size=local_conv_size, + out_channels=out_channels, + act_cfg=act_cfg) + else: + stage = ConvStage( + in_channels=channel, + resolution=curr_resolution, + depth=depth, + act_cfg=act_cfg, + drop_path=drop_path, + downsample=downsample, + use_checkpoint=use_checkpoint, + out_channels=out_channels, + conv_expand_ratio=mbconv_expand_ratio) + self.stages.append(stage) + + # add output norm + if i in self.out_indices: + norm_layer = build_norm_layer(norm_cfg, out_channels)[1] + self.add_module(f'norm{i}', norm_layer) + + def set_layer_lr_decay(self, layer_lr_decay): + # TODO: add layer_lr_decay + pass + + def forward(self, x): + outs = [] + x = self.patch_embed(x) + + for i, stage in enumerate(self.stages): + x = stage(x) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + if self.gap_before_final_norm: + gap = x.mean(1) + outs.append(norm_layer(gap)) + else: + out = norm_layer(x) + # convert the (B,L,C) format into (B,C,H,W) format + # which would be better for the downstream tasks. + B, L, C = out.shape + out = out.view(B, *stage.resolution, C) + outs.append(out.permute(0, 3, 1, 2)) + + return tuple(outs) + + def _freeze_stages(self): + for i in range(self.frozen_stages): + stage = self.stages[i] + stage.eval() + for param in stage.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(TinyViT, self).train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/backbones/tnt.py b/mmpretrain/models/backbones/tnt.py new file mode 100644 index 0000000000000000000000000000000000000000..e1b241c1f6bc398157793748b7a457f0836daedb --- /dev/null +++ b/mmpretrain/models/backbones/tnt.py @@ -0,0 +1,368 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math + +import torch +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmcv.cnn.bricks.transformer import FFN, MultiheadAttention +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from ..utils import to_2tuple +from .base_backbone import BaseBackbone + + +class TransformerBlock(BaseModule): + """Implement a transformer block in TnTLayer. + + Args: + embed_dims (int): The feature dimension + num_heads (int): Parallel attention heads + ffn_ratio (int): A ratio to calculate the hidden_dims in ffn layer. + Default: 4 + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Default 0. + attn_drop_rate (float): The drop out rate for attention layer. + Default 0. + drop_path_rate (float): stochastic depth rate. Default 0. + num_fcs (int): The number of fully-connected layers for FFNs. Default 2 + qkv_bias (bool): Enable bias for qkv if True. Default False + act_cfg (dict): The activation config for FFNs. Defaults to GELU. + norm_cfg (dict): Config dict for normalization layer. Default + layer normalization + batch_first (bool): Key, Query and Value are shape of + (batch, n, embed_dim) or (n, batch, embed_dim). + (batch, n, embed_dim) is common case in CV. Defaults to False + init_cfg (dict, optional): Initialization config dict. Defaults to None + """ + + def __init__(self, + embed_dims, + num_heads, + ffn_ratio=4, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_fcs=2, + qkv_bias=False, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + batch_first=True, + init_cfg=None): + super(TransformerBlock, self).__init__(init_cfg=init_cfg) + + self.norm_attn = build_norm_layer(norm_cfg, embed_dims)[1] + self.attn = MultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + batch_first=batch_first) + + self.norm_ffn = build_norm_layer(norm_cfg, embed_dims)[1] + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=embed_dims * ffn_ratio, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg) + + if not qkv_bias: + self.attn.attn.in_proj_bias = None + + def forward(self, x): + x = self.attn(self.norm_attn(x), identity=x) + x = self.ffn(self.norm_ffn(x), identity=x) + return x + + +class TnTLayer(BaseModule): + """Implement one encoder layer in Transformer in Transformer. + + Args: + num_pixel (int): The pixel number in target patch transformed with + a linear projection in inner transformer + embed_dims_inner (int): Feature dimension in inner transformer block + embed_dims_outer (int): Feature dimension in outer transformer block + num_heads_inner (int): Parallel attention heads in inner transformer. + num_heads_outer (int): Parallel attention heads in outer transformer. + inner_block_cfg (dict): Extra config of inner transformer block. + Defaults to empty dict. + outer_block_cfg (dict): Extra config of outer transformer block. + Defaults to empty dict. + norm_cfg (dict): Config dict for normalization layer. Default + layer normalization + init_cfg (dict, optional): Initialization config dict. Defaults to None + """ + + def __init__(self, + num_pixel, + embed_dims_inner, + embed_dims_outer, + num_heads_inner, + num_heads_outer, + inner_block_cfg=dict(), + outer_block_cfg=dict(), + norm_cfg=dict(type='LN'), + init_cfg=None): + super(TnTLayer, self).__init__(init_cfg=init_cfg) + + self.inner_block = TransformerBlock( + embed_dims=embed_dims_inner, + num_heads=num_heads_inner, + **inner_block_cfg) + + self.norm_proj = build_norm_layer(norm_cfg, embed_dims_inner)[1] + self.projection = nn.Linear( + embed_dims_inner * num_pixel, embed_dims_outer, bias=True) + + self.outer_block = TransformerBlock( + embed_dims=embed_dims_outer, + num_heads=num_heads_outer, + **outer_block_cfg) + + def forward(self, pixel_embed, patch_embed): + pixel_embed = self.inner_block(pixel_embed) + + B, N, C = patch_embed.size() + patch_embed[:, 1:] = patch_embed[:, 1:] + self.projection( + self.norm_proj(pixel_embed).reshape(B, N - 1, -1)) + patch_embed = self.outer_block(patch_embed) + + return pixel_embed, patch_embed + + +class PixelEmbed(BaseModule): + """Image to Pixel Embedding. + + Args: + img_size (int | tuple): The size of input image + patch_size (int): The size of one patch + in_channels (int): The num of input channels + embed_dims_inner (int): The num of channels of the target patch + transformed with a linear projection in inner transformer + stride (int): The stride of the conv2d layer. We use a conv2d layer + and a unfold layer to implement image to pixel embedding. + init_cfg (dict, optional): Initialization config dict + """ + + def __init__(self, + img_size=224, + patch_size=16, + in_channels=3, + embed_dims_inner=48, + stride=4, + init_cfg=None): + super(PixelEmbed, self).__init__(init_cfg=init_cfg) + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + # patches_resolution property necessary for resizing + # positional embedding + patches_resolution = [ + img_size[0] // patch_size[0], img_size[1] // patch_size[1] + ] + num_patches = patches_resolution[0] * patches_resolution[1] + + self.img_size = img_size + self.num_patches = num_patches + self.embed_dims_inner = embed_dims_inner + + new_patch_size = [math.ceil(ps / stride) for ps in patch_size] + self.new_patch_size = new_patch_size + + self.proj = nn.Conv2d( + in_channels, + self.embed_dims_inner, + kernel_size=7, + padding=3, + stride=stride) + self.unfold = nn.Unfold( + kernel_size=new_patch_size, stride=new_patch_size) + + def forward(self, x, pixel_pos): + B, C, H, W = x.shape + assert H == self.img_size[0] and W == self.img_size[1], \ + f"Input image size ({H}*{W}) doesn't match model " \ + f'({self.img_size[0]}*{self.img_size[1]}).' + x = self.proj(x) + x = self.unfold(x) + x = x.transpose(1, + 2).reshape(B * self.num_patches, self.embed_dims_inner, + self.new_patch_size[0], + self.new_patch_size[1]) + x = x + pixel_pos + x = x.reshape(B * self.num_patches, self.embed_dims_inner, + -1).transpose(1, 2) + return x + + +@MODELS.register_module() +class TNT(BaseBackbone): + """Transformer in Transformer. + + A PyTorch implement of: `Transformer in Transformer + `_ + + Inspiration from + https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/tnt.py + + Args: + arch (str | dict): Vision Transformer architecture + Default: 'b' + img_size (int | tuple): Input image size. Defaults to 224 + patch_size (int | tuple): The patch size. Deault to 16 + in_channels (int): Number of input channels. Defaults to 3 + ffn_ratio (int): A ratio to calculate the hidden_dims in ffn layer. + Default: 4 + qkv_bias (bool): Enable bias for qkv if True. Default False + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Default 0. + attn_drop_rate (float): The drop out rate for attention layer. + Default 0. + drop_path_rate (float): stochastic depth rate. Default 0. + act_cfg (dict): The activation config for FFNs. Defaults to GELU. + norm_cfg (dict): Config dict for normalization layer. Default + layer normalization + first_stride (int): The stride of the conv2d layer. We use a conv2d + layer and a unfold layer to implement image to pixel embedding. + num_fcs (int): The number of fully-connected layers for FFNs. Default 2 + init_cfg (dict, optional): Initialization config dict + """ + arch_zoo = { + **dict.fromkeys( + ['s', 'small'], { + 'embed_dims_outer': 384, + 'embed_dims_inner': 24, + 'num_layers': 12, + 'num_heads_outer': 6, + 'num_heads_inner': 4 + }), + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims_outer': 640, + 'embed_dims_inner': 40, + 'num_layers': 12, + 'num_heads_outer': 10, + 'num_heads_inner': 4 + }) + } + + def __init__(self, + arch='b', + img_size=224, + patch_size=16, + in_channels=3, + ffn_ratio=4, + qkv_bias=False, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + first_stride=4, + num_fcs=2, + init_cfg=[ + dict(type='TruncNormal', layer='Linear', std=.02), + dict(type='Constant', layer='LayerNorm', val=1., bias=0.) + ]): + super(TNT, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims_outer', 'embed_dims_inner', 'num_layers', + 'num_heads_inner', 'num_heads_outer' + } + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims_inner = self.arch_settings['embed_dims_inner'] + self.embed_dims_outer = self.arch_settings['embed_dims_outer'] + # embed_dims for consistency with other models + self.embed_dims = self.embed_dims_outer + self.num_layers = self.arch_settings['num_layers'] + self.num_heads_inner = self.arch_settings['num_heads_inner'] + self.num_heads_outer = self.arch_settings['num_heads_outer'] + + self.pixel_embed = PixelEmbed( + img_size=img_size, + patch_size=patch_size, + in_channels=in_channels, + embed_dims_inner=self.embed_dims_inner, + stride=first_stride) + num_patches = self.pixel_embed.num_patches + self.num_patches = num_patches + new_patch_size = self.pixel_embed.new_patch_size + num_pixel = new_patch_size[0] * new_patch_size[1] + + self.norm1_proj = build_norm_layer(norm_cfg, num_pixel * + self.embed_dims_inner)[1] + self.projection = nn.Linear(num_pixel * self.embed_dims_inner, + self.embed_dims_outer) + self.norm2_proj = build_norm_layer(norm_cfg, self.embed_dims_outer)[1] + + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims_outer)) + self.patch_pos = nn.Parameter( + torch.zeros(1, num_patches + 1, self.embed_dims_outer)) + self.pixel_pos = nn.Parameter( + torch.zeros(1, self.embed_dims_inner, new_patch_size[0], + new_patch_size[1])) + self.drop_after_pos = nn.Dropout(p=drop_rate) + + dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, self.num_layers) + ] # stochastic depth decay rule + self.layers = ModuleList() + for i in range(self.num_layers): + block_cfg = dict( + ffn_ratio=ffn_ratio, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=dpr[i], + num_fcs=num_fcs, + qkv_bias=qkv_bias, + norm_cfg=norm_cfg, + batch_first=True) + self.layers.append( + TnTLayer( + num_pixel=num_pixel, + embed_dims_inner=self.embed_dims_inner, + embed_dims_outer=self.embed_dims_outer, + num_heads_inner=self.num_heads_inner, + num_heads_outer=self.num_heads_outer, + inner_block_cfg=block_cfg, + outer_block_cfg=block_cfg, + norm_cfg=norm_cfg)) + + self.norm = build_norm_layer(norm_cfg, self.embed_dims_outer)[1] + + trunc_normal_(self.cls_token, std=.02) + trunc_normal_(self.patch_pos, std=.02) + trunc_normal_(self.pixel_pos, std=.02) + + def forward(self, x): + B = x.shape[0] + pixel_embed = self.pixel_embed(x, self.pixel_pos) + + patch_embed = self.norm2_proj( + self.projection( + self.norm1_proj(pixel_embed.reshape(B, self.num_patches, -1)))) + patch_embed = torch.cat( + (self.cls_token.expand(B, -1, -1), patch_embed), dim=1) + patch_embed = patch_embed + self.patch_pos + patch_embed = self.drop_after_pos(patch_embed) + + for layer in self.layers: + pixel_embed, patch_embed = layer(pixel_embed, patch_embed) + + patch_embed = self.norm(patch_embed) + return (patch_embed[:, 0], ) diff --git a/mmpretrain/models/backbones/twins.py b/mmpretrain/models/backbones/twins.py new file mode 100644 index 0000000000000000000000000000000000000000..be55c02db1daa5cb37760f2066448b3fca2cb893 --- /dev/null +++ b/mmpretrain/models/backbones/twins.py @@ -0,0 +1,721 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Conv2d, build_norm_layer +from mmcv.cnn.bricks.drop import build_dropout +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import (constant_init, normal_init, + trunc_normal_init) +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.registry import MODELS +from ..utils import ConditionalPositionEncoding, MultiheadAttention + + +class GlobalSubsampledAttention(MultiheadAttention): + """Global Sub-sampled Attention (GSA) module. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + input_dims (int, optional): The input dimension, and if None, + use ``embed_dims``. Defaults to None. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + dropout_layer (dict): The dropout config before adding the shortcut. + Defaults to ``dict(type='Dropout', drop_prob=0.)``. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + proj_bias (bool) If True, add a learnable bias to output projection. + Defaults to True. + v_shortcut (bool): Add a shortcut from value to output. It's usually + used if ``input_dims`` is different from ``embed_dims``. + Defaults to False. + sr_ratio (float): The ratio of spatial reduction in attention modules. + Defaults to 1. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + norm_cfg=dict(type='LN'), + qkv_bias=True, + sr_ratio=1, + **kwargs): + super(GlobalSubsampledAttention, + self).__init__(embed_dims, num_heads, **kwargs) + + self.qkv_bias = qkv_bias + self.q = nn.Linear(self.input_dims, embed_dims, bias=qkv_bias) + self.kv = nn.Linear(self.input_dims, embed_dims * 2, bias=qkv_bias) + + # remove self.qkv, here split into self.q, self.kv + delattr(self, 'qkv') + + self.sr_ratio = sr_ratio + if sr_ratio > 1: + # use a conv as the spatial-reduction operation, the kernel_size + # and stride in conv are equal to the sr_ratio. + self.sr = Conv2d( + in_channels=embed_dims, + out_channels=embed_dims, + kernel_size=sr_ratio, + stride=sr_ratio) + # The ret[0] of build_norm_layer is norm name. + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + + def forward(self, x, hw_shape): + B, N, C = x.shape + H, W = hw_shape + assert H * W == N, 'The product of h and w of hw_shape must be N, ' \ + 'which is the 2nd dim number of the input Tensor x.' + + q = self.q(x).reshape(B, N, self.num_heads, + C // self.num_heads).permute(0, 2, 1, 3) + + if self.sr_ratio > 1: + x = x.permute(0, 2, 1).reshape(B, C, *hw_shape) # BNC_2_BCHW + x = self.sr(x) + x = x.reshape(B, C, -1).permute(0, 2, 1) # BCHW_2_BNC + x = self.norm(x) + + kv = self.kv(x).reshape(B, -1, 2, self.num_heads, + self.head_dims).permute(2, 0, 3, 1, 4) + k, v = kv[0], kv[1] + + attn_drop = self.attn_drop if self.training else 0. + x = self.scaled_dot_product_attention(q, k, v, dropout_p=attn_drop) + x = x.transpose(1, 2).reshape(B, N, self.embed_dims) + + x = self.proj(x) + x = self.out_drop(self.proj_drop(x)) + + if self.v_shortcut: + x = v.squeeze(1) + x + return x + + +class GSAEncoderLayer(BaseModule): + """Implements one encoder layer with GlobalSubsampledAttention(GSA). + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Default: 0.0. + attn_drop_rate (float): The drop out rate for attention layer. + Default: 0.0. + drop_path_rate (float): Stochastic depth rate. Default 0.0. + num_fcs (int): The number of fully-connected layers for FFNs. + Default: 2. + qkv_bias (bool): Enable bias for qkv if True. Default: True + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU'). + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + sr_ratio (float): The ratio of spatial reduction in attention modules. + Defaults to 1. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_fcs=2, + qkv_bias=True, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + sr_ratio=1., + init_cfg=None): + super(GSAEncoderLayer, self).__init__(init_cfg=init_cfg) + + self.norm1 = build_norm_layer(norm_cfg, embed_dims, postfix=1)[1] + self.attn = GlobalSubsampledAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + qkv_bias=qkv_bias, + norm_cfg=norm_cfg, + sr_ratio=sr_ratio) + + self.norm2 = build_norm_layer(norm_cfg, embed_dims, postfix=2)[1] + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg, + add_identity=False) + + self.drop_path = build_dropout( + dict(type='DropPath', drop_prob=drop_path_rate) + ) if drop_path_rate > 0. else nn.Identity() + + def forward(self, x, hw_shape): + x = x + self.drop_path(self.attn(self.norm1(x), hw_shape)) + x = x + self.drop_path(self.ffn(self.norm2(x))) + return x + + +class LocallyGroupedSelfAttention(BaseModule): + """Locally-grouped Self Attention (LSA) module. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. Default: 8 + qkv_bias (bool, optional): If True, add a learnable bias to q, k, v. + Default: False. + qk_scale (float | None, optional): Override default qk scale of + head_dim ** -0.5 if set. Default: None. + attn_drop_rate (float, optional): Dropout ratio of attention weight. + Default: 0.0 + proj_drop_rate (float, optional): Dropout ratio of output. Default: 0. + window_size(int): Window size of LSA. Default: 1. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads=8, + qkv_bias=False, + qk_scale=None, + attn_drop_rate=0., + proj_drop_rate=0., + window_size=1, + init_cfg=None): + super(LocallyGroupedSelfAttention, self).__init__(init_cfg=init_cfg) + + assert embed_dims % num_heads == 0, \ + f'dim {embed_dims} should be divided by num_heads {num_heads}' + + self.embed_dims = embed_dims + self.num_heads = num_heads + head_dim = embed_dims // num_heads + self.scale = qk_scale or head_dim**-0.5 + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop_rate) + self.proj = nn.Linear(embed_dims, embed_dims) + self.proj_drop = nn.Dropout(proj_drop_rate) + self.window_size = window_size + + def forward(self, x, hw_shape): + B, N, C = x.shape + H, W = hw_shape + x = x.view(B, H, W, C) + + # pad feature maps to multiples of Local-groups + pad_l = pad_t = 0 + pad_r = (self.window_size - W % self.window_size) % self.window_size + pad_b = (self.window_size - H % self.window_size) % self.window_size + x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b)) + + # calculate attention mask for LSA + Hp, Wp = x.shape[1:-1] + _h, _w = Hp // self.window_size, Wp // self.window_size + mask = torch.zeros((1, Hp, Wp), device=x.device) + mask[:, -pad_b:, :].fill_(1) + mask[:, :, -pad_r:].fill_(1) + + # [B, _h, _w, window_size, window_size, C] + x = x.reshape(B, _h, self.window_size, _w, self.window_size, + C).transpose(2, 3) + mask = mask.reshape(1, _h, self.window_size, _w, + self.window_size).transpose(2, 3).reshape( + 1, _h * _w, + self.window_size * self.window_size) + # [1, _h*_w, window_size*window_size, window_size*window_size] + attn_mask = mask.unsqueeze(2) - mask.unsqueeze(3) + attn_mask = attn_mask.masked_fill(attn_mask != 0, + float(-1000.0)).masked_fill( + attn_mask == 0, float(0.0)) + + # [3, B, _w*_h, nhead, window_size*window_size, dim] + qkv = self.qkv(x).reshape(B, _h * _w, + self.window_size * self.window_size, 3, + self.num_heads, C // self.num_heads).permute( + 3, 0, 1, 4, 2, 5) + q, k, v = qkv[0], qkv[1], qkv[2] + # [B, _h*_w, n_head, window_size*window_size, window_size*window_size] + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn + attn_mask.unsqueeze(2) + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + attn = (attn @ v).transpose(2, 3).reshape(B, _h, _w, self.window_size, + self.window_size, C) + x = attn.transpose(2, 3).reshape(B, _h * self.window_size, + _w * self.window_size, C) + if pad_r > 0 or pad_b > 0: + x = x[:, :H, :W, :].contiguous() + + x = x.reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class LSAEncoderLayer(BaseModule): + """Implements one encoder layer with LocallyGroupedSelfAttention(LSA). + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Default: 0.0. + attn_drop_rate (float, optional): Dropout ratio of attention weight. + Default: 0.0 + drop_path_rate (float): Stochastic depth rate. Default 0.0. + num_fcs (int): The number of fully-connected layers for FFNs. + Default: 2. + qkv_bias (bool): Enable bias for qkv if True. Default: True + qk_scale (float | None, optional): Override default qk scale of + head_dim ** -0.5 if set. Default: None. + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU'). + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + window_size (int): Window size of LSA. Default: 1. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_fcs=2, + qkv_bias=True, + qk_scale=None, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + window_size=1, + init_cfg=None): + + super(LSAEncoderLayer, self).__init__(init_cfg=init_cfg) + + self.norm1 = build_norm_layer(norm_cfg, embed_dims, postfix=1)[1] + self.attn = LocallyGroupedSelfAttention(embed_dims, num_heads, + qkv_bias, qk_scale, + attn_drop_rate, drop_rate, + window_size) + + self.norm2 = build_norm_layer(norm_cfg, embed_dims, postfix=2)[1] + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg, + add_identity=False) + + self.drop_path = build_dropout( + dict(type='DropPath', drop_prob=drop_path_rate) + ) if drop_path_rate > 0. else nn.Identity() + + def forward(self, x, hw_shape): + x = x + self.drop_path(self.attn(self.norm1(x), hw_shape)) + x = x + self.drop_path(self.ffn(self.norm2(x))) + return x + + +@MODELS.register_module() +class PCPVT(BaseModule): + """The backbone of Twins-PCPVT. + + This backbone is the implementation of `Twins: Revisiting the Design + of Spatial Attention in Vision Transformers + `_. + + Args: + arch (dict, str): PCPVT architecture, a str value in arch zoo or a + detailed configuration dict with 7 keys, and the length of all the + values in dict should be the same: + + - depths (List[int]): The number of encoder layers in each stage. + - embed_dims (List[int]): Embedding dimension in each stage. + - patch_sizes (List[int]): The patch sizes in each stage. + - num_heads (List[int]): Numbers of attention head in each stage. + - strides (List[int]): The strides in each stage. + - mlp_ratios (List[int]): The ratios of mlp in each stage. + - sr_ratios (List[int]): The ratios of GSA-encoder layers in each + stage. + + in_channels (int): Number of input channels. Defaults to 3. + out_indices (tuple[int]): Output from which stages. + Defaults to ``(3, )``. + qkv_bias (bool): Enable bias for qkv if True. Defaults to False. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + attn_drop_rate (float): The drop out rate for attention layer. + Defaults to 0.0 + drop_path_rate (float): Stochastic depth rate. Defaults to 0.0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + norm_after_stage(bool, List[bool]): Add extra norm after each stage. + Defaults to False. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + + Examples: + >>> from mmpretrain.models import PCPVT + >>> import torch + >>> pcpvt_cfg = {'arch': "small", + >>> 'norm_after_stage': [False, False, False, True]} + >>> model = PCPVT(**pcpvt_cfg) + >>> x = torch.rand(1, 3, 224, 224) + >>> outputs = model(x) + >>> print(outputs[-1].shape) + torch.Size([1, 512, 7, 7]) + >>> pcpvt_cfg['norm_after_stage'] = [True, True, True, True] + >>> pcpvt_cfg['out_indices'] = (0, 1, 2, 3) + >>> model = PCPVT(**pcpvt_cfg) + >>> outputs = model(x) + >>> for feat in outputs: + >>> print(feat.shape) + torch.Size([1, 64, 56, 56]) + torch.Size([1, 128, 28, 28]) + torch.Size([1, 320, 14, 14]) + torch.Size([1, 512, 7, 7]) + """ + arch_zoo = { + **dict.fromkeys(['s', 'small'], + {'embed_dims': [64, 128, 320, 512], + 'depths': [3, 4, 6, 3], + 'num_heads': [1, 2, 5, 8], + 'patch_sizes': [4, 2, 2, 2], + 'strides': [4, 2, 2, 2], + 'mlp_ratios': [8, 8, 4, 4], + 'sr_ratios': [8, 4, 2, 1]}), + **dict.fromkeys(['b', 'base'], + {'embed_dims': [64, 128, 320, 512], + 'depths': [3, 4, 18, 3], + 'num_heads': [1, 2, 5, 8], + 'patch_sizes': [4, 2, 2, 2], + 'strides': [4, 2, 2, 2], + 'mlp_ratios': [8, 8, 4, 4], + 'sr_ratios': [8, 4, 2, 1]}), + **dict.fromkeys(['l', 'large'], + {'embed_dims': [64, 128, 320, 512], + 'depths': [3, 8, 27, 3], + 'num_heads': [1, 2, 5, 8], + 'patch_sizes': [4, 2, 2, 2], + 'strides': [4, 2, 2, 2], + 'mlp_ratios': [8, 8, 4, 4], + 'sr_ratios': [8, 4, 2, 1]}), + } # yapf: disable + + essential_keys = { + 'embed_dims', 'depths', 'num_heads', 'patch_sizes', 'strides', + 'mlp_ratios', 'sr_ratios' + } + + def __init__(self, + arch, + in_channels=3, + out_indices=(3, ), + qkv_bias=False, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + norm_cfg=dict(type='LN'), + norm_after_stage=False, + init_cfg=None): + super(PCPVT, self).__init__(init_cfg=init_cfg) + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + assert isinstance(arch, dict) and ( + set(arch) == self.essential_keys + ), f'Custom arch needs a dict with keys {self.essential_keys}.' + self.arch_settings = arch + + self.depths = self.arch_settings['depths'] + self.embed_dims = self.arch_settings['embed_dims'] + self.patch_sizes = self.arch_settings['patch_sizes'] + self.strides = self.arch_settings['strides'] + self.mlp_ratios = self.arch_settings['mlp_ratios'] + self.num_heads = self.arch_settings['num_heads'] + self.sr_ratios = self.arch_settings['sr_ratios'] + + self.num_extra_tokens = 0 # there is no cls-token in Twins + self.num_stage = len(self.depths) + for key, value in self.arch_settings.items(): + assert isinstance(value, list) and len(value) == self.num_stage, ( + 'Length of setting item in arch dict must be type of list and' + ' have the same length.') + + # patch_embeds + self.patch_embeds = ModuleList() + self.position_encoding_drops = ModuleList() + self.stages = ModuleList() + + for i in range(self.num_stage): + # use in_channels of the model in the first stage + if i == 0: + stage_in_channels = in_channels + else: + stage_in_channels = self.embed_dims[i - 1] + + self.patch_embeds.append( + PatchEmbed( + in_channels=stage_in_channels, + embed_dims=self.embed_dims[i], + conv_type='Conv2d', + kernel_size=self.patch_sizes[i], + stride=self.strides[i], + padding='corner', + norm_cfg=dict(type='LN'))) + + self.position_encoding_drops.append(nn.Dropout(p=drop_rate)) + + # PEGs + self.position_encodings = ModuleList([ + ConditionalPositionEncoding(embed_dim, embed_dim) + for embed_dim in self.embed_dims + ]) + + # stochastic depth + total_depth = sum(self.depths) + self.dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] # stochastic depth decay rule + cur = 0 + + for k in range(len(self.depths)): + _block = ModuleList([ + GSAEncoderLayer( + embed_dims=self.embed_dims[k], + num_heads=self.num_heads[k], + feedforward_channels=self.mlp_ratios[k] * + self.embed_dims[k], + attn_drop_rate=attn_drop_rate, + drop_rate=drop_rate, + drop_path_rate=self.dpr[cur + i], + num_fcs=2, + qkv_bias=qkv_bias, + act_cfg=dict(type='GELU'), + norm_cfg=norm_cfg, + sr_ratio=self.sr_ratios[k]) for i in range(self.depths[k]) + ]) + self.stages.append(_block) + cur += self.depths[k] + + self.out_indices = out_indices + + assert isinstance(norm_after_stage, (bool, list)) + if isinstance(norm_after_stage, bool): + self.norm_after_stage = [norm_after_stage] * self.num_stage + else: + self.norm_after_stage = norm_after_stage + assert len(self.norm_after_stage) == self.num_stage, \ + (f'Number of norm_after_stage({len(self.norm_after_stage)}) should' + f' be equal to the number of stages({self.num_stage}).') + + for i, has_norm in enumerate(self.norm_after_stage): + assert isinstance(has_norm, bool), 'norm_after_stage should be ' \ + 'bool or List[bool].' + if has_norm and norm_cfg is not None: + norm_layer = build_norm_layer(norm_cfg, self.embed_dims[i])[1] + else: + norm_layer = nn.Identity() + + self.add_module(f'norm_after_stage{i}', norm_layer) + + def init_weights(self): + if self.init_cfg is not None: + super(PCPVT, self).init_weights() + else: + for m in self.modules(): + if isinstance(m, nn.Linear): + trunc_normal_init(m, std=.02, bias=0.) + elif isinstance(m, (_BatchNorm, nn.GroupNorm, nn.LayerNorm)): + constant_init(m, val=1.0, bias=0.) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[ + 1] * m.out_channels + fan_out //= m.groups + normal_init( + m, mean=0, std=math.sqrt(2.0 / fan_out), bias=0) + + def forward(self, x): + outputs = list() + + b = x.shape[0] + + for i in range(self.num_stage): + x, hw_shape = self.patch_embeds[i](x) + h, w = hw_shape + x = self.position_encoding_drops[i](x) + for j, blk in enumerate(self.stages[i]): + x = blk(x, hw_shape) + if j == 0: + x = self.position_encodings[i](x, hw_shape) + + norm_layer = getattr(self, f'norm_after_stage{i}') + x = norm_layer(x) + x = x.reshape(b, h, w, -1).permute(0, 3, 1, 2).contiguous() + + if i in self.out_indices: + outputs.append(x) + + return tuple(outputs) + + +@MODELS.register_module() +class SVT(PCPVT): + """The backbone of Twins-SVT. + + This backbone is the implementation of `Twins: Revisiting the Design + of Spatial Attention in Vision Transformers + `_. + + Args: + arch (dict, str): SVT architecture, a str value in arch zoo or a + detailed configuration dict with 8 keys, and the length of all the + values in dict should be the same: + + - depths (List[int]): The number of encoder layers in each stage. + - embed_dims (List[int]): Embedding dimension in each stage. + - patch_sizes (List[int]): The patch sizes in each stage. + - num_heads (List[int]): Numbers of attention head in each stage. + - strides (List[int]): The strides in each stage. + - mlp_ratios (List[int]): The ratios of mlp in each stage. + - sr_ratios (List[int]): The ratios of GSA-encoder layers in each + stage. + - windiow_sizes (List[int]): The window sizes in LSA-encoder layers + in each stage. + + in_channels (int): Number of input channels. Defaults to 3. + out_indices (tuple[int]): Output from which stages. + Defaults to (3, ). + qkv_bias (bool): Enable bias for qkv if True. Defaults to False. + drop_rate (float): Dropout rate. Defaults to 0. + attn_drop_rate (float): Dropout ratio of attention weight. + Defaults to 0.0 + drop_path_rate (float): Stochastic depth rate. Defaults to 0.2. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + norm_after_stage(bool, List[bool]): Add extra norm after each stage. + Defaults to False. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + + Examples: + >>> from mmpretrain.models import SVT + >>> import torch + >>> svt_cfg = {'arch': "small", + >>> 'norm_after_stage': [False, False, False, True]} + >>> model = SVT(**svt_cfg) + >>> x = torch.rand(1, 3, 224, 224) + >>> outputs = model(x) + >>> print(outputs[-1].shape) + torch.Size([1, 512, 7, 7]) + >>> svt_cfg["out_indices"] = (0, 1, 2, 3) + >>> svt_cfg["norm_after_stage"] = [True, True, True, True] + >>> model = SVT(**svt_cfg) + >>> output = model(x) + >>> for feat in output: + >>> print(feat.shape) + torch.Size([1, 64, 56, 56]) + torch.Size([1, 128, 28, 28]) + torch.Size([1, 320, 14, 14]) + torch.Size([1, 512, 7, 7]) + """ + arch_zoo = { + **dict.fromkeys(['s', 'small'], + {'embed_dims': [64, 128, 256, 512], + 'depths': [2, 2, 10, 4], + 'num_heads': [2, 4, 8, 16], + 'patch_sizes': [4, 2, 2, 2], + 'strides': [4, 2, 2, 2], + 'mlp_ratios': [4, 4, 4, 4], + 'sr_ratios': [8, 4, 2, 1], + 'window_sizes': [7, 7, 7, 7]}), + **dict.fromkeys(['b', 'base'], + {'embed_dims': [96, 192, 384, 768], + 'depths': [2, 2, 18, 2], + 'num_heads': [3, 6, 12, 24], + 'patch_sizes': [4, 2, 2, 2], + 'strides': [4, 2, 2, 2], + 'mlp_ratios': [4, 4, 4, 4], + 'sr_ratios': [8, 4, 2, 1], + 'window_sizes': [7, 7, 7, 7]}), + **dict.fromkeys(['l', 'large'], + {'embed_dims': [128, 256, 512, 1024], + 'depths': [2, 2, 18, 2], + 'num_heads': [4, 8, 16, 32], + 'patch_sizes': [4, 2, 2, 2], + 'strides': [4, 2, 2, 2], + 'mlp_ratios': [4, 4, 4, 4], + 'sr_ratios': [8, 4, 2, 1], + 'window_sizes': [7, 7, 7, 7]}), + } # yapf: disable + + essential_keys = { + 'embed_dims', 'depths', 'num_heads', 'patch_sizes', 'strides', + 'mlp_ratios', 'sr_ratios', 'window_sizes' + } + + def __init__(self, + arch, + in_channels=3, + out_indices=(3, ), + qkv_bias=False, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.0, + norm_cfg=dict(type='LN'), + norm_after_stage=False, + init_cfg=None): + super(SVT, self).__init__(arch, in_channels, out_indices, qkv_bias, + drop_rate, attn_drop_rate, drop_path_rate, + norm_cfg, norm_after_stage, init_cfg) + + self.window_sizes = self.arch_settings['window_sizes'] + + for k in range(self.num_stage): + for i in range(self.depths[k]): + # in even-numbered layers of each stage, replace GSA with LSA + if i % 2 == 0: + ffn_channels = self.mlp_ratios[k] * self.embed_dims[k] + self.stages[k][i] = \ + LSAEncoderLayer( + embed_dims=self.embed_dims[k], + num_heads=self.num_heads[k], + feedforward_channels=ffn_channels, + drop_rate=drop_rate, + norm_cfg=norm_cfg, + attn_drop_rate=attn_drop_rate, + drop_path_rate=self.dpr[sum(self.depths[:k])+i], + qkv_bias=qkv_bias, + window_size=self.window_sizes[k]) diff --git a/mmpretrain/models/backbones/van.py b/mmpretrain/models/backbones/van.py new file mode 100644 index 0000000000000000000000000000000000000000..c34dc3362f84ffa39151219f038f0c74ee0242e8 --- /dev/null +++ b/mmpretrain/models/backbones/van.py @@ -0,0 +1,434 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from mmcv.cnn import Conv2d, build_activation_layer, build_norm_layer +from mmcv.cnn.bricks import DropPath +from mmcv.cnn.bricks.transformer import PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +class MixFFN(BaseModule): + """An implementation of MixFFN of VAN. Refer to + mmdetection/mmdet/models/backbones/pvt.py. + + The differences between MixFFN & FFN: + 1. Use 1X1 Conv to replace Linear layer. + 2. Introduce 3X3 Depth-wise Conv to encode positional information. + + Args: + embed_dims (int): The feature dimension. Same as + `MultiheadAttention`. + feedforward_channels (int): The hidden dimension of FFNs. + act_cfg (dict, optional): The activation config for FFNs. + Default: dict(type='GELU'). + ffn_drop (float, optional): Probability of an element to be + zeroed in FFN. Default 0.0. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + embed_dims, + feedforward_channels, + act_cfg=dict(type='GELU'), + ffn_drop=0., + init_cfg=None): + super(MixFFN, self).__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + self.feedforward_channels = feedforward_channels + self.act_cfg = act_cfg + + self.fc1 = Conv2d( + in_channels=embed_dims, + out_channels=feedforward_channels, + kernel_size=1) + self.dwconv = Conv2d( + in_channels=feedforward_channels, + out_channels=feedforward_channels, + kernel_size=3, + stride=1, + padding=1, + bias=True, + groups=feedforward_channels) + self.act = build_activation_layer(act_cfg) + self.fc2 = Conv2d( + in_channels=feedforward_channels, + out_channels=embed_dims, + kernel_size=1) + self.drop = nn.Dropout(ffn_drop) + + def forward(self, x): + x = self.fc1(x) + x = self.dwconv(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +class LKA(BaseModule): + """Large Kernel Attention(LKA) of VAN. + + .. code:: text + DW_conv (depth-wise convolution) + | + | + DW_D_conv (depth-wise dilation convolution) + | + | + Transition Convolution (1×1 convolution) + + Args: + embed_dims (int): Number of input channels. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, embed_dims, init_cfg=None): + super(LKA, self).__init__(init_cfg=init_cfg) + + # a spatial local convolution (depth-wise convolution) + self.DW_conv = Conv2d( + in_channels=embed_dims, + out_channels=embed_dims, + kernel_size=5, + padding=2, + groups=embed_dims) + + # a spatial long-range convolution (depth-wise dilation convolution) + self.DW_D_conv = Conv2d( + in_channels=embed_dims, + out_channels=embed_dims, + kernel_size=7, + stride=1, + padding=9, + groups=embed_dims, + dilation=3) + + self.conv1 = Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + + def forward(self, x): + u = x.clone() + attn = self.DW_conv(x) + attn = self.DW_D_conv(attn) + attn = self.conv1(attn) + + return u * attn + + +class SpatialAttention(BaseModule): + """Basic attention module in VANBloack. + + Args: + embed_dims (int): Number of input channels. + act_cfg (dict, optional): The activation config for FFNs. + Default: dict(type='GELU'). + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, embed_dims, act_cfg=dict(type='GELU'), init_cfg=None): + super(SpatialAttention, self).__init__(init_cfg=init_cfg) + + self.proj_1 = Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + self.activation = build_activation_layer(act_cfg) + self.spatial_gating_unit = LKA(embed_dims) + self.proj_2 = Conv2d( + in_channels=embed_dims, out_channels=embed_dims, kernel_size=1) + + def forward(self, x): + shorcut = x.clone() + x = self.proj_1(x) + x = self.activation(x) + x = self.spatial_gating_unit(x) + x = self.proj_2(x) + x = x + shorcut + return x + + +class VANBlock(BaseModule): + """A block of VAN. + + Args: + embed_dims (int): Number of input channels. + ffn_ratio (float): The expansion ratio of feedforward network hidden + layer channels. Defaults to 4. + drop_rate (float): Dropout rate after embedding. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.1. + act_cfg (dict, optional): The activation config for FFNs. + Default: dict(type='GELU'). + layer_scale_init_value (float): Init value for Layer Scale. + Defaults to 1e-2. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + embed_dims, + ffn_ratio=4., + drop_rate=0., + drop_path_rate=0., + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='BN', eps=1e-5), + layer_scale_init_value=1e-2, + init_cfg=None): + super(VANBlock, self).__init__(init_cfg=init_cfg) + self.out_channels = embed_dims + + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + self.attn = SpatialAttention(embed_dims, act_cfg=act_cfg) + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0. else nn.Identity() + + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + mlp_hidden_dim = int(embed_dims * ffn_ratio) + self.mlp = MixFFN( + embed_dims=embed_dims, + feedforward_channels=mlp_hidden_dim, + act_cfg=act_cfg, + ffn_drop=drop_rate) + self.layer_scale_1 = nn.Parameter( + layer_scale_init_value * torch.ones((embed_dims)), + requires_grad=True) if layer_scale_init_value > 0 else None + self.layer_scale_2 = nn.Parameter( + layer_scale_init_value * torch.ones((embed_dims)), + requires_grad=True) if layer_scale_init_value > 0 else None + + def forward(self, x): + identity = x + x = self.norm1(x) + x = self.attn(x) + if self.layer_scale_1 is not None: + x = self.layer_scale_1.unsqueeze(-1).unsqueeze(-1) * x + x = identity + self.drop_path(x) + + identity = x + x = self.norm2(x) + x = self.mlp(x) + if self.layer_scale_2 is not None: + x = self.layer_scale_2.unsqueeze(-1).unsqueeze(-1) * x + x = identity + self.drop_path(x) + + return x + + +class VANPatchEmbed(PatchEmbed): + """Image to Patch Embedding of VAN. + + The differences between VANPatchEmbed & PatchEmbed: + 1. Use BN. + 2. Do not use 'flatten' and 'transpose'. + """ + + def __init__(self, *args, norm_cfg=dict(type='BN'), **kwargs): + super(VANPatchEmbed, self).__init__(*args, norm_cfg=norm_cfg, **kwargs) + + def forward(self, x): + """ + Args: + x (Tensor): Has shape (B, C, H, W). In most case, C is 3. + Returns: + tuple: Contains merged results and its spatial shape. + - x (Tensor): Has shape (B, out_h * out_w, embed_dims) + - out_size (tuple[int]): Spatial shape of x, arrange as + (out_h, out_w). + """ + + if self.adaptive_padding: + x = self.adaptive_padding(x) + + x = self.projection(x) + out_size = (x.shape[2], x.shape[3]) + if self.norm is not None: + x = self.norm(x) + return x, out_size + + +@MODELS.register_module() +class VAN(BaseBackbone): + """Visual Attention Network. + + A PyTorch implement of : `Visual Attention Network + `_ + + Inspiration from + https://github.com/Visual-Attention-Network/VAN-Classification + + Args: + arch (str | dict): Visual Attention Network architecture. + If use string, choose from 'tiny', 'small', 'base' and 'large'. + If use dict, it should have below keys: + + - **embed_dims** (List[int]): The dimensions of embedding. + - **depths** (List[int]): The number of blocks in each stage. + - **ffn_ratios** (List[int]): The number of expansion ratio of + feedforward network hidden layer channels. + + Defaults to 'tiny'. + patch_sizes (List[int | tuple]): The patch size in patch embeddings. + Defaults to [7, 3, 3, 3]. + in_channels (int): The num of input channels. Defaults to 3. + drop_rate (float): Dropout rate after embedding. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.1. + out_indices (Sequence[int]): Output from which stages. + Default: ``(3, )``. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + norm_cfg (dict): Config dict for normalization layer for all output + features. Defaults to ``dict(type='LN')`` + block_cfgs (Sequence[dict] | dict): The extra config of each block. + Defaults to empty dicts. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + + Examples: + >>> from mmpretrain.models import VAN + >>> import torch + >>> cfg = dict(arch='tiny') + >>> model = VAN(**cfg) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> outputs = model(inputs) + >>> for out in outputs: + >>> print(out.size()) + (1, 256, 7, 7) + """ + arch_zoo = { + **dict.fromkeys(['t', 'tiny'], + {'embed_dims': [32, 64, 160, 256], + 'depths': [3, 3, 5, 2], + 'ffn_ratios': [8, 8, 4, 4]}), + **dict.fromkeys(['s', 'small'], + {'embed_dims': [64, 128, 320, 512], + 'depths': [2, 2, 4, 2], + 'ffn_ratios': [8, 8, 4, 4]}), + **dict.fromkeys(['b', 'base'], + {'embed_dims': [64, 128, 320, 512], + 'depths': [3, 3, 12, 3], + 'ffn_ratios': [8, 8, 4, 4]}), + **dict.fromkeys(['l', 'large'], + {'embed_dims': [64, 128, 320, 512], + 'depths': [3, 5, 27, 3], + 'ffn_ratios': [8, 8, 4, 4]}), + } # yapf: disable + + def __init__(self, + arch='tiny', + patch_sizes=[7, 3, 3, 3], + in_channels=3, + drop_rate=0., + drop_path_rate=0., + out_indices=(3, ), + frozen_stages=-1, + norm_eval=False, + norm_cfg=dict(type='LN'), + block_cfgs=dict(), + init_cfg=None): + super(VAN, self).__init__(init_cfg=init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = {'embed_dims', 'depths', 'ffn_ratios'} + assert isinstance(arch, dict) and set(arch) == essential_keys, \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.depths = self.arch_settings['depths'] + self.ffn_ratios = self.arch_settings['ffn_ratios'] + self.num_stages = len(self.depths) + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + + total_depth = sum(self.depths) + dpr = [ + x.item() for x in torch.linspace(0, drop_path_rate, total_depth) + ] # stochastic depth decay rule + + cur_block_idx = 0 + for i, depth in enumerate(self.depths): + patch_embed = VANPatchEmbed( + in_channels=in_channels if i == 0 else self.embed_dims[i - 1], + input_size=None, + embed_dims=self.embed_dims[i], + kernel_size=patch_sizes[i], + stride=patch_sizes[i] // 2 + 1, + padding=(patch_sizes[i] // 2, patch_sizes[i] // 2), + norm_cfg=dict(type='BN')) + + blocks = ModuleList([ + VANBlock( + embed_dims=self.embed_dims[i], + ffn_ratio=self.ffn_ratios[i], + drop_rate=drop_rate, + drop_path_rate=dpr[cur_block_idx + j], + **block_cfgs) for j in range(depth) + ]) + cur_block_idx += depth + norm = build_norm_layer(norm_cfg, self.embed_dims[i])[1] + + self.add_module(f'patch_embed{i + 1}', patch_embed) + self.add_module(f'blocks{i + 1}', blocks) + self.add_module(f'norm{i + 1}', norm) + + def train(self, mode=True): + super(VAN, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + def _freeze_stages(self): + for i in range(0, self.frozen_stages + 1): + # freeze patch embed + m = getattr(self, f'patch_embed{i + 1}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + # freeze blocks + m = getattr(self, f'blocks{i + 1}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + # freeze norm + m = getattr(self, f'norm{i + 1}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def forward(self, x): + outs = [] + for i in range(self.num_stages): + patch_embed = getattr(self, f'patch_embed{i + 1}') + blocks = getattr(self, f'blocks{i + 1}') + norm = getattr(self, f'norm{i + 1}') + x, hw_shape = patch_embed(x) + for block in blocks: + x = block(x) + x = x.flatten(2).transpose(1, 2) + x = norm(x) + x = x.reshape(-1, *hw_shape, + block.out_channels).permute(0, 3, 1, 2).contiguous() + if i in self.out_indices: + outs.append(x) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/vgg.py b/mmpretrain/models/backbones/vgg.py new file mode 100644 index 0000000000000000000000000000000000000000..026b916256cf56cdf75d348ee07b0ceceffd9751 --- /dev/null +++ b/mmpretrain/models/backbones/vgg.py @@ -0,0 +1,183 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmengine.utils.dl_utils.parrots_wrapper import _BatchNorm + +from mmpretrain.registry import MODELS +from .base_backbone import BaseBackbone + + +def make_vgg_layer(in_channels, + out_channels, + num_blocks, + conv_cfg=None, + norm_cfg=None, + act_cfg=dict(type='ReLU'), + dilation=1, + with_norm=False, + ceil_mode=False): + layers = [] + for _ in range(num_blocks): + layer = ConvModule( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=3, + dilation=dilation, + padding=dilation, + bias=True, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + layers.append(layer) + in_channels = out_channels + layers.append(nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=ceil_mode)) + + return layers + + +@MODELS.register_module() +class VGG(BaseBackbone): + """VGG backbone. + + Args: + depth (int): Depth of vgg, from {11, 13, 16, 19}. + with_norm (bool): Use BatchNorm or not. + num_classes (int): number of classes for classification. + num_stages (int): VGG stages, normally 5. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int], optional): Output from which stages. + When it is None, the default behavior depends on whether + num_classes is specified. If num_classes <= 0, the default value is + (4, ), output the last feature map before classifier. If + num_classes > 0, the default value is (5, ), output the + classification score. Default: None. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + ceil_mode (bool): Whether to use ceil_mode of MaxPool. Default: False. + with_last_pool (bool): Whether to keep the last pooling before + classifier. Default: True. + """ + + # Parameters to build layers. Each element specifies the number of conv in + # each stage. For example, VGG11 contains 11 layers with learnable + # parameters. 11 is computed as 11 = (1 + 1 + 2 + 2 + 2) + 3, + # where 3 indicates the last three fully-connected layers. + arch_settings = { + 11: (1, 1, 2, 2, 2), + 13: (2, 2, 2, 2, 2), + 16: (2, 2, 3, 3, 3), + 19: (2, 2, 4, 4, 4) + } + + def __init__(self, + depth, + num_classes=-1, + num_stages=5, + dilations=(1, 1, 1, 1, 1), + out_indices=None, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=None, + act_cfg=dict(type='ReLU'), + norm_eval=False, + ceil_mode=False, + with_last_pool=True, + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d']), + dict(type='Constant', val=1., layer=['_BatchNorm']), + dict(type='Normal', std=0.01, layer=['Linear']) + ]): + super(VGG, self).__init__(init_cfg) + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for vgg') + assert num_stages >= 1 and num_stages <= 5 + stage_blocks = self.arch_settings[depth] + self.stage_blocks = stage_blocks[:num_stages] + assert len(dilations) == num_stages + + self.num_classes = num_classes + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + with_norm = norm_cfg is not None + + if out_indices is None: + out_indices = (5, ) if num_classes > 0 else (4, ) + assert max(out_indices) <= num_stages + self.out_indices = out_indices + + self.in_channels = 3 + start_idx = 0 + vgg_layers = [] + self.range_sub_modules = [] + for i, num_blocks in enumerate(self.stage_blocks): + num_modules = num_blocks + 1 + end_idx = start_idx + num_modules + dilation = dilations[i] + out_channels = 64 * 2**i if i < 4 else 512 + vgg_layer = make_vgg_layer( + self.in_channels, + out_channels, + num_blocks, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + dilation=dilation, + with_norm=with_norm, + ceil_mode=ceil_mode) + vgg_layers.extend(vgg_layer) + self.in_channels = out_channels + self.range_sub_modules.append([start_idx, end_idx]) + start_idx = end_idx + if not with_last_pool: + vgg_layers.pop(-1) + self.range_sub_modules[-1][1] -= 1 + self.module_name = 'features' + self.add_module(self.module_name, nn.Sequential(*vgg_layers)) + + if self.num_classes > 0: + self.classifier = nn.Sequential( + nn.Linear(512 * 7 * 7, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, 4096), + nn.ReLU(True), + nn.Dropout(), + nn.Linear(4096, num_classes), + ) + + def forward(self, x): + outs = [] + vgg_layers = getattr(self, self.module_name) + for i in range(len(self.stage_blocks)): + for j in range(*self.range_sub_modules[i]): + vgg_layer = vgg_layers[j] + x = vgg_layer(x) + if i in self.out_indices: + outs.append(x) + if self.num_classes > 0: + x = x.view(x.size(0), -1) + x = self.classifier(x) + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + vgg_layers = getattr(self, self.module_name) + for i in range(self.frozen_stages): + for j in range(*self.range_sub_modules[i]): + m = vgg_layers[j] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(VGG, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() diff --git a/mmpretrain/models/backbones/vig.py b/mmpretrain/models/backbones/vig.py new file mode 100644 index 0000000000000000000000000000000000000000..c1a7879bd99682c32cbd1e02079fe79e2c6a3d0a --- /dev/null +++ b/mmpretrain/models/backbones/vig.py @@ -0,0 +1,852 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# modified from +# https://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/vig_pytorch +from typing import Sequence + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import build_activation_layer +from mmcv.cnn.bricks import DropPath +from mmengine.model import ModuleList, Sequential +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.models.backbones.base_backbone import BaseBackbone +from mmpretrain.registry import MODELS +from ..utils import build_norm_layer + + +def get_2d_relative_pos_embed(embed_dim, grid_size): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, grid_size*grid_size] + """ + pos_embed = get_2d_sincos_pos_embed(embed_dim, grid_size) + relative_pos = 2 * np.matmul(pos_embed, + pos_embed.transpose()) / pos_embed.shape[1] + return relative_pos + + +def get_2d_sincos_pos_embed(embed_dim, grid_size, cls_token=False): + """ + grid_size: int of the grid height and width + return: + pos_embed: [grid_size*grid_size, embed_dim] or + [1+grid_size*grid_size, embed_dim] (w/ or w/o cls_token) + """ + grid_h = np.arange(grid_size, dtype=np.float32) + grid_w = np.arange(grid_size, dtype=np.float32) + grid = np.meshgrid(grid_w, grid_h) # here w goes first + grid = np.stack(grid, axis=0) + + grid = grid.reshape([2, 1, grid_size, grid_size]) + pos_embed = get_2d_sincos_pos_embed_from_grid(embed_dim, grid) + if cls_token: + pos_embed = np.concatenate([np.zeros([1, embed_dim]), pos_embed], + axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, + grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, + grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=np.float32) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +def xy_pairwise_distance(x, y): + """Compute pairwise distance of a point cloud. + + Args: + x: tensor (batch_size, num_points, num_dims) + y: tensor (batch_size, num_points, num_dims) + Returns: + pairwise distance: (batch_size, num_points, num_points) + """ + with torch.no_grad(): + xy_inner = -2 * torch.matmul(x, y.transpose(2, 1)) + x_square = torch.sum(torch.mul(x, x), dim=-1, keepdim=True) + y_square = torch.sum(torch.mul(y, y), dim=-1, keepdim=True) + return x_square + xy_inner + y_square.transpose(2, 1) + + +def xy_dense_knn_matrix(x, y, k=16, relative_pos=None): + """Get KNN based on the pairwise distance. + + Args: + x: (batch_size, num_dims, num_points, 1) + y: (batch_size, num_dims, num_points, 1) + k: int + relative_pos:Whether to use relative_pos + Returns: + nearest neighbors: + (batch_size, num_points, k) (batch_size, num_points, k) + """ + with torch.no_grad(): + x = x.transpose(2, 1).squeeze(-1) + y = y.transpose(2, 1).squeeze(-1) + batch_size, n_points, n_dims = x.shape + dist = xy_pairwise_distance(x.detach(), y.detach()) + if relative_pos is not None: + dist += relative_pos + _, nn_idx = torch.topk(-dist, k=k) + center_idx = torch.arange( + 0, n_points, device=x.device).repeat(batch_size, k, + 1).transpose(2, 1) + return torch.stack((nn_idx, center_idx), dim=0) + + +class DenseDilated(nn.Module): + """Find dilated neighbor from neighbor list. + + edge_index: (2, batch_size, num_points, k) + """ + + def __init__(self, k=9, dilation=1, use_stochastic=False, epsilon=0.0): + super(DenseDilated, self).__init__() + self.dilation = dilation + self.use_stochastic = use_stochastic + self.epsilon = epsilon + self.k = k + + def forward(self, edge_index): + if self.use_stochastic: + if torch.rand(1) < self.epsilon and self.training: + num = self.k * self.dilation + randnum = torch.randperm(num)[:self.k] + edge_index = edge_index[:, :, :, randnum] + else: + edge_index = edge_index[:, :, :, ::self.dilation] + else: + edge_index = edge_index[:, :, :, ::self.dilation] + return edge_index + + +class DenseDilatedKnnGraph(nn.Module): + """Find the neighbors' indices based on dilated knn.""" + + def __init__(self, k=9, dilation=1, use_stochastic=False, epsilon=0.0): + super(DenseDilatedKnnGraph, self).__init__() + self.dilation = dilation + self.use_stochastic = use_stochastic + self.epsilon = epsilon + self.k = k + self._dilated = DenseDilated(k, dilation, use_stochastic, epsilon) + + def forward(self, x, y=None, relative_pos=None): + if y is not None: + x = F.normalize(x, p=2.0, dim=1) + y = F.normalize(y, p=2.0, dim=1) + + edge_index = xy_dense_knn_matrix(x, y, self.k * self.dilation, + relative_pos) + else: + x = F.normalize(x, p=2.0, dim=1) + y = x.clone() + + edge_index = xy_dense_knn_matrix(x, y, self.k * self.dilation, + relative_pos) + return self._dilated(edge_index) + + +class BasicConv(Sequential): + + def __init__(self, + channels, + act_cfg, + norm_cfg=None, + graph_conv_bias=True, + drop=0.): + m = [] + for i in range(1, len(channels)): + m.append( + nn.Conv2d( + channels[i - 1], + channels[i], + 1, + bias=graph_conv_bias, + groups=4)) + if norm_cfg is not None: + m.append(build_norm_layer(norm_cfg, channels[-1])) + if act_cfg is not None: + m.append(build_activation_layer(act_cfg)) + if drop > 0: + m.append(nn.Dropout2d(drop)) + + super(BasicConv, self).__init__(*m) + + +def batched_index_select(x, idx): + r"""fetches neighbors features from a given neighbor idx + + Args: + x (Tensor): input feature Tensor + :math: + `\mathbf{X} \in \mathbb{R}^{B \times C \times N \times 1}`. + idx (Tensor): edge_idx + :math:`\mathbf{X} \in \mathbb{R}^{B \times N \times l}`. + Returns: + Tensor: output neighbors features + :math:`\mathbf{X} \in \mathbb{R}^{B \times C \times N \times k}`. + """ + batch_size, num_dims, num_vertices_reduced = x.shape[:3] + _, num_vertices, k = idx.shape + idx_base = torch.arange( + 0, batch_size, device=idx.device).view(-1, 1, 1) * num_vertices_reduced + idx = idx + idx_base + idx = idx.contiguous().view(-1) + + x = x.transpose(2, 1) + feature = x.contiguous().view(batch_size * num_vertices_reduced, + -1)[idx, :] + feature = feature.view(batch_size, num_vertices, k, + num_dims).permute(0, 3, 1, 2).contiguous() + return feature + + +class MRConv2d(nn.Module): + """Max-Relative Graph Convolution (Paper: https://arxiv.org/abs/1904.03751) + for dense data type.""" + + def __init__(self, + in_channels, + out_channels, + act_cfg, + norm_cfg=None, + graph_conv_bias=True): + super(MRConv2d, self).__init__() + self.nn = BasicConv([in_channels * 2, out_channels], act_cfg, norm_cfg, + graph_conv_bias) + + def forward(self, x, edge_index, y=None): + x_i = batched_index_select(x, edge_index[1]) + if y is not None: + x_j = batched_index_select(y, edge_index[0]) + else: + x_j = batched_index_select(x, edge_index[0]) + x_j, _ = torch.max(x_j - x_i, -1, keepdim=True) + b, c, n, _ = x.shape + x = torch.cat([x.unsqueeze(2), x_j.unsqueeze(2)], + dim=2).reshape(b, 2 * c, n, _) + return self.nn(x) + + +class EdgeConv2d(nn.Module): + """Edge convolution layer (with activation, batch normalization) for dense + data type.""" + + def __init__(self, + in_channels, + out_channels, + act_cfg, + norm_cfg=None, + graph_conv_bias=True): + super(EdgeConv2d, self).__init__() + self.nn = BasicConv([in_channels * 2, out_channels], act_cfg, norm_cfg, + graph_conv_bias) + + def forward(self, x, edge_index, y=None): + x_i = batched_index_select(x, edge_index[1]) + if y is not None: + x_j = batched_index_select(y, edge_index[0]) + else: + x_j = batched_index_select(x, edge_index[0]) + max_value, _ = torch.max( + self.nn(torch.cat([x_i, x_j - x_i], dim=1)), -1, keepdim=True) + return max_value + + +class GraphSAGE(nn.Module): + """GraphSAGE Graph Convolution (Paper: https://arxiv.org/abs/1706.02216) + for dense data type.""" + + def __init__(self, + in_channels, + out_channels, + act_cfg, + norm_cfg=None, + graph_conv_bias=True): + super(GraphSAGE, self).__init__() + self.nn1 = BasicConv([in_channels, in_channels], act_cfg, norm_cfg, + graph_conv_bias) + self.nn2 = BasicConv([in_channels * 2, out_channels], act_cfg, + norm_cfg, graph_conv_bias) + + def forward(self, x, edge_index, y=None): + if y is not None: + x_j = batched_index_select(y, edge_index[0]) + else: + x_j = batched_index_select(x, edge_index[0]) + x_j, _ = torch.max(self.nn1(x_j), -1, keepdim=True) + return self.nn2(torch.cat([x, x_j], dim=1)) + + +class GINConv2d(nn.Module): + """GIN Graph Convolution (Paper: https://arxiv.org/abs/1810.00826) for + dense data type.""" + + def __init__(self, + in_channels, + out_channels, + act_cfg, + norm_cfg=None, + graph_conv_bias=True): + super(GINConv2d, self).__init__() + self.nn = BasicConv([in_channels, out_channels], act_cfg, norm_cfg, + graph_conv_bias) + eps_init = 0.0 + self.eps = nn.Parameter(torch.Tensor([eps_init])) + + def forward(self, x, edge_index, y=None): + if y is not None: + x_j = batched_index_select(y, edge_index[0]) + else: + x_j = batched_index_select(x, edge_index[0]) + x_j = torch.sum(x_j, -1, keepdim=True) + return self.nn((1 + self.eps) * x + x_j) + + +class GraphConv2d(nn.Module): + """Static graph convolution layer.""" + + def __init__(self, + in_channels, + out_channels, + graph_conv_type, + act_cfg, + norm_cfg=None, + graph_conv_bias=True): + super(GraphConv2d, self).__init__() + if graph_conv_type == 'edge': + self.gconv = EdgeConv2d(in_channels, out_channels, act_cfg, + norm_cfg, graph_conv_bias) + elif graph_conv_type == 'mr': + self.gconv = MRConv2d(in_channels, out_channels, act_cfg, norm_cfg, + graph_conv_bias) + elif graph_conv_type == 'sage': + self.gconv = GraphSAGE(in_channels, out_channels, act_cfg, + norm_cfg, graph_conv_bias) + elif graph_conv_type == 'gin': + self.gconv = GINConv2d(in_channels, out_channels, act_cfg, + norm_cfg, graph_conv_bias) + else: + raise NotImplementedError( + 'graph_conv_type:{} is not supported'.format(graph_conv_type)) + + def forward(self, x, edge_index, y=None): + return self.gconv(x, edge_index, y) + + +class DyGraphConv2d(GraphConv2d): + """Dynamic graph convolution layer.""" + + def __init__(self, + in_channels, + out_channels, + k=9, + dilation=1, + graph_conv_type='mr', + act_cfg=dict(type='GELU'), + norm_cfg=None, + graph_conv_bias=True, + use_stochastic=False, + epsilon=0.2, + r=1): + super(DyGraphConv2d, + self).__init__(in_channels, out_channels, graph_conv_type, + act_cfg, norm_cfg, graph_conv_bias) + self.k = k + self.d = dilation + self.r = r + self.dilated_knn_graph = DenseDilatedKnnGraph(k, dilation, + use_stochastic, epsilon) + + def forward(self, x, relative_pos=None): + B, C, H, W = x.shape + y = None + if self.r > 1: + y = F.avg_pool2d(x, self.r, self.r) + y = y.reshape(B, C, -1, 1).contiguous() + x = x.reshape(B, C, -1, 1).contiguous() + edge_index = self.dilated_knn_graph(x, y, relative_pos) + x = super(DyGraphConv2d, self).forward(x, edge_index, y) + return x.reshape(B, -1, H, W).contiguous() + + +class Grapher(nn.Module): + """Grapher module with graph convolution and fc layers.""" + + def __init__(self, + in_channels, + k=9, + dilation=1, + graph_conv_type='mr', + act_cfg=dict(type='GELU'), + norm_cfg=None, + graph_conv_bias=True, + use_stochastic=False, + epsilon=0.2, + r=1, + n=196, + drop_path=0.0, + relative_pos=False): + super(Grapher, self).__init__() + self.channels = in_channels + self.n = n + self.r = r + self.fc1 = Sequential( + nn.Conv2d(in_channels, in_channels, 1, stride=1, padding=0), + build_norm_layer(dict(type='BN'), in_channels), + ) + self.graph_conv = DyGraphConv2d(in_channels, in_channels * 2, k, + dilation, graph_conv_type, act_cfg, + norm_cfg, graph_conv_bias, + use_stochastic, epsilon, r) + self.fc2 = Sequential( + nn.Conv2d(in_channels * 2, in_channels, 1, stride=1, padding=0), + build_norm_layer(dict(type='BN'), in_channels), + ) + self.drop_path = DropPath( + drop_path) if drop_path > 0. else nn.Identity() + + self.relative_pos = None + if relative_pos: + relative_pos_tensor = torch.from_numpy( + np.float32( + get_2d_relative_pos_embed(in_channels, int( + n**0.5)))).unsqueeze(0).unsqueeze(1) + relative_pos_tensor = F.interpolate( + relative_pos_tensor, + size=(n, n // (r * r)), + mode='bicubic', + align_corners=False) + self.relative_pos = nn.Parameter( + -relative_pos_tensor.squeeze(1), requires_grad=False) + + def _get_relative_pos(self, relative_pos, H, W): + if relative_pos is None or H * W == self.n: + return relative_pos + else: + N = H * W + N_reduced = N // (self.r * self.r) + return F.interpolate( + relative_pos.unsqueeze(0), size=(N, N_reduced), + mode='bicubic').squeeze(0) + + def forward(self, x): + B, C, H, W = x.shape + relative_pos = self._get_relative_pos(self.relative_pos, H, W) + shortcut = x + x = self.fc1(x) + x = self.graph_conv(x, relative_pos) + x = self.fc2(x) + x = self.drop_path(x) + shortcut + return x + + +class FFN(nn.Module): + """"out_features = out_features or in_features\n + hidden_features = hidden_features or in_features""" + + def __init__(self, + in_features, + hidden_features=None, + out_features=None, + act_cfg=dict(type='GELU'), + drop_path=0.0): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = Sequential( + nn.Conv2d(in_features, hidden_features, 1, stride=1, padding=0), + build_norm_layer(dict(type='BN'), hidden_features), + ) + self.act = build_activation_layer(act_cfg) + self.fc2 = Sequential( + nn.Conv2d(hidden_features, out_features, 1, stride=1, padding=0), + build_norm_layer(dict(type='BN'), out_features), + ) + self.drop_path = DropPath( + drop_path) if drop_path > 0. else nn.Identity() + + def forward(self, x): + shortcut = x + x = self.fc1(x) + x = self.act(x) + x = self.fc2(x) + x = self.drop_path(x) + shortcut + return x + + +@MODELS.register_module() +class Vig(BaseBackbone): + """Vision GNN backbone. + + A PyTorch implementation of `Vision GNN: An Image is Worth Graph of Nodes + `_. + + Modified from the official implementation + https://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/vig_pytorch + + Args: + arch(str): Vision GNN architecture, + choose from 'tiny', 'small' and 'base'. + in_channels (int): The number of channels of input images. + Defaults to 3. + k (int): The number of KNN's k. Defaults to 9. + out_indices (Sequence | int): Output from which blocks. + Defaults to -1, means the last block. + act_cfg (dict): The config of activative functions. + Defaults to ``dict(type='GELU'))``. + norm_cfg (dict): The config of normalization layers. + Defaults to ``dict(type='BN', eps=1e-6)``. + graph_conv_bias (bool): Whether to use bias in the convolution + layers in Grapher. Defaults to True. + graph_conv_type (str): The type of graph convolution,choose + from 'edge', 'mr', 'sage' and 'gin'. Defaults to 'mr'. + epsilon (float): Probability of random arrangement in KNN. It only + works when ``use_dilation=True`` and ``use_stochastic=True``. + Defaults to 0.2. + use_dilation(bool): Whether to use dilation in KNN. Defaults to True. + use_stochastic(bool): Whether to use stochastic in KNN. + Defaults to False. + drop_path (float): stochastic depth rate. Default 0.0 + relative_pos(bool): Whether to use relative position embedding. + Defaults to False. + norm_eval (bool): Whether to set the normalization layer to eval mode. + Defaults to False. + frozen_stages (int): Blocks to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + init_cfg (dict, optional): The initialization configs. + Defaults to None. + """ # noqa: E501 + + arch_settings = { + 'tiny': dict(num_blocks=12, channels=192), + 'small': dict(num_blocks=16, channels=320), + 'base': dict(num_blocks=16, channels=640), + } + + def __init__(self, + arch, + in_channels=3, + k=9, + out_indices=-1, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='BN'), + graph_conv_bias=True, + graph_conv_type='mr', + epsilon=0.2, + use_dilation=True, + use_stochastic=False, + drop_path=0., + relative_pos=False, + norm_eval=False, + frozen_stages=0, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + arch = self.arch_settings[arch] + self.num_blocks = arch['num_blocks'] + channels = arch['channels'] + + if isinstance(out_indices, int): + out_indices = [out_indices] + elif isinstance(out_indices, tuple): + out_indices = list(out_indices) + elif not isinstance(out_indices, list): + raise TypeError('"out_indices" must by a tuple, list or int, ' + f'get {type(out_indices)} instead.') + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_blocks + index + assert 0 <= out_indices[i] <= self.num_blocks, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.stem = Sequential( + nn.Conv2d(in_channels, channels // 8, 3, stride=2, padding=1), + build_norm_layer(norm_cfg, channels // 8), + build_activation_layer(act_cfg), + nn.Conv2d(channels // 8, channels // 4, 3, stride=2, padding=1), + build_norm_layer(norm_cfg, channels // 4), + build_activation_layer(act_cfg), + nn.Conv2d(channels // 4, channels // 2, 3, stride=2, padding=1), + build_norm_layer(norm_cfg, channels // 2), + build_activation_layer(act_cfg), + nn.Conv2d(channels // 2, channels, 3, stride=2, padding=1), + build_norm_layer(norm_cfg, channels), + build_activation_layer(act_cfg), + nn.Conv2d(channels, channels, 3, stride=1, padding=1), + build_norm_layer(norm_cfg, channels), + ) + + # stochastic depth decay rule + dpr = [x.item() for x in torch.linspace(0, drop_path, self.num_blocks)] + # number of knn's k + num_knn = [ + int(x.item()) for x in torch.linspace(k, 2 * k, self.num_blocks) + ] + max_dilation = 196 // max(num_knn) + + self.pos_embed = nn.Parameter(torch.zeros(1, channels, 14, 14)) + + self.blocks = ModuleList([ + Sequential( + Grapher( + in_channels=channels, + k=num_knn[i], + dilation=min(i // 4 + + 1, max_dilation) if use_dilation else 1, + graph_conv_type=graph_conv_type, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + graph_conv_bias=graph_conv_bias, + use_stochastic=use_stochastic, + epsilon=epsilon, + drop_path=dpr[i], + relative_pos=relative_pos), + FFN(in_features=channels, + hidden_features=channels * 4, + act_cfg=act_cfg, + drop_path=dpr[i])) for i in range(self.num_blocks) + ]) + + self.norm_eval = norm_eval + self.frozen_stages = frozen_stages + + def forward(self, inputs): + outs = [] + x = self.stem(inputs) + self.pos_embed + + for i, block in enumerate(self.blocks): + x = block(x) + + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + self.stem.eval() + for i in range(self.frozen_stages): + m = self.blocks[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(Vig, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + +@MODELS.register_module() +class PyramidVig(BaseBackbone): + """Pyramid Vision GNN backbone. + + A PyTorch implementation of `Vision GNN: An Image is Worth Graph of Nodes + `_. + + Modified from the official implementation + https://github.com/huawei-noah/Efficient-AI-Backbones/tree/master/vig_pytorch + + Args: + arch (str): Vision GNN architecture, choose from 'tiny', + 'small' and 'base'. + in_channels (int): The number of channels of input images. + Defaults to 3. + k (int): The number of KNN's k. Defaults to 9. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + act_cfg (dict): The config of activative functions. + Defaults to ``dict(type='GELU'))``. + norm_cfg (dict): The config of normalization layers. + Defaults to ``dict(type='BN')``. + graph_conv_bias (bool): Whether to use bias in the convolution + layers in Grapher. Defaults to True. + graph_conv_type (str): The type of graph convolution,choose + from 'edge', 'mr', 'sage' and 'gin'. Defaults to 'mr'. + epsilon (float): Probability of random arrangement in KNN. It only + works when ``use_stochastic=True``. Defaults to 0.2. + use_stochastic (bool): Whether to use stochastic in KNN. + Defaults to False. + drop_path (float): stochastic depth rate. Default 0.0 + norm_eval (bool): Whether to set the normalization layer to eval mode. + Defaults to False. + frozen_stages (int): Stages to be frozen (all param fixed). + Defaults to 0, which means not freezing any parameters. + init_cfg (dict, optional): The initialization configs. + Defaults to None. + """ # noqa: E501 + arch_settings = { + 'tiny': dict(blocks=[2, 2, 6, 2], channels=[48, 96, 240, 384]), + 'small': dict(blocks=[2, 2, 6, 2], channels=[80, 160, 400, 640]), + 'medium': dict(blocks=[2, 2, 16, 2], channels=[96, 192, 384, 768]), + 'base': dict(blocks=[2, 2, 18, 2], channels=[128, 256, 512, 1024]), + } + + def __init__(self, + arch, + in_channels=3, + k=9, + out_indices=-1, + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='BN'), + graph_conv_bias=True, + graph_conv_type='mr', + epsilon=0.2, + use_stochastic=False, + drop_path=0., + norm_eval=False, + frozen_stages=0, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + arch = self.arch_settings[arch] + self.blocks = arch['blocks'] + self.num_blocks = sum(self.blocks) + self.num_stages = len(self.blocks) + channels = arch['channels'] + self.channels = channels + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_stages + index + assert 0 <= out_indices[i] <= self.num_stages, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + self.stem = Sequential( + nn.Conv2d(in_channels, channels[0] // 2, 3, stride=2, padding=1), + build_norm_layer(norm_cfg, channels[0] // 2), + build_activation_layer(act_cfg), + nn.Conv2d(channels[0] // 2, channels[0], 3, stride=2, padding=1), + build_norm_layer(norm_cfg, channels[0]), + build_activation_layer(act_cfg), + nn.Conv2d(channels[0], channels[0], 3, stride=1, padding=1), + build_norm_layer(norm_cfg, channels[0]), + ) + + # stochastic depth decay rule + dpr = [x.item() for x in torch.linspace(0, drop_path, self.num_blocks)] + # number of knn's k + num_knn = [ + int(x.item()) for x in torch.linspace(k, k, self.num_blocks) + ] + max_dilation = 49 // max(num_knn) + + self.pos_embed = nn.Parameter( + torch.zeros(1, channels[0], 224 // 4, 224 // 4)) + HW = 224 // 4 * 224 // 4 + reduce_ratios = [4, 2, 1, 1] + + self.stages = ModuleList() + block_idx = 0 + for stage_idx, num_blocks in enumerate(self.blocks): + mid_channels = channels[stage_idx] + reduce_ratio = reduce_ratios[stage_idx] + blocks = [] + if stage_idx > 0: + blocks.append( + Sequential( + nn.Conv2d( + self.channels[stage_idx - 1], + mid_channels, + kernel_size=3, + stride=2, + padding=1), + build_norm_layer(norm_cfg, mid_channels), + )) + HW = HW // 4 + for _ in range(num_blocks): + blocks.append( + Sequential( + Grapher( + in_channels=mid_channels, + k=num_knn[block_idx], + dilation=min(block_idx // 4 + 1, max_dilation), + graph_conv_type=graph_conv_type, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + graph_conv_bias=graph_conv_bias, + use_stochastic=use_stochastic, + epsilon=epsilon, + r=reduce_ratio, + n=HW, + drop_path=dpr[block_idx], + relative_pos=True), + FFN(in_features=mid_channels, + hidden_features=mid_channels * 4, + act_cfg=act_cfg, + drop_path=dpr[block_idx]))) + block_idx += 1 + self.stages.append(Sequential(*blocks)) + + self.norm_eval = norm_eval + self.frozen_stages = frozen_stages + + def forward(self, inputs): + outs = [] + x = self.stem(inputs) + self.pos_embed + + for i, blocks in enumerate(self.stages): + x = blocks(x) + + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + self.stem.eval() + for i in range(self.frozen_stages): + m = self.stages[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(PyramidVig, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() diff --git a/mmpretrain/models/backbones/vision_transformer.py b/mmpretrain/models/backbones/vision_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..cd0a70d377fde1a9b7535612ac5126d8abafa3fa --- /dev/null +++ b/mmpretrain/models/backbones/vision_transformer.py @@ -0,0 +1,520 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Sequence + +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from ..utils import (MultiheadAttention, SwiGLUFFNFused, build_norm_layer, + resize_pos_embed, to_2tuple) +from .base_backbone import BaseBackbone + + +class TransformerEncoderLayer(BaseModule): + """Implements one encoder layer in Vision Transformer. + + Args: + embed_dims (int): The feature dimension + num_heads (int): Parallel attention heads + feedforward_channels (int): The hidden dimension for FFNs + layer_scale_init_value (float or torch.Tensor): Init value of layer + scale. Defaults to 0. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop_rate (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + ffn_type (str): Select the type of ffn layers. Defaults to 'origin'. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + layer_scale_init_value=0., + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + num_fcs=2, + qkv_bias=True, + ffn_type='origin', + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + init_cfg=None): + super(TransformerEncoderLayer, self).__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + + self.attn = MultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + qkv_bias=qkv_bias, + layer_scale_init_value=layer_scale_init_value) + + self.ln2 = build_norm_layer(norm_cfg, self.embed_dims) + + if ffn_type == 'origin': + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg, + layer_scale_init_value=layer_scale_init_value) + elif ffn_type == 'swiglu_fused': + self.ffn = SwiGLUFFNFused( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + layer_scale_init_value=layer_scale_init_value) + else: + raise NotImplementedError + + @property + def norm1(self): + return self.ln1 + + @property + def norm2(self): + return self.ln2 + + def init_weights(self): + super(TransformerEncoderLayer, self).init_weights() + for m in self.ffn.modules(): + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + nn.init.normal_(m.bias, std=1e-6) + + def forward(self, x): + x = x + self.attn(self.ln1(x)) + x = self.ffn(self.ln2(x), identity=x) + return x + + +@MODELS.register_module() +class VisionTransformer(BaseBackbone): + """Vision Transformer. + + A PyTorch implement of : `An Image is Worth 16x16 Words: Transformers + for Image Recognition at Scale `_ + + Args: + arch (str | dict): Vision Transformer architecture. If use string, + choose from 'small', 'base', 'large', 'deit-tiny', 'deit-small' + and 'deit-base'. If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + + Defaults to 'base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + Defaults to ``"cls_token"``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + layer_scale_init_value (float or torch.Tensor): Init value of layer + scale. Defaults to 0. + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys( + ['s', 'small'], { + 'embed_dims': 768, + 'num_layers': 8, + 'num_heads': 8, + 'feedforward_channels': 768 * 3, + }), + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 3072 + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'num_layers': 24, + 'num_heads': 16, + 'feedforward_channels': 4096 + }), + **dict.fromkeys( + ['h', 'huge'], + { + # The same as the implementation in MAE + # + 'embed_dims': 1280, + 'num_layers': 32, + 'num_heads': 16, + 'feedforward_channels': 5120 + }), + **dict.fromkeys( + ['eva-g', 'eva-giant'], + { + # The implementation in EVA + # + 'embed_dims': 1408, + 'num_layers': 40, + 'num_heads': 16, + 'feedforward_channels': 6144 + }), + **dict.fromkeys( + ['deit-t', 'deit-tiny'], { + 'embed_dims': 192, + 'num_layers': 12, + 'num_heads': 3, + 'feedforward_channels': 192 * 4 + }), + **dict.fromkeys( + ['deit-s', 'deit-small', 'dinov2-s', 'dinov2-small'], { + 'embed_dims': 384, + 'num_layers': 12, + 'num_heads': 6, + 'feedforward_channels': 384 * 4 + }), + **dict.fromkeys( + ['deit-b', 'deit-base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 768 * 4 + }), + **dict.fromkeys( + ['dinov2-g', 'dinov2-giant'], { + 'embed_dims': 1536, + 'num_layers': 40, + 'num_heads': 24, + 'feedforward_channels': 6144 + }), + } + num_extra_tokens = 1 # class token + OUT_TYPES = {'raw', 'cls_token', 'featmap', 'avg_featmap'} + + def __init__(self, + arch='base', + img_size=224, + patch_size=16, + in_channels=3, + out_indices=-1, + drop_rate=0., + drop_path_rate=0., + qkv_bias=True, + norm_cfg=dict(type='LN', eps=1e-6), + final_norm=True, + out_type='cls_token', + with_cls_token=True, + frozen_stages=-1, + interpolate_mode='bicubic', + layer_scale_init_value=0., + patch_cfg=dict(), + layer_cfgs=dict(), + pre_norm=False, + init_cfg=None): + super(VisionTransformer, self).__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'feedforward_channels' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.img_size = to_2tuple(img_size) + + # Set patch embedding + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + bias=not pre_norm, # disable bias if pre_norm is used(e.g., CLIP) + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + # Set out type + if out_type not in self.OUT_TYPES: + raise ValueError(f'Unsupported `out_type` {out_type}, please ' + f'choose from {self.OUT_TYPES}') + self.out_type = out_type + + # Set cls token + if with_cls_token: + self.cls_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + elif out_type != 'cls_token': + self.cls_token = None + self.num_extra_tokens = 0 + else: + raise ValueError( + 'with_cls_token must be True when `out_type="cls_token"`.') + + # Set position embedding + self.interpolate_mode = interpolate_mode + self.pos_embed = nn.Parameter( + torch.zeros(1, num_patches + self.num_extra_tokens, + self.embed_dims)) + self._register_load_state_dict_pre_hook(self._prepare_pos_embed) + + self.drop_after_pos = nn.Dropout(p=drop_rate) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_layers + index + assert 0 <= out_indices[i] <= self.num_layers, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + embed_dims=self.embed_dims, + num_heads=self.arch_settings['num_heads'], + feedforward_channels=self. + arch_settings['feedforward_channels'], + layer_scale_init_value=layer_scale_init_value, + drop_rate=drop_rate, + drop_path_rate=dpr[i], + qkv_bias=qkv_bias, + norm_cfg=norm_cfg) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(TransformerEncoderLayer(**_layer_cfg)) + + self.frozen_stages = frozen_stages + if pre_norm: + self.pre_norm = build_norm_layer(norm_cfg, self.embed_dims) + else: + self.pre_norm = nn.Identity() + + self.final_norm = final_norm + if final_norm: + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + if self.out_type == 'avg_featmap': + self.ln2 = build_norm_layer(norm_cfg, self.embed_dims) + + # freeze stages only when self.frozen_stages > 0 + if self.frozen_stages > 0: + self._freeze_stages() + + @property + def norm1(self): + return self.ln1 + + @property + def norm2(self): + return self.ln2 + + def init_weights(self): + super(VisionTransformer, self).init_weights() + + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + if self.pos_embed is not None: + trunc_normal_(self.pos_embed, std=0.02) + + def _prepare_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + f'Resize the pos_embed shape from {ckpt_pos_embed_shape} ' + f'to {self.pos_embed.shape}.') + + ckpt_pos_embed_shape = to_2tuple( + int(np.sqrt(ckpt_pos_embed_shape[1] - self.num_extra_tokens))) + pos_embed_shape = self.patch_embed.init_out_size + + state_dict[name] = resize_pos_embed(state_dict[name], + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, + self.num_extra_tokens) + + @staticmethod + def resize_pos_embed(*args, **kwargs): + """Interface for backward-compatibility.""" + return resize_pos_embed(*args, **kwargs) + + def _freeze_stages(self): + # freeze position embedding + if self.pos_embed is not None: + self.pos_embed.requires_grad = False + # set dropout to eval model + self.drop_after_pos.eval() + # freeze patch embedding + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + # freeze cls_token + self.cls_token.requires_grad = False + # freeze layers + for i in range(1, self.frozen_stages + 1): + m = self.layers[i - 1] + m.eval() + for param in m.parameters(): + param.requires_grad = False + # freeze the last layer norm + if self.frozen_stages == len(self.layers) and self.final_norm: + self.ln1.eval() + for param in self.ln1.parameters(): + param.requires_grad = False + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + if self.cls_token is not None: + # stole cls_tokens impl from Phil Wang, thanks + cls_token = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_token, x), dim=1) + + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + x = self.pre_norm(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.ln1(x) + + if i in self.out_indices: + outs.append(self._format_output(x, patch_resolution)) + + return tuple(outs) + + def _format_output(self, x, hw): + if self.out_type == 'raw': + return x + if self.out_type == 'cls_token': + return x[:, 0] + + patch_token = x[:, self.num_extra_tokens:] + if self.out_type == 'featmap': + B = x.size(0) + # (B, N, C) -> (B, H, W, C) -> (B, C, H, W) + return patch_token.reshape(B, *hw, -1).permute(0, 3, 1, 2) + if self.out_type == 'avg_featmap': + return self.ln2(patch_token.mean(dim=1)) + + def get_layer_depth(self, param_name: str, prefix: str = ''): + """Get the layer-wise depth of a parameter. + + Args: + param_name (str): The name of the parameter. + prefix (str): The prefix for the parameter. + Defaults to an empty string. + + Returns: + Tuple[int, int]: The layer-wise depth and the num of layers. + + Note: + The first depth is the stem module (``layer_depth=0``), and the + last depth is the subsequent module (``layer_depth=num_layers-1``) + """ + num_layers = self.num_layers + 2 + + if not param_name.startswith(prefix): + # For subsequent module like head + return num_layers - 1, num_layers + + param_name = param_name[len(prefix):] + + if param_name in ('cls_token', 'pos_embed'): + layer_depth = 0 + elif param_name.startswith('patch_embed'): + layer_depth = 0 + elif param_name.startswith('layers'): + layer_id = int(param_name.split('.')[1]) + layer_depth = layer_id + 1 + else: + layer_depth = num_layers - 1 + + return layer_depth, num_layers diff --git a/mmpretrain/models/backbones/vit_eva02.py b/mmpretrain/models/backbones/vit_eva02.py new file mode 100644 index 0000000000000000000000000000000000000000..20ec4b247bbdbfc209c353c8e001d34d71a3990c --- /dev/null +++ b/mmpretrain/models/backbones/vit_eva02.py @@ -0,0 +1,350 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn.bricks.drop import build_dropout +from mmengine.model import BaseModule, ModuleList + +from mmpretrain.registry import MODELS +from ..utils import (RotaryEmbeddingFast, SwiGLUFFN, build_norm_layer, + resize_pos_embed) +from .vision_transformer import VisionTransformer + + +class AttentionWithRoPE(BaseModule): + """Multi-head Attention Module with 2D sincos position embedding (RoPE). + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + qkv_bias (bool): If True, add a learnable bias to q and v. Note + that we follows the official implementation where ``k_bias`` + is 0. Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + proj_bias (bool) If True, add a learnable bias to output projection. + Defaults to True. + rope (:obj:`torch.nn.Module`, optional): If it is an object of the + ``RotaryEmbedding``, the rotation of the token position will be + performed before the softmax. Defaults to None. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + attn_drop=0., + proj_drop=0., + qkv_bias=True, + qk_scale=None, + proj_bias=True, + rope=None, + with_cls_token=True, + init_cfg=None): + super(AttentionWithRoPE, self).__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + self.num_heads = num_heads + self.head_dims = embed_dims // num_heads + self.scale = qk_scale or self.head_dims**-0.5 + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(embed_dims, embed_dims, bias=proj_bias) + self.proj_drop = nn.Dropout(proj_drop) + + self.with_cls_token = with_cls_token + + self.rope = rope + + def forward(self, x, patch_resolution): + B, N, _ = x.shape + + qkv = self.qkv(x) + qkv = qkv.reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + q, k, v = qkv.unbind(dim=0) + + if self.rope: + if self.with_cls_token: + q_t = q[:, :, 1:, :] + ro_q_t = self.rope(q_t, patch_resolution) + q = torch.cat((q[:, :, :1, :], ro_q_t), -2).type_as(v) + + k_t = k[:, :, 1:, :] if self.with_cls_token else k + ro_k_t = self.rope(k_t, patch_resolution) + k = torch.cat((k[:, :, :1, :], ro_k_t), -2).type_as(v) + else: + q = self.rope(q, patch_resolution) + k = self.rope(k, patch_resolution) + + q = q * self.scale + + attn = (q @ k.transpose(-2, -1)) + attn = attn.softmax(dim=-1).type_as(x) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, -1) + + x = self.proj(x) + x = self.proj_drop(x) + + return x + + +class EVA02EndcoderLayer(BaseModule): + """Implements one encoder EVA02EndcoderLayer in EVA02. + + Args: + embed_dims (int): The feature dimension + num_heads (int): Parallel attention heads + feedforward_channels (int): The hidden dimension of FFNs. + sub_ln (bool): Whether to add the sub layer normalization + in the attention module. Defaults to False. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + proj_bias (bool): enable bias for projection in the attention module + if True. Defaults to True. + rope (:obj:`torch.nn.Module`, optional): RotaryEmbedding object + in the attention module. Defaults to None. + drop_rate (float): Dropout rate in the mlp module. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + sub_ln=False, + attn_drop=0., + proj_drop=0., + qkv_bias=False, + qk_scale=None, + proj_bias=True, + rope=None, + with_cls_token=True, + drop_rate=0., + drop_path_rate=0., + norm_cfg=dict(type='LN'), + init_cfg=None): + super(EVA02EndcoderLayer, self).__init__(init_cfg=init_cfg) + + self.norm1 = build_norm_layer(norm_cfg, embed_dims) + + self.attn = AttentionWithRoPE( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop, + proj_drop=proj_drop, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + proj_bias=proj_bias, + rope=rope, + with_cls_token=with_cls_token) + + self.drop_path = build_dropout( + dict(type='DropPath', drop_prob=drop_path_rate)) + + self.norm2 = build_norm_layer(norm_cfg, embed_dims) + + if drop_rate > 0: + dropout_layer = dict(type='Dropout', drop_prob=drop_rate) + else: + dropout_layer = None + + if sub_ln: + ffn_norm = norm_cfg + else: + ffn_norm = None + + self.mlp = SwiGLUFFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + dropout_layer=dropout_layer, + norm_cfg=ffn_norm, + add_identity=False, + ) + + def forward(self, x, patch_resolution): + inputs = x + x = self.norm1(x) + x = self.attn(x, patch_resolution) + x = self.drop_path(x) + x = inputs + x + + inputs = x + x = self.norm2(x) + x = self.mlp(x) + x = self.drop_path(x) + x = inputs + x + + return x + + +@MODELS.register_module() +class ViTEVA02(VisionTransformer): + """EVA02 Vision Transformer. + + A PyTorch implement of : `EVA-02: A Visual Representation for Neon Genesis + `_ + + Args: + arch (str | dict): Vision Transformer architecture. If use string, + choose from 'tiny', 'small', 'base', 'large'. If use dict, + it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **mlp_ratio** (float): The ratio of the mlp module. + + Defaults to 'tiny'. + + sub_ln (bool): Whether to add the sub layer normalization in swiglu. + Defaults to False. + drop_rate (float): Probability of an element to be zeroed in the + mlp module. Defaults to 0. + attn_drop_rate (float): Probability of an element to be zeroed after + the softmax in the attention. Defaults to 0. + proj_drop_rate (float): Probability of an element to be zeroed after + projection in the attention. Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + **kwargs(dict, optional): Other args for Vision Transformer. + """ + arch_zoo = { + **dict.fromkeys( + ['t', 'ti', 'tiny'], { + 'embed_dims': 192, + 'num_layers': 12, + 'num_heads': 3, + 'feedforward_channels': int(192 * 4 * 2 / 3) + }), + **dict.fromkeys( + ['s', 'small'], { + 'embed_dims': 384, + 'num_layers': 12, + 'num_heads': 6, + 'feedforward_channels': int(384 * 4 * 2 / 3) + }), + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': int(768 * 4 * 2 / 3) + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'num_layers': 24, + 'num_heads': 16, + 'feedforward_channels': int(1024 * 4 * 2 / 3) + }) + } + num_extra_tokens = 1 # class token + OUT_TYPES = {'raw', 'cls_token', 'featmap', 'avg_featmap'} + + def __init__(self, + arch='tiny', + sub_ln=False, + drop_rate=0., + attn_drop_rate=0., + proj_drop_rate=0., + drop_path_rate=0., + qkv_bias=True, + norm_cfg=dict(type='LN'), + with_cls_token=True, + layer_cfgs=dict(), + **kwargs): + # set essential args for Vision Transformer + kwargs.update( + arch=arch, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + norm_cfg=norm_cfg, + with_cls_token=with_cls_token) + super(ViTEVA02, self).__init__(**kwargs) + + self.num_heads = self.arch_settings['num_heads'] + + # Set RoPE + head_dim = self.embed_dims // self.num_heads + self.rope = RotaryEmbeddingFast( + embed_dims=head_dim, patch_resolution=self.patch_resolution) + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + embed_dims=self.embed_dims, + num_heads=self.num_heads, + feedforward_channels=self. + arch_settings['feedforward_channels'], + sub_ln=sub_ln, + norm_cfg=norm_cfg, + proj_drop=proj_drop_rate, + attn_drop=attn_drop_rate, + drop_rate=drop_rate, + qkv_bias=qkv_bias, + rope=self.rope, + with_cls_token=with_cls_token, + drop_path_rate=dpr[i]) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(EVA02EndcoderLayer(**_layer_cfg)) + + def forward(self, x): + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + + if self.cls_token is not None: + # stole cls_tokens impl from Phil Wang, thanks + cls_tokens = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + x = self.pre_norm(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x, patch_resolution) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.ln1(x) + + if i in self.out_indices: + outs.append(self._format_output(x, patch_resolution)) + + return tuple(outs) diff --git a/mmpretrain/models/backbones/vit_sam.py b/mmpretrain/models/backbones/vit_sam.py new file mode 100644 index 0000000000000000000000000000000000000000..3fbf6fd118b3619a9bbc13bbfe2c89826d78f08e --- /dev/null +++ b/mmpretrain/models/backbones/vit_sam.py @@ -0,0 +1,697 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence, Tuple + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn.bricks.transformer import FFN, PatchEmbed +from mmengine.model import BaseModule, ModuleList +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from ..utils import LayerNorm2d, build_norm_layer, resize_pos_embed, to_2tuple +from .base_backbone import BaseBackbone + + +def window_partition(x: torch.Tensor, + window_size: int) -> Tuple[torch.Tensor, Tuple[int, int]]: + """Partition into non-overlapping windows with padding if needed. + + Borrowed from https://github.com/facebookresearch/segment-anything/ + + Args: + x (torch.Tensor): Input tokens with [B, H, W, C]. + window_size (int): Window size. + + Returns: + Tuple[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]] + + - ``windows``: Windows after partition with + [B * num_windows, window_size, window_size, C]. + - ``(Hp, Wp)``: Padded height and width before partition + """ + B, H, W, C = x.shape + + pad_h = (window_size - H % window_size) % window_size + pad_w = (window_size - W % window_size) % window_size + if pad_h > 0 or pad_w > 0: + x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h)) + Hp, Wp = H + pad_h, W + pad_w + + x = x.view(B, Hp // window_size, window_size, Wp // window_size, + window_size, C) + windows = x.permute(0, 1, 3, 2, 4, + 5).contiguous().view(-1, window_size, window_size, C) + return windows, (Hp, Wp) + + +def window_unpartition(windows: torch.Tensor, window_size: int, + pad_hw: Tuple[int, int], + hw: Tuple[int, int]) -> torch.Tensor: + """Window unpartition into original sequences and removing padding. + + Borrowed from https://github.com/facebookresearch/segment-anything/ + + Args: + x (torch.Tensor): Input tokens with + [B * num_windows, window_size, window_size, C]. + window_size (int): Window size. + pad_hw (tuple): Padded height and width (Hp, Wp). + hw (tuple): Original height and width (H, W) before padding. + + Returns: + torch.Tensor: Unpartitioned sequences with [B, H, W, C]. + """ + Hp, Wp = pad_hw + H, W = hw + B = windows.shape[0] // (Hp * Wp // window_size // window_size) + x = windows.view(B, Hp // window_size, Wp // window_size, window_size, + window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1) + + if Hp > H or Wp > W: + x = x[:, :H, :W, :].contiguous() + return x + + +def get_rel_pos(q_size: int, k_size: int, + rel_pos: torch.Tensor) -> torch.Tensor: + """Get relative positional embeddings according to the relative positions + of query and key sizes. + + Borrowed from https://github.com/facebookresearch/segment-anything/ + + Args: + q_size (int): Size of query q. + k_size (int): Size of key k. + rel_pos (torch.Tensor): Relative position embeddings (L, C). + + Returns: + torch.Tensor: Extracted positional embeddings according to relative + positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode='linear', + ) + rel_pos_resized = rel_pos_resized.reshape(-1, + max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - + k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + + +def add_decomposed_rel_pos( + attn: torch.Tensor, + q: torch.Tensor, + rel_pos_h: torch.Tensor, + rel_pos_w: torch.Tensor, + q_size: Tuple[int, int], + k_size: Tuple[int, int], +) -> torch.Tensor: + """Borrowed from https://github.com/facebookresearch/segment-anything/ + + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py + + Args: + attn (torch.Tensor): Attention map. + q (torch.Tensor): Query q in the attention layer with shape + (B, q_h * q_w, C). + rel_pos_h (torch.Tensor): Relative position embeddings (Lh, C) for + height axis. + rel_pos_w (torch.Tensor): Relative position embeddings (Lw, C) for + width axis. + q_size (tuple): Spatial sequence size of query q with (q_h, q_w). + k_size (tuple): Spatial sequence size of key k with (k_h, k_w). + + Returns: + torch.Tensor: Attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum('bhwc,hkc->bhwk', r_q, Rh) + rel_w = torch.einsum('bhwc,wkc->bhwk', r_q, Rw) + + attn = (attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + + rel_w[:, :, :, None, :]).view(B, q_h * q_w, k_h * k_w) + + return attn + + +class Attention(nn.Module): + """Multi-head Attention block with relative position embeddings. + + Borrowed from https://github.com/facebookresearch/segment-anything/ + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + use_rel_pos (bool):Whether to use relative position embedding. + Defaults to False. + input_size (int, optional): Input resolution for calculating the + relative positional parameter size. Defaults to None. + """ + + def __init__( + self, + embed_dims: int, + num_heads: int = 8, + qkv_bias: bool = True, + use_rel_pos: bool = False, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + super().__init__() + self.num_heads = num_heads + head_embed_dims = embed_dims // num_heads + self.scale = head_embed_dims**-0.5 + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + self.proj = nn.Linear(embed_dims, embed_dims) + + self.use_rel_pos = use_rel_pos + if self.use_rel_pos: + assert (input_size is not None), \ + 'Input size must be provided if using relative position embed.' + # initialize relative positional embeddings + self.rel_pos_h = nn.Parameter( + torch.zeros(2 * input_size[0] - 1, head_embed_dims)) + self.rel_pos_w = nn.Parameter( + torch.zeros(2 * input_size[1] - 1, head_embed_dims)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + B, H, W, _ = x.shape + # qkv with shape (3, B, nHead, H * W, C) + qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, + -1).permute(2, 0, 3, 1, 4) + # q, k, v with shape (B * nHead, H * W, C) + q, k, v = qkv.reshape(3, B * self.num_heads, H * W, -1).unbind(0) + + attn = (q * self.scale) @ k.transpose(-2, -1) + + if self.use_rel_pos: + attn = add_decomposed_rel_pos(attn, q, self.rel_pos_h, + self.rel_pos_w, (H, W), (H, W)) + + attn = attn.softmax(dim=-1) + x = (attn @ v).view(B, self.num_heads, H, W, + -1).permute(0, 2, 3, 1, 4).reshape(B, H, W, -1) + x = self.proj(x) + + return x + + +class TransformerEncoderLayer(BaseModule): + """Encoder layer with window attention in Vision Transformer. + + Args: + embed_dims (int): The feature dimension + num_heads (int): Parallel attention heads + feedforward_channels (int): The hidden dimension for FFNs + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + use_rel_pos (bool):Whether to use relative position embedding. + Defaults to False. + window_size (int): Window size for window attention. Defaults to 0. + input_size (int, optional): Input resolution for calculating the + relative positional parameter size. Defaults to None. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims: int, + num_heads: int, + feedforward_channels: int, + drop_rate: float = 0., + drop_path_rate: float = 0., + num_fcs: int = 2, + qkv_bias: bool = True, + act_cfg: dict = dict(type='GELU'), + norm_cfg: dict = dict(type='LN'), + use_rel_pos: bool = False, + window_size: int = 0, + input_size: Optional[Tuple[int, int]] = None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + self.window_size = window_size + + self.ln1 = build_norm_layer(norm_cfg, self.embed_dims) + + self.attn = Attention( + embed_dims=embed_dims, + num_heads=num_heads, + qkv_bias=qkv_bias, + use_rel_pos=use_rel_pos, + input_size=input_size if window_size == 0 else + (window_size, window_size), + ) + + self.ln2 = build_norm_layer(norm_cfg, self.embed_dims) + + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg) + + @property + def norm1(self): + return self.ln1 + + @property + def norm2(self): + return self.ln2 + + def forward(self, x): + shortcut = x + x = self.ln1(x) + # Window partition + if self.window_size > 0: + H, W = x.shape[1], x.shape[2] + x, pad_hw = window_partition(x, self.window_size) + + x = self.attn(x) + # Reverse window partition + if self.window_size > 0: + x = window_unpartition(x, self.window_size, pad_hw, (H, W)) + x = shortcut + x + + x = self.ffn(self.ln2(x), identity=x) + return x + + +@MODELS.register_module() +class ViTSAM(BaseBackbone): + """Vision Transformer as image encoder used in SAM. + + A PyTorch implement of backbone: `Segment Anything + `_ + + Args: + arch (str | dict): Vision Transformer architecture. If use string, + choose from 'base', 'large', 'huge'. If use dict, it should have + below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + - **global_attn_indexes** (int): The index of layers with global + attention. + + Defaults to 'base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + out_channels (int): The num of output channels, if equal to 0, the + channel reduction layer is disabled. Defaults to 256. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + out_type (str): The type of output features. Please choose from + + - ``"raw"`` or ``"featmap"``: The feature map tensor from the + patch tokens with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + + Defaults to ``"raw"``. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + use_abs_pos (bool): Whether to use absolute position embedding. + Defaults to True. + use_rel_pos (bool):Whether to use relative position embedding. + Defaults to True. + window_size (int): Window size for window attention. Defaults to 14. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 3072, + 'global_attn_indexes': [2, 5, 8, 11] + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'num_layers': 24, + 'num_heads': 16, + 'feedforward_channels': 4096, + 'global_attn_indexes': [5, 11, 17, 23] + }), + **dict.fromkeys( + ['h', 'huge'], { + 'embed_dims': 1280, + 'num_layers': 32, + 'num_heads': 16, + 'feedforward_channels': 5120, + 'global_attn_indexes': [7, 15, 23, 31] + }), + } + OUT_TYPES = {'raw', 'featmap', 'avg_featmap'} + + def __init__(self, + arch: str = 'base', + img_size: int = 224, + patch_size: int = 16, + in_channels: int = 3, + out_channels: int = 256, + out_indices: int = -1, + out_type: str = 'raw', + drop_rate: float = 0., + drop_path_rate: float = 0., + qkv_bias: bool = True, + use_abs_pos: bool = True, + use_rel_pos: bool = True, + window_size: int = 14, + norm_cfg: dict = dict(type='LN', eps=1e-6), + frozen_stages: int = -1, + interpolate_mode: str = 'bicubic', + patch_cfg: dict = dict(), + layer_cfgs: dict = dict(), + init_cfg: Optional[dict] = None): + super().__init__(init_cfg) + + if isinstance(arch, str): + arch = arch.lower() + assert arch in set(self.arch_zoo), \ + f'Arch {arch} is not in default archs {set(self.arch_zoo)}' + self.arch_settings = self.arch_zoo[arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'feedforward_channels' + } + assert isinstance(arch, dict) and essential_keys <= set(arch), \ + f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = arch + + self.embed_dims = self.arch_settings['embed_dims'] + self.num_layers = self.arch_settings['num_layers'] + self.global_attn_indexes = self.arch_settings['global_attn_indexes'] + self.img_size = to_2tuple(img_size) + + # Set patch embedding + _patch_cfg = dict( + in_channels=in_channels, + input_size=img_size, + embed_dims=self.embed_dims, + conv_type='Conv2d', + kernel_size=patch_size, + stride=patch_size, + ) + _patch_cfg.update(patch_cfg) + self.patch_embed = PatchEmbed(**_patch_cfg) + self.patch_resolution = self.patch_embed.init_out_size + + # Set out type + if out_type not in self.OUT_TYPES: + raise ValueError(f'Unsupported `out_type` {out_type}, please ' + f'choose from {self.OUT_TYPES}') + self.out_type = out_type + + self.use_abs_pos = use_abs_pos + self.interpolate_mode = interpolate_mode + if use_abs_pos: + # Set position embedding + self.pos_embed = nn.Parameter( + torch.zeros(1, *self.patch_resolution, self.embed_dims)) + self.drop_after_pos = nn.Dropout(p=drop_rate) + self._register_load_state_dict_pre_hook(self._prepare_pos_embed) + + if use_rel_pos: + self._register_load_state_dict_pre_hook( + self._prepare_relative_position) + + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_layers + index + assert 0 <= out_indices[i] <= self.num_layers, \ + f'Invalid out_indices {index}' + self.out_indices = out_indices + + # stochastic depth decay rule + dpr = np.linspace(0, drop_path_rate, self.num_layers) + + self.layers = ModuleList() + if isinstance(layer_cfgs, dict): + layer_cfgs = [layer_cfgs] * self.num_layers + for i in range(self.num_layers): + _layer_cfg = dict( + embed_dims=self.embed_dims, + num_heads=self.arch_settings['num_heads'], + feedforward_channels=self. + arch_settings['feedforward_channels'], + drop_rate=drop_rate, + drop_path_rate=dpr[i], + qkv_bias=qkv_bias, + window_size=window_size + if i not in self.global_attn_indexes else 0, + input_size=self.patch_resolution, + use_rel_pos=use_rel_pos, + norm_cfg=norm_cfg) + _layer_cfg.update(layer_cfgs[i]) + self.layers.append(TransformerEncoderLayer(**_layer_cfg)) + + self.out_channels = out_channels + if self.out_channels > 0: + self.channel_reduction = nn.Sequential( + nn.Conv2d( + self.embed_dims, + out_channels, + kernel_size=1, + bias=False, + ), + LayerNorm2d(out_channels, eps=1e-6), + nn.Conv2d( + out_channels, + out_channels, + kernel_size=3, + padding=1, + bias=False, + ), + LayerNorm2d(out_channels, eps=1e-6), + ) + + # freeze stages only when self.frozen_stages > 0 + self.frozen_stages = frozen_stages + if self.frozen_stages > 0: + self._freeze_stages() + + def init_weights(self): + super().init_weights() + + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + if self.pos_embed is not None: + trunc_normal_(self.pos_embed, std=0.02) + + def _freeze_stages(self): + # freeze position embedding + if self.pos_embed is not None: + self.pos_embed.requires_grad = False + # set dropout to eval model + self.drop_after_pos.eval() + # freeze patch embedding + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + # freeze layers + for i in range(1, self.frozen_stages + 1): + m = self.layers[i - 1] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + # freeze channel_reduction module + if self.frozen_stages == self.num_layers and self.out_channels > 0: + m = self.channel_reduction + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor]: + B = x.shape[0] + x, patch_resolution = self.patch_embed(x) + x = x.view(B, patch_resolution[0], patch_resolution[1], + self.embed_dims) + + if self.use_abs_pos: + # 'resize_pos_embed' only supports 'pos_embed' with ndim==3, but + # in ViTSAM, the 'pos_embed' has 4 dimensions (1, H, W, C), so it + # is flattened. Besides, ViTSAM doesn't have any extra token. + resized_pos_embed = resize_pos_embed( + self.pos_embed.flatten(1, 2), + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=0) + x = x + resized_pos_embed.view(1, *patch_resolution, + self.embed_dims) + x = self.drop_after_pos(x) + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x) + + if i in self.out_indices: + # (B, H, W, C) -> (B, C, H, W) + x = x.permute(0, 3, 1, 2) + + if self.out_channels > 0: + x = self.channel_reduction(x) + outs.append(self._format_output(x)) + + return tuple(outs) + + def _format_output(self, x) -> torch.Tensor: + if self.out_type == 'raw' or self.out_type == 'featmap': + return x + elif self.out_type == 'avg_featmap': + # (B, C, H, W) -> (B, C, N) -> (B, N, C) + x = x.flatten(2).permute(0, 2, 1) + return x.mean(dim=1) + + def _prepare_pos_embed(self, state_dict, prefix, *args, **kwargs): + name = prefix + 'pos_embed' + if name not in state_dict.keys(): + return + + ckpt_pos_embed_shape = state_dict[name].shape + if self.pos_embed.shape != ckpt_pos_embed_shape: + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info( + f'Resize the pos_embed shape from {ckpt_pos_embed_shape} ' + f'to {self.pos_embed.shape}.') + + ckpt_pos_embed_shape = ckpt_pos_embed_shape[1:3] + pos_embed_shape = self.patch_embed.init_out_size + + flattened_pos_embed = state_dict[name].flatten(1, 2) + resized_pos_embed = resize_pos_embed(flattened_pos_embed, + ckpt_pos_embed_shape, + pos_embed_shape, + self.interpolate_mode, 0) + state_dict[name] = resized_pos_embed.view(1, *pos_embed_shape, + self.embed_dims) + + def _prepare_relative_position(self, state_dict, prefix, *args, **kwargs): + state_dict_model = self.state_dict() + all_keys = list(state_dict_model.keys()) + for key in all_keys: + if 'rel_pos_' in key: + ckpt_key = prefix + key + if ckpt_key not in state_dict: + continue + relative_position_pretrained = state_dict[ckpt_key] + relative_position_current = state_dict_model[key] + L1, _ = relative_position_pretrained.size() + L2, _ = relative_position_current.size() + if L1 != L2: + new_rel_pos = F.interpolate( + relative_position_pretrained.reshape(1, L1, + -1).permute( + 0, 2, 1), + size=L2, + mode='linear', + ) + new_rel_pos = new_rel_pos.reshape(-1, L2).permute(1, 0) + from mmengine.logging import MMLogger + logger = MMLogger.get_current_instance() + logger.info(f'Resize the {ckpt_key} from ' + f'{state_dict[ckpt_key].shape} to ' + f'{new_rel_pos.shape}') + state_dict[ckpt_key] = new_rel_pos + + def get_layer_depth(self, param_name: str, prefix: str = ''): + """Get the layer-wise depth of a parameter. + + Args: + param_name (str): The name of the parameter. + prefix (str): The prefix for the parameter. + Defaults to an empty string. + + Returns: + Tuple[int, int]: The layer-wise depth and the num of layers. + + Note: + The first depth is the stem module (``layer_depth=0``), and the + last depth is the subsequent module (``layer_depth=num_layers-1``) + """ + num_layers = self.num_layers + 2 + + if not param_name.startswith(prefix): + # For subsequent module like head + return num_layers - 1, num_layers + + param_name = param_name[len(prefix):] + + if param_name in ('cls_token', 'pos_embed'): + layer_depth = 0 + elif param_name.startswith('patch_embed'): + layer_depth = 0 + elif param_name.startswith('layers'): + layer_id = int(param_name.split('.')[1]) + layer_depth = layer_id + 1 + else: + layer_depth = num_layers - 1 + + return layer_depth, num_layers diff --git a/mmpretrain/models/backbones/xcit.py b/mmpretrain/models/backbones/xcit.py new file mode 100644 index 0000000000000000000000000000000000000000..392ebbedf457cc199b70afa1923ec0b698f7fd5b --- /dev/null +++ b/mmpretrain/models/backbones/xcit.py @@ -0,0 +1,770 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from functools import partial +from typing import Optional, Sequence, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn.bricks import ConvModule, DropPath +from mmcv.cnn.bricks.transformer import FFN +from mmengine.model import BaseModule, Sequential +from mmengine.model.weight_init import trunc_normal_ +from mmengine.utils import digit_version + +from mmpretrain.registry import MODELS +from ..utils import build_norm_layer, to_2tuple +from .base_backbone import BaseBackbone + +if digit_version(torch.__version__) < digit_version('1.8.0'): + floor_div = torch.floor_divide +else: + floor_div = partial(torch.div, rounding_mode='floor') + + +class ClassAttntion(BaseModule): + """Class Attention Module. + + A PyTorch implementation of Class Attention Module introduced by: + `Going deeper with Image Transformers `_ + + taken from + https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py + with slight modifications to do CA + + Args: + dim (int): The feature dimension. + num_heads (int): Parallel attention heads. Defaults to 8. + qkv_bias (bool): enable bias for qkv if True. Defaults to False. + attn_drop (float): The drop out rate for attention output weights. + Defaults to 0. + proj_drop (float): The drop out rate for linear output weights. + Defaults to 0. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to None. + """ # noqa: E501 + + def __init__(self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = False, + attn_drop: float = 0., + proj_drop: float = 0., + init_cfg=None): + + super(ClassAttntion, self).__init__(init_cfg=init_cfg) + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.q = nn.Linear(dim, dim, bias=qkv_bias) + self.k = nn.Linear(dim, dim, bias=qkv_bias) + self.v = nn.Linear(dim, dim, bias=qkv_bias) + + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x): + B, N, C = x.shape + # We only need to calculate query of cls token. + q = self.q(x[:, 0]).unsqueeze(1).reshape(B, 1, self.num_heads, + C // self.num_heads).permute( + 0, 2, 1, 3) + k = self.k(x).reshape(B, N, self.num_heads, + C // self.num_heads).permute(0, 2, 1, 3) + + q = q * self.scale + v = self.v(x).reshape(B, N, self.num_heads, + C // self.num_heads).permute(0, 2, 1, 3) + + attn = (q @ k.transpose(-2, -1)) + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x_cls = (attn @ v).transpose(1, 2).reshape(B, 1, C) + x_cls = self.proj(x_cls) + x_cls = self.proj_drop(x_cls) + + return x_cls + + +class PositionalEncodingFourier(BaseModule): + """Positional Encoding using a fourier kernel. + + A PyTorch implementation of Positional Encoding relying on + a fourier kernel introduced by: + `Attention is all you Need `_ + + Based on the `official XCiT code + `_ + + Args: + hidden_dim (int): The hidden feature dimension. Defaults to 32. + dim (int): The output feature dimension. Defaults to 768. + temperature (int): A control variable for position encoding. + Defaults to 10000. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + hidden_dim: int = 32, + dim: int = 768, + temperature: int = 10000, + init_cfg=None): + super(PositionalEncodingFourier, self).__init__(init_cfg=init_cfg) + + self.token_projection = ConvModule( + in_channels=hidden_dim * 2, + out_channels=dim, + kernel_size=1, + conv_cfg=None, + norm_cfg=None, + act_cfg=None) + self.scale = 2 * math.pi + self.temperature = temperature + self.hidden_dim = hidden_dim + self.dim = dim + self.eps = 1e-6 + + def forward(self, B: int, H: int, W: int): + device = self.token_projection.conv.weight.device + y_embed = torch.arange( + 1, H + 1, device=device).unsqueeze(1).repeat(1, 1, W).float() + x_embed = torch.arange(1, W + 1, device=device).repeat(1, H, 1).float() + y_embed = y_embed / (y_embed[:, -1:, :] + self.eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + self.eps) * self.scale + + dim_t = torch.arange(self.hidden_dim, device=device).float() + dim_t = floor_div(dim_t, 2) + dim_t = self.temperature**(2 * dim_t / self.hidden_dim) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + [pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()], + dim=4).flatten(3) + pos_y = torch.stack( + [pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()], + dim=4).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + pos = self.token_projection(pos) + return pos.repeat(B, 1, 1, 1) # (B, C, H, W) + + +class ConvPatchEmbed(BaseModule): + """Patch Embedding using multiple convolution layers. + + Args: + img_size (int, tuple): input image size. + Defaults to 224, means the size is 224*224. + patch_size (int): The patch size in conv patch embedding. + Defaults to 16. + in_channels (int): The input channels of this module. + Defaults to 3. + embed_dims (int): The feature dimension + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='GELU')``. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + img_size: Union[int, tuple] = 224, + patch_size: int = 16, + in_channels: int = 3, + embed_dims: int = 768, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='GELU'), + init_cfg=None): + super(ConvPatchEmbed, self).__init__(init_cfg=init_cfg) + img_size = to_2tuple(img_size) + num_patches = (img_size[1] // patch_size) * (img_size[0] // patch_size) + self.img_size = img_size + self.patch_size = patch_size + self.num_patches = num_patches + + conv = partial( + ConvModule, + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + ) + + layer = [] + if patch_size == 16: + layer.append( + conv(in_channels=in_channels, out_channels=embed_dims // 8)) + layer.append( + conv( + in_channels=embed_dims // 8, out_channels=embed_dims // 4)) + elif patch_size == 8: + layer.append( + conv(in_channels=in_channels, out_channels=embed_dims // 4)) + else: + raise ValueError('For patch embedding, the patch size must be 16 ' + f'or 8, but get patch size {self.patch_size}.') + + layer.append( + conv(in_channels=embed_dims // 4, out_channels=embed_dims // 2)) + layer.append( + conv( + in_channels=embed_dims // 2, + out_channels=embed_dims, + act_cfg=None, + )) + + self.proj = Sequential(*layer) + + def forward(self, x: torch.Tensor): + x = self.proj(x) + Hp, Wp = x.shape[2], x.shape[3] + x = x.flatten(2).transpose(1, 2) # (B, N, C) + return x, (Hp, Wp) + + +class ClassAttentionBlock(BaseModule): + """Transformer block using Class Attention. + + Args: + dim (int): The feature dimension. + num_heads (int): Parallel attention heads. + mlp_ratio (float): The hidden dimension ratio for FFN. + Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to False. + drop (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): The initial value for layer scale. + Defaults to 1. + tokens_norm (bool): Whether to normalize all tokens or just the + cls_token in the CA. Defaults to False. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN', eps=1e-6)``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='GELU')``. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + dim: int, + num_heads: int, + mlp_ratio: float = 4., + qkv_bias: bool = False, + drop=0., + attn_drop=0., + drop_path=0., + layer_scale_init_value=1., + tokens_norm=False, + norm_cfg=dict(type='LN', eps=1e-6), + act_cfg=dict(type='GELU'), + init_cfg=None): + + super(ClassAttentionBlock, self).__init__(init_cfg=init_cfg) + + self.norm1 = build_norm_layer(norm_cfg, dim) + + self.attn = ClassAttntion( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_drop=attn_drop, + proj_drop=drop, + ) + + self.drop_path = DropPath( + drop_path) if drop_path > 0. else nn.Identity() + + self.norm2 = build_norm_layer(norm_cfg, dim) + + self.ffn = FFN( + embed_dims=dim, + feedforward_channels=int(dim * mlp_ratio), + act_cfg=act_cfg, + ffn_drop=drop, + ) + + if layer_scale_init_value > 0: + self.gamma1 = nn.Parameter(layer_scale_init_value * + torch.ones(dim)) + self.gamma2 = nn.Parameter(layer_scale_init_value * + torch.ones(dim)) + else: + self.gamma1, self.gamma2 = 1.0, 1.0 + + # See https://github.com/rwightman/pytorch-image-models/pull/747#issuecomment-877795721 # noqa: E501 + self.tokens_norm = tokens_norm + + def forward(self, x): + x_norm1 = self.norm1(x) + x_attn = torch.cat([self.attn(x_norm1), x_norm1[:, 1:]], dim=1) + x = x + self.drop_path(self.gamma1 * x_attn) + if self.tokens_norm: + x = self.norm2(x) + else: + x = torch.cat([self.norm2(x[:, 0:1]), x[:, 1:]], dim=1) + x_res = x + cls_token = x[:, 0:1] + cls_token = self.gamma2 * self.ffn(cls_token, identity=0) + x = torch.cat([cls_token, x[:, 1:]], dim=1) + x = x_res + self.drop_path(x) + return x + + +class LPI(BaseModule): + """Local Patch Interaction module. + + A PyTorch implementation of Local Patch Interaction module + as in XCiT introduced by `XCiT: Cross-Covariance Image Transformers + `_ + + Local Patch Interaction module that allows explicit communication between + tokens in 3x3 windows to augment the implicit communication performed by + the block diagonal scatter attention. Implemented using 2 layers of + separable 3x3 convolutions with GeLU and BatchNorm2d + + Args: + in_features (int): The input channels. + out_features (int, optional): The output channels. Defaults to None. + kernel_size (int): The kernel_size in ConvModule. Defaults to 3. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='GELU')``. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + in_features: int, + out_features: Optional[int] = None, + kernel_size: int = 3, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='GELU'), + init_cfg=None): + super(LPI, self).__init__(init_cfg=init_cfg) + + out_features = out_features or in_features + padding = kernel_size // 2 + + self.conv1 = ConvModule( + in_channels=in_features, + out_channels=in_features, + kernel_size=kernel_size, + padding=padding, + groups=in_features, + bias=True, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + order=('conv', 'act', 'norm')) + + self.conv2 = ConvModule( + in_channels=in_features, + out_channels=out_features, + kernel_size=kernel_size, + padding=padding, + groups=out_features, + norm_cfg=None, + act_cfg=None) + + def forward(self, x: torch.Tensor, H: int, W: int) -> torch.Tensor: + B, N, C = x.shape + x = x.permute(0, 2, 1).reshape(B, C, H, W) + x = self.conv1(x) + x = self.conv2(x) + x = x.reshape(B, C, N).permute(0, 2, 1) + return x + + +class XCA(BaseModule): + r"""Cross-Covariance Attention module. + + A PyTorch implementation of Cross-Covariance Attention module + as in XCiT introduced by `XCiT: Cross-Covariance Image Transformers + `_ + + In Cross-Covariance Attention (XCA), the channels are updated using a + weighted sum. The weights are obtained from the (softmax normalized) + Cross-covariance matrix :math:`(Q^T \cdot K \in d_h \times d_h)` + + Args: + dim (int): The feature dimension. + num_heads (int): Parallel attention heads. Defaults to 8. + qkv_bias (bool): enable bias for qkv if True. Defaults to False. + attn_drop (float): The drop out rate for attention output weights. + Defaults to 0. + proj_drop (float): The drop out rate for linear output weights. + Defaults to 0. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = False, + attn_drop: float = 0., + proj_drop: float = 0., + init_cfg=None): + super(XCA, self).__init__(init_cfg=init_cfg) + self.num_heads = num_heads + self.temperature = nn.Parameter(torch.ones(num_heads, 1, 1)) + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + B, N, C = x.shape + # (qkv, B, num_heads, channels per head, N) + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, + C // self.num_heads).permute(2, 0, 3, 4, 1) + q, k, v = qkv.unbind(0) + + # Paper section 3.2 l2-Normalization and temperature scaling + q = F.normalize(q, dim=-1) + k = F.normalize(k, dim=-1) + attn = (q @ k.transpose(-2, -1)) * self.temperature + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + # (B, num_heads, C', N) -> (B, N, num_heads, C') -> (B, N C) + x = (attn @ v).permute(0, 3, 1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class XCABlock(BaseModule): + """Transformer block using XCA. + + Args: + dim (int): The feature dimension. + num_heads (int): Parallel attention heads. + mlp_ratio (float): The hidden dimension ratio for FFNs. + Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to False. + drop (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): The initial value for layer scale. + Defaults to 1. + bn_norm_cfg (dict): Config dict for batchnorm in LPI and + ConvPatchEmbed. Defaults to ``dict(type='BN')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN', eps=1e-6)``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='GELU')``. + init_cfg (dict | list[dict], optional): Initialization config dict. + """ + + def __init__(self, + dim: int, + num_heads: int, + mlp_ratio: float = 4., + qkv_bias: bool = False, + drop: float = 0., + attn_drop: float = 0., + drop_path: float = 0., + layer_scale_init_value: float = 1., + bn_norm_cfg=dict(type='BN'), + norm_cfg=dict(type='LN', eps=1e-6), + act_cfg=dict(type='GELU'), + init_cfg=None): + super(XCABlock, self).__init__(init_cfg=init_cfg) + + self.norm1 = build_norm_layer(norm_cfg, dim) + self.attn = XCA( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + attn_drop=attn_drop, + proj_drop=drop, + ) + self.drop_path = DropPath( + drop_path) if drop_path > 0. else nn.Identity() + + self.norm3 = build_norm_layer(norm_cfg, dim) + self.local_mp = LPI( + in_features=dim, + norm_cfg=bn_norm_cfg, + act_cfg=act_cfg, + ) + + self.norm2 = build_norm_layer(norm_cfg, dim) + self.ffn = FFN( + embed_dims=dim, + feedforward_channels=int(dim * mlp_ratio), + act_cfg=act_cfg, + ffn_drop=drop, + ) + + self.gamma1 = nn.Parameter(layer_scale_init_value * torch.ones(dim)) + self.gamma3 = nn.Parameter(layer_scale_init_value * torch.ones(dim)) + self.gamma2 = nn.Parameter(layer_scale_init_value * torch.ones(dim)) + + def forward(self, x, H: int, W: int): + x = x + self.drop_path(self.gamma1 * self.attn(self.norm1(x))) + # NOTE official code has 3 then 2, so keeping it the same to be + # consistent with loaded weights See + # https://github.com/rwightman/pytorch-image-models/pull/747#issuecomment-877795721 # noqa: E501 + x = x + self.drop_path( + self.gamma3 * self.local_mp(self.norm3(x), H, W)) + x = x + self.drop_path( + self.gamma2 * self.ffn(self.norm2(x), identity=0)) + return x + + +@MODELS.register_module() +class XCiT(BaseBackbone): + """XCiT backbone. + + A PyTorch implementation of XCiT backbone introduced by: + `XCiT: Cross-Covariance Image Transformers + `_ + + Args: + img_size (int, tuple): Input image size. Defaults to 224. + patch_size (int): Patch size. Defaults to 16. + in_channels (int): Number of input channels. Defaults to 3. + embed_dims (int): Embedding dimension. Defaults to 768. + depth (int): depth of vision transformer. Defaults to 12. + cls_attn_layers (int): Depth of Class attention layers. + Defaults to 2. + num_heads (int): Number of attention heads. Defaults to 12. + mlp_ratio (int): Ratio of mlp hidden dim to embedding dim. + Defaults to 4. + qkv_bias (bool): enable bias for qkv if True. Defaults to True. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0. + attn_drop_rate (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + use_pos_embed (bool): Whether to use positional encoding. + Defaults to True. + layer_scale_init_value (float): The initial value for layer scale. + Defaults to 1. + tokens_norm (bool): Whether to normalize all tokens or just the + cls_token in the CA. Defaults to False. + out_indices (Sequence[int]): Output from which layers. + Defaults to (-1, ). + frozen_stages (int): Layers to be frozen (all param fixed), and 0 + means to freeze the stem stage. Defaults to -1, which means + not freeze any parameters. + bn_norm_cfg (dict): Config dict for the batch norm layers in LPI and + ConvPatchEmbed. Defaults to ``dict(type='BN')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN', eps=1e-6)``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='GELU')``. + init_cfg (dict | list[dict], optional): Initialization config dict. + """ + + def __init__(self, + img_size: Union[int, tuple] = 224, + patch_size: int = 16, + in_channels: int = 3, + embed_dims: int = 768, + depth: int = 12, + cls_attn_layers: int = 2, + num_heads: int = 12, + mlp_ratio: float = 4., + qkv_bias: bool = True, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + use_pos_embed: bool = True, + layer_scale_init_value: float = 1., + tokens_norm: bool = False, + out_type: str = 'cls_token', + out_indices: Sequence[int] = (-1, ), + final_norm: bool = True, + frozen_stages: int = -1, + bn_norm_cfg=dict(type='BN'), + norm_cfg=dict(type='LN', eps=1e-6), + act_cfg=dict(type='GELU'), + init_cfg=dict(type='TruncNormal', layer='Linear')): + super(XCiT, self).__init__(init_cfg=init_cfg) + + img_size = to_2tuple(img_size) + if (img_size[0] % patch_size != 0) or (img_size[1] % patch_size != 0): + raise ValueError(f'`patch_size` ({patch_size}) should divide ' + f'the image shape ({img_size}) evenly.') + + self.embed_dims = embed_dims + + assert out_type in ('raw', 'featmap', 'avg_featmap', 'cls_token') + self.out_type = out_type + + self.patch_embed = ConvPatchEmbed( + img_size=img_size, + patch_size=patch_size, + in_channels=in_channels, + embed_dims=embed_dims, + norm_cfg=bn_norm_cfg, + act_cfg=act_cfg, + ) + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dims)) + self.use_pos_embed = use_pos_embed + if use_pos_embed: + self.pos_embed = PositionalEncodingFourier(dim=embed_dims) + self.pos_drop = nn.Dropout(p=drop_rate) + + self.xca_layers = nn.ModuleList() + self.ca_layers = nn.ModuleList() + self.num_layers = depth + cls_attn_layers + + for _ in range(depth): + self.xca_layers.append( + XCABlock( + dim=embed_dims, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=drop_path_rate, + bn_norm_cfg=bn_norm_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + layer_scale_init_value=layer_scale_init_value, + )) + + for _ in range(cls_attn_layers): + self.ca_layers.append( + ClassAttentionBlock( + dim=embed_dims, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + drop=drop_rate, + attn_drop=attn_drop_rate, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + layer_scale_init_value=layer_scale_init_value, + tokens_norm=tokens_norm, + )) + + if final_norm: + self.norm = build_norm_layer(norm_cfg, embed_dims) + + # Transform out_indices + if isinstance(out_indices, int): + out_indices = [out_indices] + assert isinstance(out_indices, Sequence), \ + f'"out_indices" must by a sequence or int, ' \ + f'get {type(out_indices)} instead.' + out_indices = list(out_indices) + for i, index in enumerate(out_indices): + if index < 0: + out_indices[i] = self.num_layers + index + assert 0 <= out_indices[i] <= self.num_layers, \ + f'Invalid out_indices {index}.' + self.out_indices = out_indices + + if frozen_stages > self.num_layers + 1: + raise ValueError('frozen_stages must be less than ' + f'{self.num_layers} but get {frozen_stages}') + self.frozen_stages = frozen_stages + + def init_weights(self): + super().init_weights() + + if self.init_cfg is not None and self.init_cfg['type'] == 'Pretrained': + return + + trunc_normal_(self.cls_token, std=.02) + + def _freeze_stages(self): + if self.frozen_stages < 0: + return + + # freeze position embedding + if self.use_pos_embed: + self.pos_embed.eval() + for param in self.pos_embed.parameters(): + param.requires_grad = False + # freeze patch embedding + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + # set dropout to eval model + self.pos_drop.eval() + # freeze cls_token, only use in self.Clslayers + if self.frozen_stages > len(self.xca_layers): + self.cls_token.requires_grad = False + # freeze layers + for i in range(1, self.frozen_stages): + if i <= len(self.xca_layers): + m = self.xca_layers[i - 1] + else: + m = self.ca_layers[i - len(self.xca_layers) - 1] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + # freeze the last layer norm if all_stages are frozen + if self.frozen_stages == len(self.xca_layers) + len(self.ca_layers): + self.norm.eval() + for param in self.norm.parameters(): + param.requires_grad = False + + def forward(self, x): + outs = [] + B = x.shape[0] + # x is (B, N, C). (Hp, Hw) is the patch resolution + x, (Hp, Wp) = self.patch_embed(x) + + if self.use_pos_embed: + # (B, C, Hp, Wp) -> (B, C, N) -> (B, N, C) + pos_encoding = self.pos_embed(B, Hp, Wp) + x = x + pos_encoding.reshape(B, -1, x.size(1)).permute(0, 2, 1) + x = self.pos_drop(x) + + for i, layer in enumerate(self.xca_layers): + x = layer(x, Hp, Wp) + if i in self.out_indices: + outs.append(self._format_output(x, (Hp, Wp), False)) + + x = torch.cat((self.cls_token.expand(B, -1, -1), x), dim=1) + + for i, layer in enumerate(self.ca_layers): + x = layer(x) + if i == len(self.ca_layers) - 1: + x = self.norm(x) + if i + len(self.xca_layers) in self.out_indices: + outs.append(self._format_output(x, (Hp, Wp), True)) + + return tuple(outs) + + def _format_output(self, x, hw, with_cls_token: bool): + if self.out_type == 'raw': + return x + if self.out_type == 'cls_token': + if not with_cls_token: + raise ValueError( + 'Cannot output cls_token since there is no cls_token.') + return x[:, 0] + + patch_token = x[:, 1:] if with_cls_token else x + if self.out_type == 'featmap': + B = x.size(0) + # (B, N, C) -> (B, H, W, C) -> (B, C, H, W) + return patch_token.reshape(B, *hw, -1).permute(0, 3, 1, 2) + if self.out_type == 'avg_featmap': + return patch_token.mean(dim=1) + + def train(self, mode=True): + super().train(mode) + self._freeze_stages() diff --git a/mmpretrain/models/builder.py b/mmpretrain/models/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..2ea4e25c8d6db3bbf07ab94ea08c08e474ec3595 --- /dev/null +++ b/mmpretrain/models/builder.py @@ -0,0 +1,39 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpretrain.registry import MODELS + +BACKBONES = MODELS +NECKS = MODELS +HEADS = MODELS +LOSSES = MODELS +CLASSIFIERS = MODELS +RETRIEVER = MODELS + + +def build_backbone(cfg): + """Build backbone.""" + return BACKBONES.build(cfg) + + +def build_neck(cfg): + """Build neck.""" + return NECKS.build(cfg) + + +def build_head(cfg): + """Build head.""" + return HEADS.build(cfg) + + +def build_loss(cfg): + """Build loss.""" + return LOSSES.build(cfg) + + +def build_classifier(cfg): + """Build classifier.""" + return CLASSIFIERS.build(cfg) + + +def build_retriever(cfg): + """Build retriever.""" + return RETRIEVER.build(cfg) diff --git a/mmpretrain/models/classifiers/__init__.py b/mmpretrain/models/classifiers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5fa276ff5a2152beb93c4d1b42e6bbf4e2cbf822 --- /dev/null +++ b/mmpretrain/models/classifiers/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .base import BaseClassifier +from .hugging_face import HuggingFaceClassifier +from .image import ImageClassifier +from .timm import TimmClassifier + +__all__ = [ + 'BaseClassifier', 'ImageClassifier', 'TimmClassifier', + 'HuggingFaceClassifier' +] diff --git a/mmpretrain/models/classifiers/base.py b/mmpretrain/models/classifiers/base.py new file mode 100644 index 0000000000000000000000000000000000000000..a65fc213f4bfe271a9298b823ba38fc4ca9f57e1 --- /dev/null +++ b/mmpretrain/models/classifiers/base.py @@ -0,0 +1,108 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from abc import ABCMeta, abstractmethod +from typing import List, Optional, Sequence + +import torch +from mmengine.model import BaseModel +from mmengine.structures import BaseDataElement + + +class BaseClassifier(BaseModel, metaclass=ABCMeta): + """Base class for classifiers. + + Args: + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None, it will use "BaseDataPreprocessor" as type, see + :class:`mmengine.model.BaseDataPreprocessor` for more details. + Defaults to None. + + Attributes: + init_cfg (dict): Initialization config dict. + data_preprocessor (:obj:`mmengine.model.BaseDataPreprocessor`): An + extra data pre-processing module, which processes data from + dataloader to the format accepted by :meth:`forward`. + """ + + def __init__(self, + init_cfg: Optional[dict] = None, + data_preprocessor: Optional[dict] = None): + super(BaseClassifier, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + @property + def with_neck(self) -> bool: + """Whether the classifier has a neck.""" + return hasattr(self, 'neck') and self.neck is not None + + @property + def with_head(self) -> bool: + """Whether the classifier has a head.""" + return hasattr(self, 'head') and self.head is not None + + @abstractmethod + def forward(self, + inputs: torch.Tensor, + data_samples: Optional[List[BaseDataElement]] = None, + mode: str = 'tensor'): + """The unified entry for a forward process in both training and test. + + The method should accept three modes: "tensor", "predict" and "loss": + + - "tensor": Forward the whole network and return tensor or tuple of + tensor without any post-processing, same as a common nn.Module. + - "predict": Forward and return the predictions, which are fully + processed to a list of :obj:`BaseDataElement`. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + inputs (torch.Tensor): The input tensor with shape (N, C, ...) + in general. + data_samples (List[BaseDataElement], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + + - If ``mode="tensor"``, return a tensor or a tuple of tensor. + - If ``mode="predict"``, return a list of + :obj:`mmengine.BaseDataElement`. + - If ``mode="loss"``, return a dict of tensor. + """ + pass + + def extract_feat(self, inputs: torch.Tensor): + """Extract features from the input tensor with shape (N, C, ...). + + The sub-classes are recommended to implement this method to extract + features from backbone and neck. + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + """ + raise NotImplementedError + + def extract_feats(self, multi_inputs: Sequence[torch.Tensor], + **kwargs) -> list: + """Extract features from a sequence of input tensor. + + Args: + multi_inputs (Sequence[torch.Tensor]): A sequence of input + tensor. It can be used in augmented inference. + **kwargs: Other keyword arguments accepted by :meth:`extract_feat`. + + Returns: + list: Features of every input tensor. + """ + assert isinstance(multi_inputs, Sequence), \ + '`extract_feats` is used for a sequence of inputs tensor. If you '\ + 'want to extract on single inputs tensor, use `extract_feat`.' + return [self.extract_feat(inputs, **kwargs) for inputs in multi_inputs] diff --git a/mmpretrain/models/classifiers/hugging_face.py b/mmpretrain/models/classifiers/hugging_face.py new file mode 100644 index 0000000000000000000000000000000000000000..26a8fda51b0d01ee54ba71665caedbb8a7bd842c --- /dev/null +++ b/mmpretrain/models/classifiers/hugging_face.py @@ -0,0 +1,222 @@ +# Copyright (c) OpenMMLab. All right reserved. +import re +from collections import OrderedDict +from typing import List, Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from mmpretrain.utils import require +from .base import BaseClassifier + + +@MODELS.register_module() +class HuggingFaceClassifier(BaseClassifier): + """Image classifiers for HuggingFace model. + + This class accepts all positional and keyword arguments of the API + ``from_pretrained`` (when ``pretrained=True``) and ``from_config`` (when + ``pretrained=False``) of `transformers.AutoModelForImageClassification`_ + and use it to create a model from hugging-face. + + It can load checkpoints of hugging-face directly, and the saved checkpoints + also can be directly load by hugging-face. + + Please confirm that you have installed ``transfromers`` if you want to use it. + + .. _transformers.AutoModelForImageClassification: + https://huggingface.co/docs/transformers/main/en/model_doc/auto#transformers.AutoModelForImageClassification + + Args: + model_name (str): The name of the model to use in hugging-face. + pretrained (bool): Whether to load pretrained checkpoint from + hugging-face. Defaults to False. + *args: Other positional arguments of the method + `from_pretrained` or `from_config`. + loss (dict): Config of classification loss. Defaults to + ``dict(type='CrossEntropyLoss', loss_weight=1.0)``. + train_cfg (dict, optional): The training setting. The acceptable + fields are: + + - augments (List[dict]): The batch augmentation methods to use. + More details can be found in :mod:`mmpretrain.model.utils.augment`. + + Defaults to None. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None or no specified type, it will use + "ClsDataPreprocessor" as type. See :class:`ClsDataPreprocessor` for + more details. Defaults to None. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + **kwargs: Other keyword arguments of the method + `from_pretrained` or `from_config`. + + Examples: + >>> import torch + >>> from mmpretrain.models import build_classifier + >>> cfg = dict(type='HuggingFaceClassifier', model_name='microsoft/resnet-50', pretrained=True) + >>> model = build_classifier(cfg) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> out = model(inputs) + >>> print(out.shape) + torch.Size([1, 1000]) + """ # noqa: E501 + + @require('transformers') + def __init__(self, + model_name, + pretrained=False, + *model_args, + loss=dict(type='CrossEntropyLoss', loss_weight=1.0), + train_cfg: Optional[dict] = None, + with_cp: bool = False, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None, + **kwargs): + if data_preprocessor is None: + data_preprocessor = {} + # The build process is in MMEngine, so we need to add scope here. + data_preprocessor.setdefault('type', 'mmpretrain.ClsDataPreprocessor') + + if train_cfg is not None and 'augments' in train_cfg: + # Set batch augmentations by `train_cfg` + data_preprocessor['batch_augments'] = train_cfg + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + from transformers import AutoConfig, AutoModelForImageClassification + if pretrained: + self.model = AutoModelForImageClassification.from_pretrained( + model_name, *model_args, **kwargs) + else: + config = AutoConfig.from_pretrained(model_name, *model_args, + **kwargs) + self.model = AutoModelForImageClassification.from_config(config) + + if not isinstance(loss, nn.Module): + loss = MODELS.build(loss) + self.loss_module = loss + + self.with_cp = with_cp + if self.with_cp: + self.model.gradient_checkpointing_enable() + + self._register_state_dict_hook(self._remove_state_dict_prefix) + self._register_load_state_dict_pre_hook(self._add_state_dict_prefix) + + def forward(self, inputs, data_samples=None, mode='tensor'): + if mode == 'tensor': + return self.model(inputs).logits + elif mode == 'loss': + return self.loss(inputs, data_samples) + elif mode == 'predict': + return self.predict(inputs, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, inputs: torch.Tensor): + raise NotImplementedError( + "The HuggingFaceClassifier doesn't support extract feature yet.") + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs): + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments of the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # The part can be traced by torch.fx + cls_score = self.model(inputs).logits + + # The part can not be traced by torch.fx + losses = self._get_loss(cls_score, data_samples, **kwargs) + return losses + + def _get_loss(self, cls_score: torch.Tensor, + data_samples: List[DataSample], **kwargs): + """Unpack data samples and compute loss.""" + # Unpack data samples and pack targets + if 'gt_score' in data_samples[0]: + # Batch augmentation may convert labels to one-hot format scores. + target = torch.stack([i.gt_score for i in data_samples]) + else: + target = torch.cat([i.gt_label for i in data_samples]) + + # compute loss + losses = dict() + loss = self.loss_module( + cls_score, target, avg_factor=cls_score.size(0), **kwargs) + losses['loss'] = loss + + return losses + + def predict(self, + inputs: torch.Tensor, + data_samples: Optional[List[DataSample]] = None): + """Predict results from a batch of inputs. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + + Returns: + List[DataSample]: The prediction results. + """ + # The part can be traced by torch.fx + cls_score = self.model(inputs).logits + + # The part can not be traced by torch.fx + predictions = self._get_predictions(cls_score, data_samples) + return predictions + + def _get_predictions(self, cls_score, data_samples): + """Post-process the output of head. + + Including softmax and set ``pred_label`` of data samples. + """ + pred_scores = F.softmax(cls_score, dim=1) + pred_labels = pred_scores.argmax(dim=1, keepdim=True).detach() + + if data_samples is not None: + for data_sample, score, label in zip(data_samples, pred_scores, + pred_labels): + data_sample.set_pred_score(score).set_pred_label(label) + else: + data_samples = [] + for score, label in zip(pred_scores, pred_labels): + data_samples.append( + DataSample().set_pred_score(score).set_pred_label(label)) + + return data_samples + + @staticmethod + def _remove_state_dict_prefix(self, state_dict, prefix, local_metadata): + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + new_key = re.sub(f'^{prefix}model.', prefix, k) + new_state_dict[new_key] = v + return new_state_dict + + @staticmethod + def _add_state_dict_prefix(state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + new_prefix = prefix + 'model.' + for k in list(state_dict.keys()): + new_key = re.sub(f'^{prefix}', new_prefix, k) + state_dict[new_key] = state_dict[k] + del state_dict[k] diff --git a/mmpretrain/models/classifiers/image.py b/mmpretrain/models/classifiers/image.py new file mode 100644 index 0000000000000000000000000000000000000000..f606fe77ca71a4cdf332f57b6c3b1a02878b9f85 --- /dev/null +++ b/mmpretrain/models/classifiers/image.py @@ -0,0 +1,259 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseClassifier + + +@MODELS.register_module() +class ImageClassifier(BaseClassifier): + """Image classifiers for supervised classification task. + + Args: + backbone (dict): The backbone module. See + :mod:`mmpretrain.models.backbones`. + neck (dict, optional): The neck module to process features from + backbone. See :mod:`mmpretrain.models.necks`. Defaults to None. + head (dict, optional): The head module to do prediction and calculate + loss from processed features. See :mod:`mmpretrain.models.heads`. + Notice that if the head is not set, almost all methods cannot be + used except :meth:`extract_feat`. Defaults to None. + pretrained (str, optional): The pretrained checkpoint path, support + local path and remote path. Defaults to None. + train_cfg (dict, optional): The training setting. The acceptable + fields are: + + - augments (List[dict]): The batch augmentation methods to use. + More details can be found in + :mod:`mmpretrain.model.utils.augment`. + - probs (List[float], optional): The probability of every batch + augmentation methods. If None, choose evenly. Defaults to None. + + Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None or no specified type, it will use + "ClsDataPreprocessor" as type. See :class:`ClsDataPreprocessor` for + more details. Defaults to None. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: Optional[dict] = None, + head: Optional[dict] = None, + pretrained: Optional[str] = None, + train_cfg: Optional[dict] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + if pretrained is not None: + init_cfg = dict(type='Pretrained', checkpoint=pretrained) + + data_preprocessor = data_preprocessor or {} + + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'mmpretrain.ClsDataPreprocessor') + data_preprocessor.setdefault('batch_augments', train_cfg) + data_preprocessor = MODELS.build(data_preprocessor) + elif not isinstance(data_preprocessor, nn.Module): + raise TypeError('data_preprocessor should be a `dict` or ' + f'`nn.Module` instance, but got ' + f'{type(data_preprocessor)}') + + super(ImageClassifier, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + if not isinstance(backbone, nn.Module): + backbone = MODELS.build(backbone) + if neck is not None and not isinstance(neck, nn.Module): + neck = MODELS.build(neck) + if head is not None and not isinstance(head, nn.Module): + head = MODELS.build(head) + + self.backbone = backbone + self.neck = neck + self.head = head + + def forward(self, + inputs: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + mode: str = 'tensor'): + """The unified entry for a forward process in both training and test. + + The method should accept three modes: "tensor", "predict" and "loss": + + - "tensor": Forward the whole network and return tensor(s) without any + post-processing, same as a common PyTorch Module. + - "predict": Forward and return the predictions, which are fully + processed to a list of :obj:`DataSample`. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + + - If ``mode="tensor"``, return a tensor or a tuple of tensor. + - If ``mode="predict"``, return a list of + :obj:`mmpretrain.structures.DataSample`. + - If ``mode="loss"``, return a dict of tensor. + """ + if mode == 'tensor': + feats = self.extract_feat(inputs) + return self.head(feats) if self.with_head else feats + elif mode == 'loss': + return self.loss(inputs, data_samples) + elif mode == 'predict': + return self.predict(inputs, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, inputs, stage='neck'): + """Extract features from the input tensor with shape (N, C, ...). + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + stage (str): Which stage to output the feature. Choose from: + + - "backbone": The output of backbone network. Returns a tuple + including multiple stages features. + - "neck": The output of neck module. Returns a tuple including + multiple stages features. + - "pre_logits": The feature before the final classification + linear layer. Usually returns a tensor. + + Defaults to "neck". + + Returns: + tuple | Tensor: The output of specified stage. + The output depends on detailed implementation. In general, the + output of backbone and neck is a tuple and the output of + pre_logits is a tensor. + + Examples: + 1. Backbone output + + >>> import torch + >>> from mmengine import Config + >>> from mmpretrain.models import build_classifier + >>> + >>> cfg = Config.fromfile('configs/resnet/resnet18_8xb32_in1k.py').model + >>> cfg.backbone.out_indices = (0, 1, 2, 3) # Output multi-scale feature maps + >>> model = build_classifier(cfg) + >>> outs = model.extract_feat(torch.rand(1, 3, 224, 224), stage='backbone') + >>> for out in outs: + ... print(out.shape) + torch.Size([1, 64, 56, 56]) + torch.Size([1, 128, 28, 28]) + torch.Size([1, 256, 14, 14]) + torch.Size([1, 512, 7, 7]) + + 2. Neck output + + >>> import torch + >>> from mmengine import Config + >>> from mmpretrain.models import build_classifier + >>> + >>> cfg = Config.fromfile('configs/resnet/resnet18_8xb32_in1k.py').model + >>> cfg.backbone.out_indices = (0, 1, 2, 3) # Output multi-scale feature maps + >>> model = build_classifier(cfg) + >>> + >>> outs = model.extract_feat(torch.rand(1, 3, 224, 224), stage='neck') + >>> for out in outs: + ... print(out.shape) + torch.Size([1, 64]) + torch.Size([1, 128]) + torch.Size([1, 256]) + torch.Size([1, 512]) + + 3. Pre-logits output (without the final linear classifier head) + + >>> import torch + >>> from mmengine import Config + >>> from mmpretrain.models import build_classifier + >>> + >>> cfg = Config.fromfile('configs/vision_transformer/vit-base-p16_pt-64xb64_in1k-224.py').model + >>> model = build_classifier(cfg) + >>> + >>> out = model.extract_feat(torch.rand(1, 3, 224, 224), stage='pre_logits') + >>> print(out.shape) # The hidden dims in head is 3072 + torch.Size([1, 3072]) + """ # noqa: E501 + assert stage in ['backbone', 'neck', 'pre_logits'], \ + (f'Invalid output stage "{stage}", please choose from "backbone", ' + '"neck" and "pre_logits"') + + x = self.backbone(inputs) + + if stage == 'backbone': + return x + + if self.with_neck: + x = self.neck(x) + if stage == 'neck': + return x + + assert self.with_head and hasattr(self.head, 'pre_logits'), \ + "No head or the head doesn't implement `pre_logits` method." + return self.head.pre_logits(x) + + def loss(self, inputs: torch.Tensor, + data_samples: List[DataSample]) -> dict: + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + feats = self.extract_feat(inputs) + return self.head.loss(feats, data_samples) + + def predict(self, + inputs: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + **kwargs) -> List[DataSample]: + """Predict results from a batch of inputs. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + **kwargs: Other keyword arguments accepted by the ``predict`` + method of :attr:`head`. + """ + feats = self.extract_feat(inputs) + return self.head.predict(feats, data_samples, **kwargs) + + def get_layer_depth(self, param_name: str): + """Get the layer-wise depth of a parameter. + + Args: + param_name (str): The name of the parameter. + + Returns: + Tuple[int, int]: The layer-wise depth and the max depth. + """ + if hasattr(self.backbone, 'get_layer_depth'): + return self.backbone.get_layer_depth(param_name, 'backbone.') + else: + raise NotImplementedError( + f"The babckone {type(self.backbone)} doesn't " + 'support `get_layer_depth` by now.') diff --git a/mmpretrain/models/classifiers/timm.py b/mmpretrain/models/classifiers/timm.py new file mode 100644 index 0000000000000000000000000000000000000000..d777b2e039d848b01fc9c6b6eaae6619bebb8938 --- /dev/null +++ b/mmpretrain/models/classifiers/timm.py @@ -0,0 +1,209 @@ +# Copyright (c) OpenMMLab. All right reserved. +import re +from collections import OrderedDict +from typing import List, Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from mmpretrain.utils import require +from .base import BaseClassifier + + +@MODELS.register_module() +class TimmClassifier(BaseClassifier): + """Image classifiers for pytorch-image-models (timm) model. + + This class accepts all positional and keyword arguments of the function + `timm.models.create_model `_ and use + it to create a model from pytorch-image-models. + + It can load checkpoints of timm directly, and the saved checkpoints also + can be directly load by timm. + + Please confirm that you have installed ``timm`` if you want to use it. + + Args: + *args: All positional arguments of the function + `timm.models.create_model`. + loss (dict): Config of classification loss. Defaults to + ``dict(type='CrossEntropyLoss', loss_weight=1.0)``. + train_cfg (dict, optional): The training setting. The acceptable + fields are: + + - augments (List[dict]): The batch augmentation methods to use. + More details can be found in :mod:`mmpretrain.model.utils.augment`. + + Defaults to None. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None or no specified type, it will use + "ClsDataPreprocessor" as type. See :class:`ClsDataPreprocessor` for + more details. Defaults to None. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + **kwargs: Other keyword arguments of the function + `timm.models.create_model`. + + Examples: + >>> import torch + >>> from mmpretrain.models import build_classifier + >>> cfg = dict(type='TimmClassifier', model_name='resnet50', pretrained=True) + >>> model = build_classifier(cfg) + >>> inputs = torch.rand(1, 3, 224, 224) + >>> out = model(inputs) + >>> print(out.shape) + torch.Size([1, 1000]) + """ # noqa: E501 + + @require('timm') + def __init__(self, + *args, + loss=dict(type='CrossEntropyLoss', loss_weight=1.0), + train_cfg: Optional[dict] = None, + with_cp: bool = False, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None, + **kwargs): + if data_preprocessor is None: + data_preprocessor = {} + # The build process is in MMEngine, so we need to add scope here. + data_preprocessor.setdefault('type', 'mmpretrain.ClsDataPreprocessor') + + if train_cfg is not None and 'augments' in train_cfg: + # Set batch augmentations by `train_cfg` + data_preprocessor['batch_augments'] = train_cfg + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + from timm.models import create_model + self.model = create_model(*args, **kwargs) + + if not isinstance(loss, nn.Module): + loss = MODELS.build(loss) + self.loss_module = loss + + self.with_cp = with_cp + if self.with_cp: + self.model.set_grad_checkpointing() + + self._register_state_dict_hook(self._remove_state_dict_prefix) + self._register_load_state_dict_pre_hook(self._add_state_dict_prefix) + + def forward(self, inputs, data_samples=None, mode='tensor'): + if mode == 'tensor': + return self.model(inputs) + elif mode == 'loss': + return self.loss(inputs, data_samples) + elif mode == 'predict': + return self.predict(inputs, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, inputs: torch.Tensor): + if hasattr(self.model, 'forward_features'): + return self.model.forward_features(inputs) + else: + raise NotImplementedError( + f"The model {type(self.model)} doesn't support extract " + "feature because it don't have `forward_features` method.") + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs): + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments of the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # The part can be traced by torch.fx + cls_score = self.model(inputs) + + # The part can not be traced by torch.fx + losses = self._get_loss(cls_score, data_samples, **kwargs) + return losses + + def _get_loss(self, cls_score: torch.Tensor, + data_samples: List[DataSample], **kwargs): + """Unpack data samples and compute loss.""" + # Unpack data samples and pack targets + if 'gt_score' in data_samples[0]: + # Batch augmentation may convert labels to one-hot format scores. + target = torch.stack([i.gt_score for i in data_samples]) + else: + target = torch.cat([i.gt_label for i in data_samples]) + + # compute loss + losses = dict() + loss = self.loss_module(cls_score, target, **kwargs) + losses['loss'] = loss + + return losses + + def predict(self, + inputs: torch.Tensor, + data_samples: Optional[List[DataSample]] = None): + """Predict results from a batch of inputs. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + + Returns: + List[DataSample]: The prediction results. + """ + # The part can be traced by torch.fx + cls_score = self(inputs) + + # The part can not be traced by torch.fx + predictions = self._get_predictions(cls_score, data_samples) + return predictions + + def _get_predictions(self, cls_score, data_samples=None): + """Post-process the output of head. + + Including softmax and set ``pred_label`` of data samples. + """ + pred_scores = F.softmax(cls_score, dim=1) + pred_labels = pred_scores.argmax(dim=1, keepdim=True).detach() + + if data_samples is not None: + for data_sample, score, label in zip(data_samples, pred_scores, + pred_labels): + data_sample.set_pred_score(score).set_pred_label(label) + else: + data_samples = [] + for score, label in zip(pred_scores, pred_labels): + data_samples.append( + DataSample().set_pred_score(score).set_pred_label(label)) + + return data_samples + + @staticmethod + def _remove_state_dict_prefix(self, state_dict, prefix, local_metadata): + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + new_key = re.sub(f'^{prefix}model.', prefix, k) + new_state_dict[new_key] = v + return new_state_dict + + @staticmethod + def _add_state_dict_prefix(state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + new_prefix = prefix + 'model.' + for k in list(state_dict.keys()): + new_key = re.sub(f'^{prefix}', new_prefix, k) + state_dict[new_key] = state_dict[k] + del state_dict[k] diff --git a/mmpretrain/models/heads/__init__.py b/mmpretrain/models/heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7d2c1ae0f02524d6a5538a69c1706af4ea70da02 --- /dev/null +++ b/mmpretrain/models/heads/__init__.py @@ -0,0 +1,65 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .beitv1_head import BEiTV1Head +from .beitv2_head import BEiTV2Head +from .cae_head import CAEHead +from .cls_head import ClsHead +from .conformer_head import ConformerHead +from .contrastive_head import ContrastiveHead +from .deit_head import DeiTClsHead +from .efficientformer_head import EfficientFormerClsHead +from .grounding_head import GroundingHead +from .itc_head import ITCHead +from .itm_head import ITMHead +from .latent_heads import LatentCrossCorrelationHead, LatentPredictHead +from .levit_head import LeViTClsHead +from .linear_head import LinearClsHead +from .mae_head import MAEPretrainHead +from .margin_head import ArcFaceClsHead +from .mim_head import MIMHead +from .mixmim_head import MixMIMPretrainHead +from .mocov3_head import MoCoV3Head +from .multi_label_cls_head import MultiLabelClsHead +from .multi_label_csra_head import CSRAClsHead +from .multi_label_linear_head import MultiLabelLinearClsHead +from .multi_task_head import MultiTaskHead +from .seq_gen_head import SeqGenerationHead +from .simmim_head import SimMIMHead +from .stacked_head import StackedLinearClsHead +from .swav_head import SwAVHead +from .vig_head import VigClsHead +from .vision_transformer_head import VisionTransformerClsHead +from .vqa_head import VQAGenerationHead + +__all__ = [ + 'ClsHead', + 'LinearClsHead', + 'StackedLinearClsHead', + 'MultiLabelClsHead', + 'MultiLabelLinearClsHead', + 'VisionTransformerClsHead', + 'DeiTClsHead', + 'ConformerHead', + 'EfficientFormerClsHead', + 'ArcFaceClsHead', + 'CSRAClsHead', + 'MultiTaskHead', + 'LeViTClsHead', + 'VigClsHead', + 'BEiTV1Head', + 'BEiTV2Head', + 'CAEHead', + 'ContrastiveHead', + 'LatentCrossCorrelationHead', + 'LatentPredictHead', + 'MAEPretrainHead', + 'MixMIMPretrainHead', + 'SwAVHead', + 'MoCoV3Head', + 'MIMHead', + 'SimMIMHead', + 'SeqGenerationHead', + 'VQAGenerationHead', + 'ITCHead', + 'ITMHead', + 'GroundingHead', +] diff --git a/mmpretrain/models/heads/beitv1_head.py b/mmpretrain/models/heads/beitv1_head.py new file mode 100644 index 0000000000000000000000000000000000000000..df422ea71c9090d1ab084bbc93c8889a4f2f402e --- /dev/null +++ b/mmpretrain/models/heads/beitv1_head.py @@ -0,0 +1,55 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class BEiTV1Head(BaseModule): + """Head for BEiT v1 Pre-training. + + Compute the logits and the cross entropy loss. + + Args: + embed_dims (int): The dimension of embedding. + num_embed (int): The number of classification types. + loss (dict): The config of loss. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__( + self, + embed_dims: int, + num_embed: int, + loss: dict, + init_cfg: Optional[Union[dict, List[dict]]] = dict( + type='TruncNormal', layer='Linear', std=0.02, bias=0) + ) -> None: + super().__init__(init_cfg=init_cfg) + self.cls_head = nn.Linear(embed_dims, num_embed) + self.loss_module = MODELS.build(loss) + + def loss(self, feats: torch.Tensor, target: torch.Tensor, + mask: torch.Tensor) -> torch.Tensor: + """Generate loss. + + Args: + feats (torch.Tensor): Features from backbone. + target (torch.Tensor): Target generated by target_generator. + mask (torch.Tensor): Generated mask for pretraing. + """ + mask = mask.flatten(1).to(torch.bool) + target = torch.argmax(target, dim=1).flatten(1) + target = target[mask] + + # remove cls_token + feats = feats[:, 1:] + logits = self.cls_head(feats[mask]) + + loss = self.loss_module(logits, target) + return loss diff --git a/mmpretrain/models/heads/beitv2_head.py b/mmpretrain/models/heads/beitv2_head.py new file mode 100644 index 0000000000000000000000000000000000000000..cf677a2cf7c1a3964f1ba884a0ccae83f8b70a40 --- /dev/null +++ b/mmpretrain/models/heads/beitv2_head.py @@ -0,0 +1,57 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class BEiTV2Head(BaseModule): + """Head for BEiT v2 Pre-training. + + Compute the logits and the cross entropy loss. + + Args: + embed_dims (int): The dimension of embedding. + num_embed (int): The number of classification types. + loss (dict): The config of loss. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__( + self, + embed_dims: int, + num_embed: int, + loss: dict, + init_cfg: Optional[Union[dict, List[dict]]] = dict( + type='TruncNormal', layer='Linear', std=0.02, bias=0) + ) -> None: + super().__init__(init_cfg=init_cfg) + self.cls_head = nn.Linear(embed_dims, num_embed) + self.loss_module = MODELS.build(loss) + + def loss(self, feats: torch.Tensor, feats_cls_pt: torch.Tensor, + target: torch.Tensor, mask: torch.Tensor) -> torch.Tensor: + """Generate loss. + + Args: + feats (torch.Tensor): Features from backbone. + feats_cls_pt (torch.Tensor) : Features from class late layers for + pretraining. + target (torch.Tensor): Target generated by target_generator. + mask (torch.Tensor): Generated mask for pretraing. + """ + mask = mask.flatten(1).to(torch.bool) + target = target[mask] + + # shared cls head + logits = self.cls_head(feats[mask]) + logits_cls_pt = self.cls_head(feats_cls_pt[mask]) + + loss_1 = self.loss_module(logits, target) + loss_2 = self.loss_module(logits_cls_pt, target) + return loss_1, loss_2 diff --git a/mmpretrain/models/heads/cae_head.py b/mmpretrain/models/heads/cae_head.py new file mode 100644 index 0000000000000000000000000000000000000000..18a07f0a79297c35a39b9b2da0d25bf1eac6e70b --- /dev/null +++ b/mmpretrain/models/heads/cae_head.py @@ -0,0 +1,69 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class CAEHead(BaseModule): + """Head for CAE Pre-training. + + Compute the align loss and the main loss. In addition, this head also + generates the prediction target generated by dalle. + + Args: + loss (dict): The config of loss. + tokenizer_path (str): The path of the tokenizer. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + loss: dict, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg=init_cfg) + self.loss_module = MODELS.build(loss) + + @torch.no_grad() + def _generate_target(self, logits_target: torch.Tensor) -> torch.Tensor: + """Generate the reconstruction target. + + Args: + logits_target (torch.Tensor): The logits generated by DALL-E.s + + Returns: + torch.Tensor: The logits target. + """ + target = torch.argmax(logits_target, dim=1) + return target.flatten(1) + + def loss(self, logits: torch.Tensor, logits_target: torch.Tensor, + latent_pred: torch.Tensor, latent_target: torch.Tensor, + mask: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Generate loss. + + Args: + logits (torch.Tensor): Logits generated by decoder. + logits_target (img_target): Target generated by dalle for decoder + prediction. + latent_pred (torch.Tensor): Latent prediction by regressor. + latent_target (torch.Tensor): Target for latent prediction, + generated by teacher. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: The tuple of loss. + - ``loss_main`` (torch.Tensor): Cross entropy loss. + - ``loss_align`` (torch.Tensor): MSE loss. + """ + + target = self._generate_target(logits_target) # target features + target = target[mask].detach() + + # loss main for decoder, loss align for regressor + loss_main, loss_align = self.loss_module(logits, target, latent_pred, + latent_target) + + return (loss_main, loss_align) diff --git a/mmpretrain/models/heads/cls_head.py b/mmpretrain/models/heads/cls_head.py new file mode 100644 index 0000000000000000000000000000000000000000..4ac4c51804122adbc92df8c8748e4109e205110f --- /dev/null +++ b/mmpretrain/models/heads/cls_head.py @@ -0,0 +1,156 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.model import BaseModule + +from mmpretrain.evaluation.metrics import Accuracy +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample + + +@MODELS.register_module() +class ClsHead(BaseModule): + """Classification head. + + Args: + loss (dict): Config of classification loss. Defaults to + ``dict(type='CrossEntropyLoss', loss_weight=1.0)``. + topk (int | Tuple[int]): Top-k accuracy. Defaults to ``(1, )``. + cal_acc (bool): Whether to calculate accuracy during training. + If you use batch augmentations like Mixup and CutMix during + training, it is pointless to calculate accuracy. + Defaults to False. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + loss: dict = dict(type='CrossEntropyLoss', loss_weight=1.0), + topk: Union[int, Tuple[int]] = (1, ), + cal_acc: bool = False, + init_cfg: Optional[dict] = None): + super(ClsHead, self).__init__(init_cfg=init_cfg) + + self.topk = topk + if not isinstance(loss, nn.Module): + loss = MODELS.build(loss) + self.loss_module = loss + self.cal_acc = cal_acc + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``ClsHead``, we just obtain the feature + of the last stage. + """ + # The ClsHead doesn't have other module, just return after unpacking. + return feats[-1] + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The ClsHead doesn't have the final classification head, + # just return the unpacked inputs. + return pre_logits + + def loss(self, feats: Tuple[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # The part can be traced by torch.fx + cls_score = self(feats) + + # The part can not be traced by torch.fx + losses = self._get_loss(cls_score, data_samples, **kwargs) + return losses + + def _get_loss(self, cls_score: torch.Tensor, + data_samples: List[DataSample], **kwargs): + """Unpack data samples and compute loss.""" + # Unpack data samples and pack targets + if 'gt_score' in data_samples[0]: + # Batch augmentation may convert labels to one-hot format scores. + target = torch.stack([i.gt_score for i in data_samples]) + else: + target = torch.cat([i.gt_label for i in data_samples]) + + # compute loss + losses = dict() + loss = self.loss_module( + cls_score, target, avg_factor=cls_score.size(0), **kwargs) + losses['loss'] = loss + + # compute accuracy + if self.cal_acc: + assert target.ndim == 1, 'If you enable batch augmentation ' \ + 'like mixup during training, `cal_acc` is pointless.' + acc = Accuracy.calculate(cls_score, target, topk=self.topk) + losses.update( + {f'accuracy_top-{k}': a + for k, a in zip(self.topk, acc)}) + + return losses + + def predict( + self, + feats: Tuple[torch.Tensor], + data_samples: Optional[List[Optional[DataSample]]] = None + ) -> List[DataSample]: + """Inference without augmentation. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample | None], optional): The annotation + data of every samples. If not None, set ``pred_label`` of + the input data samples. Defaults to None. + + Returns: + List[DataSample]: A list of data samples which contains the + predicted results. + """ + # The part can be traced by torch.fx + cls_score = self(feats) + + # The part can not be traced by torch.fx + predictions = self._get_predictions(cls_score, data_samples) + return predictions + + def _get_predictions(self, cls_score, data_samples): + """Post-process the output of head. + + Including softmax and set ``pred_label`` of data samples. + """ + pred_scores = F.softmax(cls_score, dim=1) + pred_labels = pred_scores.argmax(dim=1, keepdim=True).detach() + + out_data_samples = [] + if data_samples is None: + data_samples = [None for _ in range(pred_scores.size(0))] + + for data_sample, score, label in zip(data_samples, pred_scores, + pred_labels): + if data_sample is None: + data_sample = DataSample() + + data_sample.set_pred_score(score).set_pred_label(label) + out_data_samples.append(data_sample) + return out_data_samples diff --git a/mmpretrain/models/heads/conformer_head.py b/mmpretrain/models/heads/conformer_head.py new file mode 100644 index 0000000000000000000000000000000000000000..eade90d567b5cb9189f62919ad9a6a0e9c47ae23 --- /dev/null +++ b/mmpretrain/models/heads/conformer_head.py @@ -0,0 +1,122 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Sequence, Tuple + +import torch +import torch.nn as nn + +from mmpretrain.evaluation.metrics import Accuracy +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .cls_head import ClsHead + + +@MODELS.register_module() +class ConformerHead(ClsHead): + """Linear classifier head. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (Sequence[int]): Number of channels in the input + feature map. + init_cfg (dict | optional): The extra init config of layers. + Defaults to use ``dict(type='Normal', layer='Linear', std=0.01)``. + """ + + def __init__( + self, + num_classes: int, + in_channels: Sequence[int], # [conv_dim, trans_dim] + init_cfg: dict = dict(type='TruncNormal', layer='Linear', std=.02), + **kwargs): + super(ConformerHead, self).__init__(init_cfg=init_cfg, **kwargs) + + self.in_channels = in_channels + self.num_classes = num_classes + self.init_cfg = init_cfg + + if self.num_classes <= 0: + raise ValueError( + f'num_classes={num_classes} must be a positive integer') + + self.conv_cls_head = nn.Linear(self.in_channels[0], num_classes) + self.trans_cls_head = nn.Linear(self.in_channels[1], num_classes) + + def pre_logits(self, feats: Tuple[List[torch.Tensor]]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``ConformerHead``, we just obtain the + feature of the last stage. + """ + # The ConformerHead doesn't have other module, + # just return after unpacking. + return feats[-1] + + def forward(self, feats: Tuple[List[torch.Tensor]]) -> Tuple[torch.Tensor]: + """The forward process.""" + x = self.pre_logits(feats) + # There are two outputs in the Conformer model + assert len(x) == 2 + + conv_cls_score = self.conv_cls_head(x[0]) + tran_cls_score = self.trans_cls_head(x[1]) + + return conv_cls_score, tran_cls_score + + def predict(self, + feats: Tuple[List[torch.Tensor]], + data_samples: List[DataSample] = None) -> List[DataSample]: + """Inference without augmentation. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample], optional): The annotation + data of every samples. If not None, set ``pred_label`` of + the input data samples. Defaults to None. + + Returns: + List[DataSample]: A list of data samples which contains the + predicted results. + """ + # The part can be traced by torch.fx + conv_cls_score, tran_cls_score = self(feats) + cls_score = conv_cls_score + tran_cls_score + + # The part can not be traced by torch.fx + predictions = self._get_predictions(cls_score, data_samples) + return predictions + + def _get_loss(self, cls_score: Tuple[torch.Tensor], + data_samples: List[DataSample], **kwargs) -> dict: + """Unpack data samples and compute loss.""" + # Unpack data samples and pack targets + if 'gt_score' in data_samples[0]: + # Batch augmentation may convert labels to one-hot format scores. + target = torch.stack([i.gt_score for i in data_samples]) + else: + target = torch.cat([i.gt_label for i in data_samples]) + + # compute loss + losses = dict() + loss = sum([ + self.loss_module( + score, target, avg_factor=score.size(0), **kwargs) + for score in cls_score + ]) + losses['loss'] = loss + + # compute accuracy + if self.cal_acc: + assert target.ndim == 1, 'If you enable batch augmentation ' \ + 'like mixup during training, `cal_acc` is pointless.' + acc = Accuracy.calculate( + cls_score[0] + cls_score[1], target, topk=self.topk) + losses.update( + {f'accuracy_top-{k}': a + for k, a in zip(self.topk, acc)}) + + return losses diff --git a/mmpretrain/models/heads/contrastive_head.py b/mmpretrain/models/heads/contrastive_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6d1474aed59e2912ca4b5c24ce5a2430f50cb913 --- /dev/null +++ b/mmpretrain/models/heads/contrastive_head.py @@ -0,0 +1,50 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class ContrastiveHead(BaseModule): + """Head for contrastive learning. + + The contrastive loss is implemented in this head and is used in SimCLR, + MoCo, DenseCL, etc. + + Args: + loss (dict): Config dict for module of loss functions. + temperature (float): The temperature hyper-parameter that + controls the concentration level of the distribution. + Defaults to 0.1. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + loss: dict, + temperature: float = 0.1, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg=init_cfg) + self.loss_module = MODELS.build(loss) + self.temperature = temperature + + def loss(self, pos: torch.Tensor, neg: torch.Tensor) -> torch.Tensor: + """Forward function to compute contrastive loss. + + Args: + pos (torch.Tensor): Nx1 positive similarity. + neg (torch.Tensor): Nxk negative similarity. + + Returns: + torch.Tensor: The contrastive loss. + """ + N = pos.size(0) + logits = torch.cat((pos, neg), dim=1) + logits /= self.temperature + labels = torch.zeros((N, ), dtype=torch.long).to(pos.device) + + loss = self.loss_module(logits, labels) + return loss diff --git a/mmpretrain/models/heads/deit_head.py b/mmpretrain/models/heads/deit_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a96f6e152711d23646e02312218c0c85e96300e8 --- /dev/null +++ b/mmpretrain/models/heads/deit_head.py @@ -0,0 +1,72 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings +from typing import List, Tuple + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .vision_transformer_head import VisionTransformerClsHead + + +@MODELS.register_module() +class DeiTClsHead(VisionTransformerClsHead): + """Distilled Vision Transformer classifier head. + + Comparing with the :class:`VisionTransformerClsHead`, this head adds an + extra linear layer to handle the dist token. The final classification score + is the average of both linear transformation results of ``cls_token`` and + ``dist_token``. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + hidden_dim (int, optional): Number of the dimensions for hidden layer. + Defaults to None, which means no extra hidden layer. + act_cfg (dict): The activation config. Only available during + pre-training. Defaults to ``dict(type='Tanh')``. + init_cfg (dict): The extra initialization configs. Defaults to + ``dict(type='Constant', layer='Linear', val=0)``. + """ + + def _init_layers(self): + """"Init extra hidden linear layer to handle dist token if exists.""" + super(DeiTClsHead, self)._init_layers() + if self.hidden_dim is None: + head_dist = nn.Linear(self.in_channels, self.num_classes) + else: + head_dist = nn.Linear(self.hidden_dim, self.num_classes) + self.layers.add_module('head_dist', head_dist) + + def pre_logits(self, + feats: Tuple[List[torch.Tensor]]) -> Tuple[torch.Tensor]: + """The process before the final classification head. + + The input ``feats`` is a tuple of list of tensor, and each tensor is + the feature of a backbone stage. In ``DeiTClsHead``, we obtain the + feature of the last stage and forward in hidden layer if exists. + """ + feat = feats[-1] # Obtain feature of the last scale. + # For backward-compatibility with the previous ViT output + if len(feat) == 3: + _, cls_token, dist_token = feat + else: + cls_token, dist_token = feat + if self.hidden_dim is None: + return cls_token, dist_token + else: + cls_token = self.layers.act(self.layers.pre_logits(cls_token)) + dist_token = self.layers.act(self.layers.pre_logits(dist_token)) + return cls_token, dist_token + + def forward(self, feats: Tuple[List[torch.Tensor]]) -> torch.Tensor: + """The forward process.""" + if self.training: + warnings.warn('MMPretrain cannot train the ' + 'distilled version DeiT.') + cls_token, dist_token = self.pre_logits(feats) + # The final classification head. + cls_score = (self.layers.head(cls_token) + + self.layers.head_dist(dist_token)) / 2 + return cls_score diff --git a/mmpretrain/models/heads/efficientformer_head.py b/mmpretrain/models/heads/efficientformer_head.py new file mode 100644 index 0000000000000000000000000000000000000000..09aa05b28533028723f599881777939a48982319 --- /dev/null +++ b/mmpretrain/models/heads/efficientformer_head.py @@ -0,0 +1,89 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Tuple + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .cls_head import ClsHead + + +@MODELS.register_module() +class EfficientFormerClsHead(ClsHead): + """EfficientFormer classifier head. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + distillation (bool): Whether use a additional distilled head. + Defaults to True. + init_cfg (dict): The extra initialization configs. Defaults to + ``dict(type='Normal', layer='Linear', std=0.01)``. + """ + + def __init__(self, + num_classes, + in_channels, + distillation=True, + init_cfg=dict(type='Normal', layer='Linear', std=0.01), + *args, + **kwargs): + super(EfficientFormerClsHead, self).__init__( + init_cfg=init_cfg, *args, **kwargs) + self.in_channels = in_channels + self.num_classes = num_classes + self.dist = distillation + + if self.num_classes <= 0: + raise ValueError( + f'num_classes={num_classes} must be a positive integer') + + self.head = nn.Linear(self.in_channels, self.num_classes) + if self.dist: + self.dist_head = nn.Linear(self.in_channels, self.num_classes) + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The final classification head. + cls_score = self.head(pre_logits) + + if self.dist: + cls_score = (cls_score + self.dist_head(pre_logits)) / 2 + return cls_score + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In :obj`EfficientFormerClsHead`, we just + obtain the feature of the last stage. + """ + # The EfficientFormerClsHead doesn't have other module, just return + # after unpacking. + return feats[-1] + + def loss(self, feats: Tuple[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + if self.dist: + raise NotImplementedError( + "MMPretrain doesn't support to train" + ' the distilled version EfficientFormer.') + else: + return super().loss(feats, data_samples, **kwargs) diff --git a/mmpretrain/models/heads/grounding_head.py b/mmpretrain/models/heads/grounding_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a47512ef5930dde51a7023a07c3412d759b6bd8c --- /dev/null +++ b/mmpretrain/models/heads/grounding_head.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import torch +import torch.nn.functional as F +from mmengine.model import BaseModule + +from mmpretrain.models.utils.box_utils import (box_cxcywh_to_xyxy, + generalized_box_iou) +from mmpretrain.registry import MODELS, TOKENIZER + + +@MODELS.register_module() +class GroundingHead(BaseModule): + """bbox Coordination generation head for multi-modal pre-trained task, + adapted by BLIP. Normally used for visual grounding. + + Args: + loss: dict, + decoder: dict, + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__( + self, + decoder: dict = None, + tokenizer: dict = None, + box_l1_loss_coeff=4.0, + box_giou_loss_coeff=2.0, + init_cfg: Optional[dict] = None, + ) -> None: + super(GroundingHead, self).__init__(init_cfg=init_cfg) + ''' init the decoder from med_config''' + self.decoder = None + if decoder: + self.decoder = MODELS.build(decoder) + self.loss_fn = torch.nn.CrossEntropyLoss( + reduction='none', ignore_index=-100) + + self.box_l1_loss_coeff = box_l1_loss_coeff + self.box_giou_loss_coeff = box_giou_loss_coeff + + if isinstance(tokenizer, dict): + self.tokenizer = TOKENIZER.build(tokenizer) + else: + self.tokenizer = tokenizer + + self.image_res = 640 + prefix_ids = torch.tensor( + self.tokenizer.convert_tokens_to_ids(['[unused339]'])) + target_ids = torch.tensor( + self.tokenizer.convert_tokens_to_ids( + [f'[unused{340+_}]' for _ in range(self.image_res + 1)])) + self.register_buffer('prefix_ids', prefix_ids) + self.register_buffer('target_ids', target_ids) + + bbox_prob_mask = torch.zeros(len(self.tokenizer)) + bbox_prob_mask[self.target_ids[0]:self.target_ids[-1] + 1] = 1 + bbox_prob_mask = (1.0 - bbox_prob_mask) * -10000.0 + self.register_buffer('bbox_prob_mask', bbox_prob_mask) + self.bin_start_idx = self.target_ids[0] + + def forward(self, text_embedding, text_embedding_mask, + encoder_hidden_states, encoder_attention_mask): + + # localize prompt token, text embedding + + merged_encode_hs = torch.cat([encoder_hidden_states, text_embedding], + 1) + merge_att_mask = torch.cat( + [encoder_attention_mask, text_embedding_mask], 1) + + loc_prompt = self.prompt.weight.T + loc_prompt = torch.repeat_interleave(loc_prompt, + merge_att_mask.shape[0], + 0).unsqueeze(1) + + loc_prompt_mask = torch.ones(loc_prompt.shape[:-1]).long().to( + loc_prompt.device) + + decoder_out = self.decoder( + inputs_embeds=loc_prompt, + attention_mask=loc_prompt_mask, + encoder_hidden_states=merged_encode_hs, + encoder_attention_mask=merge_att_mask, + output_hidden_states=True, + labels=None, + ) + decoder_hs = decoder_out.hidden_states[-1][:, 0, :] + box_pred = self.box_head(decoder_hs) + return decoder_out, decoder_hs, box_pred + + def loss(self, + text_embedding, + text_embedding_mask, + encoder_hidden_states, + encoder_attention_mask, + decoder_targets, + return_scores=False): + """Calculate losses from the extracted features. + + Args: + feats (dict): The features extracted from the backbone. + data_samples (List[BaseDataElement]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + + merged_encode_hs = torch.cat([encoder_hidden_states, text_embedding], + 1) + merge_att_mask = torch.cat( + [encoder_attention_mask, text_embedding_mask], 1) + + answer_targets = (decoder_targets * + self.image_res).long() + self.bin_start_idx + prefix_ids = torch.repeat_interleave(self.prefix_ids, + merge_att_mask.shape[0], + 0).unsqueeze(-1) + prefix_ids = torch.cat([prefix_ids, answer_targets], dim=1) + + answer_output = self.decoder( + prefix_ids, + encoder_hidden_states=merged_encode_hs, + encoder_attention_mask=merge_att_mask, + labels=None, + return_dict=True, + ) + prob_mask = self.bbox_prob_mask.view(1, 1, + self.bbox_prob_mask.shape[-1]) + prediction_scores = answer_output.logits + prob_mask + + shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous() + labels = prefix_ids[:, 1:].contiguous() + vocab_size = len(self.tokenizer) + loss_seq_init = self.loss_fn( + shifted_prediction_scores.view(-1, vocab_size), labels.view(-1)) + + with torch.no_grad(): + pred_box = (torch.argmax( + prediction_scores[:, :-1, :].contiguous(), dim=-1) - + self.bin_start_idx) / self.image_res + weight_bbox = F.l1_loss( + pred_box, decoder_targets, reduction='none').clamp( + 0, 5) * self.box_l1_loss_coeff + weight_giou = (1 - torch.diag( + generalized_box_iou( + box_cxcywh_to_xyxy(pred_box), + box_cxcywh_to_xyxy(decoder_targets))) + ) * self.box_giou_loss_coeff + bs = text_embedding.shape[0] + loss_seq = loss_seq_init[:].view(bs, -1, 4) + loss_seq = loss_seq * weight_bbox + loss_seq = loss_seq * weight_giou.unsqueeze(1) + + loss_seq = loss_seq.mean() + + losses = { + 'loss_seq': loss_seq, + 'loss_seq_init': loss_seq_init.mean(), + 'loss': loss_seq, + 'box_l1': weight_bbox.mean(-1).mean().detach(), + 'box_giou': weight_giou.mean().detach() + } + + return losses + + def predict( + self, + text_embedding, + text_embedding_mask, + encoder_hidden_states, + encoder_attention_mask, + ): + """Generates the bbox coordinates at inference time.""" + + merged_encode_hs = torch.cat([encoder_hidden_states, text_embedding], + 1) + merge_att_mask = torch.cat( + [encoder_attention_mask, text_embedding_mask], 1) + + prefix_ids = torch.repeat_interleave(self.prefix_ids, + merge_att_mask.shape[0], + 0).unsqueeze(-1) + + for _ in range(4): + decoder_output = self.decoder( + prefix_ids, + encoder_hidden_states=merged_encode_hs, + encoder_attention_mask=merge_att_mask, + labels=None, + return_dict=True, + ) + prob_mask = self.bbox_prob_mask.view(1, 1, + self.bbox_prob_mask.shape[-1]) + prediction_scores = decoder_output.logits + prob_mask + + prefix_ids = torch.cat([ + prefix_ids, + torch.argmax(prediction_scores[:, -1, :], dim=-1).unsqueeze(1) + ], + dim=1) + + pred_box = self.process_bbox(prefix_ids[:, 1:]) # xywh 0-1 to xyxy 0-1 + + return pred_box + + @torch.no_grad() + def process_bbox(self, bbox): + bbox = bbox - self.bin_start_idx + bbox = torch.true_divide(bbox, self.image_res) + bbox = box_cxcywh_to_xyxy(bbox) + bbox = torch.clip(bbox, 0, 1) + assert torch.all(bbox <= 1) + return bbox diff --git a/mmpretrain/models/heads/itc_head.py b/mmpretrain/models/heads/itc_head.py new file mode 100644 index 0000000000000000000000000000000000000000..006d52c76d9317809c7bb07519f4efb18716d8bd --- /dev/null +++ b/mmpretrain/models/heads/itc_head.py @@ -0,0 +1,157 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.dist import all_gather +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class ITCHead(BaseModule): + """Image-text matching head for multi-modal pre-trained task. Adapted by + BLIP, ALBEF. Normally used for retrieval task. + + Args: + embed_dim (int): Embed channel size for queue. + queue_size (int): Queue size for image and text. Defaults to 57600. + temperature (float): Temperature to calculate the similarity. + Defaults to 0.07. + use_distill (bool): Whether to use distill to calculate loss. + Defaults to True. + alpha (float): Weight for momentum similarity. Defaults to 0.4. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + embed_dim: int, + queue_size: int = 57600, + temperature: float = 0.07, + use_distill: bool = True, + alpha: float = 0.4, + init_cfg: Optional[dict] = None): + super(ITCHead, self).__init__(init_cfg=init_cfg) + self.temp = nn.Parameter(temperature * torch.ones([])) + self.use_distill = use_distill + if self.use_distill: + # create the queue + self.register_buffer('image_queue', + torch.randn(embed_dim, queue_size)) + self.register_buffer('text_queue', + torch.randn(embed_dim, queue_size)) + self.register_buffer('idx_queue', torch.full((1, queue_size), + -100)) + self.register_buffer('queue_ptr', torch.zeros(1, dtype=torch.long)) + + self.image_queue = F.normalize(self.image_queue, dim=0) + self.text_queue = F.normalize(self.text_queue, dim=0) + + self.queue_size = queue_size + # This value will be warmup by `WarmupParamHook` + self.alpha = alpha + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + return feats[-1] + + def loss(self, feats: Tuple[torch.Tensor], data_samples, **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[ClsDataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + + # The part can be traced by torch.fx + img_feats, text_feats, img_feats_m, text_feats_m = self(feats) + + img_feats_all = torch.cat( + [img_feats_m.t(), + self.image_queue.clone().detach()], dim=1) + text_feats_all = torch.cat( + [text_feats_m.t(), + self.text_queue.clone().detach()], dim=1) + + # The part can not be traced by torch.fx + losses = self._get_loss(img_feats, text_feats, img_feats_m, + text_feats_m, img_feats_all, text_feats_all, + data_samples, **kwargs) + return losses + + def _get_loss(self, img_feats, text_feats, img_feats_m, text_feats_m, + img_feats_all, text_feats_all, data_samples, **kwargs): + """Unpack data samples and compute loss.""" + + idx = torch.tensor([ds.image_id + for ds in data_samples]).to(img_feats.device) + idx = idx.view(-1, 1) + idx_all = torch.cat([idx.t(), self.idx_queue.clone().detach()], dim=1) + pos_idx = torch.eq(idx, idx_all).float() + sim_targets = pos_idx / pos_idx.sum(1, keepdim=True) + + with torch.no_grad(): + if self.use_distill: + sim_i2t_m = img_feats_m @ text_feats_all / self.temp + sim_t2i_m = text_feats_m @ img_feats_all / self.temp + + sim_i2t_targets = ( + self.alpha * F.softmax(sim_i2t_m, dim=1) + + (1 - self.alpha) * sim_targets) + sim_t2i_targets = ( + self.alpha * F.softmax(sim_t2i_m, dim=1) + + (1 - self.alpha) * sim_targets) + + sim_i2t = img_feats @ text_feats_all / self.temp + sim_t2i = text_feats @ img_feats_all / self.temp + + if self.use_distill: + loss_i2t = -torch.sum( + F.log_softmax(sim_i2t, dim=1) * sim_i2t_targets, dim=1).mean() + loss_t2i = -torch.sum( + F.log_softmax(sim_t2i, dim=1) * sim_t2i_targets, dim=1).mean() + else: + loss_i2t = -torch.sum( + F.log_softmax(sim_i2t, dim=1) * sim_targets, dim=1).mean() + loss_t2i = -torch.sum( + F.log_softmax(sim_t2i, dim=1) * sim_targets, dim=1).mean() + + # compute loss + losses = dict() + + losses['itc_loss'] = (loss_i2t + loss_t2i) / 2 + self._dequeue_and_enqueue(img_feats_m, text_feats_m, idx) + return losses + + @torch.no_grad() + def _dequeue_and_enqueue(self, image_feat, text_feat, idxs=None): + # gather keys before updating queue + image_feats = torch.cat(all_gather(image_feat)) + text_feats = torch.cat(all_gather(text_feat)) + + batch_size = image_feats.shape[0] + + ptr = int(self.queue_ptr) + assert self.queue_size % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.image_queue[:, ptr:ptr + batch_size] = image_feats.T + self.text_queue[:, ptr:ptr + batch_size] = text_feats.T + + if idxs is not None: + idxs = torch.cat(all_gather(idxs)) + self.idx_queue[:, ptr:ptr + batch_size] = idxs.T + + ptr = (ptr + batch_size) % self.queue_size # move pointer + self.queue_ptr[0] = ptr diff --git a/mmpretrain/models/heads/itm_head.py b/mmpretrain/models/heads/itm_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c7b42f3f684e2ffefd085b39360706a339017f4c --- /dev/null +++ b/mmpretrain/models/heads/itm_head.py @@ -0,0 +1,117 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.evaluation import Accuracy +from mmpretrain.registry import MODELS + + +class Pooler(nn.Module): + + def __init__(self, hidden_size): + super().__init__() + self.dense = nn.Linear(hidden_size, hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +@MODELS.register_module() +class ITMHead(BaseModule): + """Image-text matching head for multi-modal pre-trained task. Adapted by + BLIP, FLAVA. + + Args: + hidden_size (int): Hidden channel size out input features. + with_pooler (bool): Whether a pooler is added. Defaults to True. + loss (dict): Config of global contrasive loss. Defaults to + ``dict(type='GlobalContrasiveLoss')``. + cal_acc (bool): Whether to calculate accuracy during training. + If you use batch augmentations like Mixup and CutMix during + training, it is pointless to calculate accuracy. + Defaults to False. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + hidden_size: int, + with_pooler: bool = True, + loss: dict = dict(type='CrossEntropyLoss', loss_weight=1.0), + cal_acc: bool = False, + init_cfg: Optional[dict] = None): + super(ITMHead, self).__init__(init_cfg=init_cfg) + self.hidden_size = hidden_size + + if with_pooler: + self.pooler = Pooler(hidden_size=self.hidden_size) + else: + self.pooler = nn.Identity() + self.fc = nn.Linear(self.hidden_size, 2) + + self.loss_module = MODELS.build(loss) + self.cal_acc = cal_acc + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pooler(feats[-1]) + itm_logits = self.fc(pre_logits) + return itm_logits + + def loss(self, feats: Tuple[torch.Tensor], data_samples, **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[ClsDataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + + # The part can be traced by torch.fx + itm_logits = self(feats) + + # deal with query + if itm_logits.ndim == 3: + itm_logits = itm_logits.mean(dim=1) + + # The part can not be traced by torch.fx + losses = self._get_loss(itm_logits, data_samples, **kwargs) + return losses + + def _get_loss(self, itm_logits: torch.Tensor, data_samples, **kwargs): + """Unpack data samples and compute loss.""" + # Unpack data samples and pack targets + # use `itm_label` in here temporarily + target = torch.tensor([i.is_matched + for i in data_samples]).to(itm_logits.device) + + # compute loss + losses = dict() + + loss = self.loss_module( + itm_logits, target.long(), avg_factor=itm_logits.size(0), **kwargs) + losses['itm_loss'] = loss + + # compute accuracy + if self.cal_acc: + # topk is meaningless for matching task + acc = Accuracy.calculate(itm_logits, target) + # acc is warpped with two lists of topk and thrs + # which are unnecessary here + losses.update({'itm_accuracy': acc[0][0]}) + + return losses diff --git a/mmpretrain/models/heads/latent_heads.py b/mmpretrain/models/heads/latent_heads.py new file mode 100644 index 0000000000000000000000000000000000000000..a9662b5d91c8534d1a2a7834e4b9e3ec37f552c1 --- /dev/null +++ b/mmpretrain/models/heads/latent_heads.py @@ -0,0 +1,94 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from mmengine.dist import all_reduce, get_world_size +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class LatentPredictHead(BaseModule): + """Head for latent feature prediction. + + This head builds a predictor, which can be any registered neck component. + For example, BYOL and SimSiam call this head and build NonLinearNeck. + It also implements similarity loss between two forward features. + + Args: + loss (dict): Config dict for the loss. + predictor (dict): Config dict for the predictor. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + loss: dict, + predictor: dict, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg=init_cfg) + self.loss_module = MODELS.build(loss) + self.predictor = MODELS.build(predictor) + + def loss(self, input: torch.Tensor, + target: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Forward head. + + Args: + input (torch.Tensor): NxC input features. + target (torch.Tensor): NxC target features. + + Returns: + torch.Tensor: The latent predict loss. + """ + pred = self.predictor([input])[0] + target = target.detach() + + loss = self.loss_module(pred, target) + + return loss + + +@MODELS.register_module() +class LatentCrossCorrelationHead(BaseModule): + """Head for latent feature cross correlation. + + Part of the code is borrowed from `script + `_. + + Args: + in_channels (int): Number of input channels. + loss (dict): Config dict for module of loss functions. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + loss: dict, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg=init_cfg) + self.world_size = get_world_size() + self.bn = nn.BatchNorm1d(in_channels, affine=False) + self.loss_module = MODELS.build(loss) + + def loss(self, input: torch.Tensor, target: torch.Tensor) -> torch.Tensor: + """Forward head. + + Args: + input (torch.Tensor): NxC input features. + target (torch.Tensor): NxC target features. + + Returns: + torch.Tensor: The cross correlation loss. + """ + # cross-correlation matrix + cross_correlation_matrix = self.bn(input).T @ self.bn(target) + cross_correlation_matrix.div_(input.size(0) * self.world_size) + + all_reduce(cross_correlation_matrix) + + loss = self.loss_module(cross_correlation_matrix) + return loss diff --git a/mmpretrain/models/heads/levit_head.py b/mmpretrain/models/heads/levit_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a74d7ecc52caca0adca642e528f2861f9a0e5833 --- /dev/null +++ b/mmpretrain/models/heads/levit_head.py @@ -0,0 +1,81 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.models.heads import ClsHead +from mmpretrain.registry import MODELS +from ..utils import build_norm_layer + + +class BatchNormLinear(BaseModule): + + def __init__(self, in_channels, out_channels, norm_cfg=dict(type='BN1d')): + super(BatchNormLinear, self).__init__() + self.bn = build_norm_layer(norm_cfg, in_channels) + self.linear = nn.Linear(in_channels, out_channels) + + @torch.no_grad() + def fuse(self): + w = self.bn.weight / (self.bn.running_var + self.bn.eps)**0.5 + b = self.bn.bias - self.bn.running_mean * \ + self.bn.weight / (self.bn.running_var + self.bn.eps) ** 0.5 + w = self.linear.weight * w[None, :] + b = (self.linear.weight @ b[:, None]).view(-1) + self.linear.bias + + self.linear.weight.data.copy_(w) + self.linear.bias.data.copy_(b) + return self.linear + + def forward(self, x): + x = self.bn(x) + x = self.linear(x) + return x + + +def fuse_parameters(module): + for child_name, child in module.named_children(): + if hasattr(child, 'fuse'): + setattr(module, child_name, child.fuse()) + else: + fuse_parameters(child) + + +@MODELS.register_module() +class LeViTClsHead(ClsHead): + + def __init__(self, + num_classes=1000, + distillation=True, + in_channels=None, + deploy=False, + **kwargs): + super(LeViTClsHead, self).__init__(**kwargs) + self.num_classes = num_classes + self.distillation = distillation + self.deploy = deploy + self.head = BatchNormLinear(in_channels, num_classes) + if distillation: + self.head_dist = BatchNormLinear(in_channels, num_classes) + + if self.deploy: + self.switch_to_deploy(self) + + def switch_to_deploy(self): + if self.deploy: + return + fuse_parameters(self) + self.deploy = True + + def forward(self, x): + x = self.pre_logits(x) + if self.distillation: + x = self.head(x), self.head_dist(x) # 2 16 384 -> 2 1000 + if not self.training: + x = (x[0] + x[1]) / 2 + else: + raise NotImplementedError("MMPretrain doesn't support " + 'training in distillation mode.') + else: + x = self.head(x) + return x diff --git a/mmpretrain/models/heads/linear_head.py b/mmpretrain/models/heads/linear_head.py new file mode 100644 index 0000000000000000000000000000000000000000..90b4c2b11eb0b2ba087fd438a32596cedb13cebb --- /dev/null +++ b/mmpretrain/models/heads/linear_head.py @@ -0,0 +1,63 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Tuple + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .cls_head import ClsHead + + +@MODELS.register_module() +class LinearClsHead(ClsHead): + """Linear classifier head. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + loss (dict): Config of classification loss. Defaults to + ``dict(type='CrossEntropyLoss', loss_weight=1.0)``. + topk (int | Tuple[int]): Top-k accuracy. Defaults to ``(1, )``. + cal_acc (bool): Whether to calculate accuracy during training. + If you use batch augmentations like Mixup and CutMix during + training, it is pointless to calculate accuracy. + Defaults to False. + init_cfg (dict, optional): the config to control the initialization. + Defaults to ``dict(type='Normal', layer='Linear', std=0.01)``. + """ + + def __init__(self, + num_classes: int, + in_channels: int, + init_cfg: Optional[dict] = dict( + type='Normal', layer='Linear', std=0.01), + **kwargs): + super(LinearClsHead, self).__init__(init_cfg=init_cfg, **kwargs) + + self.in_channels = in_channels + self.num_classes = num_classes + + if self.num_classes <= 0: + raise ValueError( + f'num_classes={num_classes} must be a positive integer') + + self.fc = nn.Linear(self.in_channels, self.num_classes) + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``LinearClsHead``, we just obtain the + feature of the last stage. + """ + # The LinearClsHead doesn't have other module, just return after + # unpacking. + return feats[-1] + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The final classification head. + cls_score = self.fc(pre_logits) + return cls_score diff --git a/mmpretrain/models/heads/mae_head.py b/mmpretrain/models/heads/mae_head.py new file mode 100644 index 0000000000000000000000000000000000000000..1a5366d13b5f5bed0baedea06b9ff956ff5cf16b --- /dev/null +++ b/mmpretrain/models/heads/mae_head.py @@ -0,0 +1,103 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class MAEPretrainHead(BaseModule): + """Head for MAE Pre-training. + + Args: + loss (dict): Config of loss. + norm_pix_loss (bool): Whether or not normalize target. + Defaults to False. + patch_size (int): Patch size. Defaults to 16. + """ + + def __init__(self, + loss: dict, + norm_pix: bool = False, + patch_size: int = 16) -> None: + super().__init__() + self.norm_pix = norm_pix + self.patch_size = patch_size + self.loss_module = MODELS.build(loss) + + def patchify(self, imgs: torch.Tensor) -> torch.Tensor: + r"""Split images into non-overlapped patches. + + Args: + imgs (torch.Tensor): A batch of images. The shape should + be :math:`(B, 3, H, W)`. + + Returns: + torch.Tensor: Patchified images. The shape is + :math:`(B, L, \text{patch_size}^2 \times 3)`. + """ + p = self.patch_size + assert imgs.shape[2] == imgs.shape[3] and imgs.shape[2] % p == 0 + + h = w = imgs.shape[2] // p + x = imgs.reshape(shape=(imgs.shape[0], 3, h, p, w, p)) + x = torch.einsum('nchpwq->nhwpqc', x) + x = x.reshape(shape=(imgs.shape[0], h * w, p**2 * 3)) + return x + + def unpatchify(self, x: torch.Tensor) -> torch.Tensor: + r"""Combine non-overlapped patches into images. + + Args: + x (torch.Tensor): The shape is + :math:`(B, L, \text{patch_size}^2 \times 3)`. + + Returns: + torch.Tensor: The shape is :math:`(B, 3, H, W)`. + """ + p = self.patch_size + h = w = int(x.shape[1]**.5) + assert h * w == x.shape[1] + + x = x.reshape(shape=(x.shape[0], h, w, p, p, 3)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], 3, h * p, h * p)) + return imgs + + def construct_target(self, target: torch.Tensor) -> torch.Tensor: + """Construct the reconstruction target. + + In addition to splitting images into tokens, this module will also + normalize the image according to ``norm_pix``. + + Args: + target (torch.Tensor): Image with the shape of B x 3 x H x W + + Returns: + torch.Tensor: Tokenized images with the shape of B x L x C + """ + target = self.patchify(target) + if self.norm_pix: + # normalize the target image + mean = target.mean(dim=-1, keepdim=True) + var = target.var(dim=-1, keepdim=True) + target = (target - mean) / (var + 1.e-6)**.5 + + return target + + def loss(self, pred: torch.Tensor, target: torch.Tensor, + mask: torch.Tensor) -> torch.Tensor: + """Generate loss. + + Args: + pred (torch.Tensor): The reconstructed image. + target (torch.Tensor): The target image. + mask (torch.Tensor): The mask of the target image. + + Returns: + torch.Tensor: The reconstruction loss. + """ + target = self.construct_target(target) + loss = self.loss_module(pred, target, mask) + + return loss diff --git a/mmpretrain/models/heads/margin_head.py b/mmpretrain/models/heads/margin_head.py new file mode 100644 index 0000000000000000000000000000000000000000..3a88bf8b3f4d19b233192a7578f49b750ff53ed5 --- /dev/null +++ b/mmpretrain/models/heads/margin_head.py @@ -0,0 +1,300 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import List, Optional, Sequence, Tuple, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.fileio import list_from_file +from mmengine.runner import autocast +from mmengine.utils import is_seq_of + +from mmpretrain.models.losses import convert_to_one_hot +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .cls_head import ClsHead + + +class NormProduct(nn.Linear): + """An enhanced linear layer with k clustering centers to calculate product + between normalized input and linear weight. + + Args: + in_features (int): size of each input sample. + out_features (int): size of each output sample + k (int): The number of clustering centers. Defaults to 1. + bias (bool): Whether there is bias. If set to ``False``, the + layer will not learn an additive bias. Defaults to ``True``. + feature_norm (bool): Whether to normalize the input feature. + Defaults to ``True``. + weight_norm (bool):Whether to normalize the weight. + Defaults to ``True``. + """ + + def __init__(self, + in_features: int, + out_features: int, + k=1, + bias: bool = False, + feature_norm: bool = True, + weight_norm: bool = True): + + super().__init__(in_features, out_features * k, bias=bias) + self.weight_norm = weight_norm + self.feature_norm = feature_norm + self.out_features = out_features + self.k = k + + def forward(self, input: torch.Tensor) -> torch.Tensor: + if self.feature_norm: + input = F.normalize(input) + if self.weight_norm: + weight = F.normalize(self.weight) + else: + weight = self.weight + cosine_all = F.linear(input, weight, self.bias) + + if self.k == 1: + return cosine_all + else: + cosine_all = cosine_all.view(-1, self.out_features, self.k) + cosine, _ = torch.max(cosine_all, dim=2) + return cosine + + +@MODELS.register_module() +class ArcFaceClsHead(ClsHead): + """ArcFace classifier head. + + A PyTorch implementation of paper `ArcFace: Additive Angular Margin Loss + for Deep Face Recognition `_ and + `Sub-center ArcFace: Boosting Face Recognition by Large-Scale Noisy Web + Faces `_ + + Example: + To use ArcFace in config files. + + 1. use vanilla ArcFace + + .. code:: python + + mode = dict( + backbone = xxx, + neck = xxxx, + head=dict( + type='ArcFaceClsHead', + num_classes=5000, + in_channels=1024, + loss = dict(type='CrossEntropyLoss', loss_weight=1.0), + init_cfg=None), + ) + + 2. use SubCenterArcFace with 3 sub-centers + + .. code:: python + + mode = dict( + backbone = xxx, + neck = xxxx, + head=dict( + type='ArcFaceClsHead', + num_classes=5000, + in_channels=1024, + num_subcenters=3, + loss = dict(type='CrossEntropyLoss', loss_weight=1.0), + init_cfg=None), + ) + + 3. use SubCenterArcFace With CountPowerAdaptiveMargins + + .. code:: python + + mode = dict( + backbone = xxx, + neck = xxxx, + head=dict( + type='ArcFaceClsHead', + num_classes=5000, + in_channels=1024, + num_subcenters=3, + loss = dict(type='CrossEntropyLoss', loss_weight=1.0), + init_cfg=None), + ) + + custom_hooks = [dict(type='SetAdaptiveMarginsHook')] + + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + num_subcenters (int): Number of subcenters. Defaults to 1. + scale (float): Scale factor of output logit. Defaults to 64.0. + margins (float): The penalty margin. Could be the fllowing formats: + + - float: The margin, would be same for all the categories. + - Sequence[float]: The category-based margins list. + - str: A '.txt' file path which contains a list. Each line + represents the margin of a category, and the number in the + i-th row indicates the margin of the i-th class. + + Defaults to 0.5. + easy_margin (bool): Avoid theta + m >= PI. Defaults to False. + loss (dict): Config of classification loss. Defaults to + ``dict(type='CrossEntropyLoss', loss_weight=1.0)``. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + num_classes: int, + in_channels: int, + num_subcenters: int = 1, + scale: float = 64., + margins: Optional[Union[float, Sequence[float], str]] = 0.50, + easy_margin: bool = False, + loss: dict = dict(type='CrossEntropyLoss', loss_weight=1.0), + init_cfg: Optional[dict] = None): + + super(ArcFaceClsHead, self).__init__(init_cfg=init_cfg) + if not isinstance(loss, nn.Module): + loss = MODELS.build(loss) + self.loss_module = loss + + assert num_subcenters >= 1 and num_classes >= 0 + self.in_channels = in_channels + self.num_classes = num_classes + self.num_subcenters = num_subcenters + self.scale = scale + self.easy_margin = easy_margin + + self.norm_product = NormProduct(in_channels, num_classes, + num_subcenters) + + if isinstance(margins, float): + margins = [margins] * num_classes + elif isinstance(margins, str) and margins.endswith('.txt'): + margins = [float(item) for item in list_from_file(margins)] + else: + assert is_seq_of(list(margins), (float, int)), ( + 'the attribute `margins` in ``ArcFaceClsHead`` should be a ' + ' float, a Sequence of float, or a ".txt" file path.') + + assert len(margins) == num_classes, \ + 'The length of margins must be equal with num_classes.' + + self.register_buffer( + 'margins', torch.tensor(margins).float(), persistent=False) + # To make `phi` monotonic decreasing, refers to + # https://github.com/deepinsight/insightface/issues/108 + sinm_m = torch.sin(math.pi - self.margins) * self.margins + threshold = torch.cos(math.pi - self.margins) + self.register_buffer('sinm_m', sinm_m, persistent=False) + self.register_buffer('threshold', threshold, persistent=False) + + def set_margins(self, margins: Union[Sequence[float], float]) -> None: + """set margins of arcface head. + + Args: + margins (Union[Sequence[float], float]): The marigins. + """ + if isinstance(margins, float): + margins = [margins] * self.num_classes + assert is_seq_of( + list(margins), float) and (len(margins) == self.num_classes), ( + f'margins must be Sequence[Union(float, int)], get {margins}') + + self.margins = torch.tensor( + margins, device=self.margins.device, dtype=torch.float32) + self.sinm_m = torch.sin(self.margins) * self.margins + self.threshold = -torch.cos(self.margins) + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``ArcFaceHead``, we just obtain the + feature of the last stage. + """ + # The ArcFaceHead doesn't have other module, just return after + # unpacking. + return feats[-1] + + def _get_logit_with_margin(self, pre_logits, target): + """add arc margin to the cosine in target index. + + The target must be in index format. + """ + assert target.dim() == 1 or ( + target.dim() == 2 and target.shape[1] == 1), \ + 'The target must be in index format.' + cosine = self.norm_product(pre_logits) + phi = torch.cos(torch.acos(cosine) + self.margins) + + if self.easy_margin: + # when cosine>0, choose phi + # when cosine<=0, choose cosine + phi = torch.where(cosine > 0, phi, cosine) + else: + # when cos>th, choose phi + # when cos<=th, choose cosine-mm + phi = torch.where(cosine > self.threshold, phi, + cosine - self.sinm_m) + + target = convert_to_one_hot(target, self.num_classes) + output = target * phi + (1 - target) * cosine + return output + + def forward(self, + feats: Tuple[torch.Tensor], + target: Optional[torch.Tensor] = None) -> torch.Tensor: + """The forward process.""" + # Disable AMP + with autocast(enabled=False): + pre_logits = self.pre_logits(feats) + + if target is None: + # when eval, logit is the cosine between W and pre_logits; + # cos(theta_yj) = (x/||x||) * (W/||W||) + logit = self.norm_product(pre_logits) + else: + # when training, add a margin to the pre_logits where target is + # True, then logit is the cosine between W and new pre_logits + logit = self._get_logit_with_margin(pre_logits, target) + + return self.scale * logit + + def loss(self, feats: Tuple[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # Unpack data samples and pack targets + label_target = torch.cat([i.gt_label for i in data_samples]) + if 'gt_score' in data_samples[0]: + # Batch augmentation may convert labels to one-hot format scores. + target = torch.stack([i.gt_score for i in data_samples]) + else: + target = label_target + + # the index format target would be used + cls_score = self(feats, label_target) + + # compute loss + losses = dict() + loss = self.loss_module( + cls_score, target, avg_factor=cls_score.size(0), **kwargs) + losses['loss'] = loss + + return losses diff --git a/mmpretrain/models/heads/mim_head.py b/mmpretrain/models/heads/mim_head.py new file mode 100644 index 0000000000000000000000000000000000000000..bda90c8198986ec9b2ff2d03db3350e1f1a25823 --- /dev/null +++ b/mmpretrain/models/heads/mim_head.py @@ -0,0 +1,37 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class MIMHead(BaseModule): + """Pre-training head for Masked Image Modeling. + + Args: + loss (dict): Config dict for module of loss functions. + """ + + def __init__(self, loss: dict) -> None: + super().__init__() + self.loss_module = MODELS.build(loss) + + def loss(self, + pred: torch.Tensor, + target: torch.Tensor, + mask: Optional[torch.Tensor] = None) -> torch.Tensor: + """Forward head. + + Args: + pred (torch.Tensor): Predictions with shape B x L x C. + target (torch.Tensor): Targets with shape B x L x C. + mask (torch.Tensor): Mask with shape B x L. + + Returns: + torch.Tensor: The loss tensor. + """ + loss = self.loss_module(pred, target, mask) + return loss diff --git a/mmpretrain/models/heads/mixmim_head.py b/mmpretrain/models/heads/mixmim_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a709630abb26bce1153596cec842da0912bab912 --- /dev/null +++ b/mmpretrain/models/heads/mixmim_head.py @@ -0,0 +1,49 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch + +from mmpretrain.registry import MODELS +from .mae_head import MAEPretrainHead + + +@MODELS.register_module() +class MixMIMPretrainHead(MAEPretrainHead): + """Head for MixMIM Pre-training. + + Args: + loss (dict): Config of loss. + norm_pix_loss (bool): Whether or not normalize target. + Defaults to False. + patch_size (int): Patch size. Defaults to 16. + """ + + def __init__(self, + loss: dict, + norm_pix: bool = False, + patch_size: int = 16) -> None: + super().__init__(loss=loss, norm_pix=norm_pix, patch_size=patch_size) + + def loss(self, x_rec: torch.Tensor, target: torch.Tensor, + mask: torch.Tensor) -> torch.Tensor: + """Generate loss. + + Args: + pred (torch.Tensor): The reconstructed image. + target (torch.Tensor): The target image. + mask (torch.Tensor): The mask of the target image. + + Returns: + torch.Tensor: The reconstruction loss. + """ + target = self.construct_target(target) + + B, L, C = x_rec.shape + + # unmix tokens + x1_rec = x_rec[:B // 2] + x2_rec = x_rec[B // 2:] + + unmix_x_rec = x1_rec * mask + x2_rec.flip(0) * (1 - mask) + + loss_rec = self.loss_module(unmix_x_rec, target) + + return loss_rec diff --git a/mmpretrain/models/heads/mocov3_head.py b/mmpretrain/models/heads/mocov3_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c2bec2a6cc90247fab44d6d954a8a0c6ede0a812 --- /dev/null +++ b/mmpretrain/models/heads/mocov3_head.py @@ -0,0 +1,66 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from mmengine.dist import all_gather, get_rank +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class MoCoV3Head(BaseModule): + """Head for MoCo v3 Pre-training. + + This head builds a predictor, which can be any registered neck component. + It also implements latent contrastive loss between two forward features. + Part of the code is modified from: + ``_. + + Args: + predictor (dict): Config dict for module of predictor. + loss (dict): Config dict for module of loss functions. + temperature (float): The temperature hyper-parameter that + controls the concentration level of the distribution. + Defaults to 1.0. + """ + + def __init__(self, + predictor: dict, + loss: dict, + temperature: float = 1.0) -> None: + super().__init__() + self.predictor = MODELS.build(predictor) + self.loss_module = MODELS.build(loss) + self.temperature = temperature + + def loss(self, base_out: torch.Tensor, + momentum_out: torch.Tensor) -> torch.Tensor: + """Generate loss. + + Args: + base_out (torch.Tensor): NxC features from base_encoder. + momentum_out (torch.Tensor): NxC features from momentum_encoder. + + Returns: + torch.Tensor: The loss tensor. + """ + # predictor computation + pred = self.predictor([base_out])[0] + + # normalize + pred = nn.functional.normalize(pred, dim=1) + target = nn.functional.normalize(momentum_out, dim=1) + + # get negative samples + target = torch.cat(all_gather(target), dim=0) + + # Einstein sum is more intuitive + logits = torch.einsum('nc,mc->nm', [pred, target]) / self.temperature + + # generate labels + batch_size = logits.shape[0] + labels = (torch.arange(batch_size, dtype=torch.long) + + batch_size * get_rank()).to(logits.device) + + loss = self.loss_module(logits, labels) + return loss diff --git a/mmpretrain/models/heads/multi_label_cls_head.py b/mmpretrain/models/heads/multi_label_cls_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ca36bfe06e70e1e0f16a5dc4c161b186234f57ac --- /dev/null +++ b/mmpretrain/models/heads/multi_label_cls_head.py @@ -0,0 +1,155 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Tuple + +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample, label_to_onehot + + +@MODELS.register_module() +class MultiLabelClsHead(BaseModule): + """Classification head for multilabel task. + + Args: + loss (dict): Config of classification loss. Defaults to + dict(type='CrossEntropyLoss', use_sigmoid=True). + thr (float, optional): Predictions with scores under the thresholds + are considered as negative. Defaults to None. + topk (int, optional): Predictions with the k-th highest scores are + considered as positive. Defaults to None. + init_cfg (dict, optional): The extra init config of layers. + Defaults to None. + + Notes: + If both ``thr`` and ``topk`` are set, use ``thr` to determine + positive predictions. If neither is set, use ``thr=0.5`` as + default. + """ + + def __init__(self, + loss: Dict = dict(type='CrossEntropyLoss', use_sigmoid=True), + thr: Optional[float] = None, + topk: Optional[int] = None, + init_cfg: Optional[dict] = None): + super(MultiLabelClsHead, self).__init__(init_cfg=init_cfg) + + if not isinstance(loss, nn.Module): + loss = MODELS.build(loss) + self.loss_module = loss + + if thr is None and topk is None: + thr = 0.5 + + self.thr = thr + self.topk = topk + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``MultiLabelClsHead``, we just obtain + the feature of the last stage. + """ + # The MultiLabelClsHead doesn't have other module, just return after + # unpacking. + return feats[-1] + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The MultiLabelClsHead doesn't have the final classification head, + # just return the unpacked inputs. + return pre_logits + + def loss(self, feats: Tuple[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # The part can be traced by torch.fx + cls_score = self(feats) + + # The part can not be traced by torch.fx + losses = self._get_loss(cls_score, data_samples, **kwargs) + return losses + + def _get_loss(self, cls_score: torch.Tensor, + data_samples: List[DataSample], **kwargs): + """Unpack data samples and compute loss.""" + num_classes = cls_score.size()[-1] + # Unpack data samples and pack targets + if 'gt_score' in data_samples[0]: + target = torch.stack([i.gt_score.float() for i in data_samples]) + else: + target = torch.stack([ + label_to_onehot(i.gt_label, num_classes) for i in data_samples + ]).float() + + # compute loss + losses = dict() + loss = self.loss_module( + cls_score, target, avg_factor=cls_score.size(0), **kwargs) + losses['loss'] = loss + + return losses + + def predict(self, + feats: Tuple[torch.Tensor], + data_samples: List[DataSample] = None) -> List[DataSample]: + """Inference without augmentation. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + Multiple stage inputs are acceptable but only the last stage + will be used to classify. The shape of every item should be + ``(num_samples, num_classes)``. + data_samples (List[DataSample], optional): The annotation + data of every samples. If not None, set ``pred_label`` of + the input data samples. Defaults to None. + + Returns: + List[DataSample]: A list of data samples which contains the + predicted results. + """ + # The part can be traced by torch.fx + cls_score = self(feats) + + # The part can not be traced by torch.fx + predictions = self._get_predictions(cls_score, data_samples) + return predictions + + def _get_predictions(self, cls_score: torch.Tensor, + data_samples: List[DataSample]): + """Post-process the output of head. + + Including softmax and set ``pred_label`` of data samples. + """ + pred_scores = torch.sigmoid(cls_score) + + if data_samples is None: + data_samples = [DataSample() for _ in range(cls_score.size(0))] + + for data_sample, score in zip(data_samples, pred_scores): + if self.thr is not None: + # a label is predicted positive if larger than thr + label = torch.where(score >= self.thr)[0] + else: + # top-k labels will be predicted positive for any example + _, label = score.topk(self.topk) + data_sample.set_pred_score(score).set_pred_label(label) + + return data_samples diff --git a/mmpretrain/models/heads/multi_label_csra_head.py b/mmpretrain/models/heads/multi_label_csra_head.py new file mode 100644 index 0000000000000000000000000000000000000000..95a3a0e8b9d6c68c2f2c1da3c0c160c4c695cc7c --- /dev/null +++ b/mmpretrain/models/heads/multi_label_csra_head.py @@ -0,0 +1,112 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Modified from https://github.com/Kevinz-code/CSRA +from typing import Tuple + +import torch +import torch.nn as nn +from mmengine.model import BaseModule, ModuleList + +from mmpretrain.registry import MODELS +from .multi_label_cls_head import MultiLabelClsHead + + +@MODELS.register_module() +class CSRAClsHead(MultiLabelClsHead): + """Class-specific residual attention classifier head. + + Please refer to the `Residual Attention: A Simple but Effective Method for + Multi-Label Recognition (ICCV 2021) `_ + for details. + + Args: + num_classes (int): Number of categories. + in_channels (int): Number of channels in the input feature map. + num_heads (int): Number of residual at tensor heads. + loss (dict): Config of classification loss. + lam (float): Lambda that combines global average and max pooling + scores. + init_cfg (dict, optional): The extra init config of layers. + Defaults to use ``dict(type='Normal', layer='Linear', std=0.01)``. + """ + temperature_settings = { # softmax temperature settings + 1: [1], + 2: [1, 99], + 4: [1, 2, 4, 99], + 6: [1, 2, 3, 4, 5, 99], + 8: [1, 2, 3, 4, 5, 6, 7, 99] + } + + def __init__(self, + num_classes: int, + in_channels: int, + num_heads: int, + lam: float, + init_cfg=dict(type='Normal', layer='Linear', std=0.01), + **kwargs): + assert num_heads in self.temperature_settings.keys( + ), 'The num of heads is not in temperature setting.' + assert lam > 0, 'Lambda should be between 0 and 1.' + super(CSRAClsHead, self).__init__(init_cfg=init_cfg, **kwargs) + self.temp_list = self.temperature_settings[num_heads] + self.csra_heads = ModuleList([ + CSRAModule(num_classes, in_channels, self.temp_list[i], lam) + for i in range(num_heads) + ]) + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``CSRAClsHead``, we just obtain the + feature of the last stage. + """ + # The CSRAClsHead doesn't have other module, just return after + # unpacking. + return feats[-1] + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + logit = sum([head(pre_logits) for head in self.csra_heads]) + return logit + + +class CSRAModule(BaseModule): + """Basic module of CSRA with different temperature. + + Args: + num_classes (int): Number of categories. + in_channels (int): Number of channels in the input feature map. + T (int): Temperature setting. + lam (float): Lambda that combines global average and max pooling + scores. + init_cfg (dict | optional): The extra init config of layers. + Defaults to use dict(type='Normal', layer='Linear', std=0.01). + """ + + def __init__(self, + num_classes: int, + in_channels: int, + T: int, + lam: float, + init_cfg=None): + + super(CSRAModule, self).__init__(init_cfg=init_cfg) + self.T = T # temperature + self.lam = lam # Lambda + self.head = nn.Conv2d(in_channels, num_classes, 1, bias=False) + self.softmax = nn.Softmax(dim=2) + + def forward(self, x): + score = self.head(x) / torch.norm( + self.head.weight, dim=1, keepdim=True).transpose(0, 1) + score = score.flatten(2) + base_logit = torch.mean(score, dim=2) + + if self.T == 99: # max-pooling + att_logit = torch.max(score, dim=2)[0] + else: + score_soft = self.softmax(score * self.T) + att_logit = torch.sum(score * score_soft, dim=2) + + return base_logit + self.lam * att_logit diff --git a/mmpretrain/models/heads/multi_label_linear_head.py b/mmpretrain/models/heads/multi_label_linear_head.py new file mode 100644 index 0000000000000000000000000000000000000000..81217ec55c54f23748b7e4ce8797509abfbb2ed3 --- /dev/null +++ b/mmpretrain/models/heads/multi_label_linear_head.py @@ -0,0 +1,66 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Optional, Tuple + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .multi_label_cls_head import MultiLabelClsHead + + +@MODELS.register_module() +class MultiLabelLinearClsHead(MultiLabelClsHead): + """Linear classification head for multilabel task. + + Args: + loss (dict): Config of classification loss. Defaults to + dict(type='CrossEntropyLoss', use_sigmoid=True). + thr (float, optional): Predictions with scores under the thresholds + are considered as negative. Defaults to None. + topk (int, optional): Predictions with the k-th highest scores are + considered as positive. Defaults to None. + init_cfg (dict, optional): The extra init config of layers. + Defaults to use dict(type='Normal', layer='Linear', std=0.01). + + Notes: + If both ``thr`` and ``topk`` are set, use ``thr` to determine + positive predictions. If neither is set, use ``thr=0.5`` as + default. + """ + + def __init__(self, + num_classes: int, + in_channels: int, + loss: Dict = dict(type='CrossEntropyLoss', use_sigmoid=True), + thr: Optional[float] = None, + topk: Optional[int] = None, + init_cfg: Optional[dict] = dict( + type='Normal', layer='Linear', std=0.01)): + super(MultiLabelLinearClsHead, self).__init__( + loss=loss, thr=thr, topk=topk, init_cfg=init_cfg) + + assert num_classes > 0, f'num_classes ({num_classes}) must be a ' \ + 'positive integer.' + + self.in_channels = in_channels + self.num_classes = num_classes + + self.fc = nn.Linear(self.in_channels, self.num_classes) + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. In ``MultiLabelLinearClsHead``, we just + obtain the feature of the last stage. + """ + # The obtain the MultiLabelLinearClsHead doesn't have other module, + # just return after unpacking. + return feats[-1] + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The final classification head. + cls_score = self.fc(pre_logits) + return cls_score diff --git a/mmpretrain/models/heads/multi_task_head.py b/mmpretrain/models/heads/multi_task_head.py new file mode 100644 index 0000000000000000000000000000000000000000..8b4645a790d8494a216d945c91496388e0629c79 --- /dev/null +++ b/mmpretrain/models/heads/multi_task_head.py @@ -0,0 +1,141 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Sequence, Tuple + +import torch +import torch.nn as nn +from mmengine.model import BaseModule, ModuleDict + +from mmpretrain.registry import MODELS +from mmpretrain.structures import MultiTaskDataSample + + +def loss_convertor(loss_func, task_name): + + def wrapped(inputs, data_samples, **kwargs): + mask = torch.empty(len(data_samples), dtype=torch.bool) + task_data_samples = [] + for i, data_sample in enumerate(data_samples): + assert isinstance(data_sample, MultiTaskDataSample) + sample_mask = task_name in data_sample + mask[i] = sample_mask + if sample_mask: + task_data_samples.append(data_sample.get(task_name)) + + if len(task_data_samples) == 0: + # This makes it possible to perform loss.backward when a + # task does not have gt_labels within a batch. + loss = (inputs[0] * 0).sum() + return {'loss': loss, 'mask_size': torch.tensor(0.)} + + # Mask the inputs of the task + def mask_inputs(inputs, mask): + if isinstance(inputs, Sequence): + return type(inputs)( + [mask_inputs(input, mask) for input in inputs]) + elif isinstance(inputs, torch.Tensor): + return inputs[mask] + + masked_inputs = mask_inputs(inputs, mask) + loss_output = loss_func(masked_inputs, task_data_samples, **kwargs) + loss_output['mask_size'] = mask.sum().to(torch.float) + return loss_output + + return wrapped + + +@MODELS.register_module() +class MultiTaskHead(BaseModule): + """Multi task head. + + Args: + task_heads (dict): Sub heads to use, the key will be use to rename the + loss components. + common_cfg (dict): The common settings for all heads. Defaults to an + empty dict. + init_cfg (dict, optional): The extra initialization settings. + Defaults to None. + """ + + def __init__(self, task_heads, init_cfg=None, **kwargs): + super(MultiTaskHead, self).__init__(init_cfg=init_cfg) + + assert isinstance(task_heads, dict), 'The `task_heads` argument' \ + "should be a dict, which's keys are task names and values are" \ + 'configs of head for the task.' + + self.task_heads = ModuleDict() + + for task_name, sub_head in task_heads.items(): + if not isinstance(sub_head, nn.Module): + sub_head = MODELS.build(sub_head, default_args=kwargs) + sub_head.loss = loss_convertor(sub_head.loss, task_name) + self.task_heads[task_name] = sub_head + + def forward(self, feats): + """The forward process.""" + return { + task_name: head(feats) + for task_name, head in self.task_heads.items() + } + + def loss(self, feats: Tuple[torch.Tensor], + data_samples: List[MultiTaskDataSample], **kwargs) -> dict: + """Calculate losses from the classification score. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + data_samples (List[MultiTaskDataSample]): The annotation data of + every samples. + **kwargs: Other keyword arguments to forward the loss module. + + Returns: + dict[str, Tensor]: a dictionary of loss components, each task loss + key will be prefixed by the task_name like "task1_loss" + """ + losses = dict() + for task_name, head in self.task_heads.items(): + head_loss = head.loss(feats, data_samples, **kwargs) + for k, v in head_loss.items(): + losses[f'{task_name}_{k}'] = v + return losses + + def predict( + self, + feats: Tuple[torch.Tensor], + data_samples: List[MultiTaskDataSample] = None + ) -> List[MultiTaskDataSample]: + """Inference without augmentation. + + Args: + feats (tuple[Tensor]): The features extracted from the backbone. + data_samples (List[MultiTaskDataSample], optional): The annotation + data of every samples. If not None, set ``pred_label`` of + the input data samples. Defaults to None. + + Returns: + List[MultiTaskDataSample]: A list of data samples which contains + the predicted results. + """ + predictions_dict = dict() + + for task_name, head in self.task_heads.items(): + task_samples = head.predict(feats) + batch_size = len(task_samples) + predictions_dict[task_name] = task_samples + + if data_samples is None: + data_samples = [MultiTaskDataSample() for _ in range(batch_size)] + + for task_name, task_samples in predictions_dict.items(): + for data_sample, task_sample in zip(data_samples, task_samples): + task_sample.set_field( + task_name in data_sample.tasks, + 'eval_mask', + field_type='metainfo') + + if task_name in data_sample.tasks: + data_sample.get(task_name).update(task_sample) + else: + data_sample.set_field(task_sample, task_name) + + return data_samples diff --git a/mmpretrain/models/heads/seq_gen_head.py b/mmpretrain/models/heads/seq_gen_head.py new file mode 100644 index 0000000000000000000000000000000000000000..b2e9b10efe6e1e6a709cd870f0572f14bbd176ee --- /dev/null +++ b/mmpretrain/models/heads/seq_gen_head.py @@ -0,0 +1,188 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class SeqGenerationHead(BaseModule): + """Generation head for multi-modal pre-trained task, adopted by BLIP. + Normally used for generation task. + + Args: + decoder (dict): Decoder for blip generation head. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__( + self, + decoder: dict, + ignore_index=-100, + loss: dict = dict(type='LabelSmoothLoss', label_smooth_val=0.1), + init_cfg: Optional[dict] = None, + ) -> None: + super(SeqGenerationHead, self).__init__(init_cfg=init_cfg) + self.decoder = MODELS.build(decoder) + self.loss_fn = MODELS.build(loss) + self.ignore_index = ignore_index + + def forward(self, input_ids: torch.Tensor, + encoder_hidden_states: torch.Tensor, + encoder_attention_mask: torch.Tensor, labels: torch.Tensor): + """Forward to get decoder output. + + Args: + input_ids (torch.Tensor): The tokenized input text tensor. + encoder_hidden_states (torch.Tensor): Hidden states from image + embeddings. + encoder_attention_mask (torch.Tensor): Image embeddings hidden + states attention mask. + labels (torch.Tensor): Decoder target for calculate loss. + + Returns: + dict[str, Tensor]: a dictionary of decoder outputs. + """ + + decoder_out = self.decoder( + input_ids=input_ids, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + labels=labels, + return_dict=True, + ) + return decoder_out + + def loss(self, input_ids, encoder_hidden_states, encoder_attention_mask, + labels): + """Calculate losses from the extracted features. + + Args: + input_ids (torch.Tensor): The tokenized input text tensor. + encoder_hidden_states (torch.Tensor): Hidden states from image + embeddings. + encoder_attention_mask (torch.Tensor): Image embeddings hidden + states attention mask. + labels (torch.Tensor): Decoder target for calculate loss. + + Returns: + dict[str, Tensor]: a dictionary of loss components. + """ + + decoder_out = self( + input_ids=input_ids, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + labels=labels, + ) + prediction_scores = decoder_out['logits'] + # we are doing next-token prediction; + # shift prediction scores and input ids by one + shifted_prediction_scores = prediction_scores[:, :-1, :].contiguous() + labels = labels[:, 1:].contiguous() + + vocab_size = prediction_scores.shape[-1] + + # mask ignored index + if (labels == self.ignore_index).any(): + labels = labels.view(-1).clone() + ignore_mask = (labels == self.ignore_index) + labels.masked_fill_(ignore_mask, 0) + weight = torch.logical_not(ignore_mask) + avg_factor = max(weight.sum(), 1) + else: + weight = None + avg_factor = labels.size(0) + + lm_loss = self.loss_fn( + shifted_prediction_scores.view(-1, vocab_size), + labels, + weight=weight, + avg_factor=avg_factor, + ) + losses = { + 'seq_gen_lm_loss': lm_loss, + } + + return losses + + def predict(self, + input_ids, + encoder_hidden_states, + sep_token_id, + pad_token_id, + use_nucleus_sampling=False, + num_beams=3, + max_length=20, + min_length=2, + top_p=0.9, + repetition_penalty=1.0, + **kwargs): + """Decoder prediction method. + + Args: + input_ids (torch.Tensor): The tokenized input text tensor. + encoder_hidden_states (torch.Tensor): Hidden states from image + embeddings. + sep_token_id (int): Tokenid of separation token. + pad_token_id (int): Tokenid of pad token. + use_nucleus_sampling (bool): Whether to use nucleus sampling in + prediction. Defaults to False. + num_beams (int): Number of beams used in predition. + Defaults to 3. + max_length (int): Max length of generated text in predition. + Defaults to 20. + min_length (int): Min length of generated text in predition. + Defaults to 20. + top_p (float): + If < 1.0, only keep the top tokens with cumulative probability + >= top_p (nucleus filtering). Defaults to 0.9. + repetition_penalty (float): The parameter for repetition penalty. + Defaults to 1.0. + **kwarg: Other arguments that might used in generation. + + Returns: + dict[str, Tensor]: a dictionary of generation outputs. + """ + device = encoder_hidden_states.device + + # TODO: In old version of transformers + # Additional repeat interleave of hidden states should be add here. + image_atts = torch.ones( + encoder_hidden_states.size()[:-1], dtype=torch.long).to(device) + + model_kwargs = { + 'encoder_hidden_states': encoder_hidden_states, + 'encoder_attention_mask': image_atts, + } + model_kwargs.update(kwargs) + + if use_nucleus_sampling: + # nucleus sampling + outputs = self.decoder.generate( + input_ids=input_ids, + max_length=max_length, + min_length=min_length, + do_sample=True, + top_p=top_p, + num_return_sequences=1, + eos_token_id=sep_token_id, + pad_token_id=pad_token_id, + repetition_penalty=1.1, + **model_kwargs) + else: + # beam search + outputs = self.decoder.generate( + input_ids=input_ids, + max_length=max_length, + min_length=min_length, + num_beams=num_beams, + eos_token_id=sep_token_id, + pad_token_id=pad_token_id, + repetition_penalty=repetition_penalty, + **model_kwargs) + + return outputs diff --git a/mmpretrain/models/heads/simmim_head.py b/mmpretrain/models/heads/simmim_head.py new file mode 100644 index 0000000000000000000000000000000000000000..b7af984c9eb4891e9f4281daf630355cafbb6cc7 --- /dev/null +++ b/mmpretrain/models/heads/simmim_head.py @@ -0,0 +1,40 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class SimMIMHead(BaseModule): + """Head for SimMIM Pre-training. + + Args: + patch_size (int): Patch size of each token. + loss (dict): The config for loss. + """ + + def __init__(self, patch_size: int, loss: dict) -> None: + super().__init__() + self.patch_size = patch_size + self.loss_module = MODELS.build(loss) + + def loss(self, pred: torch.Tensor, target: torch.Tensor, + mask: torch.Tensor) -> torch.Tensor: + """Generate loss. + + This method will expand mask to the size of the original image. + + Args: + pred (torch.Tensor): The reconstructed image (B, C, H, W). + target (torch.Tensor): The target image (B, C, H, W). + mask (torch.Tensor): The mask of the target image. + + Returns: + torch.Tensor: The reconstruction loss. + """ + mask = mask.repeat_interleave(self.patch_size, 1).repeat_interleave( + self.patch_size, 2).unsqueeze(1).contiguous() + loss = self.loss_module(pred, target, mask) + + return loss diff --git a/mmpretrain/models/heads/stacked_head.py b/mmpretrain/models/heads/stacked_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6cd819de8e8daf162bb906d5524871577754fa1f --- /dev/null +++ b/mmpretrain/models/heads/stacked_head.py @@ -0,0 +1,135 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, Optional, Sequence, Tuple + +import torch +import torch.nn as nn +from mmcv.cnn import build_activation_layer, build_norm_layer +from mmengine.model import BaseModule, ModuleList + +from mmpretrain.registry import MODELS +from .cls_head import ClsHead + + +class LinearBlock(BaseModule): + """Linear block for StackedLinearClsHead.""" + + def __init__(self, + in_channels, + out_channels, + dropout_rate=0., + norm_cfg=None, + act_cfg=None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.fc = nn.Linear(in_channels, out_channels) + + self.norm = None + self.act = None + self.dropout = None + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, out_channels)[1] + if act_cfg is not None: + self.act = build_activation_layer(act_cfg) + if dropout_rate > 0: + self.dropout = nn.Dropout(p=dropout_rate) + + def forward(self, x): + """The forward process.""" + x = self.fc(x) + if self.norm is not None: + x = self.norm(x) + if self.act is not None: + x = self.act(x) + if self.dropout is not None: + x = self.dropout(x) + return x + + +@MODELS.register_module() +class StackedLinearClsHead(ClsHead): + """Classifier head with several hidden fc layer and a output fc layer. + + Args: + num_classes (int): Number of categories. + in_channels (int): Number of channels in the input feature map. + mid_channels (Sequence[int]): Number of channels in the hidden fc + layers. + dropout_rate (float): Dropout rate after each hidden fc layer, + except the last layer. Defaults to 0. + norm_cfg (dict, optional): Config dict of normalization layer after + each hidden fc layer, except the last layer. Defaults to None. + act_cfg (dict, optional): Config dict of activation function after each + hidden layer, except the last layer. Defaults to use "ReLU". + """ + + def __init__(self, + num_classes: int, + in_channels: int, + mid_channels: Sequence[int], + dropout_rate: float = 0., + norm_cfg: Optional[Dict] = None, + act_cfg: Optional[Dict] = dict(type='ReLU'), + **kwargs): + super(StackedLinearClsHead, self).__init__(**kwargs) + self.num_classes = num_classes + self.in_channels = in_channels + if self.num_classes <= 0: + raise ValueError( + f'num_classes={num_classes} must be a positive integer') + + assert isinstance(mid_channels, Sequence), \ + f'`mid_channels` of StackedLinearClsHead should be a sequence, ' \ + f'instead of {type(mid_channels)}' + self.mid_channels = mid_channels + + self.dropout_rate = dropout_rate + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + + self._init_layers() + + def _init_layers(self): + """"Init layers.""" + self.layers = ModuleList() + in_channels = self.in_channels + for hidden_channels in self.mid_channels: + self.layers.append( + LinearBlock( + in_channels, + hidden_channels, + dropout_rate=self.dropout_rate, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + in_channels = hidden_channels + + self.layers.append( + LinearBlock( + self.mid_channels[-1], + self.num_classes, + dropout_rate=0., + norm_cfg=None, + act_cfg=None)) + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a backbone stage. + """ + x = feats[-1] + for layer in self.layers[:-1]: + x = layer(x) + return x + + @property + def fc(self): + """Full connected layer.""" + return self.layers[-1] + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The final classification head. + cls_score = self.fc(pre_logits) + return cls_score diff --git a/mmpretrain/models/heads/swav_head.py b/mmpretrain/models/heads/swav_head.py new file mode 100644 index 0000000000000000000000000000000000000000..8f3a30236e019822a166e25551f77feec8228d84 --- /dev/null +++ b/mmpretrain/models/heads/swav_head.py @@ -0,0 +1,31 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class SwAVHead(BaseModule): + """Head for SwAV Pre-training. + + Args: + loss (dict): Config dict for module of loss functions. + """ + + def __init__(self, loss: dict) -> None: + super().__init__() + self.loss_module = MODELS.build(loss) + + def loss(self, pred: torch.Tensor) -> torch.Tensor: + """Generate loss. + + Args: + pred (torch.Tensor): NxC input features. + + Returns: + torch.Tensor: The SwAV loss. + """ + loss = self.loss_module(pred) + + return loss diff --git a/mmpretrain/models/heads/vig_head.py b/mmpretrain/models/heads/vig_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ecb984deb4b0b6bf162263a86771f2d3eba71cbd --- /dev/null +++ b/mmpretrain/models/heads/vig_head.py @@ -0,0 +1,65 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Tuple + +import torch +import torch.nn as nn +from mmcv.cnn import build_activation_layer + +from mmpretrain.registry import MODELS +from .cls_head import ClsHead + + +@MODELS.register_module() +class VigClsHead(ClsHead): + """The classification head for Vision GNN. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + hidden_dim (int): The number of middle channels. Defaults to 1024. + act_cfg (dict): The config of activation function. + Defaults to ``dict(type='GELU')``. + dropout (float): The dropout rate. + loss (dict): Config of classification loss. Defaults to + ``dict(type='CrossEntropyLoss', loss_weight=1.0)``. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + num_classes: int, + in_channels: int, + hidden_dim: int = 1024, + act_cfg: dict = dict(type='GELU'), + dropout: float = 0., + **kwargs): + super().__init__(**kwargs) + + self.fc1 = nn.Linear(in_channels, hidden_dim) + self.bn = nn.BatchNorm1d(hidden_dim) + self.act = build_activation_layer(act_cfg) + self.drop = nn.Dropout(dropout) + self.fc2 = nn.Linear(hidden_dim, num_classes) + + def pre_logits(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of tensor, and each tensor is the + feature of a stage_blocks stage. In ``VigClsHead``, we just obtain the + feature of the last stage. + """ + feats = feats[-1] + feats = self.fc1(feats) + feats = self.bn(feats) + feats = self.act(feats) + feats = self.drop(feats) + + return feats + + def forward(self, feats: Tuple[torch.Tensor]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The final classification head. + cls_score = self.fc2(pre_logits) + return cls_score diff --git a/mmpretrain/models/heads/vision_transformer_head.py b/mmpretrain/models/heads/vision_transformer_head.py new file mode 100644 index 0000000000000000000000000000000000000000..83e8fca125cd626c51abfcc87b28387f654618f9 --- /dev/null +++ b/mmpretrain/models/heads/vision_transformer_head.py @@ -0,0 +1,97 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from collections import OrderedDict +from typing import List, Optional, Tuple + +import torch +import torch.nn as nn +from mmcv.cnn import build_activation_layer +from mmengine.model import Sequential +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.registry import MODELS +from .cls_head import ClsHead + + +@MODELS.register_module() +class VisionTransformerClsHead(ClsHead): + """Vision Transformer classifier head. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + hidden_dim (int, optional): Number of the dimensions for hidden layer. + Defaults to None, which means no extra hidden layer. + act_cfg (dict): The activation config. Only available during + pre-training. Defaults to ``dict(type='Tanh')``. + init_cfg (dict): The extra initialization configs. Defaults to + ``dict(type='Constant', layer='Linear', val=0)``. + """ + + def __init__(self, + num_classes: int, + in_channels: int, + hidden_dim: Optional[int] = None, + act_cfg: dict = dict(type='Tanh'), + init_cfg: dict = dict(type='Constant', layer='Linear', val=0), + **kwargs): + super(VisionTransformerClsHead, self).__init__( + init_cfg=init_cfg, **kwargs) + self.in_channels = in_channels + self.num_classes = num_classes + self.hidden_dim = hidden_dim + self.act_cfg = act_cfg + + if self.num_classes <= 0: + raise ValueError( + f'num_classes={num_classes} must be a positive integer') + + self._init_layers() + + def _init_layers(self): + """"Init hidden layer if exists.""" + if self.hidden_dim is None: + layers = [('head', nn.Linear(self.in_channels, self.num_classes))] + else: + layers = [ + ('pre_logits', nn.Linear(self.in_channels, self.hidden_dim)), + ('act', build_activation_layer(self.act_cfg)), + ('head', nn.Linear(self.hidden_dim, self.num_classes)), + ] + self.layers = Sequential(OrderedDict(layers)) + + def init_weights(self): + """"Init weights of hidden layer if exists.""" + super(VisionTransformerClsHead, self).init_weights() + # Modified from ClassyVision + if hasattr(self.layers, 'pre_logits'): + # Lecun norm + trunc_normal_( + self.layers.pre_logits.weight, + std=math.sqrt(1 / self.layers.pre_logits.in_features)) + nn.init.zeros_(self.layers.pre_logits.bias) + + def pre_logits(self, feats: Tuple[List[torch.Tensor]]) -> torch.Tensor: + """The process before the final classification head. + + The input ``feats`` is a tuple of list of tensor, and each tensor is + the feature of a backbone stage. In ``VisionTransformerClsHead``, we + obtain the feature of the last stage and forward in hidden layer if + exists. + """ + feat = feats[-1] # Obtain feature of the last scale. + # For backward-compatibility with the previous ViT output + cls_token = feat[-1] if isinstance(feat, list) else feat + if self.hidden_dim is None: + return cls_token + else: + x = self.layers.pre_logits(cls_token) + return self.layers.act(x) + + def forward(self, feats: Tuple[List[torch.Tensor]]) -> torch.Tensor: + """The forward process.""" + pre_logits = self.pre_logits(feats) + # The final classification head. + cls_score = self.layers.head(pre_logits) + return cls_score diff --git a/mmpretrain/models/heads/vqa_head.py b/mmpretrain/models/heads/vqa_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c7b5fe532874e2e8325caa3090d3be66b098ad46 --- /dev/null +++ b/mmpretrain/models/heads/vqa_head.py @@ -0,0 +1,246 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Union + +import mmengine +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class VQAGenerationHead(BaseModule): + """Generation head for multi-modal pre-trained task, adapted by BLIP. + Normally used for qa generation task (open-set) + + Args: + decoder (dict): Decoder for decoding answers. + inference_method (str): Inference method. One of 'rank', 'generate'. + - If 'rank', the model will return answers with the highest + probability from the answer list. + - If 'generate', the model will generate answers. + - Only for test, not for train / val. + num_beams (int): Number of beams for beam search. 1 means no beam + search. Only support when inference_method=='generate'. + Defaults to 3. + num_ans_candidates (int): Number of answer candidates, used to filter + out answers with low probability. Only support when + inference_method=='rank'. Defaults to 128. + loss (dict or nn.Module): Config of loss or module of loss. Defaults to + ``nn.CrossEntropyLoss(reduction='none', ignore_index=-100)``. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + answer_list_path (str, optional): Path to `answer_list.json` + (json file of a answer list). Required when + inference_method=='rank'. + + + TODO: `mmcls.LabelSmoothLoss` has not support `ignore_index` param. + Now using `nn.CrossEntropyLoss`, without label_smoothing, in order to + maintain compatibility with torch < 1.10.0 + """ + + def __init__( + self, + decoder: dict, + inference_method: str = 'generate', + num_beams: int = 3, + num_ans_candidates: int = 128, + loss: Union[dict, nn.Module] = nn.CrossEntropyLoss( + reduction='none', ignore_index=-100), + init_cfg: Optional[dict] = None, + answer_list_path: Optional[str] = None, + ) -> None: + + super(VQAGenerationHead, self).__init__(init_cfg=init_cfg) + self.decoder = MODELS.build(decoder) + + if inference_method == 'generate': + assert isinstance(num_beams, int), \ + 'for VQA `generate` mode, `num_beams` must be a int.' + self.num_beams = num_beams + self.num_ans_candidates = None + self.answer_list = None + + elif inference_method == 'rank': + assert isinstance(num_ans_candidates, int), \ + 'for VQA `rank` mode, `num_ans_candidates` must be a int.' + assert isinstance(answer_list_path, str), \ + 'for VQA `rank` mode, `answer_list_path` must be set as ' \ + 'the path to `answer_list.json`.' + self.num_beams = None + self.answer_list = mmengine.load(answer_list_path) + if isinstance(self.answer_list, dict): + self.answer_list = list(self.answer_list.keys()) + assert isinstance(self.answer_list, list) and all( + isinstance(item, str) for item in self.answer_list), \ + 'for VQA `rank` mode, `answer_list.json` must be a list of str' + self.num_ans_candidates = min(num_ans_candidates, + len(self.answer_list)) + + else: + raise AssertionError( + 'for VQA, `inference_method` must be "generate" or "rank", ' + 'got {}.'.format(inference_method)) + + self.inference_method = inference_method + if not isinstance(loss, nn.Module): + loss = MODELS.build(loss) + self.loss_module = loss + + def forward(self, feats: dict): + prediction_logits = self.decoder( + feats['answer_input_ids'], + attention_mask=feats['answer_attention_mask'], + encoder_hidden_states=feats['question_states'], + encoder_attention_mask=feats['question_atts'], + labels=feats['answer_targets'], + return_dict=True, + return_logits=True, # directly return logits, not computing loss + reduction='none', + ) + return prediction_logits + + def loss(self, feats: dict, data_samples=None): + """Calculate losses from the extracted features. + + Args: + feats (dict): The features extracted from the backbone. + data_samples (List[BaseDataElement]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + shifted_prediction_scores = self(feats) + labels = feats['answer_targets'] + lm_loss = None + + # we are doing next-token prediction; + # shift prediction scores and input ids by one + labels = labels[:, 1:].contiguous() + lm_loss = self.loss_module( + shifted_prediction_scores.view(-1, + self.decoder.med_config.vocab_size), + labels.view(-1)) + lm_loss = lm_loss.view(shifted_prediction_scores.size(0), -1).sum(1) + # compute weighted loss + losses = dict() + loss = feats['answer_weight'] * lm_loss + loss = loss.sum() / feats['batch_size'] + losses['vqa_loss'] = loss + + return losses + + def predict_rank(self, feats: dict, data_samples=None): + """Predict rank in a close-set answer list.""" + question_states = feats['multimodal_embeds'] + question_atts = feats['question_atts'] + answer_candidates = feats['answer_candidates'] + assert answer_candidates is not None + + answer_ids = answer_candidates.input_ids + answer_atts = answer_candidates.attention_mask + num_ques = question_states.size(0) + start_ids = answer_ids[0, 0].repeat(num_ques, 1) # bos token + + start_output = self.decoder( + start_ids, + encoder_hidden_states=question_states, + encoder_attention_mask=question_atts, + return_dict=True, + reduction='none', + ) + logits = start_output.logits[:, 0, :] # first token's logit + + # topk_probs: top-k probability + # topk_ids: [num_question, k] + answer_first_token = answer_ids[:, 1] + prob_first_token = F.softmax( + logits, dim=1).index_select( + dim=1, index=answer_first_token) + topk_probs, topk_ids = prob_first_token.topk( + self.num_ans_candidates, dim=1) + + # answer input: [num_question*k, answer_len] + input_ids = [] + input_atts = [] + for b, topk_id in enumerate(topk_ids): + input_ids.append(answer_ids.index_select(dim=0, index=topk_id)) + input_atts.append(answer_atts.index_select(dim=0, index=topk_id)) + input_ids = torch.cat(input_ids, dim=0) + input_atts = torch.cat(input_atts, dim=0) + + targets_ids = input_ids.masked_fill(input_ids == feats['pad_token_id'], + -100) + + def tile(x, dim, n_tile): + init_dim = x.size(dim) + repeat_idx = [1] * x.dim() + repeat_idx[dim] = n_tile + x = x.repeat(*(repeat_idx)) + order_index = torch.LongTensor( + np.concatenate([ + init_dim * np.arange(n_tile) + i for i in range(init_dim) + ])) + return torch.index_select(x, dim, order_index.to(x.device)) + + # repeat encoder's output for top-k answers + question_states = tile(question_states, 0, self.num_ans_candidates) + question_atts = tile(question_atts, 0, self.num_ans_candidates) + + output = self.decoder( + input_ids, + attention_mask=input_atts, + encoder_hidden_states=question_states, + encoder_attention_mask=question_atts, + labels=targets_ids, + return_dict=True, + reduction='none', + ) + + log_probs_sum = -output.loss + log_probs_sum = log_probs_sum.view(num_ques, self.num_ans_candidates) + + max_topk_ids = log_probs_sum.argmax(dim=1) + max_ids = topk_ids[max_topk_ids >= 0, max_topk_ids] + + answers = [self.answer_list[max_id] for max_id in max_ids] + + return answers + + def predict_generate(self, feats: dict, data_samples=None): + """Predict answers in a generation manner.""" + device = feats['multimodal_embeds'].device + question_states = feats['multimodal_embeds'] + question_atts = torch.ones( + question_states.size()[:-1], dtype=torch.long).to(device) + model_kwargs = { + 'encoder_hidden_states': question_states, + 'encoder_attention_mask': question_atts + } + + bos_ids = torch.full((feats['multimodal_embeds'].shape[0], 1), + fill_value=feats['bos_token_id'], + device=device) + + outputs = self.decoder.generate( + input_ids=bos_ids, + max_length=10, + min_length=1, + num_beams=self.num_beams, + eos_token_id=feats['sep_token_id'], + pad_token_id=feats['pad_token_id'], + **model_kwargs) + + return outputs + + def predict(self, feats: dict, data_samples=None): + """Predict results from the extracted features.""" + if self.inference_method == 'generate': + return self.predict_generate(feats, data_samples) + elif self.inference_method == 'rank': + return self.predict_rank(feats, data_samples) diff --git a/mmpretrain/models/losses/__init__.py b/mmpretrain/models/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b1b2ed725ef76df7e18bf9283ec84b3b12e3d2cf --- /dev/null +++ b/mmpretrain/models/losses/__init__.py @@ -0,0 +1,35 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .asymmetric_loss import AsymmetricLoss, asymmetric_loss +from .cae_loss import CAELoss +from .cosine_similarity_loss import CosineSimilarityLoss +from .cross_correlation_loss import CrossCorrelationLoss +from .cross_entropy_loss import (CrossEntropyLoss, binary_cross_entropy, + cross_entropy) +from .focal_loss import FocalLoss, sigmoid_focal_loss +from .label_smooth_loss import LabelSmoothLoss +from .reconstruction_loss import PixelReconstructionLoss +from .seesaw_loss import SeesawLoss +from .swav_loss import SwAVLoss +from .utils import (convert_to_one_hot, reduce_loss, weight_reduce_loss, + weighted_loss) + +__all__ = [ + 'asymmetric_loss', + 'AsymmetricLoss', + 'cross_entropy', + 'binary_cross_entropy', + 'CrossEntropyLoss', + 'reduce_loss', + 'weight_reduce_loss', + 'LabelSmoothLoss', + 'weighted_loss', + 'FocalLoss', + 'sigmoid_focal_loss', + 'convert_to_one_hot', + 'SeesawLoss', + 'CAELoss', + 'CosineSimilarityLoss', + 'CrossCorrelationLoss', + 'PixelReconstructionLoss', + 'SwAVLoss', +] diff --git a/mmpretrain/models/losses/asymmetric_loss.py b/mmpretrain/models/losses/asymmetric_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..dcc9707da8475b5e87d2b4f8a5a2cf669d7ffe2f --- /dev/null +++ b/mmpretrain/models/losses/asymmetric_loss.py @@ -0,0 +1,149 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .utils import convert_to_one_hot, weight_reduce_loss + + +def asymmetric_loss(pred, + target, + weight=None, + gamma_pos=1.0, + gamma_neg=4.0, + clip=0.05, + reduction='mean', + avg_factor=None, + use_sigmoid=True, + eps=1e-8): + r"""asymmetric loss. + + Please refer to the `paper `__ for + details. + + Args: + pred (torch.Tensor): The prediction with shape (N, \*). + target (torch.Tensor): The ground truth label of the prediction with + shape (N, \*). + weight (torch.Tensor, optional): Sample-wise loss weight with shape + (N, ). Defaults to None. + gamma_pos (float): positive focusing parameter. Defaults to 0.0. + gamma_neg (float): Negative focusing parameter. We usually set + gamma_neg > gamma_pos. Defaults to 4.0. + clip (float, optional): Probability margin. Defaults to 0.05. + reduction (str): The method used to reduce the loss. + Options are "none", "mean" and "sum". If reduction is 'none' , loss + is same shape as pred and label. Defaults to 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + use_sigmoid (bool): Whether the prediction uses sigmoid instead + of softmax. Defaults to True. + eps (float): The minimum value of the argument of logarithm. Defaults + to 1e-8. + + Returns: + torch.Tensor: Loss. + """ + assert pred.shape == \ + target.shape, 'pred and target should be in the same shape.' + + if use_sigmoid: + pred_sigmoid = pred.sigmoid() + else: + pred_sigmoid = nn.functional.softmax(pred, dim=-1) + + target = target.type_as(pred) + + if clip and clip > 0: + pt = (1 - pred_sigmoid + + clip).clamp(max=1) * (1 - target) + pred_sigmoid * target + else: + pt = (1 - pred_sigmoid) * (1 - target) + pred_sigmoid * target + asymmetric_weight = (1 - pt).pow(gamma_pos * target + gamma_neg * + (1 - target)) + loss = -torch.log(pt.clamp(min=eps)) * asymmetric_weight + if weight is not None: + assert weight.dim() == 1 + weight = weight.float() + if pred.dim() > 1: + weight = weight.reshape(-1, 1) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +@MODELS.register_module() +class AsymmetricLoss(nn.Module): + """asymmetric loss. + + Args: + gamma_pos (float): positive focusing parameter. + Defaults to 0.0. + gamma_neg (float): Negative focusing parameter. We + usually set gamma_neg > gamma_pos. Defaults to 4.0. + clip (float, optional): Probability margin. Defaults to 0.05. + reduction (str): The method used to reduce the loss into + a scalar. + loss_weight (float): Weight of loss. Defaults to 1.0. + use_sigmoid (bool): Whether the prediction uses sigmoid instead + of softmax. Defaults to True. + eps (float): The minimum value of the argument of logarithm. Defaults + to 1e-8. + """ + + def __init__(self, + gamma_pos=0.0, + gamma_neg=4.0, + clip=0.05, + reduction='mean', + loss_weight=1.0, + use_sigmoid=True, + eps=1e-8): + super(AsymmetricLoss, self).__init__() + self.gamma_pos = gamma_pos + self.gamma_neg = gamma_neg + self.clip = clip + self.reduction = reduction + self.loss_weight = loss_weight + self.use_sigmoid = use_sigmoid + self.eps = eps + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + r"""asymmetric loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, \*). + target (torch.Tensor): The ground truth label of the prediction + with shape (N, \*), N or (N,1). + weight (torch.Tensor, optional): Sample-wise loss weight with shape + (N, \*). Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The method used to reduce the + loss into a scalar. Options are "none", "mean" and "sum". + Defaults to None. + + Returns: + torch.Tensor: Loss. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if target.dim() == 1 or (target.dim() == 2 and target.shape[1] == 1): + target = convert_to_one_hot(target.view(-1, 1), pred.shape[-1]) + loss_cls = self.loss_weight * asymmetric_loss( + pred, + target, + weight, + gamma_pos=self.gamma_pos, + gamma_neg=self.gamma_neg, + clip=self.clip, + reduction=reduction, + avg_factor=avg_factor, + use_sigmoid=self.use_sigmoid, + eps=self.eps) + return loss_cls diff --git a/mmpretrain/models/losses/cae_loss.py b/mmpretrain/models/losses/cae_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..1dc081b603361e9b06c96cf836941fa971a4b4c4 --- /dev/null +++ b/mmpretrain/models/losses/cae_loss.py @@ -0,0 +1,48 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Tuple + +import torch +from mmengine.model import BaseModule +from torch import nn + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class CAELoss(BaseModule): + """Loss function for CAE. + + Compute the align loss and the main loss. + + Args: + lambd (float): The weight for the align loss. + """ + + def __init__(self, lambd: float) -> None: + super().__init__() + self.lambd = lambd + self.loss_cross_entropy = nn.CrossEntropyLoss() + self.loss_mse = nn.MSELoss() + + def forward( + self, logits: torch.Tensor, target: torch.Tensor, + latent_pred: torch.Tensor, + latent_target: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Forward function of CAE Loss. + + Args: + logits (torch.Tensor): The outputs from the decoder. + target (torch.Tensor): The targets generated by dalle. + latent_pred (torch.Tensor): The latent prediction from the + regressor. + latent_target (torch.Tensor): The latent target from the teacher + network. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: The main loss and align loss. + """ + loss_main = self.loss_cross_entropy(logits, target) + loss_align = self.loss_mse(latent_pred, + latent_target.detach()) * self.lambd + + return loss_main, loss_align diff --git a/mmpretrain/models/losses/cosine_similarity_loss.py b/mmpretrain/models/losses/cosine_similarity_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..f0a5931e24686bd560196e1e310fc283fc4c9d4d --- /dev/null +++ b/mmpretrain/models/losses/cosine_similarity_loss.py @@ -0,0 +1,55 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +from typing import Optional + +import torch +from mmengine.model import BaseModule +from torch import nn + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class CosineSimilarityLoss(BaseModule): + """Cosine similarity loss function. + + Compute the similarity between two features and optimize that similarity as + loss. + + Args: + shift_factor (float): The shift factor of cosine similarity. + Default: 0.0. + scale_factor (float): The scale factor of cosine similarity. + Default: 1.0. + """ + + def __init__(self, + shift_factor: float = 0.0, + scale_factor: float = 1.0) -> None: + super().__init__() + self.shift_factor = shift_factor + self.scale_factor = scale_factor + + def forward(self, + pred: torch.Tensor, + target: torch.Tensor, + mask: Optional[torch.Tensor] = None) -> torch.Tensor: + """Forward function of cosine similarity loss. + + Args: + pred (torch.Tensor): The predicted features. + target (torch.Tensor): The target features. + + Returns: + torch.Tensor: The cosine similarity loss. + """ + pred_norm = nn.functional.normalize(pred, dim=-1) + target_norm = nn.functional.normalize(target, dim=-1) + loss = self.shift_factor - self.scale_factor * ( + pred_norm * target_norm).sum(dim=-1) + + if mask is None: + loss = loss.mean() + else: + loss = (loss * mask).sum() / mask.sum() + return loss diff --git a/mmpretrain/models/losses/cross_correlation_loss.py b/mmpretrain/models/losses/cross_correlation_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..d26ce3ddbd7b41778cbf25147df39da256788dd1 --- /dev/null +++ b/mmpretrain/models/losses/cross_correlation_loss.py @@ -0,0 +1,44 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class CrossCorrelationLoss(BaseModule): + """Cross correlation loss function. + + Compute the on-diagnal and off-diagnal loss. + + Args: + lambd (float): The weight for the off-diag loss. + """ + + def __init__(self, lambd: float = 0.0051) -> None: + super().__init__() + self.lambd = lambd + + def forward(self, cross_correlation_matrix: torch.Tensor) -> torch.Tensor: + """Forward function of cross correlation loss. + + Args: + cross_correlation_matrix (torch.Tensor): The cross correlation + matrix. + + Returns: + torch.Tensor: cross correlation loss. + """ + # loss + on_diag = torch.diagonal(cross_correlation_matrix).add_(-1).pow_( + 2).sum() + off_diag = self.off_diagonal(cross_correlation_matrix).pow_(2).sum() + loss = on_diag + self.lambd * off_diag + return loss + + def off_diagonal(self, x: torch.Tensor) -> torch.Tensor: + """Rreturn a flattened view of the off-diagonal elements of a square + matrix.""" + n, m = x.shape + assert n == m + return x.flatten()[:-1].view(n - 1, n + 1)[:, 1:].flatten() diff --git a/mmpretrain/models/losses/cross_entropy_loss.py b/mmpretrain/models/losses/cross_entropy_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..5d418beb812f8493668aeff99198555068a55435 --- /dev/null +++ b/mmpretrain/models/losses/cross_entropy_loss.py @@ -0,0 +1,209 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +import torch.nn.functional as F + +from mmpretrain.registry import MODELS +from .utils import weight_reduce_loss + + +def cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the CrossEntropy loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + label (torch.Tensor): The gt label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str): The method used to reduce the loss. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (torch.Tensor, optional): The weight for each class with + shape (C), C is the number of classes. Default None. + + Returns: + torch.Tensor: The calculated loss + """ + # element-wise losses + loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none') + + # apply weights and do the reduction + if weight is not None: + weight = weight.float() + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def soft_cross_entropy(pred, + label, + weight=None, + reduction='mean', + class_weight=None, + avg_factor=None): + """Calculate the Soft CrossEntropy loss. The label can be float. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + label (torch.Tensor): The gt label of the prediction with shape (N, C). + When using "mixup", the label can be float. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str): The method used to reduce the loss. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (torch.Tensor, optional): The weight for each class with + shape (C), C is the number of classes. Default None. + + Returns: + torch.Tensor: The calculated loss + """ + # element-wise losses + loss = -label * F.log_softmax(pred, dim=-1) + if class_weight is not None: + loss *= class_weight + loss = loss.sum(dim=-1) + + # apply weights and do the reduction + if weight is not None: + weight = weight.float() + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def binary_cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None, + pos_weight=None): + r"""Calculate the binary CrossEntropy loss with logits. + + Args: + pred (torch.Tensor): The prediction with shape (N, \*). + label (torch.Tensor): The gt label with shape (N, \*). + weight (torch.Tensor, optional): Element-wise weight of loss with shape + (N, ). Defaults to None. + reduction (str): The method used to reduce the loss. + Options are "none", "mean" and "sum". If reduction is 'none' , loss + is same shape as pred and label. Defaults to 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (torch.Tensor, optional): The weight for each class with + shape (C), C is the number of classes. Default None. + pos_weight (torch.Tensor, optional): The positive weight for each + class with shape (C), C is the number of classes. Default None. + + Returns: + torch.Tensor: The calculated loss + """ + # Ensure that the size of class_weight is consistent with pred and label to + # avoid automatic boracast, + assert pred.dim() == label.dim() + + if class_weight is not None: + N = pred.size()[0] + class_weight = class_weight.repeat(N, 1) + loss = F.binary_cross_entropy_with_logits( + pred, + label.float(), # only accepts float type tensor + weight=class_weight, + pos_weight=pos_weight, + reduction='none') + + # apply weights and do the reduction + if weight is not None: + assert weight.dim() == 1 + weight = weight.float() + if pred.dim() > 1: + weight = weight.reshape(-1, 1) + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + return loss + + +@MODELS.register_module() +class CrossEntropyLoss(nn.Module): + """Cross entropy loss. + + Args: + use_sigmoid (bool): Whether the prediction uses sigmoid + of softmax. Defaults to False. + use_soft (bool): Whether to use the soft version of CrossEntropyLoss. + Defaults to False. + reduction (str): The method used to reduce the loss. + Options are "none", "mean" and "sum". Defaults to 'mean'. + loss_weight (float): Weight of the loss. Defaults to 1.0. + class_weight (List[float], optional): The weight for each class with + shape (C), C is the number of classes. Default None. + pos_weight (List[float], optional): The positive weight for each + class with shape (C), C is the number of classes. Only enabled in + BCE loss when ``use_sigmoid`` is True. Default None. + """ + + def __init__(self, + use_sigmoid=False, + use_soft=False, + reduction='mean', + loss_weight=1.0, + class_weight=None, + pos_weight=None): + super(CrossEntropyLoss, self).__init__() + self.use_sigmoid = use_sigmoid + self.use_soft = use_soft + assert not ( + self.use_soft and self.use_sigmoid + ), 'use_sigmoid and use_soft could not be set simultaneously' + + self.reduction = reduction + self.loss_weight = loss_weight + self.class_weight = class_weight + self.pos_weight = pos_weight + + if self.use_sigmoid: + self.cls_criterion = binary_cross_entropy + elif self.use_soft: + self.cls_criterion = soft_cross_entropy + else: + self.cls_criterion = cross_entropy + + def forward(self, + cls_score, + label, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + + if self.class_weight is not None: + class_weight = cls_score.new_tensor(self.class_weight) + else: + class_weight = None + + # only BCE loss has pos_weight + if self.pos_weight is not None and self.use_sigmoid: + pos_weight = cls_score.new_tensor(self.pos_weight) + kwargs.update({'pos_weight': pos_weight}) + else: + pos_weight = None + + loss_cls = self.loss_weight * self.cls_criterion( + cls_score, + label, + weight, + class_weight=class_weight, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_cls diff --git a/mmpretrain/models/losses/focal_loss.py b/mmpretrain/models/losses/focal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..9d2cf5035aedfd923ae388b264a7457312b274fd --- /dev/null +++ b/mmpretrain/models/losses/focal_loss.py @@ -0,0 +1,116 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +import torch.nn.functional as F + +from mmpretrain.registry import MODELS +from .utils import convert_to_one_hot, weight_reduce_loss + + +def sigmoid_focal_loss(pred, + target, + weight=None, + gamma=2.0, + alpha=0.25, + reduction='mean', + avg_factor=None): + r"""Sigmoid focal loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, \*). + target (torch.Tensor): The ground truth label of the prediction with + shape (N, \*). + weight (torch.Tensor, optional): Sample-wise loss weight with shape + (N, ). Defaults to None. + gamma (float): The gamma for calculating the modulating factor. + Defaults to 2.0. + alpha (float): A balanced form for Focal Loss. Defaults to 0.25. + reduction (str): The method used to reduce the loss. + Options are "none", "mean" and "sum". If reduction is 'none' , + loss is same shape as pred and label. Defaults to 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + + Returns: + torch.Tensor: Loss. + """ + assert pred.shape == \ + target.shape, 'pred and target should be in the same shape.' + pred_sigmoid = pred.sigmoid() + target = target.type_as(pred) + pt = (1 - pred_sigmoid) * target + pred_sigmoid * (1 - target) + focal_weight = (alpha * target + (1 - alpha) * + (1 - target)) * pt.pow(gamma) + loss = F.binary_cross_entropy_with_logits( + pred, target, reduction='none') * focal_weight + if weight is not None: + assert weight.dim() == 1 + weight = weight.float() + if pred.dim() > 1: + weight = weight.reshape(-1, 1) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +@MODELS.register_module() +class FocalLoss(nn.Module): + """Focal loss. + + Args: + gamma (float): Focusing parameter in focal loss. + Defaults to 2.0. + alpha (float): The parameter in balanced form of focal + loss. Defaults to 0.25. + reduction (str): The method used to reduce the loss into + a scalar. Options are "none" and "mean". Defaults to 'mean'. + loss_weight (float): Weight of loss. Defaults to 1.0. + """ + + def __init__(self, + gamma=2.0, + alpha=0.25, + reduction='mean', + loss_weight=1.0): + + super(FocalLoss, self).__init__() + self.gamma = gamma + self.alpha = alpha + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + r"""Sigmoid focal loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, \*). + target (torch.Tensor): The ground truth label of the prediction + with shape (N, \*), N or (N,1). + weight (torch.Tensor, optional): Sample-wise loss weight with shape + (N, \*). Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The method used to reduce the + loss into a scalar. Options are "none", "mean" and "sum". + Defaults to None. + + Returns: + torch.Tensor: Loss. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if target.dim() == 1 or (target.dim() == 2 and target.shape[1] == 1): + target = convert_to_one_hot(target.view(-1, 1), pred.shape[-1]) + loss_cls = self.loss_weight * sigmoid_focal_loss( + pred, + target, + weight, + gamma=self.gamma, + alpha=self.alpha, + reduction=reduction, + avg_factor=avg_factor) + return loss_cls diff --git a/mmpretrain/models/losses/label_smooth_loss.py b/mmpretrain/models/losses/label_smooth_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..f117df33b07c05ee7516f0b99d985f0d001b2d31 --- /dev/null +++ b/mmpretrain/models/losses/label_smooth_loss.py @@ -0,0 +1,177 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .cross_entropy_loss import CrossEntropyLoss +from .utils import convert_to_one_hot + + +@MODELS.register_module() +class LabelSmoothLoss(nn.Module): + r"""Initializer for the label smoothed cross entropy loss. + + Refers to `Rethinking the Inception Architecture for Computer Vision + `_ + + This decreases gap between output scores and encourages generalization. + Labels provided to forward can be one-hot like vectors (NxC) or class + indices (Nx1). + And this accepts linear combination of one-hot like labels from mixup or + cutmix except multi-label task. + + Args: + label_smooth_val (float): The degree of label smoothing. + num_classes (int, optional): Number of classes. Defaults to None. + mode (str): Refers to notes, Options are 'original', 'classy_vision', + 'multi_label'. Defaults to 'original'. + use_sigmoid (bool, optional): Whether the prediction uses sigmoid of + softmax. Defaults to None, which means to use sigmoid in + "multi_label" mode and not use in other modes. + reduction (str): The method used to reduce the loss. + Options are "none", "mean" and "sum". Defaults to 'mean'. + loss_weight (float): Weight of the loss. Defaults to 1.0. + + Notes: + - if the mode is **"original"**, this will use the same label smooth + method as the original paper as: + + .. math:: + (1-\epsilon)\delta_{k, y} + \frac{\epsilon}{K} + + where :math:`\epsilon` is the ``label_smooth_val``, :math:`K` is the + ``num_classes`` and :math:`\delta_{k, y}` is Dirac delta, which + equals 1 for :math:`k=y` and 0 otherwise. + + - if the mode is **"classy_vision"**, this will use the same label + smooth method as the facebookresearch/ClassyVision repo as: + + .. math:: + \frac{\delta_{k, y} + \epsilon/K}{1+\epsilon} + + - if the mode is **"multi_label"**, this will accept labels from + multi-label task and smoothing them as: + + .. math:: + (1-2\epsilon)\delta_{k, y} + \epsilon + """ + + def __init__(self, + label_smooth_val, + num_classes=None, + use_sigmoid=None, + mode='original', + reduction='mean', + loss_weight=1.0, + class_weight=None, + pos_weight=None): + super().__init__() + self.num_classes = num_classes + self.loss_weight = loss_weight + + assert (isinstance(label_smooth_val, float) + and 0 <= label_smooth_val < 1), \ + f'LabelSmoothLoss accepts a float label_smooth_val ' \ + f'over [0, 1), but gets {label_smooth_val}' + self.label_smooth_val = label_smooth_val + + accept_reduction = {'none', 'mean', 'sum'} + assert reduction in accept_reduction, \ + f'LabelSmoothLoss supports reduction {accept_reduction}, ' \ + f'but gets {mode}.' + self.reduction = reduction + + accept_mode = {'original', 'classy_vision', 'multi_label'} + assert mode in accept_mode, \ + f'LabelSmoothLoss supports mode {accept_mode}, but gets {mode}.' + self.mode = mode + + self._eps = label_smooth_val + if mode == 'classy_vision': + self._eps = label_smooth_val / (1 + label_smooth_val) + + if mode == 'multi_label': + if not use_sigmoid: + from mmengine.logging import MMLogger + MMLogger.get_current_instance().warning( + 'For multi-label tasks, please set `use_sigmoid=True` ' + 'to use binary cross entropy.') + self.smooth_label = self.multilabel_smooth_label + use_sigmoid = True if use_sigmoid is None else use_sigmoid + else: + self.smooth_label = self.original_smooth_label + use_sigmoid = False if use_sigmoid is None else use_sigmoid + + self.ce = CrossEntropyLoss( + use_sigmoid=use_sigmoid, + use_soft=not use_sigmoid, + reduction=reduction, + class_weight=class_weight, + pos_weight=pos_weight) + + def generate_one_hot_like_label(self, label): + """This function takes one-hot or index label vectors and computes one- + hot like label vectors (float)""" + # check if targets are inputted as class integers + if label.dim() == 1 or (label.dim() == 2 and label.shape[1] == 1): + label = convert_to_one_hot(label.view(-1, 1), self.num_classes) + return label.float() + + def original_smooth_label(self, one_hot_like_label): + assert self.num_classes > 0 + smooth_label = one_hot_like_label * (1 - self._eps) + smooth_label += self._eps / self.num_classes + return smooth_label + + def multilabel_smooth_label(self, one_hot_like_label): + assert self.num_classes > 0 + smooth_label = torch.full_like(one_hot_like_label, self._eps) + smooth_label.masked_fill_(one_hot_like_label > 0, 1 - self._eps) + return smooth_label + + def forward(self, + cls_score, + label, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + r"""Label smooth loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, \*). + label (torch.Tensor): The ground truth label of the prediction + with shape (N, \*). + weight (torch.Tensor, optional): Sample-wise loss weight with shape + (N, \*). Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The method used to reduce the + loss into a scalar. Options are "none", "mean" and "sum". + Defaults to None. + + Returns: + torch.Tensor: Loss. + """ + if self.num_classes is not None: + assert self.num_classes == cls_score.shape[1], \ + f'num_classes should equal to cls_score.shape[1], ' \ + f'but got num_classes: {self.num_classes} and ' \ + f'cls_score.shape[1]: {cls_score.shape[1]}' + else: + self.num_classes = cls_score.shape[1] + + one_hot_like_label = self.generate_one_hot_like_label(label=label) + assert one_hot_like_label.shape == cls_score.shape, \ + f'LabelSmoothLoss requires output and target ' \ + f'to be same shape, but got output.shape: {cls_score.shape} ' \ + f'and target.shape: {one_hot_like_label.shape}' + + smoothed_label = self.smooth_label(one_hot_like_label) + return self.loss_weight * self.ce.forward( + cls_score, + smoothed_label, + weight=weight, + avg_factor=avg_factor, + reduction_override=reduction_override, + **kwargs) diff --git a/mmpretrain/models/losses/reconstruction_loss.py b/mmpretrain/models/losses/reconstruction_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..40e6bfd707b8e378f1ec656cfb443c27e8bbdbb3 --- /dev/null +++ b/mmpretrain/models/losses/reconstruction_loss.py @@ -0,0 +1,67 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import torch +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class PixelReconstructionLoss(BaseModule): + """Loss for the reconstruction of pixel in Masked Image Modeling. + + This module measures the distance between the target image and the + reconstructed image and compute the loss to optimize the model. Currently, + This module only provides L1 and L2 loss to penalize the reconstructed + error. In addition, a mask can be passed in the ``forward`` function to + only apply loss on visible region, like that in MAE. + + Args: + criterion (str): The loss the penalize the reconstructed error. + Currently, only supports L1 and L2 loss + channel (int, optional): The number of channels to average the + reconstruction loss. If not None, the reconstruction loss + will be divided by the channel. Defaults to None. + """ + + def __init__(self, criterion: str, channel: Optional[int] = None) -> None: + super().__init__() + + if criterion == 'L1': + self.penalty = torch.nn.L1Loss(reduction='none') + elif criterion == 'L2': + self.penalty = torch.nn.MSELoss(reduction='none') + else: + raise NotImplementedError(f'Currently, PixelReconstructionLoss \ + only supports L1 and L2 loss, but get {criterion}') + + self.channel = channel if channel is not None else 1 + + def forward(self, + pred: torch.Tensor, + target: torch.Tensor, + mask: Optional[torch.Tensor] = None) -> torch.Tensor: + """Forward function to compute the reconstrction loss. + + Args: + pred (torch.Tensor): The reconstructed image. + target (torch.Tensor): The target image. + mask (torch.Tensor): The mask of the target image. + + Returns: + torch.Tensor: The reconstruction loss. + """ + loss = self.penalty(pred, target) + + # if the dim of the loss is 3, take the average of the loss + # along the last dim + if len(loss.shape) == 3: + loss = loss.mean(dim=-1) + + if mask is None: + loss = loss.mean() + else: + loss = (loss * mask).sum() / mask.sum() / self.channel + + return loss diff --git a/mmpretrain/models/losses/seesaw_loss.py b/mmpretrain/models/losses/seesaw_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..4aaaa451b41ea7e86b7efbfe1c0b6ce8b3756d80 --- /dev/null +++ b/mmpretrain/models/losses/seesaw_loss.py @@ -0,0 +1,173 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# migrate from mmdetection with modifications +import torch +import torch.nn as nn +import torch.nn.functional as F + +from mmpretrain.registry import MODELS +from .utils import weight_reduce_loss + + +def seesaw_ce_loss(cls_score, + labels, + weight, + cum_samples, + num_classes, + p, + q, + eps, + reduction='mean', + avg_factor=None): + """Calculate the Seesaw CrossEntropy loss. + + Args: + cls_score (torch.Tensor): The prediction with shape (N, C), + C is the number of classes. + labels (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor): Sample-wise loss weight. + cum_samples (torch.Tensor): Cumulative samples for each category. + num_classes (int): The number of classes. + p (float): The ``p`` in the mitigation factor. + q (float): The ``q`` in the compenstation factor. + eps (float): The minimal value of divisor to smooth + the computation of compensation factor + reduction (str, optional): The method used to reduce the loss. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + + Returns: + torch.Tensor: The calculated loss + """ + assert cls_score.size(-1) == num_classes + assert len(cum_samples) == num_classes + + onehot_labels = F.one_hot(labels, num_classes) + seesaw_weights = cls_score.new_ones(onehot_labels.size()) + + # mitigation factor + if p > 0: + sample_ratio_matrix = cum_samples[None, :].clamp( + min=1) / cum_samples[:, None].clamp(min=1) + index = (sample_ratio_matrix < 1.0).float() + sample_weights = sample_ratio_matrix.pow(p) * index + (1 - index + ) # M_{ij} + mitigation_factor = sample_weights[labels.long(), :] + seesaw_weights = seesaw_weights * mitigation_factor + + # compensation factor + if q > 0: + scores = F.softmax(cls_score.detach(), dim=1) + self_scores = scores[ + torch.arange(0, len(scores)).to(scores.device).long(), + labels.long()] + score_matrix = scores / self_scores[:, None].clamp(min=eps) + index = (score_matrix > 1.0).float() + compensation_factor = score_matrix.pow(q) * index + (1 - index) + seesaw_weights = seesaw_weights * compensation_factor + + cls_score = cls_score + (seesaw_weights.log() * (1 - onehot_labels)) + + loss = F.cross_entropy(cls_score, labels, weight=None, reduction='none') + + if weight is not None: + weight = weight.float() + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + return loss + + +@MODELS.register_module() +class SeesawLoss(nn.Module): + """Implementation of seesaw loss. + + Refers to `Seesaw Loss for Long-Tailed Instance Segmentation (CVPR 2021) + `_ + + Args: + use_sigmoid (bool): Whether the prediction uses sigmoid of softmax. + Only False is supported. Defaults to False. + p (float): The ``p`` in the mitigation factor. + Defaults to 0.8. + q (float): The ``q`` in the compenstation factor. + Defaults to 2.0. + num_classes (int): The number of classes. + Defaults to 1000 for the ImageNet dataset. + eps (float): The minimal value of divisor to smooth + the computation of compensation factor, default to 1e-2. + reduction (str): The method that reduces the loss to a scalar. + Options are "none", "mean" and "sum". Defaults to "mean". + loss_weight (float): The weight of the loss. Defaults to 1.0 + """ + + def __init__(self, + use_sigmoid=False, + p=0.8, + q=2.0, + num_classes=1000, + eps=1e-2, + reduction='mean', + loss_weight=1.0): + super(SeesawLoss, self).__init__() + assert not use_sigmoid, '`use_sigmoid` is not supported' + self.use_sigmoid = False + self.p = p + self.q = q + self.num_classes = num_classes + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + self.cls_criterion = seesaw_ce_loss + + # cumulative samples for each category + self.register_buffer('cum_samples', + torch.zeros(self.num_classes, dtype=torch.float)) + + def forward(self, + cls_score, + labels, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + cls_score (torch.Tensor): The prediction with shape (N, C). + labels (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum'), \ + f'The `reduction_override` should be one of (None, "none", ' \ + f'"mean", "sum"), but get "{reduction_override}".' + assert cls_score.size(0) == labels.view(-1).size(0), \ + f'Expected `labels` shape [{cls_score.size(0)}], ' \ + f'but got {list(labels.size())}' + reduction = ( + reduction_override if reduction_override else self.reduction) + assert cls_score.size(-1) == self.num_classes, \ + f'The channel number of output ({cls_score.size(-1)}) does ' \ + f'not match the `num_classes` of seesaw loss ({self.num_classes}).' + + # accumulate the samples for each category + unique_labels = labels.unique() + for u_l in unique_labels: + inds_ = labels == u_l.item() + self.cum_samples[u_l] += inds_.sum() + + if weight is not None: + weight = weight.float() + else: + weight = labels.new_ones(labels.size(), dtype=torch.float) + + # calculate loss_cls_classes + loss_cls = self.loss_weight * self.cls_criterion( + cls_score, labels, weight, self.cum_samples, self.num_classes, + self.p, self.q, self.eps, reduction, avg_factor) + + return loss_cls diff --git a/mmpretrain/models/losses/swav_loss.py b/mmpretrain/models/losses/swav_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..c7dbb78e9bf6619cede65a874569072b863bdfa0 --- /dev/null +++ b/mmpretrain/models/losses/swav_loss.py @@ -0,0 +1,190 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import numpy as np +import torch +import torch.distributed as dist +import torch.nn as nn +from mmengine.dist import all_reduce +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@torch.no_grad() +def distributed_sinkhorn(out: torch.Tensor, sinkhorn_iterations: int, + world_size: int, epsilon: float) -> torch.Tensor: + """Apply the distributed sinknorn optimization on the scores matrix to find + the assignments. + + This function is modified from + https://github.com/facebookresearch/swav/blob/main/main_swav.py + + Args: + out (torch.Tensor): The scores matrix + sinkhorn_iterations (int): Number of iterations in Sinkhorn-Knopp + algorithm. + world_size (int): The world size of the process group. + epsilon (float): regularization parameter for Sinkhorn-Knopp algorithm. + + Returns: + torch.Tensor: Output of sinkhorn algorithm. + """ + eps_num_stab = 1e-12 + Q = torch.exp(out / epsilon).t( + ) # Q is K-by-B for consistency with notations from our paper + B = Q.shape[1] * world_size # number of samples to assign + K = Q.shape[0] # how many prototypes + + # make the matrix sums to 1 + sum_Q = torch.sum(Q) + all_reduce(sum_Q) + Q /= sum_Q + + for it in range(sinkhorn_iterations): + # normalize each row: total weight per prototype must be 1/K + u = torch.sum(Q, dim=1, keepdim=True) + if len(torch.nonzero(u == 0)) > 0: + Q += eps_num_stab + u = torch.sum(Q, dim=1, keepdim=True, dtype=Q.dtype) + all_reduce(u) + Q /= u + Q /= K + + # normalize each column: total weight per sample must be 1/B + Q /= torch.sum(Q, dim=0, keepdim=True) + Q /= B + + Q *= B # the columns must sum to 1 so that Q is an assignment + return Q.t() + + +class MultiPrototypes(BaseModule): + """Multi-prototypes for SwAV head. + + Args: + output_dim (int): The output dim from SwAV neck. + num_prototypes (List[int]): The number of prototypes needed. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + output_dim: int, + num_prototypes: List[int], + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__(init_cfg=init_cfg) + assert isinstance(num_prototypes, list) + self.num_heads = len(num_prototypes) + for i, k in enumerate(num_prototypes): + self.add_module('prototypes' + str(i), + nn.Linear(output_dim, k, bias=False)) + + def forward(self, x: torch.Tensor) -> List[torch.Tensor]: + """Run forward for every prototype.""" + out = [] + for i in range(self.num_heads): + out.append(getattr(self, 'prototypes' + str(i))(x)) + return out + + +@MODELS.register_module() +class SwAVLoss(BaseModule): + """The Loss for SwAV. + + This Loss contains clustering and sinkhorn algorithms to compute Q codes. + Part of the code is borrowed from `script + `_. + The queue is built in `engine/hooks/swav_hook.py`. + + Args: + feat_dim (int): feature dimension of the prototypes. + sinkhorn_iterations (int): number of iterations in Sinkhorn-Knopp + algorithm. Defaults to 3. + epsilon (float): regularization parameter for Sinkhorn-Knopp algorithm. + Defaults to 0.05. + temperature (float): temperature parameter in training loss. + Defaults to 0.1. + crops_for_assign (List[int]): list of crops id used for computing + assignments. Defaults to [0, 1]. + num_crops (List[int]): list of number of crops. Defaults to [2]. + num_prototypes (int): number of prototypes. Defaults to 3000. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + feat_dim: int, + sinkhorn_iterations: int = 3, + epsilon: float = 0.05, + temperature: float = 0.1, + crops_for_assign: List[int] = [0, 1], + num_crops: List[int] = [2], + num_prototypes: int = 3000, + init_cfg: Optional[Union[List[dict], dict]] = None): + super().__init__(init_cfg=init_cfg) + self.sinkhorn_iterations = sinkhorn_iterations + self.epsilon = epsilon + self.temperature = temperature + self.crops_for_assign = crops_for_assign + self.num_crops = num_crops + self.use_queue = False + self.queue = None + self.world_size = dist.get_world_size() if dist.is_initialized() else 1 + + # prototype layer + self.prototypes = None + if isinstance(num_prototypes, list): + self.prototypes = MultiPrototypes(feat_dim, num_prototypes) + elif num_prototypes > 0: + self.prototypes = nn.Linear(feat_dim, num_prototypes, bias=False) + assert self.prototypes is not None + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function of SwAV loss. + + Args: + x (torch.Tensor): NxC input features. + Returns: + torch.Tensor: The returned loss. + """ + # normalize the prototypes + with torch.no_grad(): + w = self.prototypes.weight.data.clone() + w = nn.functional.normalize(w, dim=1, p=2) + self.prototypes.weight.copy_(w) + + embedding, output = x, self.prototypes(x) + embedding = embedding.detach() + + bs = int(embedding.size(0) / sum(self.num_crops)) + loss = 0 + for i, crop_id in enumerate(self.crops_for_assign): + with torch.no_grad(): + out = output[bs * crop_id:bs * (crop_id + 1)].detach() + # time to use the queue + if self.queue is not None: + if self.use_queue or not torch.all(self.queue[i, + -1, :] == 0): + self.use_queue = True + out = torch.cat( + (torch.mm(self.queue[i], + self.prototypes.weight.t()), out)) + # fill the queue + self.queue[i, bs:] = self.queue[i, :-bs].clone() + self.queue[i, :bs] = embedding[crop_id * bs:(crop_id + 1) * + bs] + + # get assignments (batch_size * num_prototypes) + q = distributed_sinkhorn(out, self.sinkhorn_iterations, + self.world_size, self.epsilon)[-bs:] + + # cluster assignment prediction + subloss = 0 + for v in np.delete(np.arange(np.sum(self.num_crops)), crop_id): + x = output[bs * v:bs * (v + 1)] / self.temperature + subloss -= torch.mean( + torch.sum(q * nn.functional.log_softmax(x, dim=1), dim=1)) + loss += subloss / (np.sum(self.num_crops) - 1) + loss /= len(self.crops_for_assign) + return loss diff --git a/mmpretrain/models/losses/utils.py b/mmpretrain/models/losses/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a65b68a6590aa3fe10a023022c9c9c9bce51f935 --- /dev/null +++ b/mmpretrain/models/losses/utils.py @@ -0,0 +1,119 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import functools + +import torch +import torch.nn.functional as F + + +def reduce_loss(loss, reduction): + """Reduce loss as specified. + + Args: + loss (Tensor): Elementwise loss tensor. + reduction (str): Options are "none", "mean" and "sum". + + Return: + Tensor: Reduced loss tensor. + """ + reduction_enum = F._Reduction.get_enum(reduction) + # none: 0, elementwise_mean:1, sum: 2 + if reduction_enum == 0: + return loss + elif reduction_enum == 1: + return loss.mean() + elif reduction_enum == 2: + return loss.sum() + + +def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): + """Apply element-wise weight and reduce loss. + + Args: + loss (Tensor): Element-wise loss. + weight (Tensor): Element-wise weights. + reduction (str): Same as built-in losses of PyTorch. + avg_factor (float): Average factor when computing the mean of losses. + + Returns: + Tensor: Processed loss values. + """ + # if weight is specified, apply element-wise weight + if weight is not None: + loss = loss * weight + + # if avg_factor is not specified, just reduce the loss + if avg_factor is None: + loss = reduce_loss(loss, reduction) + else: + # if reduction is mean, then average the loss by avg_factor + if reduction == 'mean': + loss = loss.sum() / avg_factor + # if reduction is 'none', then do nothing, otherwise raise an error + elif reduction != 'none': + raise ValueError('avg_factor can not be used with reduction="sum"') + return loss + + +def weighted_loss(loss_func): + """Create a weighted version of a given loss function. + + To use this decorator, the loss function must have the signature like + ``loss_func(pred, target, **kwargs)``. The function only needs to compute + element-wise loss without any reduction. This decorator will add weight + and reduction arguments to the function. The decorated function will have + the signature like ``loss_func(pred, target, weight=None, reduction='mean', + avg_factor=None, **kwargs)``. + + :Example: + + >>> import torch + >>> @weighted_loss + >>> def l1_loss(pred, target): + >>> return (pred - target).abs() + + >>> pred = torch.Tensor([0, 2, 3]) + >>> target = torch.Tensor([1, 1, 1]) + >>> weight = torch.Tensor([1, 0, 1]) + + >>> l1_loss(pred, target) + tensor(1.3333) + >>> l1_loss(pred, target, weight) + tensor(1.) + >>> l1_loss(pred, target, reduction='none') + tensor([1., 1., 2.]) + >>> l1_loss(pred, target, weight, avg_factor=2) + tensor(1.5000) + """ + + @functools.wraps(loss_func) + def wrapper(pred, + target, + weight=None, + reduction='mean', + avg_factor=None, + **kwargs): + # get element-wise loss + loss = loss_func(pred, target, **kwargs) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + return wrapper + + +def convert_to_one_hot(targets: torch.Tensor, classes) -> torch.Tensor: + """This function converts target class indices to one-hot vectors, given + the number of classes. + + Args: + targets (Tensor): The ground truth label of the prediction + with shape (N, 1) + classes (int): the number of classes. + + Returns: + Tensor: Processed loss values. + """ + assert (torch.max(targets).item() < + classes), 'Class Index must be less than number of classes' + one_hot_targets = F.one_hot( + targets.long().squeeze(-1), num_classes=classes) + return one_hot_targets diff --git a/mmpretrain/models/multimodal/__init__.py b/mmpretrain/models/multimodal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5119ec42a8786f3a65b41f841070ec09cf2d627a --- /dev/null +++ b/mmpretrain/models/multimodal/__init__.py @@ -0,0 +1,18 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpretrain.utils.dependency import WITH_MULTIMODAL + +if WITH_MULTIMODAL: + from .blip import * # noqa: F401,F403 + from .blip2 import * # noqa: F401,F403 + from .chinese_clip import * # noqa: F401, F403 + from .flamingo import * # noqa: F401, F403 + from .ofa import * # noqa: F401, F403 +else: + from mmpretrain.registry import MODELS + from mmpretrain.utils.dependency import register_multimodal_placeholder + + register_multimodal_placeholder([ + 'Blip2Caption', 'Blip2Retrieval', 'Blip2VQA', 'BlipCaption', + 'BlipNLVR', 'BlipRetrieval', 'BlipGrounding', 'BlipVQA', 'Flamingo', + 'OFA', 'ChineseCLIP' + ], MODELS) diff --git a/mmpretrain/models/multimodal/blip/__init__.py b/mmpretrain/models/multimodal/blip/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ebbc0da6e0d11c116d4575b6c981724e387e415a --- /dev/null +++ b/mmpretrain/models/multimodal/blip/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .blip_caption import BlipCaption +from .blip_grounding import BlipGrounding +from .blip_nlvr import BlipNLVR +from .blip_retrieval import BlipRetrieval +from .blip_vqa import BlipVQA +from .language_model import BertLMHeadModel, XBertEncoder, XBertLMHeadDecoder + +__all__ = [ + 'BertLMHeadModel', 'BlipCaption', 'BlipGrounding', 'BlipNLVR', + 'BlipRetrieval', 'BlipVQA', 'XBertEncoder', 'XBertLMHeadDecoder' +] diff --git a/mmpretrain/models/multimodal/blip/blip_caption.py b/mmpretrain/models/multimodal/blip/blip_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..9af3e2408da8c6b3a55694a1323e6434dfc609e1 --- /dev/null +++ b/mmpretrain/models/multimodal/blip/blip_caption.py @@ -0,0 +1,184 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional + +import torch +from mmengine.model import BaseModel + +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample + + +@MODELS.register_module() +class BlipCaption(BaseModel): + """BLIP Caption. + + Args: + vision_encoder (dict): Encoder for extracting image features. + decoder_head (dict): The decoder head module to forward and + calculate loss from processed features. + tokenizer: (Optional[dict]): The config for tokenizer. + Defaults to None. + prompt (str): Prompt used for training and eval. + Defaults to ''. + max_txt_len (int): Max text length of input text. + num_captions (int): Number of captions to be generated for each image. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MutimodalDataPreprocessor" as type. + See :class:`MutimodalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + vision_encoder: dict, + decoder_head: dict, + tokenizer: Optional[dict] = None, + prompt: str = '', + max_txt_len: int = 20, + num_captions: int = 1, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super(BlipCaption, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + self.tokenizer = TOKENIZER.build(tokenizer) + self.visual_encoder = MODELS.build(vision_encoder) + self.seq_gen_head = MODELS.build(decoder_head) + + self.prompt = prompt + self.prompt_length = len(self.tokenizer(self.prompt).input_ids) - 1 + self.max_txt_len = max_txt_len + self.num_captions = num_captions + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[List] = None, + mode: str = 'loss', + ): + """The unified entry for a forward process in both training and test. + The method should accept two modes: "predict" and "loss": + + - "predict": Forward and return the predictions, which are fully + processed to a list of :obj:`DataSample`. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + images (torch.Tensor): pre_processed img tensor (N, C, ...). + data_samples (List[DataSample], optional): Data samples with + additional infos. + mode (str): Return what kind of value. Defaults to 'loss'. + + Returns: + The return type depends on ``mode``. + - If ``mode="loss"``, return a dict of tensor. + """ + if mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def predict(self, images, data_samples=None, **kwargs): + """Predict captions from a batch of inputs. + + Args: + images (torch.Tensor): The input images tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + **kwargs: Other keyword arguments accepted by the ``predict`` + method of :attr:`head`. + + Returns: + List[DataSample]: Return list of data samples. + """ + # prepare inputs for decoder generation. + image_embeds = self.visual_encoder(images)[0] + image_embeds = torch.repeat_interleave(image_embeds, self.num_captions, + 0) + + prompt = [self.prompt] * image_embeds.size(0) + prompt = self.tokenizer( + prompt, padding='longest', + return_tensors='pt').to(image_embeds.device) + + prompt.input_ids[:, 0] = self.tokenizer.bos_token_id + prompt.input_ids = prompt.input_ids[:, :-1] + + decoder_out = self.seq_gen_head.predict( + input_ids=prompt.input_ids, + encoder_hidden_states=image_embeds, + sep_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + output_attentions=True, + return_dict_in_generate=True, + ) + + decode_tokens = self.tokenizer.batch_decode( + decoder_out.sequences, skip_special_tokens=True) + + out_data_samples = [] + if data_samples is None: + data_samples = [None for _ in range(len(decode_tokens))] + + for data_sample, decode_token in zip(data_samples, decode_tokens): + if data_sample is None: + data_sample = DataSample() + data_sample.pred_caption = decode_token[len(self.prompt):] + out_data_samples.append(data_sample) + + return out_data_samples + + def loss(self, images, data_samples): + """Calculate losses from a batch of images and data samples. + + Args: + images (torch.Tensor): The input images tensor with shape + (N, C, ...) in general. + data_samples (List[ImageTextDataSample]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components. + """ + image_embeds = self.visual_encoder(images)[0] + raw_text = [self.prompt + ds.gt_caption for ds in data_samples] + + text = self.tokenizer( + raw_text, + padding='longest', + truncation=True, + max_length=self.max_txt_len, + return_tensors='pt', + ).to(image_embeds.device) + text.input_ids[:, 0] = self.tokenizer.bos_token_id + + # prepare targets for forwarding decoder + labels = text.input_ids.masked_fill( + text.input_ids == self.tokenizer.pad_token_id, -100) + labels[:, :self.prompt_length] = -100 + # forward decoder + image_atts = torch.ones( + image_embeds.size()[:-1], dtype=torch.long).to(image_embeds.device) + + losses = self.seq_gen_head.loss( + input_ids=text.input_ids, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + labels=labels, + ) + return losses diff --git a/mmpretrain/models/multimodal/blip/blip_grounding.py b/mmpretrain/models/multimodal/blip/blip_grounding.py new file mode 100644 index 0000000000000000000000000000000000000000..cb087287220a91b3bfcd50acee244eb5dc118bac --- /dev/null +++ b/mmpretrain/models/multimodal/blip/blip_grounding.py @@ -0,0 +1,248 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +from typing import List, Optional, Tuple, Union + +import numpy as np +import torch +from mmengine.model import BaseModel + +from mmpretrain.models.utils.box_utils import box_xyxy_to_cxcywh +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures.data_sample import DataSample + + +@MODELS.register_module() +class BlipGrounding(BaseModel): + """BLIP Grounding. + + Args: + visual_encoder (dict): Backbone for extracting image features. + text_encoder (dict): Backbone for extracting text features. + but we integrate the vqa text extractor + into the tokenizer part in datasets/transform/ + so we don't need text_backbone + multimodal_encoder (Optional[dict]): Backbone for extracting + multi-modal features. We apply this part as VQA fusion module. + neck (Optional[dict]): The neck module to process features from + backbone. Defaults to None. + head (Optional[Union[List[dict], dict]]): The head module to calculate + loss from processed features. See :mod:`mmpretrain.models.heads`. + Notice that if the head is not set, `loss` method cannot be used. + Defaults to None. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MutimodalDataPreprocessor" as type. + See :class:`MutimodalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + tokenizer: Optional[dict] = None, + visual_encoder: Optional[dict] = None, + text_encoder: Optional[dict] = None, + multimodal_encoder: Optional[dict] = None, + head: Optional[Union[List[dict], dict]] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None) -> None: + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super(BlipGrounding, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + self.tokenizer = TOKENIZER.build(tokenizer) + self.prompt = 'localize instance: ' + self.visual_encoder = MODELS.build(visual_encoder) + self.text_encoder = MODELS.build(text_encoder) + self.multimodal_encoder = MODELS.build(multimodal_encoder) + head.setdefault('tokenizer', self.tokenizer) + self.grounding_head = MODELS.build(head) + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + mode: str = 'loss', + ): + """The unified entry for a forward process in both training and test. + The method should accept only one mode "loss": + + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + inputs (torch.Tensor, tuple): The input tensor with shape + (N, C, ...) in general. + data_samples (List[VQADataSample], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'loss'. + + Returns: + The return type depends on ``mode``. + - If ``mode="loss"``, return a dict of tensor. + """ + + if mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, images: torch.Tensor) -> torch.Tensor: + """Extract features from the input tensor with shape (N, C, ...). + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + Returns: + image_embeds (Tensor): The output features. + """ + image_embeds = self.visual_encoder(images)[0] + return image_embeds + + def loss( + self, + images: torch.Tensor, + data_samples=None, + ) -> Union[torch.Tensor, Tuple[torch.Tensor]]: + """generate train_loss from the input tensor and data_samples. + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + data_samples (List[VQADataSample], optional): The annotation + data of every samples.. + + Returns: + Dict[torch.Tensor]: The losses features. + """ + + # extract image feature + image_embeds = self.extract_feat(images) + image_atts = image_embeds.new_ones( + image_embeds.size()[:-1], dtype=torch.long) + + raw_text = [] + box_targets = [] + for ds in data_samples: + + raw_text.append(ds.text) + box_t = copy.deepcopy(ds.box) * 1.0 + box_t[1] /= ds.img_shape[0] + box_t[3] /= ds.img_shape[0] + box_t[0] /= ds.img_shape[1] + box_t[2] /= ds.img_shape[1] + + box_targets.append(box_t) + + box_targets = image_embeds.new_tensor(np.stack(box_targets)) + box_targets = box_xyxy_to_cxcywh(box_targets) # xywh 0-1 + + text = self.tokenizer( + raw_text, + padding='longest', + truncation=True, + max_length=128, + return_tensors='pt', + ).to(image_embeds.device) + + text_embeds = self.text_encoder( + text.input_ids, + attention_mask=text.attention_mask, + mode='text', + return_dict=True) # bz, seq_len, hid + + # multimodal fusion + multimodal_embeds = self.multimodal_encoder( + encoder_embeds=text_embeds.last_hidden_state, + attention_mask=text.attention_mask, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + + # put answer from data_samples into tensor form + losses = self.grounding_head.loss( + text_embedding=multimodal_embeds.last_hidden_state, + text_embedding_mask=text.attention_mask, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + decoder_targets=box_targets, + ) + + return losses + + def predict(self, images, data_samples=None): + """""" + + # extract image feature + image_embeds = self.extract_feat(images) + image_atts = image_embeds.new_ones( + image_embeds.size()[:-1], dtype=torch.long) + + raw_text = [] + for ds in data_samples: + raw_text.append(ds.text) + + text = self.tokenizer( + raw_text, + padding='longest', + truncation=True, + max_length=128, + return_tensors='pt', + ).to(image_embeds.device) + + text_embeds = self.text_encoder( + text.input_ids, + attention_mask=text.attention_mask, + mode='text', + return_dict=True) # bz, seq_len, hid + + # multimodal fusion + multimodal_embeds = self.multimodal_encoder( + encoder_embeds=text_embeds.last_hidden_state, + attention_mask=text.attention_mask, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + + # put answer from data_samples into tensor form + output_boxes = self.grounding_head.predict( + text_embedding=multimodal_embeds.last_hidden_state, + text_embedding_mask=text.attention_mask, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + ) # xyxy 0-1 + + out_data_samples = [] + for bbox, data_sample, img in zip(output_boxes, data_samples, images): + if data_sample is None: + data_sample = DataSample() + + img_size = img.shape[-2:] + scale_factor = data_sample.get('scale_factor', (1, 1)) + bbox[0::2] = bbox[0::2] * img_size[1] / scale_factor[0] + bbox[1::2] = bbox[1::2] * img_size[0] / scale_factor[1] + bbox = bbox[None, :] + data_sample.pred_bboxes = bbox + + if 'gt_bboxes' in data_sample: + gt_bboxes = torch.Tensor(data_sample.get('gt_bboxes')) + gt_bboxes[:, 0::2] /= scale_factor[0] + gt_bboxes[:, 1::2] /= scale_factor[1] + data_sample.gt_bboxes = gt_bboxes + + out_data_samples.append(data_sample) + + return out_data_samples diff --git a/mmpretrain/models/multimodal/blip/blip_nlvr.py b/mmpretrain/models/multimodal/blip/blip_nlvr.py new file mode 100644 index 0000000000000000000000000000000000000000..f96e3cce237fd3b064c74264e8f907a8bd3a47ca --- /dev/null +++ b/mmpretrain/models/multimodal/blip/blip_nlvr.py @@ -0,0 +1,205 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.model import BaseModel + +from mmpretrain.registry import MODELS, TOKENIZER + + +@MODELS.register_module() +class BlipNLVR(BaseModel): + """BLIP NLVR. + + Args: + vision_backbone (dict): Backbone for extracting image features. + text_backbone (dict): Backbone for extracting text features. + but we integrate the vqa text extractor into the tokenizer part in + datasets/transform/ so we don't need text_backbone + multimodal_backbone (Optional[dict]): Backbone for extracting + multi-modal features. We apply this part as VQA fusion module. + neck (Optional[dict]): The neck module to process features from + backbone. Defaults to None. + head (Optional[dict]): The head module to calculate + loss from processed features. See :mod:`mmmultimodal.models.heads`. + Notice that if the head is not set, `loss` method cannot be used. + Defaults to None. + tokenizer: (Optional[dict]): The config for tokenizer + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MutimodalDataPreprocessor" as type. + See :class:`MutimodalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + vision_backbone: dict, + multimodal_backbone: dict, + tokenizer: Optional[dict] = None, + max_txt_len: int = 35, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + if tokenizer is not None: + self.tokenizer = TOKENIZER.build(tokenizer) + self.vision_backbone = MODELS.build(vision_backbone) + self.multimodal_backbone = MODELS.build(multimodal_backbone) + self.max_txt_len = max_txt_len + + # For simplity, directly use head definition here. + # If more complex head is designed, move this and loss to a new + # head module. + hidden_size = self.multimodal_backbone.config.hidden_size + self.head = nn.Sequential( + nn.Linear(hidden_size, hidden_size), + nn.ReLU(), + nn.Linear(hidden_size, 2), + ) + + @property + def device(self): + return next(self.parameters()).device + + def preprocess_text(self, data_samples): + + sample_item = data_samples[0] + + if sample_item is not None and 'text' in sample_item: + texts = [sample.get('text') for sample in data_samples] + else: + return None + + # perform tokenize first if satisfied conditions + texts = self.tokenizer( + texts, + padding='longest', + truncation=True, + max_length=self.max_txt_len, + return_tensors='pt', + ).to(self.device) + + return texts + + def forward( + self, + images: dict, + data_samples: Optional[List] = None, + mode: str = 'tensor', + ): + """The unified entry for a forward process in both training and test. + The method should accept only one mode "loss": + + - "loss": Forward and return a dict of losses according to the given + images and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + images (dict of torch.Tensor): + img: pre_processed img tensor (N, C, ...). + text: tokenized text (N, L) + data_samples (List[CaptionDataSample], optional): + The annotation data of every samples. + 'image': raw image data + 'text' tokenized text + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + - If ``mode="loss"``, return a dict of tensor. + """ + # B, T, C, H, W to T*B, C, H, W + images = images.permute(1, 0, 2, 3, 4).flatten(0, 1) + + if mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def predict(self, images, data_samples=None): + """Predict caption.""" + # prepare inputs for decoder generation. + image_embeds = self.vision_backbone(images)[0] + texts = self.preprocess_text(data_samples) + image_atts = torch.ones( + image_embeds.size()[:-1], dtype=torch.long).to(self.device) + + image0_embeds, image1_embeds = torch.split(image_embeds, + texts.input_ids.size(0)) + + # multimodal fusion + multimodal_embeds = self.multimodal_backbone( + texts.input_ids, + attention_mask=texts.attention_mask, + encoder_hidden_states=[image0_embeds, image1_embeds], + encoder_attention_mask=[ + image_atts[:image0_embeds.size(0)], + image_atts[image0_embeds.size(0):], + ], + return_dict=True, + ) + + # get prediction + outputs = self.head(multimodal_embeds.last_hidden_state[:, 0, :]) + + pred_scores = F.softmax(outputs, dim=1) + + for pred_score, data_sample in zip(pred_scores, data_samples): + data_sample.set_pred_score(pred_score) + data_sample.set_pred_label(pred_score.argmax(dim=0)) + + return data_samples + + def loss(self, images, data_samples): + """Calculate losses from a batch of inputs and data samples. + + Args: + images (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[ImageTextDataSample]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components. + """ + # prepare inputs for decoder generation. + image_embeds = self.vision_backbone(images)[0] + texts = self.preprocess_text(data_samples) + image_atts = torch.ones( + image_embeds.size()[:-1], dtype=torch.long).to(self.device) + image0_embeds, image1_embeds = torch.split(image_embeds, + texts.input_ids.size(0)) + + # multimodal fusion + multimodal_embeds = self.multimodal_backbone( + texts.input_ids, + attention_mask=texts.attention_mask, + encoder_hidden_states=[image0_embeds, image1_embeds], + encoder_attention_mask=[ + image_atts[:image0_embeds.size(0)], + image_atts[image0_embeds.size(0):], + ], + return_dict=True, + ) + + # get prediction + outputs = self.head(multimodal_embeds.last_hidden_state[:, 0, :]) + + targets = torch.tensor([i.gt_label + for i in data_samples]).to(outputs.device) + loss = F.cross_entropy(outputs, targets) + return {'loss': loss} diff --git a/mmpretrain/models/multimodal/blip/blip_retrieval.py b/mmpretrain/models/multimodal/blip/blip_retrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..8983e63e20832fa2e9b36e39134b6fe748baab61 --- /dev/null +++ b/mmpretrain/models/multimodal/blip/blip_retrieval.py @@ -0,0 +1,716 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import ChainMap +from copy import deepcopy +from typing import Dict, List, Optional, Tuple, Union + +import mmengine.dist as dist +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.model import BaseModel +from torch import distributed as torch_dist + +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample +from mmpretrain.utils import track_on_main_process + + +def all_gather_concat(data: torch.Tensor) -> torch.Tensor: + """Gather tensors with different first-dimension size and concat to one + tenosr. + + Note: + Only the first dimension should be different. + + Args: + data (Tensor): Tensor to be gathered. + + Returns: + torch.Tensor: The concatenated tenosr. + """ + if dist.get_world_size() == 1: + return data + + data_size = torch.tensor(data.size(0), device=data.device) + sizes_list = dist.all_gather(data_size) + + max_length = max(sizes_list) + size_diff = max_length.item() - data_size.item() + if size_diff: + padding = torch.zeros( + size_diff, *data.size()[1:], device=data.device, dtype=data.dtype) + data = torch.cat((data, padding)) + + gather_list = dist.all_gather(data) + + all_data = [] + for tensor, size in zip(gather_list, sizes_list): + + all_data.append(tensor[:size]) + + return torch.concat(all_data) + + +@MODELS.register_module() +class BlipRetrieval(BaseModel): + """BLIP Retriever. + + Args: + vision_backbone (dict): Backbone for extracting image features. + text_backbone (dict): Backbone for extracting text features. + multimodal_backbone (Optional[dict]): Backbone for extracting + multi-modal features. + vision_neck (Optional[dict]): The neck module to process image features + from vision backbone. Defaults to None. + text_neck (Optional[dict]): The neck module to process text features + from text backbone. Defaults to None. + head (Optional[Union[List[dict], dict]]): The head module to calculate + loss from processed single modality features. + See :mod:`mmmultimodal.models.heads`. + Notice that if the head is not set, `loss` method cannot be used. + Defaults to None. + multimodal_head (Optional[Union[List[dict], dict]]): The multi-modal + head module to calculate loss from processed multimodal features. + See :mod:`mmmultimodal.models.heads`. + Notice that if the head is not set, `loss` method cannot be used. + Defaults to None. + momentum (float): Momentum used for momentum contrast. + Defaults to .995. + negative_all_rank (bool): Whether to sample negative data from all + ranks for image text matching in training. Defaults to True. + temperature (float): Temperature parameter that controls the + concentration level of the distribution. Defaults to 0.07. + fast_match (bool): If False, select topk similarity as candidates and + compute the matching score. If True, return the similarity as the + matching score directly. Defaults to False. + topk (int): Select topk similarity as candidates for compute matching + scores. Notice that this is not the topk in evaluation. + Defaults to 256. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MutimodalDataPreprocessor" as type. + See :class:`MutimodalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + vision_backbone: dict, + text_backbone: dict, + multimodal_backbone: Optional[dict] = None, + vision_neck: Optional[dict] = None, + text_neck: Optional[dict] = None, + head: Optional[Union[List[dict], dict]] = None, + multimodal_head: Optional[Union[List[dict], dict]] = None, + tokenizer: Optional[dict] = None, + momentum: float = .995, + negative_all_rank: bool = True, + temperature: float = 0.07, + fast_match: bool = False, + topk: int = 256, + max_txt_len: int = 20, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + self.vision_backbone = MODELS.build(vision_backbone) + self.text_backbone = MODELS.build(text_backbone) + + if multimodal_backbone is not None: + self.multimodal_backbone = MODELS.build(multimodal_backbone) + + if vision_neck is not None: + self.vision_neck = MODELS.build(vision_neck) + + if text_neck is not None: + self.text_neck = MODELS.build(text_neck) + + if head is not None: + self.head = MODELS.build(head) + + if multimodal_head is not None: + self.multimodal_head = MODELS.build(multimodal_head) + + if tokenizer is not None: + self.tokenizer = TOKENIZER.build(tokenizer) + + self.momentum = momentum + self.negative_all_rank = negative_all_rank + self.temp = nn.Parameter(temperature * torch.ones([])) + # Shares the same para + self.head.temp = self.temp + + # create the momentum encoder + self.vision_backbone_m = deepcopy(self.vision_backbone) + self.text_backbone_m = deepcopy(self.text_backbone) + + self.vision_neck_m = deepcopy(self.vision_neck) + self.text_neck_m = deepcopy(self.text_neck) + + self.model_pairs = [ + [self.vision_backbone, self.vision_backbone_m], + [self.text_backbone, self.text_backbone_m], + [self.vision_neck, self.vision_neck_m], + [self.text_neck, self.text_neck_m], + ] + self.copy_params() + + # multimodal backone shares weights with text backbone in BLIP + # No need to set up + + # Notice that this topk is used for select k candidate to compute + # image-text score, but not the final metric topk in evaluation. + self.fast_match = fast_match + self.topk = topk + + self.max_txt_len = max_txt_len + + @property + def device(self): + return next(self.parameters()).device + + def preprocess_text(self, data_samples): + sample_item = data_samples[0] + + if sample_item is not None and 'text' in sample_item: + if isinstance(sample_item.get('text'), (list, tuple)): + texts = [] + for sample in data_samples: + texts.extend(sample.get('text')) + elif isinstance(sample_item.get('text'), str): + texts = [sample.get('text') for sample in data_samples] + else: + raise TypeError('text must be a string or a list of strings') + else: + return None + + # perform tokenize first if satisfied conditions + texts = self.tokenizer( + texts, + padding='max_length', + truncation=True, + max_length=self.max_txt_len, + return_tensors='pt', + ).to(self.device) + + return texts + + def forward(self, + images: torch.tensor = None, + data_samples: Optional[List[DataSample]] = None, + mode: str = 'tensor') -> Union[Tuple, dict]: + """The unified entry for a forward process in both training and test. + The method should accept two modes: "tensor", and "loss": + + - "tensor": Forward the whole network and return tensor without any + post-processing, same as a common nn.Module. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + For unified "predict" mode in other mm repos. It is noticed that + image-text retrieval cannot perform batch prediction since it will go + through all the samples. A standard process of retrieval evaluation is + to extract and collect all feats, and then predict all samples. + Therefore the `predict` mode here is remained as a trigger + to inform use to choose the right configurations. + + Args: + images (torch.Tensor): The input inputs tensor of shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + - If ``mode="tensor"``, return a tuple. + - If ``mode="loss"``, return a dict of tensor. + """ + if mode == 'tensor': + return self.extract_feat(images, data_samples) + elif mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat( + self, + images: torch.Tensor = None, + data_samples: List[DataSample] = None, + return_texts=True, + return_embeds=None, + ) -> Dict[str, torch.Tensor]: + """Extract features from the input dict. + + Args: + images (tensor, optional): The images to extract features. + Defaults to None. + data_samples (list, optional): The data samples containing texts + to extract features. Defaults to None. + return_texts (bool): Whether to return the tokenized text and the + corresponding attention masks. Defaults to True. + return_embeds (bool): Whether to return the text embedding and + image embedding. Defaults to None, which means to use + ``self.fast_match``. + + Returns: + Tuple[torch.Tensor]: The output features. + If multimodal_backbone is not exist, tuple of torch.Tensor + will be returned. + """ + if data_samples is not None: + texts = self.preprocess_text(data_samples) + else: + texts = None + + assert images is not None or texts is not None, \ + 'At least single modality should be passed as inputs.' + + results = {} + if texts is not None and return_texts: + results.update({ + 'text_ids': texts.input_ids, + 'text_attn_mask': texts.attention_mask, + }) + + if return_embeds is None: + return_embeds = not self.fast_match + + # extract image features + if images is not None: + output = self._extract_feat(images, modality='images') + results['image_feat'] = output['image_feat'] + if return_embeds: + results['image_embeds'] = output['image_embeds'] + + # extract text features + if texts is not None: + output = self._extract_feat(texts, modality='texts') + results['text_feat'] = output['text_feat'] + if return_embeds: + results['text_embeds'] = output['text_embeds'] + + return results + + def _extract_feat(self, inputs: Union[torch.Tensor, dict], + modality: str) -> Tuple[torch.Tensor]: + """Extract features from the single modality. + + Args: + inputs (Union[torch.Tensor, dict]): A batch of inputs. + For image, a tensor of shape (N, C, ...) in general. + For text, a dict of tokenized text inputs. + modality (str): Modality feature to be extracted. Only two + options are supported. + + - ``images``: Only extract image features, mostly used for + inference. + - ``texts``: Only extract text features, mostly used for + inference. + + Returns: + Tuple[torch.Tensor]: The output features. + """ + + if modality == 'images': + # extract image features + image_embeds = self.vision_backbone(inputs)[0] + image_feat = F.normalize( + self.vision_neck(image_embeds[:, 0, :]), dim=-1) + return {'image_embeds': image_embeds, 'image_feat': image_feat} + elif modality == 'texts': + # extract text features + text_output = self.text_backbone( + inputs.input_ids, + attention_mask=inputs.attention_mask, + token_type_ids=None, + return_dict=True, + mode='text', + ) + text_embeds = text_output.last_hidden_state + text_feat = F.normalize( + self.text_neck(text_embeds[:, 0, :]), dim=-1) + return {'text_embeds': text_embeds, 'text_feat': text_feat} + else: + raise RuntimeError(f'Invalid modality "{modality}".') + + def loss( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + ) -> Dict[str, torch.tensor]: + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (dict): A batch of inputs. The input tensor with of + at least one modality. For image, the value is a tensor + of shape (N, C, ...) in general. + For text, the value is a dict of tokenized text inputs. + data_samples (Optional[List[DataSample]]): + The annotation data of every samples. Defaults to None. + + Returns: + Dict[str, torch.tensor]: a dictionary of loss components of + both head and multimodal head. + """ + output = self.extract_feat(images, data_samples, return_embeds=True) + + text_ids = output['text_ids'] + text_attn_mask = output['text_attn_mask'] + image_embeds = output['image_embeds'] + image_feat = output['image_feat'] + text_feat = output['text_feat'] + + image_atts = torch.ones( + image_embeds.size()[:-1], dtype=torch.long).to(self.device) + + # get momentum features + with torch.no_grad(): + self._momentum_update() + image_embeds_m = self.vision_backbone_m(images)[0] + image_feat_m = F.normalize( + self.vision_neck_m(image_embeds_m[:, 0, :]), dim=-1) + + text_output_m = self.text_backbone_m( + text_ids, + attention_mask=text_attn_mask, + token_type_ids=None, + return_dict=True, + mode='text', + ) + text_embeds_m = text_output_m.last_hidden_state + text_feat_m = F.normalize( + self.text_neck_m(text_embeds_m[:, 0, :]), dim=-1) + + loss = self.head.loss( + ([image_feat, text_feat, image_feat_m, text_feat_m], ), + data_samples) + + # prepare for itm + encoder_input_ids = text_ids.clone() + encoder_input_ids[:, + 0] = self.tokenizer.additional_special_tokens_ids[0] + output_pos = self.text_backbone( + encoder_input_ids, + attention_mask=text_attn_mask, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + + idx = torch.tensor([i.image_id for i in data_samples]).view(-1, 1) + bs = idx.size(0) + idxs = torch.cat(dist.all_gather(idx)) + if self.negative_all_rank: + # compute sample similarity + with torch.no_grad(): + mask = torch.eq(idx, idxs.t()).to(self.device) + + image_feat_world = torch.cat(dist.all_gather(image_feat)) + text_feat_world = torch.cat(dist.all_gather(text_feat)) + + sim_i2t = image_feat @ text_feat_world.t() / self.temp + sim_t2i = text_feat @ image_feat_world.t() / self.temp + + weights_i2t = F.softmax(sim_i2t, dim=1) + weights_i2t.masked_fill_(mask, 0) + + weights_t2i = F.softmax(sim_t2i, dim=1) + weights_t2i.masked_fill_(mask, 0) + + world_size = dist.get_world_size() + if world_size == 1: + image_embeds_world = image_embeds + else: + image_embeds_world = torch.cat( + torch_dist.nn.all_gather(image_embeds)) + + # select a negative image (from all ranks) for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds_world[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg, dim=0) + + # select a negative text (from all ranks) for each image + input_ids_world = torch.cat(dist.all_gather(encoder_input_ids)) + att_mask_world = torch.cat(dist.all_gather(text_attn_mask)) + + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(input_ids_world[neg_idx]) + text_atts_neg.append(att_mask_world[neg_idx]) + + text_ids_neg = torch.stack(text_ids_neg, dim=0) + text_atts_neg = torch.stack(text_atts_neg, dim=0) + + text_ids_all = torch.cat([encoder_input_ids, text_ids_neg], dim=0) + text_atts_all = torch.cat([text_attn_mask, text_atts_neg], dim=0) + + image_embeds_all = torch.cat([image_embeds_neg, image_embeds], dim=0) + image_atts_all = torch.cat([image_atts, image_atts], dim=0) + + output_neg = self.text_backbone( + text_ids_all, + attention_mask=text_atts_all, + encoder_hidden_states=image_embeds_all, + encoder_attention_mask=image_atts_all, + return_dict=True, + ) + + vl_embeddings = torch.cat( + [ + output_pos.last_hidden_state[:, 0, :], + output_neg.last_hidden_state[:, 0, :], + ], + dim=0, + ) + + # create false data samples + data_samples.extend( + [DataSample(is_matched=False) for _ in range(2 * bs)]) + loss_multimodal = self.multimodal_head.loss((vl_embeddings, ), + data_samples) + + return dict(ChainMap(loss, loss_multimodal)) + + def predict(self, images, data_samples, cal_i2t=True, cal_t2i=True): + feats = self.extract_feat(images, data_samples) + + return self.predict_all( + feats, data_samples, cal_i2t=cal_i2t, cal_t2i=cal_t2i) + + def predict_all(self, + feats, + data_samples, + num_images=None, + num_texts=None, + cal_i2t=True, + cal_t2i=True): + text_ids = feats['text_ids'] + text_ids[:, 0] = self.tokenizer.additional_special_tokens_ids[0] + text_attn_mask = feats['text_attn_mask'] + image_embeds = feats.get('image_embeds', None) + image_feat = feats['image_feat'] + text_feat = feats['text_feat'] + + num_images = num_images or image_feat.size(0) + num_texts = num_texts or text_feat.size(0) + + if not self.fast_match: + image_embeds_all = all_gather_concat(image_embeds)[:num_images] + else: + image_embeds_all = None + image_feat_all = all_gather_concat(image_feat)[:num_images] + text_feat_all = all_gather_concat(text_feat)[:num_texts] + text_ids_all = all_gather_concat(text_ids)[:num_texts] + text_attn_mask_all = all_gather_concat(text_attn_mask)[:num_texts] + + results = [] + if cal_i2t: + result_i2t = self.compute_score_matrix_i2t( + image_feat, + image_embeds, + text_feat_all, + text_ids_all, + text_attn_mask_all, + ) + results.append( + self._get_predictions(result_i2t, data_samples, mode='i2t')) + if cal_t2i: + result_t2i = self.compute_score_matrix_t2i( + image_feat_all, + image_embeds_all, + text_feat, + text_ids, + text_attn_mask, + ) + results.append( + self._get_predictions(result_t2i, data_samples, mode='t2i')) + return tuple(results) + + def compute_score_matrix_i2t(self, img_feats, img_embeds, text_feats, + text_ids, text_atts): + """Compare the score matrix for image-to-text retrieval. Every image + should compare to all the text features. + + Args: + img_feats (torch.Tensor): The input img feats tensor with shape + (M, C). M stands for numbers of samples on a single GPU. + img_embeds (torch.Tensor): The input img embeds tensor with shape + (M, C). M stands for numbers of samples on a single GPU. + text_feats (torch.Tensor): The input text feats tensor with shape + (N, C). N stands for numbers of all samples on all GPUs. + text_ids (torch.Tensor): The input tensor with shape (N, C). + text_atts (torch.Tensor): The input tensor with shape (N, C). + + Returns: + torch.Tensor: Score matrix of image-to-text retrieval. + """ + + # compute i2t sim matrix + sim_matrix_i2t = img_feats @ text_feats.t() + if self.fast_match: + return sim_matrix_i2t + + score_matrix_i2t = torch.full((img_feats.size(0), text_feats.size(0)), + -100.0).to(self.device) + for i in track_on_main_process( + range(img_feats.size(0)), 'Compute I2T scores...'): + sims = sim_matrix_i2t[i] + topk_sim, topk_idx = sims.topk(k=self.topk, dim=0) + + encoder_output = img_embeds[i].repeat(self.topk, 1, 1) + encoder_att = torch.ones( + encoder_output.size()[:-1], dtype=torch.long).to(self.device) + output = self.text_backbone( + text_ids[topk_idx], + attention_mask=text_atts[topk_idx], + encoder_hidden_states=encoder_output, + encoder_attention_mask=encoder_att, + return_dict=True, + ) + score = self.multimodal_head( + (output.last_hidden_state[:, 0, :], ))[:, 1] + score_matrix_i2t[i, topk_idx] = score + topk_sim + + return score_matrix_i2t + + def compute_score_matrix_t2i(self, img_feats, img_embeds, text_feats, + text_ids, text_atts): + """Compare the score matrix for text-to-image retrieval. Every text + should compare to all the image features. + + Args: + img_feats (torch.Tensor): The input img feats tensor with shape + (M, C). M stands for numbers of samples on a single GPU. + img_embeds (torch.Tensor): The input img embeds tensor with shape + (M, C). M stands for numbers of samples on a single GPU. + text_feats (torch.Tensor): The input text feats tensor with shape + (N, C). N stands for numbers of all samples on all GPUs. + text_ids (torch.Tensor): The input tensor with shape (M, C). + text_atts (torch.Tensor): The input tensor with shape (M, C). + + Returns: + torch.Tensor: Score matrix of text-to-image retrieval. + """ + + # compute t2i sim matrix + sim_matrix_t2i = text_feats @ img_feats.t() + if self.fast_match: + return sim_matrix_t2i + + score_matrix_t2i = torch.full((text_feats.size(0), img_feats.size(0)), + -100.0).to(self.device) + for i in track_on_main_process( + range(text_feats.size(0)), 'Compute T2I scores...'): + sims = sim_matrix_t2i[i] + topk_sim, topk_idx = sims.topk(k=self.topk, dim=0) + + encoder_output = img_embeds[topk_idx] + encoder_att = torch.ones( + encoder_output.size()[:-1], dtype=torch.long).to(self.device) + output = self.text_backbone( + text_ids[i].repeat(self.topk, 1), + attention_mask=text_atts[i].repeat(self.topk, 1), + encoder_hidden_states=encoder_output, + encoder_attention_mask=encoder_att, + return_dict=True, + ) + score = self.multimodal_head( + (output.last_hidden_state[:, 0, :], ))[:, 1] + score_matrix_t2i[i, topk_idx] = score + topk_sim + + return score_matrix_t2i + + def _get_predictions(self, + result: torch.Tensor, + data_samples: List[DataSample], + mode: str = 'i2t'): + """Post-process the output of retriever. + + Args: + result (torch.Tensor): Score matrix of single retrieve, + either from image or text. + data_samples (List[DataSample], optional): The annotation + data of every samples. + mode (str): Retrieve mode, either `i2t` for image to text, or `t2i` + text to image. Defaults to `i2t`. + + Returns: + List[DataSample]: the raw data_samples with + the predicted results. + """ + + # create data sample if not exists + if data_samples is None: + data_samples = [DataSample() for _ in range(result.size(0))] + elif mode == 't2i': + # Process data samples to align with the num of texts. + new_data_samples = [] + for sample in data_samples: + if isinstance(sample.text, (list, tuple)): + texts = sample.text + else: + texts = [sample.text] + for i, text in enumerate(texts): + new_sample = DataSample(text=text) + if 'gt_image_id' in sample: + new_sample.gt_label = sample.gt_image_id[i] + new_data_samples.append(new_sample) + assert len(new_data_samples) == result.size(0) + data_samples = new_data_samples + elif mode == 'i2t': + for sample in data_samples: + if 'gt_text_id' in sample: + sample.gt_label = sample.gt_text_id + else: + raise ValueError(f'Type {mode} is not supported.') + + for data_sample, score in zip(data_samples, result): + idx = score.argmax(keepdim=True).detach() + + data_sample.set_pred_score(score) + data_sample.set_pred_label(idx) + return data_samples + + # TODO: add temperaily + @torch.no_grad() + def copy_params(self): + for model_pair in self.model_pairs: + for param, param_m in zip(model_pair[0].parameters(), + model_pair[1].parameters()): + param_m.data.copy_(param.data) # initialize + param_m.requires_grad = False # not update by gradient + + @torch.no_grad() + def _momentum_update(self): + for model_pair in self.model_pairs: + for (name, + param), (name_m, + param_m) in zip(model_pair[0].named_parameters(), + model_pair[1].named_parameters()): + # hack to behave the same + if any([i in name for i in ['8', '9', '10', '11'] + ]) and 'layers' in name and any( + [i in name for i in ['attn', 'ffn']]): + param_m.data = param.data + else: + param_m.data = param_m.data * self.momentum + \ + param.data * (1.0 - self.momentum) diff --git a/mmpretrain/models/multimodal/blip/blip_vqa.py b/mmpretrain/models/multimodal/blip/blip_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..d0f4e5861b5c92be302cc48eaa7a37264be63f93 --- /dev/null +++ b/mmpretrain/models/multimodal/blip/blip_vqa.py @@ -0,0 +1,265 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +from mmengine.model import BaseModel + +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample + + +@MODELS.register_module() +class BlipVQA(BaseModel): + """BLIP VQA. + + Args: + tokenizer: (dict): The config for tokenizer. + vision_backbone (dict): Encoder for extracting image features. + multimodal_backbone (dict): Backbone for extracting + multi-modal features. We apply this part as VQA fusion module. + head (dict): The head module to calculate + loss from processed features. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + `MutimodalDataPreprocessor` as type. + See :class:`MutimodalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + tokenizer: dict, + vision_backbone: dict, + multimodal_backbone: dict, + head: dict, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + + if data_preprocessor is None: + data_preprocessor = {} + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super(BlipVQA, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + self.tokenizer = TOKENIZER.build(tokenizer) + self.vision_backbone = MODELS.build(vision_backbone) + self.multimodal_backbone = MODELS.build(multimodal_backbone) + self.vqa_head = MODELS.build(head) + + @property + def device(self): + return next(self.parameters()).device + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + mode: str = 'loss', + ): + """The unified entry for a forward process in both training and test. + + - "loss": For training. Forward and return a dict of losses according + to the given inputs and data samples. Note that this method doesn't + handle neither back propagation nor optimizer updating, which are + done in the :meth:`train_step`. + - "predict": For testing. Forward and return a list of data_sample that + contains pred_answer for each question. + + Args: + images (Tensor): A batch of images. The shape of it should be + (B, C, H, W) for images and (B, T, C, H, W) for videos. + data_samples (List[DataSample], optional): The annotation data of + every samples. Required when ``mode="loss"``. Defaults to None. + mode (str): Return what kind of value. Defaults to 'loss'. + + Returns: + The return type depends on ``mode``. + - If ``mode="loss"``, return a dict of tensor. + - If ``mode="predict"``, return a list of `DataSample` + """ + + if mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, images: torch.Tensor) -> torch.Tensor: + """Extract features from the input tensor with shape (N, C, ..). + + Args: + images (Tensor): A batch of images. The shape of it should be + (B, C, H, W) for images and (B, T, C, H, W) for videos. + + Returns: + visual_embeds (Tensor): The output features. + """ + # extract visual feature + if images.ndim == 4: + visual_embeds = self.vision_backbone(images)[0] + elif images.ndim == 5: + # [batch, T, C, H, W] -> [batch * T, C, H, W] + bs = images.size(0) + images = images.reshape(-1, *images.shape[2:]) + visual_embeds = self.vision_backbone(images)[0] + # [batch * num_segs, L, dim] -> [batch, num_segs * L, dim] + visual_embeds = visual_embeds.reshape(bs, -1, + *visual_embeds.shape[2:]) + else: + raise ValueError( + f'Images with {images.ndim} dims is not supported.') + return visual_embeds + + def loss( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + ) -> Union[torch.Tensor, Tuple[torch.Tensor]]: + """generate train_loss from the input tensor and data_samples. + + Args: + images (Tensor): A batch of images. The shape of it should be + (B, C, H, W) for images and (B, T, C, H, W) for videos. + data_samples (List[DataSample], optional): The annotation + data of every samples. + + Returns: + Dict[torch.Tensor]: The losses features. + """ + visual_embeds = self.extract_feat(images) + image_atts = torch.ones( + visual_embeds.size()[:-1], dtype=torch.long).to(self.device) + + questions = [] + for sample in data_samples: + questions.append(sample.get('question')) + questions = self.tokenizer( + questions, padding='longest', return_tensors='pt').to(self.device) + + questions.input_ids[:, 0] = \ + self.tokenizer.additional_special_tokens_ids[0] + + # multimodal fusion + multimodal_embeds = self.multimodal_backbone( + questions.input_ids, + attention_mask=questions.attention_mask, + encoder_hidden_states=visual_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + + # put answer from data_samples into tensor form + answer_raw_text = [] + for sample in data_samples: + answer_raw_text.extend(sample.gt_answer) + answer = self.tokenizer( + answer_raw_text, padding='longest', + return_tensors='pt').to(self.device) + answer_targets = answer.input_ids.masked_fill( + answer.input_ids == self.tokenizer.pad_token_id, -100) + for sample in data_samples: + # follow BLIP setting, set answer_weight to 0.2 for VG dataset. + if not hasattr(sample, 'gt_answer_weight'): + sample.gt_answer_weight = torch.tensor([0.2]) + else: + sample.gt_answer_weight = torch.tensor(sample.gt_answer_weight) + answer_weight = torch.cat( + [sample.gt_answer_weight for sample in data_samples], + dim=0).to(self.device) + answer_count = torch.tensor( + [len(sample.gt_answer) for sample in data_samples]).to(self.device) + + question_states, question_atts = [], [] + for b, n in enumerate(answer_count): + question_states += [multimodal_embeds.last_hidden_state[b]] * n + question_atts += [questions.attention_mask[b]] * n + + question_states = torch.stack(question_states, dim=0).to(self.device) + question_atts = torch.stack(question_atts, dim=0).to(self.device) + + head_feats = dict( + answer_input_ids=answer.input_ids, + answer_attention_mask=answer.attention_mask, + answer_weight=answer_weight, + answer_targets=answer_targets, + question_states=question_states, + question_atts=question_atts, + batch_size=len(data_samples), + ) + + losses = self.vqa_head.loss(head_feats) + + return losses + + def predict( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + ): + """update data_samples that contain pred_answer for each question. + + Args: + images (Tensor): A batch of images. The shape of it should be + (B, C, H, W) for images and (B, T, C, H, W) for videos. + data_samples (List[DataSample], optional): The annotation + data of every samples. + + Returns: + Dict[torch.Tensor]: The losses features. + """ + visual_embeds = self.extract_feat(images) + image_atts = torch.ones( + visual_embeds.size()[:-1], dtype=torch.long).to(self.device) + + questions = [] + for sample in data_samples: + questions.append(sample.get('question')) + questions = self.tokenizer( + questions, padding='longest', return_tensors='pt').to(self.device) + + questions.input_ids[:, 0] = \ + self.tokenizer.additional_special_tokens_ids[0] + + # multimodal fusion + multimodal_embeds = self.multimodal_backbone( + questions.input_ids, + attention_mask=questions.attention_mask, + encoder_hidden_states=visual_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + + if self.vqa_head.inference_method == 'rank': + answer_candidates = self.tokenizer( + self.vqa_head.answer_list, + padding='longest', + return_tensors='pt').to(self.device) + answer_candidates.input_ids[:, 0] = self.tokenizer.bos_token_id + elif self.vqa_head.inference_method == 'generate': + answer_candidates = None + + head_feats = dict( + multimodal_embeds=multimodal_embeds.last_hidden_state, + question_atts=questions.attention_mask, + answer_candidates=answer_candidates, + bos_token_id=self.tokenizer.bos_token_id, + sep_token_id=self.tokenizer.sep_token_id, + pad_token_id=self.tokenizer.pad_token_id, + ) + + if self.vqa_head.inference_method == 'rank': + answers = self.vqa_head.predict(head_feats) + for answer, data_sample in zip(answers, data_samples): + data_sample.pred_answer = answer + + elif self.vqa_head.inference_method == 'generate': + outputs = self.vqa_head.predict(head_feats) + for output, data_sample in zip(outputs, data_samples): + data_sample.pred_answer = self.tokenizer.decode( + output, skip_special_tokens=True) + + return data_samples diff --git a/mmpretrain/models/multimodal/blip/language_model.py b/mmpretrain/models/multimodal/blip/language_model.py new file mode 100644 index 0000000000000000000000000000000000000000..48605a95f60550e970f893f55c4a43e03efb74df --- /dev/null +++ b/mmpretrain/models/multimodal/blip/language_model.py @@ -0,0 +1,1320 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +# flake8: noqa + +import math +from typing import Tuple + +import torch +import torch.nn as nn +from torch import Tensor, device + +try: + from transformers.activations import ACT2FN + from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, + BaseModelOutputWithPoolingAndCrossAttentions, + CausalLMOutputWithCrossAttentions) + from transformers.modeling_utils import (PreTrainedModel, + apply_chunking_to_forward, + find_pruneable_heads_and_indices, + prune_linear_layer) + from transformers.models.bert.configuration_bert import BertConfig +except: + ACT2FN = None + BaseModelOutputWithPastAndCrossAttentions = None + BaseModelOutputWithPoolingAndCrossAttentions = None + CausalLMOutputWithCrossAttentions = None + PreTrainedModel = None + apply_chunking_to_forward = None + find_pruneable_heads_and_indices = None + prune_linear_layer = None + BertConfig = None + +from mmpretrain.registry import MODELS + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word and position embeddings.""" + + def __init__(self, config): + super().__init__() + self.word_embeddings = nn.Embedding( + config.vocab_size, + config.hidden_size, + padding_idx=config.pad_token_id) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, + config.hidden_size) + + if config.add_type_embeddings: + self.token_type_embeddings = nn.Embedding(config.type_vocab_size, + config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm( + config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer( + 'position_ids', + torch.arange(config.max_position_embeddings).expand((1, -1))) + self.position_embedding_type = getattr(config, + 'position_embedding_type', + 'absolute') + + self.config = config + + def forward( + self, + input_ids=None, + token_type_ids=None, + position_ids=None, + inputs_embeds=None, + past_key_values_length=0, + ): + if input_ids is not None: + input_shape = input_ids.size() + else: + input_shape = inputs_embeds.size()[:-1] + + seq_length = input_shape[1] + + if position_ids is None: + position_ids = self.position_ids[:, past_key_values_length: + seq_length + + past_key_values_length] + + if inputs_embeds is None: + inputs_embeds = self.word_embeddings(input_ids) + + if token_type_ids is not None: + token_type_embeddings = self.token_type_embeddings(token_type_ids) + + embeddings = inputs_embeds + token_type_embeddings + else: + embeddings = inputs_embeds + + if self.position_embedding_type == 'absolute': + position_embeddings = self.position_embeddings(position_ids) + embeddings += position_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertPooler(nn.Module): + + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + self.activation = nn.Tanh() + + def forward(self, hidden_states): + # We "pool" the model by simply taking the hidden state corresponding + # to the first token. + first_token_tensor = hidden_states[:, 0] + pooled_output = self.dense(first_token_tensor) + pooled_output = self.activation(pooled_output) + return pooled_output + + +class BertPreTrainedModel(PreTrainedModel): + """An abstract class to handle weights initialization and a simple + interface for downloading and loading pretrained models.""" + + config_class = BertConfig + base_model_prefix = 'bert' + _keys_to_ignore_on_load_missing = [r'position_ids'] + + def _init_weights(self, module): + """Initialize the weights.""" + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_( + mean=0.0, std=self.config.initializer_range) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +class BertSelfAttention(nn.Module): + + def __init__(self, config, is_cross_attention): + super().__init__() + self.config = config + if config.hidden_size % config.num_attention_heads != 0 and not hasattr( + config, 'embedding_size'): + raise ValueError( + 'The hidden size (%d) is not a multiple of the number of attention ' + 'heads (%d)' % + (config.hidden_size, config.num_attention_heads)) + + self.num_attention_heads = config.num_attention_heads + self.attention_head_size = int(config.hidden_size / + config.num_attention_heads) + self.all_head_size = self.num_attention_heads * self.attention_head_size + + self.query = nn.Linear(config.hidden_size, self.all_head_size) + if is_cross_attention: + self.key = nn.Linear(config.encoder_width, self.all_head_size) + self.value = nn.Linear(config.encoder_width, self.all_head_size) + else: + self.key = nn.Linear(config.hidden_size, self.all_head_size) + self.value = nn.Linear(config.hidden_size, self.all_head_size) + + self.dropout = nn.Dropout(config.attention_probs_dropout_prob) + self.position_embedding_type = getattr(config, + 'position_embedding_type', + 'absolute') + if (self.position_embedding_type == 'relative_key' + or self.position_embedding_type == 'relative_key_query'): + self.max_position_embeddings = config.max_position_embeddings + self.distance_embedding = nn.Embedding( + 2 * config.max_position_embeddings - 1, + self.attention_head_size) + self.save_attention = False + + def save_attn_gradients(self, attn_gradients): + self.attn_gradients = attn_gradients + + def get_attn_gradients(self): + return self.attn_gradients + + def save_attention_map(self, attention_map): + self.attention_map = attention_map + + def get_attention_map(self): + return self.attention_map + + def transpose_for_scores(self, x): + new_x_shape = x.size()[:-1] + ( + self.num_attention_heads, + self.attention_head_size, + ) + x = x.view(*new_x_shape) + return x.permute(0, 2, 1, 3) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + mixed_query_layer = self.query(hidden_states) + + # If this is instantiated as a cross-attention module, the keys + # and values come from an encoder; the attention mask needs to be + # such that the encoder's padding tokens are not attended to. + is_cross_attention = encoder_hidden_states is not None + + if is_cross_attention: + key_layer = self.transpose_for_scores( + self.key(encoder_hidden_states)) + value_layer = self.transpose_for_scores( + self.value(encoder_hidden_states)) + attention_mask = encoder_attention_mask + elif past_key_value is not None: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + key_layer = torch.cat([past_key_value[0], key_layer], dim=2) + value_layer = torch.cat([past_key_value[1], value_layer], dim=2) + else: + key_layer = self.transpose_for_scores(self.key(hidden_states)) + value_layer = self.transpose_for_scores(self.value(hidden_states)) + + query_layer = self.transpose_for_scores(mixed_query_layer) + + past_key_value = (key_layer, value_layer) + + # Take the dot product between "query" and "key" to get the raw attention scores. + attention_scores = torch.matmul(query_layer, + key_layer.transpose(-1, -2)) + + if (self.position_embedding_type == 'relative_key' + or self.position_embedding_type == 'relative_key_query'): + seq_length = hidden_states.size()[1] + position_ids_l = torch.arange( + seq_length, dtype=torch.long, + device=hidden_states.device).view(-1, 1) + position_ids_r = torch.arange( + seq_length, dtype=torch.long, + device=hidden_states.device).view(1, -1) + distance = position_ids_l - position_ids_r + positional_embedding = self.distance_embedding( + distance + self.max_position_embeddings - 1) + positional_embedding = positional_embedding.to( + dtype=query_layer.dtype) # fp16 compatibility + + if self.position_embedding_type == 'relative_key': + relative_position_scores = torch.einsum( + 'bhld,lrd->bhlr', query_layer, positional_embedding) + attention_scores = attention_scores + relative_position_scores + elif self.position_embedding_type == 'relative_key_query': + relative_position_scores_query = torch.einsum( + 'bhld,lrd->bhlr', query_layer, positional_embedding) + relative_position_scores_key = torch.einsum( + 'bhrd,lrd->bhlr', key_layer, positional_embedding) + attention_scores = ( + attention_scores + relative_position_scores_query + + relative_position_scores_key) + + attention_scores = attention_scores / math.sqrt( + self.attention_head_size) + if attention_mask is not None: + # Apply the attention mask is (precomputed for all layers in BertModel forward() function) + attention_scores = attention_scores + attention_mask + + # Normalize the attention scores to probabilities. + attention_probs = nn.Softmax(dim=-1)(attention_scores) + + if is_cross_attention and self.save_attention: + self.save_attention_map(attention_probs) + attention_probs.register_hook(self.save_attn_gradients) + + # This is actually dropping out entire tokens to attend to, which might + # seem a bit unusual, but is taken from the original Transformer paper. + attention_probs_dropped = self.dropout(attention_probs) + + # Mask heads if we want to + if head_mask is not None: + attention_probs_dropped = attention_probs_dropped * head_mask + + context_layer = torch.matmul(attention_probs_dropped, value_layer) + + context_layer = context_layer.permute(0, 2, 1, 3).contiguous() + new_context_layer_shape = context_layer.size()[:-2] + ( + self.all_head_size, ) + context_layer = context_layer.view(*new_context_layer_shape) + + outputs = ((context_layer, attention_probs) if output_attentions else + (context_layer, )) + + outputs = outputs + (past_key_value, ) + return outputs + + +class BertSelfOutput(nn.Module): + + def __init__(self, config, twin=False, merge=False): + super().__init__() + self.LayerNorm = nn.LayerNorm( + config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + if twin: + self.dense0 = nn.Linear(config.hidden_size, config.hidden_size) + self.dense1 = nn.Linear(config.hidden_size, config.hidden_size) + else: + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if merge: + self.act = ACT2FN[config.hidden_act] + self.merge_layer = nn.Linear(config.hidden_size * 2, + config.hidden_size) + self.merge = True + else: + self.merge = False + + def forward(self, hidden_states, input_tensor): + if type(hidden_states) == list: + hidden_states0 = self.dense0(hidden_states[0]) + hidden_states1 = self.dense1(hidden_states[1]) + if self.merge: + hidden_states = self.merge_layer( + torch.cat([hidden_states0, hidden_states1], dim=-1)) + else: + hidden_states = (hidden_states0 + hidden_states1) / 2 + else: + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertAttention(nn.Module): + + def __init__(self, config, is_cross_attention=False, layer_num=-1): + super().__init__() + is_nlvr = is_cross_attention and getattr(config, 'nlvr', False) + if is_nlvr: + self.self0 = BertSelfAttention(config, is_nlvr) + self.self1 = BertSelfAttention(config, is_nlvr) + else: + self.self = BertSelfAttention(config, is_cross_attention) + self.output = BertSelfOutput( + config, + twin=is_nlvr, + merge=(is_nlvr and layer_num >= 6), + ) + self.pruned_heads = set() + + def prune_heads(self, heads): + if len(heads) == 0: + return + heads, index = find_pruneable_heads_and_indices( + heads, + self.self.num_attention_heads, + self.self.attention_head_size, + self.pruned_heads, + ) + + # Prune linear layers + self.self.query = prune_linear_layer(self.self.query, index) + self.self.key = prune_linear_layer(self.self.key, index) + self.self.value = prune_linear_layer(self.self.value, index) + self.output.dense = prune_linear_layer(self.output.dense, index, dim=1) + + # Update hyper params and store pruned heads + self.self.num_attention_heads = self.self.num_attention_heads - len( + heads) + self.self.all_head_size = ( + self.self.attention_head_size * self.self.num_attention_heads) + self.pruned_heads = self.pruned_heads.union(heads) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + ): + if type(encoder_hidden_states) == list: + self_outputs0 = self.self0( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states[0], + encoder_attention_mask[0], + past_key_value, + output_attentions, + ) + self_outputs1 = self.self1( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states[1], + encoder_attention_mask[1], + past_key_value, + output_attentions, + ) + attention_output = self.output( + [self_outputs0[0], self_outputs1[0]], hidden_states) + + outputs = (attention_output, ) + self_outputs0[ + 1:] # add attentions if we output them + else: + self_outputs = self.self( + hidden_states, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + ) + attention_output = self.output(self_outputs[0], hidden_states) + outputs = (attention_output, + ) + self_outputs[1:] # add attentions if we output them + return outputs + + +class BertIntermediate(nn.Module): + + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.intermediate_size) + if isinstance(config.hidden_act, str): + self.intermediate_act_fn = ACT2FN[config.hidden_act] + else: + self.intermediate_act_fn = config.hidden_act + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.intermediate_act_fn(hidden_states) + return hidden_states + + +class BertOutput(nn.Module): + + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.intermediate_size, config.hidden_size) + self.LayerNorm = nn.LayerNorm( + config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, hidden_states, input_tensor): + hidden_states = self.dense(hidden_states) + hidden_states = self.dropout(hidden_states) + hidden_states = self.LayerNorm(hidden_states + input_tensor) + return hidden_states + + +class BertLayer(nn.Module): + + def __init__(self, config, layer_num): + super().__init__() + self.config = config + self.chunk_size_feed_forward = config.chunk_size_feed_forward + self.seq_len_dim = 1 + self.attention = BertAttention(config) + self.layer_num = layer_num + + # compatibility for ALBEF and BLIP + try: + # ALBEF & ALPRO + fusion_layer = self.config.fusion_layer + add_cross_attention = ( + fusion_layer <= layer_num and self.config.add_cross_attention) + + self.fusion_layer = fusion_layer + except AttributeError: + # BLIP + self.fusion_layer = self.config.num_hidden_layers + add_cross_attention = self.config.add_cross_attention + + # if self.config.add_cross_attention: + if self.config.add_cross_attention: + self.crossattention = BertAttention( + config, + is_cross_attention=self.config.add_cross_attention, + layer_num=layer_num, + ) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + mode=None, + ): + # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 + self_attn_past_key_value = ( + past_key_value[:2] if past_key_value is not None else None) + self_attention_outputs = self.attention( + hidden_states, + attention_mask, + head_mask, + output_attentions=output_attentions, + past_key_value=self_attn_past_key_value, + ) + attention_output = self_attention_outputs[0] + + outputs = self_attention_outputs[1:-1] + present_key_value = self_attention_outputs[-1] + + # TODO line 482 in albef/models/xbert.py + # compatibility for ALBEF and BLIP + if mode in ['multimodal', 'fusion'] and hasattr( + self, 'crossattention'): + assert ( + encoder_hidden_states is not None + ), 'encoder_hidden_states must be given for cross-attention layers' + + cross_attention_outputs = self.crossattention( + attention_output, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + output_attentions=output_attentions, + ) + attention_output = cross_attention_outputs[0] + outputs = (outputs + cross_attention_outputs[1:-1] + ) # add cross attentions if we output attention weights + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk, + self.chunk_size_feed_forward, + self.seq_len_dim, + attention_output, + ) + outputs = (layer_output, ) + outputs + + outputs = outputs + (present_key_value, ) + + return outputs + + def feed_forward_chunk(self, attention_output): + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + return layer_output + + +class BertEncoder(nn.Module): + + def __init__(self, config): + super().__init__() + self.config = config + self.layer = nn.ModuleList( + [BertLayer(config, i) for i in range(config.num_hidden_layers)]) + self.gradient_checkpointing = False + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=False, + output_hidden_states=False, + return_dict=True, + mode='multimodal', + ): + all_hidden_states = () if output_hidden_states else None + all_self_attentions = () if output_attentions else None + all_cross_attentions = (() if output_attentions + and self.config.add_cross_attention else None) + + next_decoder_cache = () if use_cache else None + + try: + # ALBEF + fusion_layer = self.config.fusion_layer + except AttributeError: + # BLIP + fusion_layer = self.config.num_hidden_layers + + if mode == 'text': + start_layer = 0 + # output_layer = self.config.fusion_layer + output_layer = fusion_layer + + elif mode == 'fusion': + # start_layer = self.config.fusion_layer + start_layer = fusion_layer + output_layer = self.config.num_hidden_layers + + elif mode == 'multimodal': + start_layer = 0 + output_layer = self.config.num_hidden_layers + + # compatibility for ALBEF and BLIP + # for i in range(self.config.num_hidden_layers): + for i in range(start_layer, output_layer): + layer_module = self.layer[i] + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states, ) + + layer_head_mask = head_mask[i] if head_mask is not None else None + past_key_value = past_key_values[ + i] if past_key_values is not None else None + + # TODO pay attention to this. + if self.gradient_checkpointing and self.training: + + if use_cache: + # TODO: logger here + # logger.warn( + # "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + # ) + use_cache = False + + def create_custom_forward(module): + + def custom_forward(*inputs): + return module(*inputs, past_key_value, + output_attentions) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer_module), + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + mode=mode, + ) + else: + layer_outputs = layer_module( + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + mode=mode, + ) + + hidden_states = layer_outputs[0] + if use_cache: + next_decoder_cache += (layer_outputs[-1], ) + if output_attentions: + all_self_attentions = all_self_attentions + ( + layer_outputs[1], ) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states, ) + + if not return_dict: + return tuple(v for v in [ + hidden_states, + next_decoder_cache, + all_hidden_states, + all_self_attentions, + all_cross_attentions, + ] if v is not None) + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=hidden_states, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + cross_attentions=all_cross_attentions, + ) + + +class BertPredictionHeadTransform(nn.Module): + + def __init__(self, config): + super().__init__() + self.dense = nn.Linear(config.hidden_size, config.hidden_size) + if isinstance(config.hidden_act, str): + self.transform_act_fn = ACT2FN[config.hidden_act] + else: + self.transform_act_fn = config.hidden_act + self.LayerNorm = nn.LayerNorm( + config.hidden_size, eps=config.layer_norm_eps) + + def forward(self, hidden_states): + hidden_states = self.dense(hidden_states) + hidden_states = self.transform_act_fn(hidden_states) + hidden_states = self.LayerNorm(hidden_states) + return hidden_states + + +class BertLMPredictionHead(nn.Module): + + def __init__(self, config): + super().__init__() + self.transform = BertPredictionHeadTransform(config) + + # The output weights are the same as the input embeddings, but there is + # an output-only bias for each token. + self.decoder = nn.Linear( + config.hidden_size, config.vocab_size, bias=False) + + self.bias = nn.Parameter(torch.zeros(config.vocab_size)) + + # Need a link between the two variables so that the bias is correctly resized with `resize_token_embeddings` + self.decoder.bias = self.bias + + def forward(self, hidden_states): + hidden_states = self.transform(hidden_states) + hidden_states = self.decoder(hidden_states) + return hidden_states + + +class BertOnlyMLMHead(nn.Module): + + def __init__(self, config): + super().__init__() + self.predictions = BertLMPredictionHead(config) + + def forward(self, sequence_output): + prediction_scores = self.predictions(sequence_output) + return prediction_scores + + +@MODELS.register_module() +class BertModel(BertPreTrainedModel): + """The model can behave as an encoder (with only self-attention) as well as + a decoder, in which case a layer of cross-attention is added between the + self-attention layers, following the architecture described in `Attention + is all you need `__ by Ashish Vaswani, + Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N. + + Gomez, Lukasz Kaiser and Illia Polosukhin. argument and + :obj:`add_cross_attention` set to :obj:`True`; an + :obj:`encoder_hidden_states` is then expected as an input to the forward + pass. + """ + + def __init__(self, config, add_pooling_layer=True): + if not isinstance(config, BertConfig): + config = BertConfig.from_dict(config) + + super().__init__(config) + self.config = config + + self.embeddings = BertEmbeddings(config) + + self.encoder = BertEncoder(config) + + self.pooler = BertPooler(config) if add_pooling_layer else None + + self.init_weights() + + def get_input_embeddings(self): + return self.embeddings.word_embeddings + + def set_input_embeddings(self, value): + self.embeddings.word_embeddings = value + + def _prune_heads(self, heads_to_prune): + """Prunes heads of the model. + + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base + class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + def get_extended_attention_mask( + self, + attention_mask: Tensor, + input_shape: Tuple[int], + device: device, + is_decoder: bool, + ) -> Tensor: + """Makes broadcastable attention and causal masks so that future and + masked tokens are ignored. + + Arguments: + attention_mask (:obj:`torch.Tensor`): + Mask with ones indicating tokens to attend to, zeros for tokens to ignore. + input_shape (:obj:`Tuple[int]`): + The shape of the input to the model. + device: (:obj:`torch.device`): + The device of the input to the model. + + Returns: + :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. + """ + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if attention_mask.dim() == 3: + extended_attention_mask = attention_mask[:, None, :, :] + elif attention_mask.dim() == 2: + # Provided a padding mask of dimensions [batch_size, seq_length] + # - if the model is a decoder, apply a causal mask in addition to the padding mask + # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] + if is_decoder: + batch_size, seq_length = input_shape + + seq_ids = torch.arange(seq_length, device=device) + causal_mask = ( + seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= + seq_ids[None, :, None]) + # in case past_key_values are used we need to add a prefix ones mask to the causal mask + # causal and attention masks must have same type with pytorch version < 1.3 + causal_mask = causal_mask.to(attention_mask.dtype) + + if causal_mask.shape[1] < attention_mask.shape[1]: + prefix_seq_len = attention_mask.shape[ + 1] - causal_mask.shape[1] + causal_mask = torch.cat( + [ + torch.ones( + (batch_size, seq_length, prefix_seq_len), + device=device, + dtype=causal_mask.dtype, + ), + causal_mask, + ], + axis=-1, + ) + + extended_attention_mask = ( + causal_mask[:, None, :, :] * + attention_mask[:, None, None, :]) + else: + extended_attention_mask = attention_mask[:, None, None, :] + else: + raise ValueError( + 'Wrong shape for input_ids (shape {}) or attention_mask (shape {})' + .format(input_shape, attention_mask.shape)) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to( + dtype=self.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + return extended_attention_mask + + def forward( + self, + input_ids=None, + attention_mask=None, + token_type_ids=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + is_decoder=False, + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + """ + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + if is_decoder: + use_cache = use_cache if use_cache is not None else self.config.use_cache + else: + use_cache = False + + if input_ids is not None and inputs_embeds is not None: + raise ValueError( + 'You cannot specify both input_ids and inputs_embeds at the same time' + ) + elif input_ids is not None: + input_shape = input_ids.size() + batch_size, seq_length = input_shape + device = input_ids.device + elif inputs_embeds is not None: + input_shape = inputs_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = inputs_embeds.device + elif encoder_embeds is not None: + input_shape = encoder_embeds.size()[:-1] + batch_size, seq_length = input_shape + device = encoder_embeds.device + else: + raise ValueError( + 'You have to specify either input_ids or inputs_embeds or encoder_embeds' + ) + + # past_key_values_length + past_key_values_length = ( + past_key_values[0][0].shape[2] + if past_key_values is not None else 0) + + if attention_mask is None: + attention_mask = torch.ones( + ((batch_size, seq_length + past_key_values_length)), + device=device) + + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + extended_attention_mask: torch.Tensor = self.get_extended_attention_mask( + attention_mask, input_shape, device, is_decoder) + + # If a 2D or 3D attention mask is provided for the cross-attention + # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] + if encoder_hidden_states is not None: + if type(encoder_hidden_states) == list: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[ + 0].size() + else: + ( + encoder_batch_size, + encoder_sequence_length, + _, + ) = encoder_hidden_states.size() + encoder_hidden_shape = (encoder_batch_size, + encoder_sequence_length) + + if type(encoder_attention_mask) == list: + encoder_extended_attention_mask = [ + self.invert_attention_mask(mask) + for mask in encoder_attention_mask + ] + elif encoder_attention_mask is None: + encoder_attention_mask = torch.ones( + encoder_hidden_shape, device=device) + encoder_extended_attention_mask = self.invert_attention_mask( + encoder_attention_mask) + else: + encoder_extended_attention_mask = self.invert_attention_mask( + encoder_attention_mask) + else: + encoder_extended_attention_mask = None + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + head_mask = self.get_head_mask(head_mask, + self.config.num_hidden_layers) + + if encoder_embeds is None: + embedding_output = self.embeddings( + input_ids=input_ids, + position_ids=position_ids, + token_type_ids=token_type_ids, + inputs_embeds=inputs_embeds, + past_key_values_length=past_key_values_length, + ) + else: + embedding_output = encoder_embeds + + encoder_outputs = self.encoder( + embedding_output, + attention_mask=extended_attention_mask, + head_mask=head_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_extended_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + mode=mode, + ) + sequence_output = encoder_outputs[0] + pooled_output = ( + self.pooler(sequence_output) if self.pooler is not None else None) + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPoolingAndCrossAttentions( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + past_key_values=encoder_outputs.past_key_values, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + cross_attentions=encoder_outputs.cross_attentions, + ) + + +class BaseEncoder(nn.Module): + """Base class for primitive encoders, such as ViT, TimeSformer, etc.""" + + def __init__(self): + super().__init__() + + def forward_features(self, samples, **kwargs): + raise NotImplementedError + + @property + def device(self): + return list(self.parameters())[0].device + + +@MODELS.register_module() +class XBertEncoder(BertModel, BaseEncoder): + + def __init__(self, med_config, from_pretrained=False): + + med_config = BertConfig.from_dict(med_config) + super().__init__(config=med_config, add_pooling_layer=False) + + def forward_automask(self, tokenized_text, visual_embeds, **kwargs): + image_atts = torch.ones( + visual_embeds.size()[:-1], dtype=torch.long).to(self.device) + + text = tokenized_text + text_output = super().forward( + text.input_ids, + attention_mask=text.attention_mask, + encoder_hidden_states=visual_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + + return text_output + + def forward_text(self, tokenized_text, **kwargs): + text = tokenized_text + token_type_ids = kwargs.get('token_type_ids', None) + + text_output = super().forward( + text.input_ids, + attention_mask=text.attention_mask, + token_type_ids=token_type_ids, + return_dict=True, + mode='text', + ) + + return text_output + + +@MODELS.register_module() +class Linear(torch.nn.Linear): + """Wrapper for linear function.""" + + +@MODELS.register_module() +class BertLMHeadModel(BertPreTrainedModel): + + _keys_to_ignore_on_load_unexpected = [r'pooler'] + _keys_to_ignore_on_load_missing = [ + r'position_ids', r'predictions.decoder.bias' + ] + + def __init__(self, config): + super().__init__(config) + + self.bert = BertModel(config, add_pooling_layer=False) + self.cls = BertOnlyMLMHead(config) + + self.init_weights() + + def get_output_embeddings(self): + return self.cls.predictions.decoder + + def set_output_embeddings(self, new_embeddings): + self.cls.predictions.decoder = new_embeddings + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + inputs_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + labels=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + return_logits=False, + is_decoder=True, + reduction='mean', + mode='multimodal', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in + ``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are + ignored (masked), the loss is only computed for the tokens with labels n ``[0, ..., config.vocab_size]`` + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + Returns: + Example:: + >>> from transformers import BertTokenizer, + BertLMHeadModel, BertConfig + >>> import torch + >>> tokenizer = BertTokenizer.from_pretrained( + 'bert-base-cased') + >>> config = BertConfig.from_pretrained( + "bert-base-cased") + >>> model = BertLMHeadModel.from_pretrained( + 'bert-base-cased', config=config) + >>> inputs = tokenizer( + "Hello, my dog is cute", + return_tensors="pt") + >>> outputs = model(**inputs) + >>> prediction_logits = outputs.logits + """ + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + if labels is not None: + use_cache = False + + outputs = self.bert( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + head_mask=head_mask, + inputs_embeds=inputs_embeds, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + is_decoder=is_decoder, + mode=mode, + ) + + sequence_output = outputs[0] + prediction_scores = self.cls(sequence_output) + + if return_logits: + return prediction_scores[:, :-1, :].contiguous() + + lm_loss = None + if labels is not None: + # we are doing next-token prediction; shift prediction scores and input ids by one + shifted_prediction_scores = prediction_scores[:, : + -1, :].contiguous() + labels = labels[:, 1:].contiguous() + loss_fct = torch.nn.CrossEntropyLoss( + reduction=reduction, label_smoothing=0.1) + lm_loss = loss_fct( + shifted_prediction_scores.view(-1, self.config.vocab_size), + labels.view(-1)) + if reduction == 'none': + lm_loss = lm_loss.view(prediction_scores.size(0), -1).sum(1) + + if not return_dict: + output = (prediction_scores, ) + outputs[2:] + return ((lm_loss, ) + output) if lm_loss is not None else output + + return CausalLMOutputWithCrossAttentions( + loss=lm_loss, + logits=prediction_scores, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + cross_attentions=outputs.cross_attentions, + ) + + def prepare_inputs_for_generation(self, + input_ids, + past=None, + attention_mask=None, + **model_kwargs): + input_shape = input_ids.shape + # if model is used as a decoder in encoder-decoder model, + # the decoder attention mask is created on the fly + if attention_mask is None: + attention_mask = input_ids.new_ones(input_shape) + + # cut decoder_input_ids if past is used + if past is not None: + input_ids = input_ids[:, -1:] + + return { + 'input_ids': + input_ids, + 'attention_mask': + attention_mask, + 'past_key_values': + past, + 'encoder_hidden_states': + model_kwargs.get('encoder_hidden_states', None), + 'encoder_attention_mask': + model_kwargs.get('encoder_attention_mask', None), + 'is_decoder': + True, + } + + def _reorder_cache(self, past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple( + past_state.index_select(0, beam_idx) + for past_state in layer_past), ) + return reordered_past + + +@MODELS.register_module() +class XBertLMHeadDecoder(BertLMHeadModel): + """This class decouples the decoder forward logic from the VL model. + + In this way, different VL models can share this decoder as long as they + feed encoder_embeds as required. + """ + + def __init__(self, med_config): + self.med_config = BertConfig.from_dict(med_config) + super(XBertLMHeadDecoder, self).__init__(config=self.med_config) + + def generate_from_encoder(self, + tokenized_prompt, + visual_embeds, + sep_token_id, + pad_token_id, + use_nucleus_sampling=False, + num_beams=3, + max_length=30, + min_length=10, + top_p=0.9, + repetition_penalty=1.0, + **kwargs): + + if not use_nucleus_sampling: + num_beams = num_beams + visual_embeds = visual_embeds.repeat_interleave(num_beams, dim=0) + + image_atts = torch.ones( + visual_embeds.size()[:-1], dtype=torch.long).to(self.device) + + model_kwargs = { + 'encoder_hidden_states': visual_embeds, + 'encoder_attention_mask': image_atts, + } + + if use_nucleus_sampling: + # nucleus sampling + outputs = self.generate( + input_ids=tokenized_prompt.input_ids, + max_length=max_length, + min_length=min_length, + do_sample=True, + top_p=top_p, + num_return_sequences=1, + eos_token_id=sep_token_id, + pad_token_id=pad_token_id, + repetition_penalty=1.1, + **model_kwargs) + else: + # beam search + outputs = self.generate( + input_ids=tokenized_prompt.input_ids, + max_length=max_length, + min_length=min_length, + num_beams=num_beams, + eos_token_id=sep_token_id, + pad_token_id=pad_token_id, + repetition_penalty=repetition_penalty, + **model_kwargs) + + return outputs diff --git a/mmpretrain/models/multimodal/blip2/Qformer.py b/mmpretrain/models/multimodal/blip2/Qformer.py new file mode 100644 index 0000000000000000000000000000000000000000..2b85f9ee66020fb86282a89840cc2556a5dec06f --- /dev/null +++ b/mmpretrain/models/multimodal/blip2/Qformer.py @@ -0,0 +1,772 @@ +# flake8: noqa +""" + * Copyright (c) 2023, salesforce.com, inc. +""" +from typing import Tuple + +import torch +import torch.utils.checkpoint +from torch import Tensor, device, nn +from torch.nn import CrossEntropyLoss +from transformers.activations import ACT2FN +from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, + BaseModelOutputWithPoolingAndCrossAttentions, + CausalLMOutputWithCrossAttentions) +from transformers.modeling_utils import apply_chunking_to_forward +from transformers.models.bert.configuration_bert import BertConfig +from transformers.utils import logging + +from mmpretrain.registry import MODELS +from ..blip.language_model import (BertAttention, BertIntermediate, + BertOnlyMLMHead, BertOutput, BertPooler, + BertPreTrainedModel) + +logger = logging.get_logger(__name__) + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word and position embeddings.""" + + def __init__(self, config): + super().__init__() + self.word_embeddings = nn.Embedding( + config.vocab_size, + config.hidden_size, + padding_idx=config.pad_token_id) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, + config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load + # any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm( + config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + # position_ids (1, len position emb) is contiguous in memory and exported when serialized + self.register_buffer( + 'position_ids', + torch.arange(config.max_position_embeddings).expand((1, -1))) + self.position_embedding_type = getattr(config, + 'position_embedding_type', + 'absolute') + + self.config = config + + def forward( + self, + input_ids=None, + position_ids=None, + query_embeds=None, + past_key_values_length=0, + ): + if input_ids is not None: + seq_length = input_ids.size()[1] + else: + seq_length = 0 + + if position_ids is None: + position_ids = self.position_ids[:, past_key_values_length: + seq_length + + past_key_values_length].clone() + + if input_ids is not None: + embeddings = self.word_embeddings(input_ids) + if self.position_embedding_type == 'absolute': + position_embeddings = self.position_embeddings(position_ids) + embeddings = embeddings + position_embeddings + + if query_embeds is not None: + embeddings = torch.cat((query_embeds, embeddings), dim=1) + else: + embeddings = query_embeds + + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertLayer(nn.Module): + + def __init__(self, config, layer_num): + super().__init__() + self.config = config + self.chunk_size_feed_forward = config.chunk_size_feed_forward + self.seq_len_dim = 1 + self.attention = BertAttention(config) + self.layer_num = layer_num + if (self.config.add_cross_attention + and layer_num % self.config.cross_attention_freq == 0): + self.crossattention = BertAttention( + config, is_cross_attention=self.config.add_cross_attention) + self.has_cross_attention = True + else: + self.has_cross_attention = False + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + self.intermediate_query = BertIntermediate(config) + self.output_query = BertOutput(config) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_value=None, + output_attentions=False, + query_length=0, + ): + # decoder uni-directional self-attention cached key/values tuple is at positions 1,2 + self_attn_past_key_value = ( + past_key_value[:2] if past_key_value is not None else None) + self_attention_outputs = self.attention( + hidden_states, + attention_mask, + head_mask, + output_attentions=output_attentions, + past_key_value=self_attn_past_key_value, + ) + attention_output = self_attention_outputs[0] + outputs = self_attention_outputs[1:-1] + + present_key_value = self_attention_outputs[-1] + + if query_length > 0: + query_attention_output = attention_output[:, :query_length, :] + + if self.has_cross_attention: + assert ( + encoder_hidden_states is not None + ), 'encoder_hidden_states must be given for cross-attention layers' + cross_attention_outputs = self.crossattention( + query_attention_output, + attention_mask, + head_mask, + encoder_hidden_states, + encoder_attention_mask, + output_attentions=output_attentions, + ) + query_attention_output = cross_attention_outputs[0] + outputs = ( + outputs + cross_attention_outputs[1:-1] + ) # add cross attentions if we output attention weights + + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk_query, + self.chunk_size_feed_forward, + self.seq_len_dim, + query_attention_output, + ) + if attention_output.shape[1] > query_length: + layer_output_text = apply_chunking_to_forward( + self.feed_forward_chunk, + self.chunk_size_feed_forward, + self.seq_len_dim, + attention_output[:, query_length:, :], + ) + layer_output = torch.cat([layer_output, layer_output_text], + dim=1) + else: + layer_output = apply_chunking_to_forward( + self.feed_forward_chunk, + self.chunk_size_feed_forward, + self.seq_len_dim, + attention_output, + ) + outputs = (layer_output, ) + outputs + + outputs = outputs + (present_key_value, ) + + return outputs + + def feed_forward_chunk(self, attention_output): + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + return layer_output + + def feed_forward_chunk_query(self, attention_output): + intermediate_output = self.intermediate_query(attention_output) + layer_output = self.output_query(intermediate_output, attention_output) + return layer_output + + +class BertEncoder(nn.Module): + + def __init__(self, config): + super().__init__() + self.config = config + self.layer = nn.ModuleList( + [BertLayer(config, i) for i in range(config.num_hidden_layers)]) + + def forward( + self, + hidden_states, + attention_mask=None, + head_mask=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=False, + output_hidden_states=False, + return_dict=True, + query_length=0, + ): + all_hidden_states = () if output_hidden_states else None + all_self_attentions = () if output_attentions else None + all_cross_attentions = (() if output_attentions + and self.config.add_cross_attention else None) + + next_decoder_cache = () if use_cache else None + + for i in range(self.config.num_hidden_layers): + layer_module = self.layer[i] + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states, ) + + layer_head_mask = head_mask[i] if head_mask is not None else None + past_key_value = past_key_values[ + i] if past_key_values is not None else None + + if getattr(self.config, 'gradient_checkpointing', + False) and self.training: + + if use_cache: + logger.warn( + '`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...' + ) + use_cache = False + + def create_custom_forward(module): + + def custom_forward(*inputs): + return module(*inputs, past_key_value, + output_attentions, query_length) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(layer_module), + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + ) + else: + layer_outputs = layer_module( + hidden_states, + attention_mask, + layer_head_mask, + encoder_hidden_states, + encoder_attention_mask, + past_key_value, + output_attentions, + query_length, + ) + + hidden_states = layer_outputs[0] + if use_cache: + next_decoder_cache += (layer_outputs[-1], ) + if output_attentions: + all_self_attentions = all_self_attentions + ( + layer_outputs[1], ) + all_cross_attentions = all_cross_attentions + ( + layer_outputs[2], ) + + if output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states, ) + + if not return_dict: + return tuple(v for v in [ + hidden_states, + next_decoder_cache, + all_hidden_states, + all_self_attentions, + all_cross_attentions, + ] if v is not None) + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=hidden_states, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attentions, + cross_attentions=all_cross_attentions, + ) + + +class BertModel(BertPreTrainedModel): + """The model can behave as an encoder (with only self-attention) as well as + a decoder, in which case a layer of cross-attention is added between the + self-attention layers, following the architecture described in `Attention + is all you need `__ by Ashish Vaswani, + Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N. + + Gomez, Lukasz Kaiser and Illia Polosukhin. argument and + :obj:`add_cross_attention` set to :obj:`True`; an + :obj:`encoder_hidden_states` is then expected as an input to the forward + pass. + """ + + def __init__(self, config, add_pooling_layer=False): + super().__init__(config) + self.config = config + + self.embeddings = BertEmbeddings(config) + + self.encoder = BertEncoder(config) + + self.pooler = BertPooler(config) if add_pooling_layer else None + + self.init_weights() + + def get_input_embeddings(self): + return self.embeddings.word_embeddings + + def set_input_embeddings(self, value): + self.embeddings.word_embeddings = value + + def _prune_heads(self, heads_to_prune): + """Prunes heads of the model. + + heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base + class PreTrainedModel + """ + for layer, heads in heads_to_prune.items(): + self.encoder.layer[layer].attention.prune_heads(heads) + + def get_extended_attention_mask( + self, + attention_mask: Tensor, + input_shape: Tuple[int], + device: device, + is_decoder: bool, + has_query: bool = False, + ) -> Tensor: + """Makes broadcastable attention and causal masks so that future and + masked tokens are ignored. + + Arguments: + attention_mask (:obj:`torch.Tensor`): + Mask with ones indicating tokens to attend to, zeros for tokens to ignore. + input_shape (:obj:`Tuple[int]`): + The shape of the input to the model. + device: (:obj:`torch.device`): + The device of the input to the model. + + Returns: + :obj:`torch.Tensor` The extended attention mask, with a the same dtype as :obj:`attention_mask.dtype`. + """ + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if attention_mask.dim() == 3: + extended_attention_mask = attention_mask[:, None, :, :] + elif attention_mask.dim() == 2: + # Provided a padding mask of dimensions [batch_size, seq_length] + # - if the model is a decoder, apply a causal mask in addition to the padding mask + # - if the model is an encoder, make the mask broadcastable to [batch_size, num_heads, seq_length, seq_length] + if is_decoder: + batch_size, seq_length = input_shape + + seq_ids = torch.arange(seq_length, device=device) + causal_mask = ( + seq_ids[None, None, :].repeat(batch_size, seq_length, 1) <= + seq_ids[None, :, None]) + + # add a prefix ones mask to the causal mask + # causal and attention masks must have same type with pytorch version < 1.3 + causal_mask = causal_mask.to(attention_mask.dtype) + + if causal_mask.shape[1] < attention_mask.shape[1]: + prefix_seq_len = attention_mask.shape[ + 1] - causal_mask.shape[1] + if has_query: # UniLM style attention mask + causal_mask = torch.cat( + [ + torch.zeros( + (batch_size, prefix_seq_len, seq_length), + device=device, + dtype=causal_mask.dtype, + ), + causal_mask, + ], + axis=1, + ) + causal_mask = torch.cat( + [ + torch.ones( + (batch_size, causal_mask.shape[1], + prefix_seq_len), + device=device, + dtype=causal_mask.dtype, + ), + causal_mask, + ], + axis=-1, + ) + extended_attention_mask = ( + causal_mask[:, None, :, :] * + attention_mask[:, None, None, :]) + else: + extended_attention_mask = attention_mask[:, None, None, :] + else: + raise ValueError( + 'Wrong shape for input_ids (shape {}) or attention_mask (shape {})' + .format(input_shape, attention_mask.shape)) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to( + dtype=self.dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + return extended_attention_mask + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + query_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + past_key_values=None, + use_cache=None, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + is_decoder=False, + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + """ + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + # use_cache = use_cache if use_cache is not None else self.config.use_cache + if input_ids is None: + assert ( + query_embeds is not None + ), 'You have to specify query_embeds when input_ids is None' + + # past_key_values_length + past_key_values_length = ( + past_key_values[0][0].shape[2] - + self.config.query_length if past_key_values is not None else 0) + + query_length = query_embeds.shape[1] if query_embeds is not None else 0 + + embedding_output = self.embeddings( + input_ids=input_ids, + position_ids=position_ids, + query_embeds=query_embeds, + past_key_values_length=past_key_values_length, + ) + + input_shape = embedding_output.size()[:-1] + batch_size, seq_length = input_shape + device = embedding_output.device + + if attention_mask is None: + attention_mask = torch.ones( + ((batch_size, seq_length + past_key_values_length)), + device=device) + + # We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length] + # ourselves in which case we just need to make it broadcastable to all heads. + if is_decoder: + extended_attention_mask = self.get_extended_attention_mask( + attention_mask, + input_ids.shape, + device, + is_decoder, + has_query=(query_embeds is not None), + ) + else: + extended_attention_mask = self.get_extended_attention_mask( + attention_mask, input_shape, device, is_decoder) + + # If a 2D or 3D attention mask is provided for the cross-attention + # we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length] + if encoder_hidden_states is not None: + if type(encoder_hidden_states) == list: + encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states[ + 0].size() + else: + ( + encoder_batch_size, + encoder_sequence_length, + _, + ) = encoder_hidden_states.size() + encoder_hidden_shape = (encoder_batch_size, + encoder_sequence_length) + + if type(encoder_attention_mask) == list: + encoder_extended_attention_mask = [ + self.invert_attention_mask(mask) + for mask in encoder_attention_mask + ] + elif encoder_attention_mask is None: + encoder_attention_mask = torch.ones( + encoder_hidden_shape, device=device) + encoder_extended_attention_mask = self.invert_attention_mask( + encoder_attention_mask) + else: + encoder_extended_attention_mask = self.invert_attention_mask( + encoder_attention_mask) + else: + encoder_extended_attention_mask = None + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + head_mask = self.get_head_mask(head_mask, + self.config.num_hidden_layers) + + encoder_outputs = self.encoder( + embedding_output, + attention_mask=extended_attention_mask, + head_mask=head_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_extended_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + query_length=query_length, + ) + sequence_output = encoder_outputs[0] + pooled_output = ( + self.pooler(sequence_output) if self.pooler is not None else None) + + if not return_dict: + return (sequence_output, pooled_output) + encoder_outputs[1:] + + return BaseModelOutputWithPoolingAndCrossAttentions( + last_hidden_state=sequence_output, + pooler_output=pooled_output, + past_key_values=encoder_outputs.past_key_values, + hidden_states=encoder_outputs.hidden_states, + attentions=encoder_outputs.attentions, + cross_attentions=encoder_outputs.cross_attentions, + ) + + +class BertLMHeadModel(BertPreTrainedModel): + + _keys_to_ignore_on_load_unexpected = [r'pooler'] + _keys_to_ignore_on_load_missing = [ + r'position_ids', r'predictions.decoder.bias' + ] + + def __init__(self, config): + super().__init__(config) + + self.bert = BertModel(config, add_pooling_layer=False) + self.cls = BertOnlyMLMHead(config) + + self.init_weights() + + def get_output_embeddings(self): + return self.cls.predictions.decoder + + def set_output_embeddings(self, new_embeddings): + self.cls.predictions.decoder = new_embeddings + + def forward( + self, + input_ids=None, + attention_mask=None, + position_ids=None, + head_mask=None, + query_embeds=None, + encoder_hidden_states=None, + encoder_attention_mask=None, + labels=None, + past_key_values=None, + use_cache=True, + output_attentions=None, + output_hidden_states=None, + return_dict=None, + return_logits=False, + is_decoder=True, + reduction='mean', + ): + r""" + encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`): + Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if + the model is configured as a decoder. + encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in + the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``: + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): + Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in + ``[-100, 0, ..., config.vocab_size]`` (see ``input_ids`` docstring) Tokens with indices set to ``-100`` are + ignored (masked), the loss is only computed for the tokens with labels n ``[0, ..., config.vocab_size]`` + past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 + tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`): + Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding. + If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids` + (those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)` + instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`. + use_cache (:obj:`bool`, `optional`): + If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up + decoding (see :obj:`past_key_values`). + Returns: + Example:: + >>> from transformers import BertTokenizer, BertLMHeadModel, BertConfig + >>> import torch + >>> tokenizer = BertTokenizer.from_pretrained('bert-base-cased') + >>> config = BertConfig.from_pretrained("bert-base-cased") + >>> model = BertLMHeadModel.from_pretrained('bert-base-cased', config=config) + >>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt") + >>> outputs = model(**inputs) + >>> prediction_logits = outputs.logits + """ + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + if labels is not None: + use_cache = False + if past_key_values is not None: + query_embeds = None + + outputs = self.bert( + input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + head_mask=head_mask, + query_embeds=query_embeds, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + is_decoder=is_decoder, + ) + + sequence_output = outputs[0] + if query_embeds is not None: + sequence_output = outputs[0][:, query_embeds.shape[1]:, :] + prediction_scores = self.cls(sequence_output) + + if return_logits: + return prediction_scores[:, :-1, :].contiguous() + + lm_loss = None + if labels is not None: + # we are doing next-token prediction; shift prediction scores and input ids by one + shifted_prediction_scores = prediction_scores[:, : + -1, :].contiguous() + labels = labels[:, 1:].contiguous() + loss_fct = CrossEntropyLoss( + reduction=reduction, label_smoothing=0.1) + lm_loss = loss_fct( + shifted_prediction_scores.view(-1, self.config.vocab_size), + labels.view(-1), + ) + if reduction == 'none': + lm_loss = lm_loss.view(prediction_scores.size(0), -1).sum(1) + + if not return_dict: + output = (prediction_scores, ) + outputs[2:] + return ((lm_loss, ) + output) if lm_loss is not None else output + + return CausalLMOutputWithCrossAttentions( + loss=lm_loss, + logits=prediction_scores, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + cross_attentions=outputs.cross_attentions, + ) + + def prepare_inputs_for_generation(self, + input_ids, + query_embeds, + past=None, + attention_mask=None, + **model_kwargs): + # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly + if attention_mask is None: + attention_mask = input_ids.new_ones(input_ids.shape) + query_mask = input_ids.new_ones(query_embeds.shape[:-1]) + attention_mask = torch.cat([query_mask, attention_mask], dim=-1) + + # cut decoder_input_ids if past is used + if past is not None: + input_ids = input_ids[:, -1:] + + return { + 'input_ids': + input_ids, + 'query_embeds': + query_embeds, + 'attention_mask': + attention_mask, + 'past_key_values': + past, + 'encoder_hidden_states': + model_kwargs.get('encoder_hidden_states', None), + 'encoder_attention_mask': + model_kwargs.get('encoder_attention_mask', None), + 'is_decoder': + True, + } + + def _reorder_cache(self, past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple( + past_state.index_select(0, beam_idx) + for past_state in layer_past), ) + return reordered_past + + +@MODELS.register_module() +class Qformer(BertLMHeadModel): + + def __init__(self, model_style: str, vision_model_width: int, + add_cross_attention: bool, cross_attention_freq: int, + num_query_token: int) -> None: + + config = BertConfig.from_pretrained(model_style) + config.add_cross_attention = add_cross_attention + config.encoder_width = vision_model_width + config.cross_attention_freq = cross_attention_freq + config.query_length = num_query_token + super().__init__(config) diff --git a/mmpretrain/models/multimodal/blip2/__init__.py b/mmpretrain/models/multimodal/blip2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b5695f236caf74493fc6e851edbf2a4a05146b5f --- /dev/null +++ b/mmpretrain/models/multimodal/blip2/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .blip2_caption import Blip2Caption +from .blip2_opt_vqa import Blip2VQA +from .blip2_retriever import Blip2Retrieval +from .modeling_opt import OPTForCausalLM +from .Qformer import Qformer + +__all__ = [ + 'Blip2Caption', 'Blip2Retrieval', 'Blip2VQA', 'OPTForCausalLM', 'Qformer' +] diff --git a/mmpretrain/models/multimodal/blip2/blip2_caption.py b/mmpretrain/models/multimodal/blip2/blip2_caption.py new file mode 100644 index 0000000000000000000000000000000000000000..7b409b07acbb84c7e3f15d49ca7a3636beee6004 --- /dev/null +++ b/mmpretrain/models/multimodal/blip2/blip2_caption.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional + +import torch +from mmengine.model import BaseModel +from torch import nn + +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample + + +@MODELS.register_module() +class Blip2Caption(BaseModel): + """BLIP2 Caption. + + Module for BLIP2 Caption task. + + Args: + vision_backbone (dict): The config dict for vision backbone. + text_backbone (dict): The config dict for text backbone. + multimodal_backbone (dict): The config dict for multimodal backbone. + vision_neck (dict): The config dict for vision neck. + tokenizer: (Optional[dict]): The config for tokenizer. + Defaults to None. + prompt (str): Prompt used for training and eval. + Defaults to ''. + max_txt_len (int): Max text length of input text. + num_captions (int): Number of captions to be generated for each image. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MultiModalDataPreprocessor" as type. + See :class:`MultiModalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + _no_split_modules = ['BEiTViT', 'OPTDecoderLayer', 'BertLayer'] + + def __init__(self, + vision_backbone: dict, + text_backbone: dict, + multimodal_backbone: dict, + vision_neck: dict, + tokenizer: Optional[dict] = None, + prompt: str = '', + max_txt_len: int = 20, + num_captions: int = 1, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None) -> None: + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + self.tokenizer = TOKENIZER.build(tokenizer) + self.eos_token_id = self.tokenizer( + '\n', add_special_tokens=False).input_ids[0] + + self.vision_backbone = MODELS.build(vision_backbone) + self.ln_vision_backbone = nn.LayerNorm(self.vision_backbone.embed_dims) + + self.vision_neck = MODELS.build(vision_neck) + + self.text_backbone = MODELS.build(text_backbone) + + self.multimodal_backbone = MODELS.build(multimodal_backbone) + self.multimodal_backbone.cls = None + self.multimodal_backbone.bert.embeddings.word_embeddings = None + self.multimodal_backbone.bert.embeddings.position_embeddings = None + for layer in self.multimodal_backbone.bert.encoder.layer: + layer.output = None + layer.intermediate = None + + self.prompt = prompt + self.max_txt_len = max_txt_len + self.num_captions = num_captions + prompt_tokens = self.tokenizer(prompt, return_tensors='pt') + self.prompt_length = prompt_tokens.attention_mask.sum(1) + + self.query_tokens = nn.Parameter( + torch.zeros(1, self.multimodal_backbone.bert.config.query_length, + self.multimodal_backbone.bert.config.hidden_size)) + self.query_tokens.data.normal_( + mean=0.0, + std=self.multimodal_backbone.bert.config.initializer_range) + + # freeze the text backbone + for _, param in self.text_backbone.named_parameters(): + param.requires_grad = False + + if hasattr(self, 'register_load_state_dict_post_hook'): + self.register_load_state_dict_post_hook(self._ignore_llm_keys_hook) + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[List] = None, + mode: str = 'loss', + ) -> List[DataSample]: + """The unified entry for a forward process in both training and test. + The method should accept two modes: "predict" and "loss": + + - "predict": Forward and return the predictions, which are fully + processed to a list of :obj:`DataSample`. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + images (torch.Tensor): pre_processed img tensor (N, C, ...). + data_samples (List[DataSample], optional): + mode (str): Return what kind of value. Defaults to 'loss'. + + Returns: + The return type depends on ``mode``. + - If ``mode="loss"``, return a dict of tensor. + """ + if mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def predict(self, + images: torch.Tensor, + data_samples: Optional[list] = None, + **kwargs) -> List[DataSample]: + """Predict captions from a batch of inputs. + + Args: + images (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + **kwargs: Other keyword arguments accepted by the ``predict`` + method of :attr:`head`. + + Returns: + List[DataSample]: Return list of data samples. + """ + + # extract image features from + image_embeds = self.ln_vision_backbone(self.vision_backbone(images)[0]) + image_atts = torch.ones( + image_embeds.size()[:-1], + dtype=torch.long, + ).to(images.device) + + # distill image features to query tokens + query_tokens = self.query_tokens.expand(image_embeds.size(0), -1, -1) + query_outputs = self.multimodal_backbone.bert( + query_embeds=query_tokens, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + inputs_opt = self.vision_neck([query_outputs.last_hidden_state]) + attns_opt = torch.ones( + inputs_opt.size()[:-1], dtype=torch.long).to(images.device) + + prompt = [self.prompt] * image_embeds.size(0) + + opt_tokens = self.tokenizer( + prompt, return_tensors='pt').to(images.device) + input_ids = opt_tokens.input_ids + attention_mask = torch.cat([attns_opt, opt_tokens.attention_mask], + dim=1) + + query_embeds = inputs_opt + + outputs = self.text_backbone.generate( + input_ids=input_ids, + query_embeds=query_embeds, + attention_mask=attention_mask, + do_sample=False, + top_p=0.9, + temperature=1., + num_beams=5, + max_new_tokens=self.max_txt_len, + min_length=1, + eos_token_id=self.eos_token_id, + repetition_penalty=1.0, + length_penalty=1.0, + num_return_sequences=self.num_captions, + ) + + output_text = self.tokenizer.batch_decode( + outputs[:, self.prompt_length:], skip_special_tokens=True) + output_text = [text.strip() for text in output_text] + + out_data_samples = [] + if data_samples is None: + data_samples = [None for _ in range(len(output_text))] + + for data_sample, decode_token in zip(data_samples, output_text): + if data_sample is None: + data_sample = DataSample() + data_sample.pred_caption = decode_token + out_data_samples.append(data_sample) + + return out_data_samples + + @staticmethod + def _ignore_llm_keys_hook(module, incompatible_keys): + """Avoid warning missing keys of the LLM model.""" + import re + llm_pattern = '^text_backbone' + for key in list(incompatible_keys.missing_keys): + if re.match(llm_pattern, key): + incompatible_keys.missing_keys.remove(key) diff --git a/mmpretrain/models/multimodal/blip2/blip2_opt_vqa.py b/mmpretrain/models/multimodal/blip2/blip2_opt_vqa.py new file mode 100644 index 0000000000000000000000000000000000000000..20e439fa826725a80462557faab8ae25a8e5660e --- /dev/null +++ b/mmpretrain/models/multimodal/blip2/blip2_opt_vqa.py @@ -0,0 +1,92 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional + +import torch + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .blip2_caption import Blip2Caption + + +@MODELS.register_module() +class Blip2VQA(Blip2Caption): + """BLIP2 VQA. + + Module for BLIP2 VQA task. For more details about the initialization + params, please refer to :class:`Blip2Caption`. + """ + + def predict(self, + images: torch.Tensor, + data_samples: Optional[list] = None, + **kwargs) -> List[DataSample]: + """Predict captions from a batch of inputs. + + Args: + images (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + **kwargs: Other keyword arguments accepted by the ``predict`` + method of :attr:`head`. + + Returns: + List[DataSample]: Return list of data samples. + """ + questions = [d.question for d in data_samples] + + # extract image features from + image_embeds = self.ln_vision_backbone(self.vision_backbone(images)[0]) + image_atts = torch.ones( + image_embeds.size()[:-1], + dtype=torch.long, + ).to(images.device) + + # distill image features to query tokens + query_tokens = self.query_tokens.expand(image_embeds.size(0), -1, -1) + query_outputs = self.multimodal_backbone.bert( + query_embeds=query_tokens, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + return_dict=True, + ) + inputs_opt = self.vision_neck([query_outputs.last_hidden_state]) + attns_opt = torch.ones( + inputs_opt.size()[:-1], dtype=torch.long).to(images.device) + + prompt = [self.prompt.format(q) for q in questions] + + # use left padding + self.tokenizer.padding_side = 'left' + + opt_tokens = self.tokenizer( + prompt, return_tensors='pt', padding='longest').to(images.device) + input_ids = opt_tokens.input_ids + attention_mask = torch.cat([attns_opt, opt_tokens.attention_mask], + dim=1) + + inputs_embeds = self.text_backbone.model.decoder.embed_tokens( + input_ids) + inputs_embeds = torch.cat([inputs_opt, inputs_embeds], dim=1) + + outputs = self.text_backbone.generate( + inputs_embeds=inputs_embeds, + attention_mask=attention_mask, + do_sample=False, + num_beams=5, + max_new_tokens=self.max_txt_len, + min_length=1, + eos_token_id=self.eos_token_id, + length_penalty=-1.0, + ) + + output_text = self.tokenizer.batch_decode( + outputs, skip_special_tokens=True) + output_text = [text.strip() for text in output_text] + + out_data_samples = [] + for data_sample, decode_token in zip(data_samples, output_text): + data_sample.pred_answer = decode_token + out_data_samples.append(data_sample) + + return out_data_samples diff --git a/mmpretrain/models/multimodal/blip2/blip2_retriever.py b/mmpretrain/models/multimodal/blip2/blip2_retriever.py new file mode 100644 index 0000000000000000000000000000000000000000..e626404a4cde5798151a0fa9589716470ed928a9 --- /dev/null +++ b/mmpretrain/models/multimodal/blip2/blip2_retriever.py @@ -0,0 +1,505 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Tuple, Union + +import mmengine.dist as dist +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.utils import track_iter_progress + +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample +from ..blip.blip_retrieval import BlipRetrieval, all_gather_concat + + +@MODELS.register_module() +class Blip2Retrieval(BlipRetrieval): + """BLIP2 Retriever. + + Args: + vision_backbone (dict): Backbone for extracting image features. + text_backbone (dict): Backbone for extracting text features. + multimodal_backbone (Optional[dict]): Backbone for extracting + multi-modal features. + vision_neck (Optional[dict]): The neck module to process image features + from vision backbone. Defaults to None. + text_neck (Optional[dict]): The neck module to process text features + from text backbone. Defaults to None. + head (Optional[Union[List[dict], dict]]): The head module to calculate + loss from processed single modality features. + See :mod:`mmmultimodal.models.heads`. + Notice that if the head is not set, `loss` method cannot be used. + Defaults to None. + multimodal_head (Optional[Union[List[dict], dict]]): The multi-modal + head module to calculate loss from processed multimodal features. + See :mod:`mmmultimodal.models.heads`. + Notice that if the head is not set, `loss` method cannot be used. + Defaults to None. + tokenizer (Optional[dict]): The config for tokenizer. Defaults to None. + temperature (float): Temperature parameter that controls the + concentration level of the distribution. Defaults to 0.07. + fast_match (bool): If False, select topk similarity as candidates and + compute the matching score. If True, return the similarity as the + matching score directly. Defaults to False. + topk (int): Select topk similarity as candidates for compute matching + scores. Notice that this is not the topk in evaluation. + Defaults to 256. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MultiModalDataPreprocessor" as type. + See :class:`MultiModalDataPreprocessor` for more details. + Defaults to None. + init_cfg (Optional[dict]): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + vision_backbone: dict, + text_backbone: Optional[dict] = None, + multimodal_backbone: Optional[dict] = None, + vision_neck: Optional[dict] = None, + text_neck: Optional[dict] = None, + head: Optional[Union[List[dict], dict]] = None, + multimodal_head: Optional[Union[List[dict], dict]] = None, + tokenizer: Optional[dict] = None, + temperature: float = 0.07, + fast_match: bool = False, + topk: int = 256, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None) -> None: + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + # Skip BlipRetrieval init + super(BlipRetrieval, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + self.vision_backbone = MODELS.build(vision_backbone) + self.ln_vision_backbone = nn.LayerNorm(self.vision_backbone.embed_dims) + self.tokenizer = TOKENIZER.build(tokenizer) + + if text_backbone is not None: + self.text_backbone = MODELS.build(text_backbone) + + if multimodal_backbone is not None: + self.multimodal_backbone = MODELS.build(multimodal_backbone) + self.multimodal_backbone.resize_token_embeddings( + len(self.tokenizer)) + self.query_tokens = nn.Parameter( + torch.zeros(1, self.multimodal_backbone.bert.config.query_length, + self.multimodal_backbone.bert.config.hidden_size)) + self.query_tokens.data.normal_( + mean=0.0, + std=self.multimodal_backbone.bert.config.initializer_range) + + if vision_neck is not None: + self.vision_neck = MODELS.build(vision_neck) + + if text_neck is not None: + self.text_neck = MODELS.build(text_neck) + + if head is not None: + self.head = MODELS.build(head) + + if multimodal_head is not None: + self.multimodal_head = MODELS.build(multimodal_head) + + self.temp = nn.Parameter(temperature * torch.ones([])) + + # Notice that this topk is used for select k candidate to compute + # image-text score, but not the final metric topk in evaluation. + self.fast_match = fast_match + self.topk = topk + + def _extract_feat(self, inputs: Union[torch.Tensor, dict], + modality: str) -> Tuple[torch.Tensor]: + """Extract features from the single modality. + Args: + inputs (Union[torch.Tensor, dict]): A batch of inputs. + For image, a tensor of shape (N, C, ...) in general. + For text, a dict of tokenized text inputs. + modality (str): Modality feature to be extracted. Only two + options are supported. + + - ``images``: Only extract image features, mostly used for + inference. + - ``texts``: Only extract text features, mostly used for + inference. + Returns: + Tuple[torch.Tensor]: The output features. + """ + if modality == 'images': + # extract image features + # TODO: + # Add layernorm inside backbone and handle the concat outside + image_embeds = self.ln_vision_backbone( + self.vision_backbone(inputs)[0]) + image_atts = torch.ones( + image_embeds.size()[:-1], dtype=torch.long).to(self.device) + + query_tokens = self.query_tokens.expand(image_embeds.shape[0], -1, + -1) + query_output = self.multimodal_backbone.bert( + query_embeds=query_tokens, + encoder_hidden_states=image_embeds, + encoder_attention_mask=image_atts, + use_cache=True, + return_dict=True, + ) + image_feat = F.normalize( + self.vision_neck([query_output.last_hidden_state]), dim=-1) + return { + 'image_embeds': image_embeds, + 'image_feat': image_feat, + 'query_output': query_output + } + elif modality == 'texts': + # extract text features + text_output = self.multimodal_backbone.bert( + inputs.input_ids, + attention_mask=inputs.attention_mask, + return_dict=True, + ) + text_embeds = text_output.last_hidden_state + text_feat = F.normalize( + self.text_neck([text_embeds[:, 0, :]]), dim=-1) + return {'text_embeds': text_embeds, 'text_feat': text_feat} + else: + raise RuntimeError(f'Invalid modality "{modality}".') + + def loss( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + ) -> Dict[str, torch.tensor]: + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (dict): A batch of inputs. The input tensor with of + at least one modality. For image, the value is a tensor + of shape (N, C, ...) in general. + For text, the value is a dict of tokenized text inputs. + data_samples (Optional[List[DataSample]]): + The annotation data of every samples. Defaults to None. + + Returns: + Dict[str, torch.tensor]: a dictionary of loss components of + both head and multimodal head. + """ + output = self.extract_feat(images, data_samples) + + text_ids = output['text_ids'] + text_attn_mask = output['text_attn_mask'] + image_embeds = output['image_embeds'] + image_feat = output['image_feat'] + text_feat = output['text_feat'] + query_output = output['query_output'] + + # ITC Loss + # B*world_size, num_query, D + image_feat_all = torch.cat(dist.all_gather(image_feat)) + # B*world_size, D + text_feat_all = torch.cat(dist.all_gather(text_feat)) + + # B, B*world_size, num_query + sim_q2t = torch.matmul( + image_feat.unsqueeze(1), text_feat_all.unsqueeze(-1)).squeeze() + + # image to text similarity + sim_i2t, _ = sim_q2t.max(-1) + sim_i2t = sim_i2t / self.temp + + # B, B*world_size, num_query + sim_t2q = torch.matmul( + text_feat.unsqueeze(1).unsqueeze(1), + image_feat_all.permute(0, 2, 1)).squeeze() + + # text-image similarity + sim_t2i, _ = sim_t2q.max(-1) + sim_t2i = sim_t2i / self.temp + + rank = dist.get_rank() + bs = images.size(0) + targets = torch.linspace( + rank * bs, rank * bs + bs - 1, bs, dtype=int).to(self.device) + + itc_loss = (F.cross_entropy(sim_i2t, targets, label_smoothing=0.1) + + F.cross_entropy(sim_t2i, targets, label_smoothing=0.1)) / 2 + + # prepare for itm + text_input_ids_world = torch.cat(dist.all_gather(text_ids)) + text_attention_mask_world = torch.cat(dist.all_gather(text_attn_mask)) + image_embeds_world = torch.cat(dist.all_gather(image_embeds)) + with torch.no_grad(): + weights_t2i = F.softmax(sim_t2i, dim=1) + 1e-4 + weights_t2i[:, rank * bs:rank * bs + bs].fill_diagonal_(0) + weights_i2t = F.softmax(sim_i2t, dim=1) + 1e-4 + weights_i2t[:, rank * bs:rank * bs + bs].fill_diagonal_(0) + + # select a negative image for each text + image_embeds_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_t2i[b], 1).item() + image_embeds_neg.append(image_embeds_world[neg_idx]) + image_embeds_neg = torch.stack(image_embeds_neg, dim=0) + + # select a negative text for each image + text_ids_neg = [] + text_atts_neg = [] + for b in range(bs): + neg_idx = torch.multinomial(weights_i2t[b], 1).item() + text_ids_neg.append(text_input_ids_world[neg_idx]) + text_atts_neg.append(text_attention_mask_world[neg_idx]) + + text_ids_neg = torch.stack(text_ids_neg, dim=0) + text_atts_neg = torch.stack(text_atts_neg, dim=0) + + text_ids_all = torch.cat([text_ids, text_ids, text_ids_neg], + dim=0) # pos, pos, neg + text_atts_all = torch.cat( + [text_attn_mask, text_attn_mask, text_atts_neg], + dim=0, + ) + + query_tokens_itm = self.query_tokens.expand(text_ids_all.shape[0], -1, + -1) + query_atts_itm = torch.ones( + query_tokens_itm.size()[:-1], dtype=torch.long).to(self.device) + attention_mask_all = torch.cat([query_atts_itm, text_atts_all], dim=1) + + image_embeds_all = torch.cat( + [image_embeds, image_embeds_neg, image_embeds], + dim=0) # pos, neg, pos + image_atts_all = torch.ones( + image_embeds_all.size()[:-1], dtype=torch.long).to(self.device) + + output_itm = self.multimodal_backbone.bert( + text_ids_all, + query_embeds=query_tokens_itm, + attention_mask=attention_mask_all, + encoder_hidden_states=image_embeds_all, + encoder_attention_mask=image_atts_all, + return_dict=True, + ) + + vl_embeddings = output_itm.last_hidden_state[:, :query_tokens_itm. + size(1), :] + + # create false data samples + data_samples.extend( + [DataSample(is_matched=False) for _ in range(2 * bs)]) + loss_multimodal = self.multimodal_head.loss((vl_embeddings, ), + data_samples) + + # LM loss + decoder_input_ids = text_ids.clone() + decoder_input_ids[:, 0] = self.tokenizer.bos_token_id + labels = decoder_input_ids.masked_fill( + decoder_input_ids == self.tokenizer.pad_token_id, -100) + + query_tokens = self.query_tokens.expand(image_embeds.shape[0], -1, -1) + query_atts = torch.ones( + query_tokens.size()[:-1], dtype=torch.long).to(self.device) + attention_mask = torch.cat([query_atts, text_attn_mask], dim=1) + lm_output = self.multimodal_backbone( + decoder_input_ids, + attention_mask=attention_mask, + past_key_values=query_output.past_key_values, + return_dict=True, + labels=labels, + ) + + return dict( + itc_loss=itc_loss, **loss_multimodal, lm_loss=lm_output.loss) + + def predict_all(self, + feats: Dict[str, torch.Tensor], + data_samples: List[DataSample], + num_images: int = None, + num_texts: int = None, + cal_i2t: bool = True, + cal_t2i: bool = True) -> Tuple[torch.Tensor, torch.Tensor]: + """Compute similarity matrix between images and texts across all ranks. + + Args: + feats (Dict[str, torch.Tensor]): Features from the current rank. + data_samples (List[DataSample]): Data samples from the current + rank. + num_images (int, optional): Number of images to use. + Defaults to None. + num_texts (int, optional): Number of texts to use. + Defaults to None. + cal_i2t (bool, optional): Whether to compute image-to-text + similarity. Defaults to True. + cal_t2i (bool, optional): Whether to compute text-to-image + similarity. Defaults to True. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Image-to-text and text-to-image + similarity matrices. + """ + text_ids = feats['text_ids'] + text_attn_mask = feats['text_attn_mask'] + image_embeds = feats.get('image_embeds', None) + image_feat = feats['image_feat'] + text_feat = feats['text_feat'] + + num_images = num_images or image_feat.size(0) + num_texts = num_texts or text_feat.size(0) + + if not self.fast_match: + image_embeds_all = all_gather_concat(image_embeds)[:num_images] + else: + image_embeds_all = None + image_feat_all = all_gather_concat(image_feat)[:num_images] + text_feat_all = all_gather_concat(text_feat)[:num_texts] + text_ids_all = all_gather_concat(text_ids)[:num_texts] + text_attn_mask_all = all_gather_concat(text_attn_mask)[:num_texts] + + results = [] + if cal_i2t: + result_i2t = self.compute_score_matrix_i2t( + image_feat, + image_embeds, + text_feat_all, + text_ids_all, + text_attn_mask_all, + ) + results.append( + self._get_predictions(result_i2t, data_samples, mode='i2t')) + if cal_t2i: + result_t2i = self.compute_score_matrix_t2i( + image_feat_all, + image_embeds_all, + text_feat, + text_ids, + text_attn_mask, + ) + results.append( + self._get_predictions(result_t2i, data_samples, mode='t2i')) + return tuple(results) + + def compute_score_matrix_i2t(self, img_feats: torch.Tensor, + img_embeds: List[torch.Tensor], + text_feats: torch.Tensor, + text_ids: torch.Tensor, + text_atts: torch.Tensor) -> torch.Tensor: + """Compare the score matrix for image-to-text retrieval. Every image + should compare to all the text features. + + Args: + img_feats (torch.Tensor): The input tensor with shape (M, C). + M stands for numbers of samples on a single GPU. + img_embeds (List[torch.Tensor]): Image features from each layer of + the vision backbone. + text_feats (torch.Tensor): The input tensor with shape (N, C). + N stands for numbers of all samples on all GPUs. + text_ids (torch.Tensor): The input tensor with shape (N, C). + text_atts (torch.Tensor): The input tensor with shape (N, C). + + Returns: + torch.Tensor: Score matrix of image-to-text retrieval. + """ + + # compute i2t sim matrix + # TODO: check correctness + sim_matrix_i2t, _ = (img_feats @ text_feats.t()).max(1) + if self.fast_match: + return sim_matrix_i2t + + score_matrix_i2t = torch.full((img_feats.size(0), text_feats.size(0)), + -100.0).to(self.device) + + for i in track_iter_progress(range(img_feats.size(0))): + sims = sim_matrix_i2t[i] + topk_sim, topk_idx = sims.topk(k=self.topk, dim=0) + # get repeated image embeddings + encoder_output = img_embeds[i].repeat(self.topk, 1, 1) + encoder_att = torch.ones( + encoder_output.size()[:-1], dtype=torch.long).to(self.device) + # query embeds and attention masks + query_tokens = self.query_tokens.expand(encoder_output.shape[0], + -1, -1) + query_atts = torch.ones( + query_tokens.size()[:-1], dtype=torch.long).to(self.device) + attention_mask = torch.cat([query_atts, text_atts[topk_idx]], + dim=1) + output = self.multimodal_backbone.bert( + text_ids[topk_idx], + query_embeds=query_tokens, + attention_mask=attention_mask, + encoder_hidden_states=encoder_output, + encoder_attention_mask=encoder_att, + return_dict=True, + ) + score = self.multimodal_head( + (output.last_hidden_state[:, :query_tokens.size(1), :], + ))[:, :, 1].mean(dim=1) + score_matrix_i2t[i, topk_idx] = score + topk_sim + + return score_matrix_i2t + + def compute_score_matrix_t2i(self, img_feats: torch.Tensor, + img_embeds: List[torch.Tensor], + text_feats: torch.Tensor, + text_ids: torch.Tensor, + text_atts: torch.Tensor) -> torch.Tensor: + """Compare the score matrix for text-to-image retrieval. + + Every text should compare to all the image features. + + Args: + img_feats (torch.Tensor): The input tensor with shape (N, C). + N stands for numbers of all samples on all GPUs. + img_embeds (List[torch.Tensor]): Image features from each layer of + the vision backbone. + text_feats (torch.Tensor): The input tensor with shape (M, C). + M stands for numbers of samples on a single GPU. + text_ids (torch.Tensor): The input tensor with shape (M, C). + text_atts (torch.Tensor): The input tensor with shape (M, C). + + Returns: + torch.Tensor: Score matrix of text-to-image retrieval. + """ + + # compute t2i sim matrix + # TODO: check correctness + sim_matrix_i2t, _ = (img_feats @ text_feats.t()).max(1) + sim_matrix_t2i = sim_matrix_i2t.t() + if self.fast_match: + return sim_matrix_i2t + + score_matrix_t2i = torch.full((text_feats.size(0), img_feats.size(0)), + -100.0).to(self.device) + + for i in track_iter_progress(range(text_feats.size(0))): + sims = sim_matrix_t2i[i] + topk_sim, topk_idx = sims.topk(k=self.topk, dim=0) + # get topk image embeddings + encoder_output = img_embeds[topk_idx] + encoder_att = torch.ones( + encoder_output.size()[:-1], dtype=torch.long).to(self.device) + # get query embeds and attention masks + query_tokens = self.query_tokens.expand(encoder_output.shape[0], + -1, -1) + query_atts = torch.ones( + query_tokens.size()[:-1], dtype=torch.long).to(self.device) + attention_mask = torch.cat( + [query_atts, text_atts[i].repeat(self.topk, 1)], dim=1) + output = self.multimodal_backbone.bert( + text_ids[i].repeat(self.topk, 1), + query_embeds=query_tokens, + attention_mask=attention_mask, + encoder_hidden_states=encoder_output, + encoder_attention_mask=encoder_att, + return_dict=True, + ) + score = self.multimodal_head( + (output.last_hidden_state[:, :query_tokens.size(1), :], + ))[:, :, 1].mean(dim=1) + score_matrix_t2i[i, topk_idx] = score + topk_sim + + return score_matrix_t2i diff --git a/mmpretrain/models/multimodal/blip2/modeling_opt.py b/mmpretrain/models/multimodal/blip2/modeling_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..7cde0d76a2079a610bd71ed034c0c88940244e76 --- /dev/null +++ b/mmpretrain/models/multimodal/blip2/modeling_opt.py @@ -0,0 +1,1083 @@ +# flake8: noqa +# Copyright 2022 The Fairseq Authors and The HuggingFace Inc. team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PyTorch OPT model.""" +import random +from typing import List, Optional, Tuple, Union + +import torch +import torch.utils.checkpoint +from torch import nn +from torch.nn import CrossEntropyLoss +from transformers.activations import ACT2FN +from transformers.modeling_outputs import (BaseModelOutputWithPast, + CausalLMOutputWithPast) +from transformers.modeling_utils import PreTrainedModel +from transformers.models.opt.configuration_opt import OPTConfig +from transformers.utils import (add_code_sample_docstrings, + add_start_docstrings, + add_start_docstrings_to_model_forward, logging, + replace_return_docstrings) + +from mmpretrain.models.utils import register_hf_model + +logger = logging.get_logger(__name__) + +_CHECKPOINT_FOR_DOC = 'facebook/opt-350m' +_CONFIG_FOR_DOC = 'OPTConfig' +_TOKENIZER_FOR_DOC = 'GPT2Tokenizer' + +# Base model docstring +_EXPECTED_OUTPUT_SHAPE = [1, 8, 1024] + +OPT_PRETRAINED_MODEL_ARCHIVE_LIST = [ + 'facebook/opt-125m', + 'facebook/opt-350m', + 'facebook/opt-1.3b', + 'facebook/opt-2.7b', + 'facebook/opt-6.7b', + 'facebook/opt-13b', + 'facebook/opt-30b', + # See all OPT models at https://huggingface.co/models?filter=opt +] + + +def _make_causal_mask(input_ids_shape: torch.Size, + dtype: torch.dtype, + past_key_values_length: int = 0): + """Make causal mask used for bi-directional self-attention.""" + bsz, tgt_len = input_ids_shape + mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min)) + mask_cond = torch.arange(mask.size(-1)) + mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) + mask = mask.to(dtype) + + if past_key_values_length > 0: + mask = torch.cat( + [torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], + dim=-1) + return mask[None, None, :, :].expand(bsz, 1, tgt_len, + tgt_len + past_key_values_length) + + +def _expand_mask(mask: torch.Tensor, + dtype: torch.dtype, + tgt_len: Optional[int] = None): + """Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, + src_seq_len]`.""" + bsz, src_len = mask.size() + tgt_len = tgt_len if tgt_len is not None else src_len + + expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, + src_len).to(dtype) + + inverted_mask = 1.0 - expanded_mask + + return inverted_mask.masked_fill( + inverted_mask.to(torch.bool), + torch.finfo(dtype).min) + + +class OPTLearnedPositionalEmbedding(nn.Embedding): + """This module learns positional embeddings up to a fixed maximum size.""" + + def __init__(self, num_embeddings: int, embedding_dim: int): + # OPT is set up so that if padding_idx is specified then offset the embedding ids by 2 + # and adjust num_embeddings appropriately. Other models don't have this hack + self.offset = 2 + super().__init__(num_embeddings + self.offset, embedding_dim) + + def forward(self, + attention_mask: torch.LongTensor, + past_key_values_length: int = 0): + """`input_ids_shape` is expected to be [bsz x seqlen].""" + attention_mask = attention_mask.long() + + # create positions depending on attention_mask + positions = ( + torch.cumsum(attention_mask, dim=1).type_as(attention_mask) * + attention_mask).long() - 1 + + # cut positions if `past_key_values_length` is > 0 + positions = positions[:, past_key_values_length:] + + return super().forward(positions + self.offset) + + +class OPTAttention(nn.Module): + """Multi-headed attention from 'Attention Is All You Need' paper.""" + + def __init__( + self, + embed_dim: int, + num_heads: int, + dropout: float = 0.0, + is_decoder: bool = False, + bias: bool = True, + ): + super().__init__() + self.embed_dim = embed_dim + self.num_heads = num_heads + self.dropout = dropout + self.head_dim = embed_dim // num_heads + + if (self.head_dim * num_heads) != self.embed_dim: + raise ValueError( + f'embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}' + f' and `num_heads`: {num_heads}).') + self.scaling = self.head_dim**-0.5 + self.is_decoder = is_decoder + + self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias) + + def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int): + return (tensor.view(bsz, seq_len, self.num_heads, + self.head_dim).transpose(1, 2).contiguous()) + + def forward( + self, + hidden_states: torch.Tensor, + key_value_states: Optional[torch.Tensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + attention_mask: Optional[torch.Tensor] = None, + layer_head_mask: Optional[torch.Tensor] = None, + output_attentions: bool = False, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], + Optional[Tuple[torch.Tensor]]]: + """Input shape: Batch x Time x Channel.""" + + # if key_value_states are provided this layer is used as a cross-attention layer + # for the decoder + is_cross_attention = key_value_states is not None + + bsz, tgt_len, _ = hidden_states.size() + + # get query proj + query_states = self.q_proj(hidden_states) * self.scaling + # get key, value proj + if is_cross_attention and past_key_value is not None: + # reuse k,v, cross_attentions + key_states = past_key_value[0] + value_states = past_key_value[1] + elif is_cross_attention: + # cross_attentions + key_states = self._shape(self.k_proj(key_value_states), -1, bsz) + value_states = self._shape(self.v_proj(key_value_states), -1, bsz) + elif past_key_value is not None: + # reuse k, v, self_attention + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + key_states = torch.cat([past_key_value[0], key_states], dim=2) + value_states = torch.cat([past_key_value[1], value_states], dim=2) + else: + # self_attention + key_states = self._shape(self.k_proj(hidden_states), -1, bsz) + value_states = self._shape(self.v_proj(hidden_states), -1, bsz) + + if self.is_decoder: + # if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states. + # Further calls to cross_attention layer can then reuse all cross-attention + # key/value_states (first "if" case) + # if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of + # all previous decoder key/value_states. Further calls to uni-directional self-attention + # can concat previous decoder key/value_states to current projected key/value_states (third "elif" case) + # if encoder bi-directional self-attention `past_key_value` is always `None` + past_key_value = (key_states, value_states) + + proj_shape = (bsz * self.num_heads, -1, self.head_dim) + query_states = self._shape(query_states, tgt_len, + bsz).view(*proj_shape) + key_states = key_states.view(*proj_shape) + value_states = value_states.view(*proj_shape) + + src_len = key_states.size(1) + attn_weights = torch.bmm(query_states, key_states.transpose(1, 2)) + + if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): + raise ValueError( + f'Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is' + f' {attn_weights.size()}') + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, tgt_len, src_len): + raise ValueError( + f'Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}' + ) + attn_weights = ( + attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + + attention_mask) + attn_weights = torch.max( + attn_weights, + torch.tensor(torch.finfo(attn_weights.dtype).min)) + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, + src_len) + + # upcast to fp32 if the weights are in fp16. Please see https://github.com/huggingface/transformers/pull/17437 + if attn_weights.dtype == torch.float16: + attn_weights = nn.functional.softmax( + attn_weights, dim=-1, dtype=torch.float32).to(torch.float16) + else: + attn_weights = nn.functional.softmax(attn_weights, dim=-1) + + if layer_head_mask is not None: + if layer_head_mask.size() != (self.num_heads, ): + raise ValueError( + f'Head mask for a single layer should be of size {(self.num_heads,)}, but is' + f' {layer_head_mask.size()}') + attn_weights = layer_head_mask.view( + 1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, + src_len) + attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, + src_len) + + if output_attentions: + # this operation is a bit awkward, but it's required to + # make sure that attn_weights keeps its gradient. + # In order to do so, attn_weights have to be reshaped + # twice and have to be reused in the following + attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, + tgt_len, src_len) + attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, + tgt_len, src_len) + else: + attn_weights_reshaped = None + + attn_probs = nn.functional.dropout( + attn_weights, p=self.dropout, training=self.training) + + attn_output = torch.bmm(attn_probs, value_states) + + if attn_output.size() != (bsz * self.num_heads, tgt_len, + self.head_dim): + raise ValueError( + f'`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is' + f' {attn_output.size()}') + + attn_output = attn_output.view(bsz, self.num_heads, tgt_len, + self.head_dim) + attn_output = attn_output.transpose(1, 2) + + # Use the `embed_dim` from the config (stored in the class) rather than `hidden_state` because `attn_output` can be + # partitioned aross GPUs when using tensor-parallelism. + attn_output = attn_output.reshape(bsz, tgt_len, self.embed_dim) + + attn_output = self.out_proj(attn_output) + + return attn_output, attn_weights_reshaped, past_key_value + + +class OPTDecoderLayer(nn.Module): + + def __init__(self, config: OPTConfig): + super().__init__() + self.embed_dim = config.hidden_size + self.self_attn = OPTAttention( + embed_dim=self.embed_dim, + num_heads=config.num_attention_heads, + dropout=config.attention_dropout, + is_decoder=True, + ) + self.do_layer_norm_before = config.do_layer_norm_before + self.dropout = config.dropout + self.activation_fn = ACT2FN[config.activation_function] + + self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim) + self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim) + self.fc2 = nn.Linear(config.ffn_dim, self.embed_dim) + self.final_layer_norm = nn.LayerNorm(self.embed_dim) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + layer_head_mask: Optional[torch.Tensor] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + ) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, + torch.FloatTensor]]]: + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): attention mask of size + `(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values. + layer_head_mask (`torch.FloatTensor`, *optional*): mask for attention heads in a given layer of size + `(encoder_attention_heads,)`. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + """ + + residual = hidden_states + + # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention + if self.do_layer_norm_before: + hidden_states = self.self_attn_layer_norm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + past_key_value=past_key_value, + attention_mask=attention_mask, + layer_head_mask=layer_head_mask, + output_attentions=output_attentions, + ) + hidden_states = nn.functional.dropout( + hidden_states, p=self.dropout, training=self.training) + hidden_states = residual + hidden_states + + # 350m applies layer norm AFTER attention + if not self.do_layer_norm_before: + hidden_states = self.self_attn_layer_norm(hidden_states) + + # Fully Connected + hidden_states_shape = hidden_states.shape + hidden_states = hidden_states.reshape(-1, hidden_states.size(-1)) + residual = hidden_states + + # 125m, 1.7B, ..., 175B applies layer norm BEFORE attention + if self.do_layer_norm_before: + hidden_states = self.final_layer_norm(hidden_states) + + hidden_states = self.fc1(hidden_states) + hidden_states = self.activation_fn(hidden_states) + + hidden_states = self.fc2(hidden_states) + hidden_states = nn.functional.dropout( + hidden_states, p=self.dropout, training=self.training) + + hidden_states = (residual + hidden_states).view(hidden_states_shape) + + # 350m applies layer norm AFTER attention + if not self.do_layer_norm_before: + hidden_states = self.final_layer_norm(hidden_states) + + outputs = (hidden_states, ) + + if output_attentions: + outputs += (self_attn_weights, ) + + if use_cache: + outputs += (present_key_value, ) + + return outputs + + +OPT_START_DOCSTRING = r""" + This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the + library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads + etc.) + + This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. + Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage + and behavior. + + Parameters: + config ([`OPTConfig`]): + Model configuration class with all the parameters of the model. Initializing with a config file does not + load the weights associated with the model, only the configuration. Check out the + [`~PreTrainedModel.from_pretrained`] method to load the model weights. +""" + + +@add_start_docstrings( + 'The bare OPT Model outputting raw hidden-states without any specific head on top.', + OPT_START_DOCSTRING, +) +class OPTPreTrainedModel(PreTrainedModel): + + config_class = OPTConfig + base_model_prefix = 'model' + supports_gradient_checkpointing = True + _no_split_modules = ['OPTDecoderLayer'] + _keys_to_ignore_on_load_unexpected = [r'decoder\.version'] + + def _init_weights(self, module): + std = self.config.init_std + if isinstance(module, nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + def _set_gradient_checkpointing(self, module, value=False): + if isinstance(module, (OPTDecoder)): + module.gradient_checkpointing = value + + +OPT_INPUTS_DOCSTRING = r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide + it. + + Indices can be obtained using [`GPT2Tokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see + `past_key_values`). + + If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`] + and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more + information on the default strategy. + head_mask (`torch.Tensor` of shape `(encoder_layers, encoder_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules in the encoder. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape + `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape + `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention + blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that + don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all + `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This + is useful if you want more control over how to convert `input_ids` indices into associated vectors than the + model's internal embedding lookup matrix. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see + `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned + tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for + more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. +""" + + +class OPTDecoder(OPTPreTrainedModel): + """Transformer decoder consisting of *config.num_hidden_layers* layers. + Each layer is a [`OPTDecoderLayer`] + + Args: + config: OPTConfig + """ + + def __init__(self, config: OPTConfig): + super().__init__(config) + self.dropout = config.dropout + self.layerdrop = config.layerdrop + self.padding_idx = config.pad_token_id + self.max_target_positions = config.max_position_embeddings + self.vocab_size = config.vocab_size + + self.embed_tokens = nn.Embedding(config.vocab_size, + config.word_embed_proj_dim, + self.padding_idx) + self.embed_positions = OPTLearnedPositionalEmbedding( + config.max_position_embeddings, config.hidden_size) + + if config.word_embed_proj_dim != config.hidden_size: + self.project_out = nn.Linear( + config.hidden_size, config.word_embed_proj_dim, bias=False) + else: + self.project_out = None + + if config.word_embed_proj_dim != config.hidden_size: + self.project_in = nn.Linear( + config.word_embed_proj_dim, config.hidden_size, bias=False) + else: + self.project_in = None + + # Note that the only purpose of `config._remove_final_layer_norm` is to keep backward compatibility + # with checkpoints that have been fine-tuned before transformers v4.20.1 + # see https://github.com/facebookresearch/metaseq/pull/164 + if config.do_layer_norm_before and not config._remove_final_layer_norm: + self.final_layer_norm = nn.LayerNorm(config.hidden_size) + else: + self.final_layer_norm = None + + self.layers = nn.ModuleList( + [OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.embed_tokens + + def set_input_embeddings(self, value): + self.embed_tokens = value + + # Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask + def _prepare_decoder_attention_mask(self, attention_mask, input_shape, + inputs_embeds, past_key_values_length): + # create causal mask + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + combined_attention_mask = None + if input_shape[-1] > 1: + combined_attention_mask = _make_causal_mask( + input_shape, + inputs_embeds.dtype, + past_key_values_length=past_key_values_length, + ).to(inputs_embeds.device) + + if attention_mask is not None: + # [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + expanded_attn_mask = _expand_mask( + attention_mask, inputs_embeds.dtype, + tgt_len=input_shape[-1]).to(inputs_embeds.device) + combined_attention_mask = ( + expanded_attn_mask if combined_attention_mask is None else + expanded_attn_mask + combined_attention_mask) + + return combined_attention_mask + + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + query_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + head_mask (`torch.Tensor` of shape `(num_hidden_layers, num_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the + cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those + that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of + all `decoder_input_ids` of shape `(batch_size, sequence_length)`. + + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + """ + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError( + 'You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time' + ) + elif input_ids is not None: + batch_size, seq_length = input_ids.shape + elif inputs_embeds is not None: + batch_size, seq_length, _ = inputs_embeds.shape + else: + raise ValueError( + 'You have to specify either decoder_input_ids or decoder_inputs_embeds' + ) + + seq_length_with_past = seq_length + past_key_values_length = 0 + + if past_key_values is not None: + past_key_values_length = past_key_values[0][0].shape[2] + seq_length_with_past = seq_length_with_past + past_key_values_length + + if inputs_embeds is None: + inputs_embeds = self.embed_tokens(input_ids) + + if query_embeds is not None: + inputs_embeds = torch.cat([query_embeds, inputs_embeds], dim=1) + input_shape = inputs_embeds.size()[:-1] + else: + input_shape = (batch_size, seq_length) + + # embed positions + if attention_mask is None: + attention_mask = torch.ones( + inputs_embeds.shape[:2], + dtype=torch.bool, + device=inputs_embeds.device) + pos_embeds = self.embed_positions(attention_mask, + past_key_values_length) + + # embed positions + if attention_mask is None: + attention_mask = torch.ones((batch_size, seq_length_with_past), + dtype=torch.bool, + device=inputs_embeds.device) + + attention_mask = self._prepare_decoder_attention_mask( + attention_mask, input_shape, inputs_embeds, past_key_values_length) + + if self.project_in is not None: + inputs_embeds = self.project_in(inputs_embeds) + + hidden_states = inputs_embeds + pos_embeds + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + next_decoder_cache = () if use_cache else None + + # check if head_mask has a correct number of layers specified if desired + for attn_mask, mask_name in zip([head_mask], ['head_mask']): + if attn_mask is not None: + if attn_mask.size()[0] != (len(self.layers)): + raise ValueError( + f'The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for' + f' {head_mask.size()[0]}.') + + for idx, decoder_layer in enumerate(self.layers): + # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) + if output_hidden_states: + all_hidden_states += (hidden_states, ) + + dropout_probability = random.uniform(0, 1) + if self.training and (dropout_probability < self.layerdrop): + continue + + past_key_value = ( + past_key_values[idx] if past_key_values is not None else None) + + if self.gradient_checkpointing and self.training: + + if use_cache: + logger.warning( + '`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...' + ) + use_cache = False + + def create_custom_forward(module): + + def custom_forward(*inputs): + # None for past_key_value + return module(*inputs, output_attentions, None) + + return custom_forward + + layer_outputs = torch.utils.checkpoint.checkpoint( + create_custom_forward(decoder_layer), + hidden_states, + attention_mask, + head_mask[idx] if head_mask is not None else None, + None, + ) + else: + + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + layer_head_mask=(head_mask[idx] + if head_mask is not None else None), + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + if use_cache: + next_decoder_cache += ( + layer_outputs[2 if output_attentions else 1], ) + + if output_attentions: + all_self_attns += (layer_outputs[1], ) + + if self.final_layer_norm is not None: + hidden_states = self.final_layer_norm(hidden_states) + + if self.project_out is not None: + hidden_states = self.project_out(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states, ) + + next_cache = next_decoder_cache if use_cache else None + if not return_dict: + return tuple( + v for v in + [hidden_states, next_cache, all_hidden_states, all_self_attns] + if v is not None) + return BaseModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + ) + + +@add_start_docstrings( + 'The bare OPT Model outputting raw hidden-states without any specific head on top.', + OPT_START_DOCSTRING, +) +class OPTModel(OPTPreTrainedModel): + + def __init__(self, config: OPTConfig): + super().__init__(config) + self.decoder = OPTDecoder(config) + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.decoder.embed_tokens + + def set_input_embeddings(self, value): + self.decoder.embed_tokens = value + + def get_decoder(self): + return self.decoder + + @add_start_docstrings_to_model_forward(OPT_INPUTS_DOCSTRING) + @add_code_sample_docstrings( + processor_class=_TOKENIZER_FOR_DOC, + checkpoint=_CHECKPOINT_FOR_DOC, + output_type=BaseModelOutputWithPast, + config_class=_CONFIG_FOR_DOC, + expected_output=_EXPECTED_OUTPUT_SHAPE, + ) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + query_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, BaseModelOutputWithPast]: + + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + use_cache = use_cache if use_cache is not None else self.config.use_cache + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + # decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn) + decoder_outputs = self.decoder( + input_ids=input_ids, + attention_mask=attention_mask, + head_mask=head_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + query_embeds=query_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + if not return_dict: + return decoder_outputs + + return BaseModelOutputWithPast( + last_hidden_state=decoder_outputs.last_hidden_state, + past_key_values=decoder_outputs.past_key_values, + hidden_states=decoder_outputs.hidden_states, + attentions=decoder_outputs.attentions, + ) + + +@register_hf_model() +class OPTForCausalLM(OPTPreTrainedModel): + _keys_to_ignore_on_load_missing = [r'lm_head.weight'] + + def __init__(self, config): + super().__init__(config) + self.model = OPTModel(config) + + # the lm_head weight is automatically tied to the embed tokens weight + self.lm_head = nn.Linear( + config.word_embed_proj_dim, config.vocab_size, bias=False) + + # Initialize weights and apply final processing + self.post_init() + + def get_input_embeddings(self): + return self.model.decoder.embed_tokens + + def set_input_embeddings(self, value): + self.model.decoder.embed_tokens = value + + def get_output_embeddings(self): + return self.lm_head + + def set_output_embeddings(self, new_embeddings): + self.lm_head = new_embeddings + + def set_decoder(self, decoder): + self.model.decoder = decoder + + def get_decoder(self): + return self.model.decoder + + @replace_return_docstrings( + output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC) + def forward( + self, + input_ids: torch.LongTensor = None, + attention_mask: Optional[torch.Tensor] = None, + head_mask: Optional[torch.Tensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + query_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.LongTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + reduction: Optional[str] = 'mean', + ) -> Union[Tuple, CausalLMOutputWithPast]: + r""" + Args: + input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): + Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you + provide it. + + Indices can be obtained using [`OPTTokenizer`]. See [`PreTrainedTokenizer.encode`] and + [`PreTrainedTokenizer.__call__`] for details. + + [What are input IDs?](../glossary#input-ids) + attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*): + Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`: + + - 1 for tokens that are **not masked**, + - 0 for tokens that are **masked**. + + [What are attention masks?](../glossary#attention-mask) + head_mask (`torch.Tensor` of shape `(num_hidden_layers, num_attention_heads)`, *optional*): + Mask to nullify selected heads of the attention modules. Mask values selected in `[0, 1]`: + + - 1 indicates the head is **not masked**, + - 0 indicates the head is **masked**. + + past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`): + Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of + shape `(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of + shape `(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`. The two additional + tensors are only required when the model is used as a decoder in a Sequence to Sequence model. + + Contains pre-computed hidden-states (key and values in the self-attention blocks and in the + cross-attention blocks) that can be used (see `past_key_values` input) to speed up sequential decoding. + + If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those + that don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of + all `decoder_input_ids` of shape `(batch_size, sequence_length)`. + inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): + Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. + This is useful if you want more control over how to convert `input_ids` indices into associated vectors + than the model's internal embedding lookup matrix. + labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): + Labels for computing the masked language modeling loss. Indices should either be in `[0, ..., + config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored + (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + output_hidden_states (`bool`, *optional*): + Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors + for more detail. + return_dict (`bool`, *optional*): + Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple. + + Returns: + + Example: + + ```python + >>> from transformers import GPT2Tokenizer, OPTForCausalLM + + >>> model = OPTForCausalLM.from_pretrained("facebook/opt-350m") + >>> tokenizer = GPT2Tokenizer.from_pretrained("facebook/opt-350m") + + >>> prompt = "Hey, are you consciours? Can you talk to me?" + >>> inputs = tokenizer(prompt, return_tensors="pt") + + >>> # Generate + >>> generate_ids = model.generate(inputs.input_ids, max_length=30) + >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0] + "Hey, are you consciours? Can you talk to me?\nI'm not consciours, but I can talk to you." + ```""" + + output_attentions = ( + output_attentions if output_attentions is not None else + self.config.output_attentions) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else + self.config.output_hidden_states) + return_dict = ( + return_dict + if return_dict is not None else self.config.use_return_dict) + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model.decoder( + input_ids=input_ids, + attention_mask=attention_mask, + head_mask=head_mask, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + query_embeds=query_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + logits = self.lm_head(outputs[0]).contiguous() + + loss = None + if labels is not None: + logits = logits[:, -labels.size(1):, :] + + # Shift so that tokens < n predict n + shift_logits = logits[..., :-1, :].contiguous() + shift_labels = labels[..., 1:].contiguous() + # Flatten the tokens + loss_fct = CrossEntropyLoss(reduction=reduction) + loss = loss_fct( + shift_logits.view(-1, self.config.vocab_size), + shift_labels.view(-1)) + if reduction == 'none': + loss = loss.view(shift_logits.size(0), -1).sum(1) + + if not return_dict: + output = (logits, ) + outputs[1:] + return (loss, ) + output if loss is not None else output + + return CausalLMOutputWithPast( + loss=loss, + logits=logits, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def prepare_inputs_for_generation( + self, + input_ids=None, + inputs_embeds=None, + query_embeds=None, + past_key_values=None, + attention_mask=None, + use_cache=None, + **kwargs, + ): + # if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly + if attention_mask is None: + if input_ids is not None: + attention_mask = input_ids.new_ones(input_ids.shape) + if past_key_values: + input_ids = input_ids[:, -1:] + query_embeds = None + # first step, decoder_cached_states are empty + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + model_inputs = {'inputs_embeds': inputs_embeds} + else: + model_inputs = {'input_ids': input_ids} + + model_inputs.update({ + 'query_embeds': query_embeds, + 'attention_mask': attention_mask, + 'past_key_values': past_key_values, + 'use_cache': use_cache, + }) + return model_inputs + + @staticmethod + def _reorder_cache(past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple( + past_state.index_select(0, beam_idx) + for past_state in layer_past), ) + return reordered_past diff --git a/mmpretrain/models/multimodal/chinese_clip/__init__.py b/mmpretrain/models/multimodal/chinese_clip/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..460e9e6a6be748113df029ad76bc0934ab7704d3 --- /dev/null +++ b/mmpretrain/models/multimodal/chinese_clip/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .bert import BertModelCN +from .chinese_clip import ChineseCLIP, ModifiedResNet + +__all__ = ['ChineseCLIP', 'ModifiedResNet', 'BertModelCN'] diff --git a/mmpretrain/models/multimodal/chinese_clip/bert.py b/mmpretrain/models/multimodal/chinese_clip/bert.py new file mode 100644 index 0000000000000000000000000000000000000000..4e8dc7322a9aaddb0f5e02f8b70597ba08a8b925 --- /dev/null +++ b/mmpretrain/models/multimodal/chinese_clip/bert.py @@ -0,0 +1,263 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. +# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. + +# flake8: noqa +import math + +import torch +from torch import nn +from torch.utils.checkpoint import checkpoint + +try: + from transformers.models.bert.configuration_bert import BertConfig +except: + BertConfig = None + +from mmpretrain.registry import MODELS +from ..blip.language_model import BertAttention, BertIntermediate, BertOutput + + +def gelu(x): + """Original Implementation of the gelu activation function in Google Bert + repo when initially created. + + For information: OpenAI GPT's gelu is slightly different (and gives + slightly different results): + 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) + Also see https://arxiv.org/abs/1606.08415 + """ # noqa + return x * 0.5 * (1.0 + torch.erf(x / math.sqrt(2.0))) + + +def gelu_new(x): + """Implementation of the gelu activation function currently in Google Bert + repo (identical to OpenAI GPT) https://arxiv.org/abs/1606.08415.""" + return 0.5 * x * (1 + torch.tanh( + math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) + + +def swish(x): + return x * torch.sigmoid(x) + + +ACT2FN = { + 'gelu': gelu, + 'relu': torch.nn.functional.relu, + 'swish': swish, + 'gelu_new': gelu_new +} + + +class BertEmbeddings(nn.Module): + """Construct the embeddings from word, position and token_type + embeddings.""" + + def __init__(self, config): + super(BertEmbeddings, self).__init__() + self.word_embeddings = nn.Embedding( + config.vocab_size, config.hidden_size, padding_idx=0) + self.position_embeddings = nn.Embedding(config.max_position_embeddings, + config.hidden_size) + self.token_type_embeddings = nn.Embedding(config.type_vocab_size, + config.hidden_size) + + # self.LayerNorm is not snake-cased to stick with TensorFlow model + # variable name and be able to load any TensorFlow checkpoint file + self.LayerNorm = nn.LayerNorm( + config.hidden_size, eps=config.layer_norm_eps) + self.dropout = nn.Dropout(config.hidden_dropout_prob) + + def forward(self, input_ids, token_type_ids=None, position_ids=None): + seq_length = input_ids.size(1) + if position_ids is None: + position_ids = torch.arange( + seq_length, dtype=torch.long, device=input_ids.device) + position_ids = position_ids.unsqueeze(0).expand_as(input_ids) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + + words_embeddings = self.word_embeddings(input_ids) + position_embeddings = self.position_embeddings(position_ids) + token_type_embeddings = self.token_type_embeddings(token_type_ids) + + embeddings = words_embeddings + position_embeddings \ + + token_type_embeddings + embeddings = self.LayerNorm(embeddings) + embeddings = self.dropout(embeddings) + return embeddings + + +class BertLayer(nn.Module): + + def __init__(self, config): + super(BertLayer, self).__init__() + self.attention = BertAttention(config) + self.intermediate = BertIntermediate(config) + self.output = BertOutput(config) + + def forward(self, hidden_states, attention_mask=None, head_mask=None): + attention_outputs = self.attention(hidden_states, attention_mask, + head_mask) + attention_output = attention_outputs[0] + intermediate_output = self.intermediate(attention_output) + layer_output = self.output(intermediate_output, attention_output) + outputs = (layer_output, ) + attention_outputs[ + 1:] # add attentions if we output them + if len(outputs) == 1: + return outputs[0] + return outputs + + +class BertEncoder(nn.Module): + + def __init__(self, config): + super(BertEncoder, self).__init__() + self.output_attentions = config.output_attentions + self.output_hidden_states = config.output_hidden_states + self.grad_checkpointing = False + self.layer = nn.ModuleList( + [BertLayer(config) for _ in range(config.num_hidden_layers)]) + + def forward(self, hidden_states, attention_mask=None, head_mask=None): + all_hidden_states = () + all_attentions = () + for i, layer_module in enumerate(self.layer): + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states, ) + + if self.grad_checkpointing and not torch.jit.is_scripting(): + layer_outputs = checkpoint(layer_module, hidden_states, + attention_mask, head_mask[i]) + else: + layer_outputs = layer_module(hidden_states, attention_mask, + head_mask[i]) + if not isinstance(layer_outputs, tuple): + layer_outputs = (layer_outputs, ) + hidden_states = layer_outputs[0] + + if self.output_attentions: + all_attentions = all_attentions + (layer_outputs[1], ) + + # Add last layer + if self.output_hidden_states: + all_hidden_states = all_hidden_states + (hidden_states, ) + + outputs = (hidden_states, ) + if self.output_hidden_states: + outputs = outputs + (all_hidden_states, ) + if self.output_attentions: + outputs = outputs + (all_attentions, ) + # last-layer hidden state, (all hidden states), (all attentions) + return outputs + + +class BertPreTrainedModel(nn.Module): + base_model_prefix = 'bert' + + def __init__(self, config): + super(BertPreTrainedModel, self).__init__() + self.config = config + + def _init_weights(self, module): + """Initialize the weights.""" + if isinstance(module, (nn.Linear, nn.Embedding)): + # Slightly different from the TF version + # which uses truncated_normal for initialization + # cf https://github.com/pytorch/pytorch/pull/5617 + module.weight.data.normal_( + mean=0.0, std=self.config.initializer_range) + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + + +@MODELS.register_module() +class BertModelCN(BertPreTrainedModel): + """The BERT model implementation for Chinese CLIP.""" + + def __init__(self, config): + config = BertConfig.from_dict(config) + super(BertModelCN, self).__init__(config) + + self.embeddings = BertEmbeddings(config) + self.encoder = BertEncoder(config) + + self.apply(self._init_weights) + + @torch.jit.ignore + def set_grad_checkpointing(self, enable=True): + if enable: + assert not self.config.output_attentions, \ + 'Grad checkpointing is currently conflict with ' \ + 'output_attentions for BertEncoder, ' \ + 'please set it to False in BertConfig' + + self.encoder.grad_checkpointing = enable + + def forward(self, + input_ids, + attention_mask=None, + token_type_ids=None, + position_ids=None, + head_mask=None): + if attention_mask is None: + attention_mask = torch.ones_like(input_ids) + if token_type_ids is None: + token_type_ids = torch.zeros_like(input_ids) + + # We create a 3D attention mask from a 2D tensor mask. + # Sizes are [batch_size, 1, 1, to_seq_length] + # So we can broadcast to [batch_size, num_heads, from_seq_length, to_seq_length] + # this attention mask is more simple than the triangular masking of causal attention + # used in OpenAI GPT, we just need to prepare the broadcast dimension here. + extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) + + # Since attention_mask is 1.0 for positions we want to attend and 0.0 for + # masked positions, this operation will create a tensor which is 0.0 for + # positions we want to attend and -10000.0 for masked positions. + # Since we are adding it to the raw scores before the softmax, this is + # effectively the same as removing these entirely. + extended_attention_mask = extended_attention_mask.to( + dtype=next(self.parameters()).dtype) # fp16 compatibility + extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 + + # Prepare head mask if needed + # 1.0 in head_mask indicate we keep the head + # attention_probs has shape bsz x n_heads x N x N + # input head_mask has shape [num_heads] or [num_hidden_layers x num_heads] + # and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length] + if head_mask is not None: + if head_mask.dim() == 1: + head_mask = head_mask.unsqueeze(0).unsqueeze(0).unsqueeze( + -1).unsqueeze(-1) + head_mask = head_mask.expand(self.config.num_hidden_layers, -1, + -1, -1, -1) + elif head_mask.dim() == 2: + head_mask = head_mask.unsqueeze(1).unsqueeze(-1).unsqueeze( + -1) # We can specify head_mask for each layer + head_mask = head_mask.to(dtype=next(self.parameters( + )).dtype) # switch to fload if need + fp16 compatibility + else: + head_mask = [None] * self.config.num_hidden_layers + + embedding_output = self.embeddings( + input_ids, + position_ids=position_ids, + token_type_ids=token_type_ids) + encoder_outputs = self.encoder( + embedding_output, extended_attention_mask, head_mask=head_mask) + sequence_output = encoder_outputs[0] + # pooled_output = self.pooler(sequence_output) + pooled_output = None + + # add hidden_states and attentions if they are here + outputs = ( + sequence_output, + pooled_output, + ) + encoder_outputs[1:] + + # sequence_output, pooled_output, (hidden_states), (attentions) + return outputs diff --git a/mmpretrain/models/multimodal/chinese_clip/chinese_clip.py b/mmpretrain/models/multimodal/chinese_clip/chinese_clip.py new file mode 100644 index 0000000000000000000000000000000000000000..40af5643602685be4d0e37331609bdecae184de9 --- /dev/null +++ b/mmpretrain/models/multimodal/chinese_clip/chinese_clip.py @@ -0,0 +1,446 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from collections import OrderedDict +from typing import List, Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn.functional as F +from mmengine.model import BaseModel, BaseModule +from torch import nn + +from mmpretrain.datasets.categories import CIFAR100_CATEGORIES_CN +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample +from mmpretrain.utils import track_on_main_process +from .utils import OPENAI_PROMPT + +PROTOTYPE_MAP = {'cifar100': CIFAR100_CATEGORIES_CN} +PROMPT_MAP = {'openai': OPENAI_PROMPT} + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1): + super().__init__() + + self.conv1 = nn.Conv2d(inplanes, planes, 1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + + self.conv2 = nn.Conv2d(planes, planes, 3, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + + self.avgpool = nn.AvgPool2d(stride) if stride > 1 else nn.Identity() + + self.conv3 = nn.Conv2d(planes, planes * self.expansion, 1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + + self.relu = nn.ReLU(inplace=True) + self.downsample = None + self.stride = stride + + if stride > 1 or inplanes != planes * Bottleneck.expansion: + self.downsample = nn.Sequential( + OrderedDict([('-1', nn.AvgPool2d(stride)), + ('0', + nn.Conv2d( + inplanes, + planes * self.expansion, + 1, + stride=1, + bias=False)), + ('1', nn.BatchNorm2d(planes * self.expansion))])) + + def forward(self, x: torch.Tensor): + identity = x + + out = self.relu(self.bn1(self.conv1(x))) + out = self.relu(self.bn2(self.conv2(out))) + out = self.avgpool(out) + out = self.bn3(self.conv3(out)) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + return out + + +class AttentionPool2d(nn.Module): + + def __init__(self, + spacial_dim: int, + embed_dim: int, + num_heads: int, + output_dim: int = None): + super().__init__() + self.positional_embedding = nn.Parameter( + torch.randn(spacial_dim**2 + 1, embed_dim) / embed_dim**0.5) + self.k_proj = nn.Linear(embed_dim, embed_dim) + self.q_proj = nn.Linear(embed_dim, embed_dim) + self.v_proj = nn.Linear(embed_dim, embed_dim) + self.c_proj = nn.Linear(embed_dim, output_dim or embed_dim) + self.num_heads = num_heads + + def forward(self, x): + x = x.reshape(x.shape[0], x.shape[1], + x.shape[2] * x.shape[3]).permute(2, 0, + 1) # NCHW -> (HW)NC + x = torch.cat([x.mean(dim=0, keepdim=True), x], dim=0) # (HW+1)NC + x = x + self.positional_embedding[:, None, :].to(x.dtype) # (HW+1)NC + x, _ = F.multi_head_attention_forward( + query=x, + key=x, + value=x, + embed_dim_to_check=x.shape[-1], + num_heads=self.num_heads, + q_proj_weight=self.q_proj.weight, + k_proj_weight=self.k_proj.weight, + v_proj_weight=self.v_proj.weight, + in_proj_weight=None, + in_proj_bias=torch.cat( + [self.q_proj.bias, self.k_proj.bias, self.v_proj.bias]), + bias_k=None, + bias_v=None, + add_zero_attn=False, + dropout_p=0, + out_proj_weight=self.c_proj.weight, + out_proj_bias=self.c_proj.bias, + use_separate_proj_weight=True, + training=self.training, + need_weights=False) + + return x[0] + + +@MODELS.register_module() +class ModifiedResNet(BaseModule): + """A modified ResNet contains the following changes: + + - Apply deep stem with an average pool instead of a max pool. + - Performs anti-aliasing strided convolutions, where an avgpool is + prepended to convolutions with stride > 1 + - The final pooling layer is a QKV attention instead of an average pool + """ # noqa + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, + depth: int = 50, + base_channels: int = 64, + input_size: int = 224, + num_attn_heads: int = 32, + output_dim: int = 1024, + init_cfg: Optional[dict] = None): + super().__init__(init_cfg=init_cfg) + self.input_size = input_size + self.block, stage_blocks = self.arch_settings[depth] + + # the 3-layer stem + self.conv1 = nn.Conv2d( + 3, + base_channels // 2, + kernel_size=3, + stride=2, + padding=1, + bias=False) + self.bn1 = nn.BatchNorm2d(base_channels // 2) + self.conv2 = nn.Conv2d( + base_channels // 2, + base_channels // 2, + kernel_size=3, + padding=1, + bias=False) + self.bn2 = nn.BatchNorm2d(base_channels // 2) + self.conv3 = nn.Conv2d( + base_channels // 2, + base_channels, + kernel_size=3, + padding=1, + bias=False) + self.bn3 = nn.BatchNorm2d(base_channels) + self.avgpool = nn.AvgPool2d(2) + self.relu = nn.ReLU(inplace=True) + + # residual layers + # this is a *mutable* variable used during construction + self._inplanes = base_channels + self.layer1 = self._make_layer(base_channels, stage_blocks[0]) + self.layer2 = self._make_layer( + base_channels * 2, stage_blocks[1], stride=2) + self.layer3 = self._make_layer( + base_channels * 4, stage_blocks[2], stride=2) + self.layer4 = self._make_layer( + base_channels * 8, stage_blocks[3], stride=2) + + embed_dim = base_channels * 32 + self.attnpool = AttentionPool2d(input_size // 32, embed_dim, + num_attn_heads, output_dim) + + def _make_layer(self, planes, blocks, stride=1): + layers = [Bottleneck(self._inplanes, planes, stride)] + + self._inplanes = planes * Bottleneck.expansion + for _ in range(1, blocks): + layers.append(Bottleneck(self._inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + + def stem(x): + for conv, bn in [(self.conv1, self.bn1), (self.conv2, self.bn2), + (self.conv3, self.bn3)]: + x = self.relu(bn(conv(x))) + x = self.avgpool(x) + return x + + x = x.type(self.conv1.weight.dtype) + x = stem(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.attnpool(x) + + return x + + +@MODELS.register_module() +class ChineseCLIP(BaseModel): + """The implementation of `ChineseCLIP `_. + + Args: + vision_backbone (dict): Config dict for vision backbone. + text_backbone (dict): Config dict for text backbone. + tokenizer (dict): Config dict for text tokenizer. + proj_dim (int): Projection dimension for similarity computation. + text_prototype (str): Text prototype, which can be a key in + `PROTOTYPE_MAP` or list of text. + text_prompt (str): The prompt for text prototype. Defaults to 'openai'. + context_length (int): The context length to use. Defaults to 52. + data_preprocessor (Union[dict, nn.Module], optional): The config for + preprocessing input data. If None or no specified type, it will use + "MultiModalDataPreprocessor" as type. + See :class:`MultiModalDataPreprocessor` for more details. + Defaults to None. + init_cfg (dict, optional): The config to control the initialization. + Defaults to None. + """ + + def __init__(self, + vision_backbone: dict, + text_backbone: dict, + tokenizer: dict, + proj_dim: int, + text_prototype: Union[str, List[str]], + text_prompt: str = 'openai', + context_length: int = 52, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + if data_preprocessor is None: + data_preprocessor = {} + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super().__init__( + data_preprocessor=data_preprocessor, init_cfg=init_cfg) + + self.vision_backbone = MODELS.build(vision_backbone) + self.text_backbone = MODELS.build(text_backbone) + + if not isinstance(self.vision_backbone, ModifiedResNet): + self.vision_projection = nn.Parameter( + torch.empty(self.vision_backbone.embed_dims, proj_dim)) + text_hidden_size = text_backbone['config']['hidden_size'] + self.text_projection = nn.Parameter( + torch.empty(text_hidden_size, proj_dim)) + + self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) + + self.tokenizer = TOKENIZER.build(tokenizer) + self.context_length = context_length + + # for zero-shot classification + if isinstance(text_prototype, + str) and text_prototype in PROTOTYPE_MAP.keys(): + self.prototype = PROTOTYPE_MAP[text_prototype] + else: + self.prototype = text_prototype + self.text_prototype_embeds = None + + self.prompt = PROMPT_MAP[text_prompt] + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[list] = None, + mode: str = 'predict', + **kwargs, + ): + """The unified entry for a forward process in both training and test. + The method accepts the following modes: + + - "predict": Forward and return a list of data samples contain the + predict results. + + Args: + images (torch.Tensor): the preprocessed image tensor of shape + ``(N, C, H, W)``. + data_samples (List[DataSample], optional): The annotation data + of every samples. Defaults to None. + mode (str): Return what kind of value. Defaults to 'predict'. + """ + if mode == 'predict': + return self.predict(images, data_samples, **kwargs) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_image_feat(self, images: torch.Tensor) -> torch.Tensor: + """The function to extract image latent features.""" + if isinstance(self.vision_backbone, ModifiedResNet): + return self.vision_backbone(images) + return self.vision_backbone(images)[-1] @ self.vision_projection + + def extract_text_feat(self, texts: torch.Tensor) -> torch.Tensor: + """The function to extract text latent features.""" + pad_index = self.tokenizer.vocab['[PAD]'] + attn_mask = texts.ne(pad_index) + # [batch_size, seq_length, hidden_size] + x = self.text_backbone(texts, attention_mask=attn_mask)[0] + return x[:, 0, :] @ self.text_projection + + def extract_feat( + self, images: torch.Tensor, + texts: torch.Tensor) -> Union[torch.Tensor, Tuple[torch.Tensor]]: + """The function to extract image and text latent features, the input + image or text can not both be None.""" + + assert images is not None or texts is not None, \ + 'text and image cannot both be None!' + if images is None: + return self.extract_text_feat(texts) + elif texts is None: + return self.extract_image_feat(images) + + image_features = self.extract_image_feat(images) + text_features = self.extract_text_feat(texts) + + image_features = image_features / image_features.norm( + dim=-1, keepdim=True) + text_features = text_features / text_features.norm( + dim=-1, keepdim=True) + + return image_features, text_features + + def compute_similarity(self, images, texts): + """Extract images and texts features and compute cosine similarity.""" + image_features, text_features = self.extract_feat( + images=images, texts=texts) + + # cosine similarity as logits + logit_scale = self.logit_scale.exp() + logits_per_image = logit_scale * image_features @ text_features.t() + logits_per_text = logits_per_image.t() + + # shape (N, N) + return logits_per_image, logits_per_text + + def predict(self, + images: torch.Tensor, + data_samples: DataSample = None) -> DataSample: + """Predict the classes of the input images. + + The prediction is for zero-shot classification and the text prototypes + will be prepared in thisfunction. + + Args: + images (torch.Tensor): The input images. + data_samples (DataSample): The data samples with information from + dataset. + + Returns: + DataSample: The results of prediction. + """ + + if self.text_prototype_embeds is None: + self.prepare_text_prototype(device=images.device) + + image_features = self.extract_image_feat(images=images) + image_features /= image_features.norm(dim=-1, keepdim=True) + + # cosine similarity as logits + logits_per_image = image_features @ self.text_prototype_embeds.to( + image_features.device) * self.logit_scale.exp() + + pred_scores = F.softmax(logits_per_image, dim=1) + pred_labels = pred_scores.argmax(dim=1, keepdim=True).detach() + + out_data_samples = [] + if data_samples is None: + data_samples = [None for _ in range(pred_scores.size(0))] + + for data_sample, score, label in zip(data_samples, pred_scores, + pred_labels): + if data_sample is None: + data_sample = DataSample() + + data_sample.set_pred_score(score).set_pred_label(label) + out_data_samples.append(data_sample) + return out_data_samples + + def prepare_text_prototype(self, device) -> None: + """The function to prepare text prototypes with prompt.""" + class_embeddings = [] + for classname in track_on_main_process(self.prototype, + 'Prepare text prototype...'): + # format with class + texts = [prompt(classname) for prompt in self.prompt] + tokenized_texts = self.tokenize(texts) + class_features = self.extract_text_feat(tokenized_texts.to(device)) + class_features /= class_features.norm(dim=-1, keepdim=True) + class_feature = class_features.mean(dim=0) + class_feature /= class_feature.norm() + class_embeddings.append(class_feature) + self.text_prototype_embeds = torch.stack( + class_embeddings, dim=1).to(device) + + def tokenize(self, texts: Union[str, List[str]]) -> torch.LongTensor: + """Returns the tokenized representation of given input string(s) + + Args: + texts (Union[str, List[str]]): An input string or a list of input + strings to tokenize + context_length (int): The context length to use. Defaults to 52. + + Returns: + torch.Tensor: Resulting tokens. + """ + if isinstance(texts, str): + texts = [texts] + + all_tokens = [] + for text in texts: + # adapt the text to Chinese BERT vocab + text = text.lower().replace('“', "\"").replace('”', "\"") + + # add special tokens + all_tokens.append( + [self.tokenizer.vocab['[CLS]']] + + self.tokenizer.convert_tokens_to_ids( + self.tokenizer.tokenize(text))[:self.context_length - 2] + + [self.tokenizer.vocab['[SEP]']]) + + result = torch.zeros( + len(all_tokens), self.context_length, dtype=torch.long) + + for i, tokens in enumerate(all_tokens): + assert len(tokens) <= self.context_length + result[i, :len(tokens)] = torch.tensor(tokens) + + return result diff --git a/mmpretrain/models/multimodal/chinese_clip/utils.py b/mmpretrain/models/multimodal/chinese_clip/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6964722bd3dbb05a6a59a1dc2c57c0a6e8692c31 --- /dev/null +++ b/mmpretrain/models/multimodal/chinese_clip/utils.py @@ -0,0 +1,186 @@ +# Copyright (c) OpenMMLab. All rights reserved. +OPENAI_PROMPT = [ + lambda c: f'{c}的照片', + lambda c: f'质量差的{c}的照片', + lambda c: f'许多{c}的照片', + lambda c: f'{c}的雕塑', + lambda c: f'难以看到{c}的照片', + lambda c: f'{c}的低分辨率照片', + lambda c: f'{c}的渲染', + lambda c: f'涂鸦{c}', + lambda c: f'{c}的糟糕照片', + lambda c: f'{c}的裁剪照片', + lambda c: f'{c}的纹身', + lambda c: f'{c}的刺绣照片', + lambda c: f'很难看到{c}的照片', + lambda c: f'{c}的明亮照片', + lambda c: f'一张干净的{c}的照片', + lambda c: f'一张包含{c}的照片', + lambda c: f'{c}的深色照片', + lambda c: f'{c}的手绘画', + lambda c: f'我的{c}的照片', + lambda c: f'不自然的{c}的照片', + lambda c: f'一张酷的{c}的照片', + lambda c: f'{c}的特写照片', + lambda c: f'{c}的黑白照片', + lambda c: f'一幅{c}的画', + lambda c: f'一幅{c}的绘画', + lambda c: f'一张{c}的像素照片', + lambda c: f'{c}的雕像', + lambda c: f'一张{c}的明亮照片', + lambda c: f'{c}的裁剪照片', + lambda c: f'人造的{c}的照片', + lambda c: f'一张关于{c}的照片', + lambda c: f'损坏的{c}的jpeg照片', + lambda c: f'{c}的模糊照片', + lambda c: f'{c}的相片', + lambda c: f'一张{c}的好照片', + lambda c: f'{c}的渲染照', + lambda c: f'视频游戏中的{c}', + lambda c: f'一张{c}的照片', + lambda c: f'{c}的涂鸦', + lambda c: f'{c}的近距离照片', + lambda c: f'{c}的折纸', + lambda c: f'{c}在视频游戏中', + lambda c: f'{c}的草图', + lambda c: f'{c}的涂鸦照', + lambda c: f'{c}的折纸形状', + lambda c: f'低分辨率的{c}的照片', + lambda c: f'玩具{c}', + lambda c: f'{c}的副本', + lambda c: f'{c}的干净的照片', + lambda c: f'一张大{c}的照片', + lambda c: f'{c}的重现', + lambda c: f'一张漂亮的{c}的照片', + lambda c: f'一张奇怪的{c}的照片', + lambda c: f'模糊的{c}的照片', + lambda c: f'卡通{c}', + lambda c: f'{c}的艺术作品', + lambda c: f'{c}的素描', + lambda c: f'刺绣{c}', + lambda c: f'{c}的像素照', + lambda c: f'{c}的拍照', + lambda c: f'{c}的损坏的照片', + lambda c: f'高质量的{c}的照片', + lambda c: f'毛绒玩具{c}', + lambda c: f'漂亮的{c}的照片', + lambda c: f'小{c}的照片', + lambda c: f'照片是奇怪的{c}', + lambda c: f'漫画{c}', + lambda c: f'{c}的艺术照', + lambda c: f'{c}的图形', + lambda c: f'大{c}的照片', + lambda c: f'黑白的{c}的照片', + lambda c: f'{c}毛绒玩具', + lambda c: f'一张{c}的深色照片', + lambda c: f'{c}的摄影图', + lambda c: f'{c}的涂鸦照', + lambda c: f'玩具形状的{c}', + lambda c: f'拍了{c}的照片', + lambda c: f'酷酷的{c}的照片', + lambda c: f'照片里的小{c}', + lambda c: f'{c}的刺青', + lambda c: f'{c}的可爱的照片', + lambda c: f'一张{c}可爱的照片', + lambda c: f'{c}可爱图片', + lambda c: f'{c}酷炫图片', + lambda c: f'一张{c}的酷炫的照片', + lambda c: f'一张{c}的酷炫图片', + lambda c: f'这是{c}', + lambda c: f'{c}的好看照片', + lambda c: f'一张{c}的好看的图片', + lambda c: f'{c}的好看图片', + lambda c: f'{c}的照片。', + lambda c: f'质量差的{c}的照片。', + lambda c: f'许多{c}的照片。', + lambda c: f'{c}的雕塑。', + lambda c: f'难以看到{c}的照片。', + lambda c: f'{c}的低分辨率照片。', + lambda c: f'{c}的渲染。', + lambda c: f'涂鸦{c}。', + lambda c: f'{c}的糟糕照片。', + lambda c: f'{c}的裁剪照片。', + lambda c: f'{c}的纹身。', + lambda c: f'{c}的刺绣照片。', + lambda c: f'很难看到{c}的照片。', + lambda c: f'{c}的明亮照片。', + lambda c: f'一张干净的{c}的照片。', + lambda c: f'一张包含{c}的照片。', + lambda c: f'{c}的深色照片。', + lambda c: f'{c}的手绘画。', + lambda c: f'我的{c}的照片。', + lambda c: f'不自然的{c}的照片。', + lambda c: f'一张酷的{c}的照片。', + lambda c: f'{c}的特写照片。', + lambda c: f'{c}的黑白照片。', + lambda c: f'一幅{c}的画。', + lambda c: f'一幅{c}的绘画。', + lambda c: f'一张{c}的像素照片。', + lambda c: f'{c}的雕像。', + lambda c: f'一张{c}的明亮照片。', + lambda c: f'{c}的裁剪照片。', + lambda c: f'人造的{c}的照片。', + lambda c: f'一张关于{c}的照片。', + lambda c: f'损坏的{c}的jpeg照片。', + lambda c: f'{c}的模糊照片。', + lambda c: f'{c}的相片。', + lambda c: f'一张{c}的好照片。', + lambda c: f'{c}的渲染照。', + lambda c: f'视频游戏中的{c}。', + lambda c: f'一张{c}的照片。', + lambda c: f'{c}的涂鸦。', + lambda c: f'{c}的近距离照片。', + lambda c: f'{c}的折纸。', + lambda c: f'{c}在视频游戏中。', + lambda c: f'{c}的草图。', + lambda c: f'{c}的涂鸦照。', + lambda c: f'{c}的折纸形状。', + lambda c: f'低分辨率的{c}的照片。', + lambda c: f'玩具{c}。', + lambda c: f'{c}的副本。', + lambda c: f'{c}的干净的照片。', + lambda c: f'一张大{c}的照片。', + lambda c: f'{c}的重现。', + lambda c: f'一张漂亮的{c}的照片。', + lambda c: f'一张奇怪的{c}的照片。', + lambda c: f'模糊的{c}的照片。', + lambda c: f'卡通{c}。', + lambda c: f'{c}的艺术作品。', + lambda c: f'{c}的素描。', + lambda c: f'刺绣{c}。', + lambda c: f'{c}的像素照。', + lambda c: f'{c}的拍照。', + lambda c: f'{c}的损坏的照片。', + lambda c: f'高质量的{c}的照片。', + lambda c: f'毛绒玩具{c}。', + lambda c: f'漂亮的{c}的照片。', + lambda c: f'小{c}的照片。', + lambda c: f'照片是奇怪的{c}。', + lambda c: f'漫画{c}。', + lambda c: f'{c}的艺术照。', + lambda c: f'{c}的图形。', + lambda c: f'大{c}的照片。', + lambda c: f'黑白的{c}的照片。', + lambda c: f'{c}毛绒玩具。', + lambda c: f'一张{c}的深色照片。', + lambda c: f'{c}的摄影图。', + lambda c: f'{c}的涂鸦照。', + lambda c: f'玩具形状的{c}。', + lambda c: f'拍了{c}的照片。', + lambda c: f'酷酷的{c}的照片。', + lambda c: f'照片里的小{c}。', + lambda c: f'{c}的刺青。', + lambda c: f'{c}的可爱的照片。', + lambda c: f'一张{c}可爱的照片。', + lambda c: f'{c}可爱图片。', + lambda c: f'{c}酷炫图片。', + lambda c: f'一张{c}的酷炫的照片。', + lambda c: f'一张{c}的酷炫图片。', + lambda c: f'这是{c}。', + lambda c: f'{c}的好看照片。', + lambda c: f'一张{c}的好看的图片。', + lambda c: f'{c}的好看图片。', + lambda c: f'一种叫{c}的花的照片', + lambda c: f'一种叫{c}的食物的照片', + lambda c: f'{c}的卫星照片', +] diff --git a/mmpretrain/models/multimodal/flamingo/__init__.py b/mmpretrain/models/multimodal/flamingo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e0bfd63b657f5f0f1517ad6d31bce2821cb372cd --- /dev/null +++ b/mmpretrain/models/multimodal/flamingo/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .adapter import FlamingoLMAdapter +from .flamingo import Flamingo + +__all__ = ['Flamingo', 'FlamingoLMAdapter'] diff --git a/mmpretrain/models/multimodal/flamingo/adapter.py b/mmpretrain/models/multimodal/flamingo/adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..69a635c2ecda4c5bb472062caa5c4c46a83de827 --- /dev/null +++ b/mmpretrain/models/multimodal/flamingo/adapter.py @@ -0,0 +1,90 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import random + +import torch.nn as nn + +from mmpretrain.registry import MODELS +from .modules import FlamingoLayer, GatedCrossAttentionBlock +from .utils import getattr_recursive, setattr_recursive + + +@MODELS.register_module() +class FlamingoLMAdapter: + """Mixin to add cross-attention layers to a language model.""" + + @classmethod + def extend_init( + cls, + base: object, + vis_hidden_size: int, + cross_attn_every_n_layers: int, + use_media_placement_augmentation: bool, + ): + """Initialize Flamingo by adding a new gated cross attn to the decoder. + + Store the media token id for computing the media locations. + + Args: + base (object): Base module could be any object that represent + a instance of language model. + vis_hidden_size: (int): Hidden size of vision embeddings. + cross_attn_every_n_layers: (int): Additional cross attn for + every n layers. + use_media_placement_augmentation: (bool): Whether to use media + placement augmentation. + """ + base.set_decoder_layers_attr_name('model.layers') + gated_cross_attn_layers = nn.ModuleList([ + GatedCrossAttentionBlock( + dim=base.config.hidden_size, dim_visual=vis_hidden_size) if + (layer_idx + 1) % cross_attn_every_n_layers == 0 else None + for layer_idx, _ in enumerate(base._get_decoder_layers()) + ]) + base._set_decoder_layers( + nn.ModuleList([ + FlamingoLayer(gated_cross_attn_layer, decoder_layer) + for gated_cross_attn_layer, decoder_layer in zip( + gated_cross_attn_layers, base._get_decoder_layers()) + ])) + base.use_media_placement_augmentation = use_media_placement_augmentation # noqa + base.initialized_flamingo = True + return base + + def set_decoder_layers_attr_name(self, decoder_layers_attr_name): + """Set decoder layers attribute name.""" + self.decoder_layers_attr_name = decoder_layers_attr_name + + def _get_decoder_layers(self): + """Get decoder layers according to attribute name.""" + return getattr_recursive(self, self.decoder_layers_attr_name) + + def _set_decoder_layers(self, value): + """Set decoder layers according to attribute name.""" + setattr_recursive(self, self.decoder_layers_attr_name, value) + + def forward(self, *input, **kwargs): + """Condition the Flamingo layers on the media locations before forward + function.""" + input_ids = kwargs['input_ids'] if 'input_ids' in kwargs else input[0] + media_locations = input_ids == self.media_token_id + attend_previous = ((random.random() < 0.5) + if self.use_media_placement_augmentation else False) + + for layer in self.get_decoder().layers: + layer.condition_media_locations(media_locations) + layer.condition_attend_previous(attend_previous) + + return super().forward( + *input, **kwargs) # Call the other parent's forward method + + def is_conditioned(self) -> bool: + """Check whether all decoder layers are already conditioned.""" + return all(layer.is_conditioned() + for layer in self._get_decoder_layers()) + + def clear_conditioned_layers(self): + """Clear all conditional layers.""" + for layer in self._get_decoder_layers(): + layer.condition_vis_x(None) + layer.condition_media_locations(None) + layer.condition_attend_previous(None) diff --git a/mmpretrain/models/multimodal/flamingo/flamingo.py b/mmpretrain/models/multimodal/flamingo/flamingo.py new file mode 100644 index 0000000000000000000000000000000000000000..abdd03328f4a22b0e4c2c37598d6e5517555994d --- /dev/null +++ b/mmpretrain/models/multimodal/flamingo/flamingo.py @@ -0,0 +1,322 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import re +from typing import List, Optional + +import torch +from mmengine.model import BaseModel + +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample +from .modules import PerceiverResampler +from .utils import ExtendModule + + +@MODELS.register_module() +class Flamingo(BaseModel): + """The Open Flamingo model for multiple tasks. + + Args: + vision_encoder (dict): The config of the vision encoder. + lang_encoder (dict): The config of the language encoder. + tokenizer (dict): The tokenizer to encode the text. + task (int): The task to perform prediction. + zeroshot_prompt (str): Prompt used for zero-shot inference. + Defaults to 'Output:'. + shot_prompt_tmpl (str): Prompt used for few-shot inference. + Defaults to 'Output:{caption}<|endofchunk|>'. + final_prompt_tmpl (str): Final part of prompt used for inference. + Defaults to 'Output:'. + generation_cfg (dict): The extra generation config, accept the keyword + arguments of [~`transformers.GenerationConfig`]. + Defaults to an empty dict. + data_preprocessor (Optional[dict]): The config for preprocessing input + data. If None or no specified type, it will use + "MutimodalDataPreprocessor" as type. + See :class:`MutimodalDataPreprocessor` for more details. + Defaults to None. + init_cfg (dict, optional): The initialization config. Defaults to None. + """ + + support_tasks = {'caption', 'vqa'} + _no_split_modules = [ + 'TransformerEncoderLayer', 'PerceiverAttention', + 'GatedCrossAttentionBlock', 'FlamingoLayer' + ] + + def __init__( + self, + vision_encoder: dict, + lang_encoder: dict, + tokenizer: dict, + task: str = 'caption', + zeroshot_prompt: str = 'Output:', + shot_prompt_tmpl: str = 'Output:{caption}<|endofchunk|>', + final_prompt_tmpl: str = 'Output:', + generation_cfg: dict = dict(), + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None): + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + if task not in self.support_tasks: + raise ValueError(f'Unsupported task {task}, please select ' + f'the task from {self.support_tasks}.') + self.task = task + + # init tokenizer + self.tokenizer = TOKENIZER.build(tokenizer) + # add Flamingo special tokens to the tokenizer + self.tokenizer.add_special_tokens( + {'additional_special_tokens': ['<|endofchunk|>', '']}) + self.tokenizer.bos_token_id = 1 + if self.tokenizer.pad_token is None: + # Issue: GPT models don't have a pad token, which we use to + # modify labels for the loss. + self.tokenizer.add_special_tokens({'pad_token': ''}) + + # Template to format the prompt input + self.zeroshot_prompt = zeroshot_prompt + self.shot_prompt_tmpl = shot_prompt_tmpl + self.final_prompt_tmpl = final_prompt_tmpl + + # init vision encoder related modules + vision_encoder_weight = vision_encoder.pop('pretrained', None) + self.vision_encoder = MODELS.build(vision_encoder) + if vision_encoder_weight is not None: + from mmengine.runner.checkpoint import load_checkpoint + load_checkpoint( + self.vision_encoder, + vision_encoder_weight, + map_location='cpu', + revise_keys=[(r'^backbone\.', '')], + ) + + self.perceiver = PerceiverResampler(dim=self.vision_encoder.embed_dims) + + # init language encoder related modules + self.lang_encoder = ExtendModule(**lang_encoder) + self.lang_encoder.resize_token_embeddings(len(self.tokenizer)) + self.lang_encoder.media_token_id = self.tokenizer.encode('')[-1] + + # other necessary parameters + self.eoc_token_id = self.tokenizer.encode('<|endofchunk|>')[-1] + self.generation_cfg = { + 'num_beams': 1, + 'max_new_tokens': None, + 'temperature': 1.0, + 'top_k': 0, + 'top_p': 1.0, + 'no_repeat_ngram_size': 0, + 'prefix_allowed_tokens_fn': None, + 'length_penalty': 1.0, + 'num_return_sequences': 1, + 'do_sample': False, + 'early_stopping': False, + **generation_cfg, + } + + if hasattr(self, 'register_load_state_dict_post_hook'): + self.register_load_state_dict_post_hook(self._load_adapter_hook) + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + mode: str = 'loss', + ): + """The unified entry for a forward process in both training and test. + The method should accept only one mode "loss": + + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + images (torch.Tensor): The input image tensor with different ndim + according to the inputs. + data_samples (List[DataSample], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'loss'. + + Returns: + The return type depends on ``mode``. + - If ``mode="loss"``, return a dict of tensor. + """ + + if mode == 'loss': + return self.loss(images, data_samples) + elif mode == 'predict': + return self.predict(images, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_vision_feats(self, images: torch.Tensor) -> torch.Tensor: + """Extract vision features. + + Args: + images (torch.Tensor): For zero-shot, the input images tensor is + with shape (B, C, H, W), for few-shot, which is + (B, T_img, C, H, W) in general. Images in the same chunk + are collated along T_img. Video data is not supported yet. + + Returns: + torch.Tensor: Return extracted features. + """ + if images.ndim == 4: + # (B, C, H, W) -> (B, 1, C, H, W) for zero-shot. + images = images.unsqueeze(1) + b, T = images.shape[:2] + # b T c h w -> (b T) c h w + images = images.view(b * T, *images.shape[-3:]) + + with torch.no_grad(): + vision_feats = self.vision_encoder(images)[-1][:, 1:] + + # (b T F) v d -> b T F v d Only support F=1 here + vision_feats = vision_feats.view(b, T, 1, *vision_feats.shape[-2:]) + + vision_feats = self.perceiver(vision_feats) # reshapes to (b, T, n, d) + return vision_feats + + def predict(self, + images: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + **generation_cfg): + """Predict generation results from a batch of inputs. + + Args: + images (torch.Tensor): For zero-shot, the input images tensor is + with shape (B, C, H, W), for few-shot, which is + (B, T_img, C, H, W) in general. Images in the same chunk + are collated along T_img. Video data is not supported yet. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + **generation_cfg: Other keyword arguments accepted by the + ``generate`` method of :attr:`lang_encoder`. + + Returns: + List[DataSample]: Return list of data samples. + """ + # generation_cfg in prediction should be dominant + generation_cfg = {**self.generation_cfg, **generation_cfg} + num_beams = generation_cfg['num_beams'] + + if num_beams > 1: + images = images.repeat_interleave(num_beams, dim=0) + + # extra vision feats and set as language condition feats + vision_x = self.extract_vision_feats(images) + for layer in self.lang_encoder._get_decoder_layers(): + layer.condition_vis_x(vision_x) + + input_text = self.preprocess_text(data_samples, device=images.device) + + outputs = self.lang_encoder.generate( + input_text.input_ids, + attention_mask=input_text.attention_mask, + eos_token_id=self.eoc_token_id, + **generation_cfg) + + # clear conditioned layers for language models + self.lang_encoder.clear_conditioned_layers() + + # remove prefix + outputs = outputs[:, len(input_text.input_ids[0]):] + + return self.post_process(outputs, data_samples) + + def preprocess_text(self, data_samples: List[DataSample], + device: torch.device) -> List[DataSample]: + """Preprocess text in advance before fed into language model. + + Args: + data_samples (List[DataSample]): The annotation + data of every samples. Defaults to None. + device (torch.device): Device for text to put on. + + Returns: + List[DataSample]: Return list of data samples. + """ + prompts = [] + for sample in data_samples: + if 'shots' in sample: + # few-shot + shot_prompt = ''.join([ + self.shot_prompt_tmpl.format(**shot) + for shot in sample.get('shots') + ]) + else: + # zero-shot + shot_prompt = self.zeroshot_prompt + + # add final prompt + final_prompt = self.final_prompt_tmpl.format(**sample.to_dict()) + prompts.append(shot_prompt + final_prompt) + + self.tokenizer.padding_side = 'left' + input_text = self.tokenizer( + prompts, + padding='longest', + truncation=True, + return_tensors='pt', + max_length=2000, + ).to(device) + return input_text + + def post_process( + self, outputs: torch.Tensor, + data_samples: Optional[List[DataSample]]) -> List[DataSample]: + """Perform post process for outputs for different task. + + Args: + outputs (torch.Tensor): The generated outputs. + data_samples (List[DataSample], optional): The annotation + data of every samples. + + Returns: + List[DataSample]: Return list of data samples. + """ + outputs = self.tokenizer.batch_decode( + outputs, skip_special_tokens=True) + + if data_samples is None: + data_samples = [DataSample() for _ in range(len(outputs))] + + for output, data_sample in zip(outputs, data_samples): + # remove text pattern + if self.task == 'caption': + data_sample.pred_caption = re.split('Output', output, + 1)[0].replace('"', '') + elif self.task == 'vqa': + data_sample.pred_answer = re.split('Question|Answer', output, + 1)[0] + + return data_samples + + @staticmethod + def _load_adapter_hook(module, incompatible_keys): + """Avoid warning missing keys except adapter keys.""" + adapter_patterns = [ + '^perceiver', + 'lang_encoder.*embed_tokens', + 'lang_encoder.*gated_cross_attn_layers', + 'lang_encoder.*rotary_emb', + ] + for key in list(incompatible_keys.missing_keys): + if not any(re.match(pattern, key) for pattern in adapter_patterns): + incompatible_keys.missing_keys.remove(key) + + for key in list(incompatible_keys.unexpected_keys): + if 'position_ids' in key: + incompatible_keys.unexpected_keys.remove(key) + if 'lang_encoder.gated_cross_attn_layers' in key: + incompatible_keys.unexpected_keys.remove(key) diff --git a/mmpretrain/models/multimodal/flamingo/modules.py b/mmpretrain/models/multimodal/flamingo/modules.py new file mode 100644 index 0000000000000000000000000000000000000000..730c61b68a8d0fb799b7985636f09b6484ef99c2 --- /dev/null +++ b/mmpretrain/models/multimodal/flamingo/modules.py @@ -0,0 +1,398 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Taken from https://github.com/lucidrains/flamingo-pytorch.""" + +from typing import Optional + +import torch +from einops import rearrange, repeat +from torch import einsum, nn + + +def FeedForward(dim, mult: int = 4): + """Feedforward layers. + + Args: + mult (int): Layer expansion muliplier. Defaults to 4. + """ + inner_dim = int(dim * mult) + return nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, inner_dim, bias=False), + nn.GELU(), + nn.Linear(inner_dim, dim, bias=False), + ) + + +class PerceiverAttention(nn.Module): + """Perceiver attetion layers. + + Args: + dim (int): Input dimensions. + dim_head (int): Number of dimension heads. Defaults to 64. + heads (int): Number of heads. Defaults to 8. + """ + + def __init__(self, *, dim: int, dim_head: int = 64, heads: int = 8): + super().__init__() + self.scale = dim_head**-0.5 + self.heads = heads + inner_dim = dim_head * heads + + self.norm_media = nn.LayerNorm(dim) + self.norm_latents = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + def forward(self, x: torch.Tensor, latents: torch.Tensor): + """Forward function. + + Args: + x (torch.Tensor): image features of shape (b, T, n1, D). + latent (torch.Tensor): latent features of shape (b, T, n2, D). + """ + x = self.norm_media(x) + latents = self.norm_latents(latents) + + h = self.heads + + q = self.to_q(latents) + kv_input = torch.cat((x, latents), dim=-2) + k, v = self.to_kv(kv_input).chunk(2, dim=-1) + q = rearrange(q, 'b t n (h d) -> b h t n d', h=h) + k = rearrange(k, 'b t n (h d) -> b h t n d', h=h) + v = rearrange(v, 'b t n (h d) -> b h t n d', h=h) + q = q * self.scale + + # attention + sim = einsum('... i d, ... j d -> ... i j', q, k) + sim = sim - sim.amax(dim=-1, keepdim=True).detach() + attn = sim.softmax(dim=-1) + + out = einsum('... i j, ... j d -> ... i d', attn, v) + out = rearrange(out, 'b h t n d -> b t n (h d)', h=h) + return self.to_out(out) + + +class PerceiverResampler(nn.Module): + """Perceiver resampler layers. + + Args: + dim (int): Input dimensions. + depth (int): Depth of resampler. Defaults to 6. + dim_head (int): Number of dimension heads. Defaults to 64. + heads (int): Number of heads. Defaults to 8. + num_latents (int): Number of latents. Defaults to 64. + max_num_media (int, optional): Max number of media. + Defaults to None. + max_num_frames (int, optional): Max number of frames. + Defaults to None. + ff_mult (int): Feed forward multiplier. Defaults to 4. + """ + + def __init__( + self, + *, + dim: int, + depth: int = 6, + dim_head: int = 64, + heads: int = 8, + num_latents: int = 64, + max_num_media: Optional[int] = None, + max_num_frames: Optional[int] = None, + ff_mult: int = 4, + ): + super().__init__() + self.latents = nn.Parameter(torch.randn(num_latents, dim)) + self.frame_embs = ( + nn.Parameter(torch.randn(max_num_frames, dim)) + if max_num_frames is not None else None) + self.media_time_embs = ( + nn.Parameter(torch.randn(max_num_media, 1, dim)) + if max_num_media is not None else None) + + self.layers = nn.ModuleList([]) + for _ in range(depth): + self.layers.append( + nn.ModuleList([ + PerceiverAttention( + dim=dim, dim_head=dim_head, heads=heads), + FeedForward(dim=dim, mult=ff_mult), + ])) + + self.norm = nn.LayerNorm(dim) + + def forward(self, x: torch.Tensor): + """Forward function for perceiver sampler. + + Args: + x (torch.Tensor): image features of shape (b, T, F, v, D) + + Returns: + torch.Tensor: shape (b, T, n, D) where n is self.num_latents + """ + b, T, F, v = x.shape[:4] + + # frame and media time embeddings + if self.frame_embs is not None: + frame_embs = repeat( + self.frame_embs[:F], 'F d -> b T F v d', b=b, T=T, v=v) + x = x + frame_embs + x = rearrange(x, 'b T F v d -> b T (F v) d' + ) # flatten the frame and spatial dimensions + if self.media_time_embs is not None: + x = x + self.media_time_embs[:T] + + # blocks + latents = repeat(self.latents, 'n d -> b T n d', b=b, T=T) + for attn, ff in self.layers: + latents = attn(x, latents) + latents + latents = ff(latents) + latents + return self.norm(latents) + + +class MaskedCrossAttention(nn.Module): + """Masked cross attention layers. + + Args: + dim (int): Input text feature dimensions. + dim_visual (int): Input visual feature dimensions. + dim_head (int): Number of dimension heads. Defaults to 64. + heads (int): Number of heads. Defaults to 8. + only_attend_immediate_media (bool): Whether attend immediate media. + Defaults to True. + """ + + def __init__( + self, + *, + dim: int, + dim_visual: int, + dim_head: int = 64, + heads: int = 8, + only_attend_immediate_media: bool = True, + ): + super().__init__() + self.scale = dim_head**-0.5 + self.heads = heads + inner_dim = dim_head * heads + + self.norm = nn.LayerNorm(dim) + + self.to_q = nn.Linear(dim, inner_dim, bias=False) + self.to_kv = nn.Linear(dim_visual, inner_dim * 2, bias=False) + self.to_out = nn.Linear(inner_dim, dim, bias=False) + + # whether for text to only attend to immediate preceding image + # or all previous images + self.only_attend_immediate_media = only_attend_immediate_media + + def forward(self, + x: torch.Tensor, + media: torch.Tensor, + media_locations: Optional[torch.Tensor] = None, + attend_previous: bool = True): + """Forward function for perceiver sampler. + + Args: + x (torch.Tensor): text features of shape (B, T_txt, D_txt). + media (torch.Tensor): image features of shape + (B, T_img, n, D_img) where n is the dim of the latents. + media_locations (torch.Tensor, optional): boolean mask identifying + the media tokens in x of shape (B, T_txt). Defaults to None. + attend_previous (bool): If false, ignores immediately preceding + image and starts attending when following image. + Defaults to True. + """ + _, T_img, n = media.shape[:3] + h = self.heads + + x = self.norm(x) + + q = self.to_q(x) + media = rearrange(media, 'b t n d -> b (t n) d') + + k, v = self.to_kv(media).chunk(2, dim=-1) + q = rearrange(q, 'b n (h d) -> b h n d', h=h) + k = rearrange(k, 'b n (h d) -> b h n d', h=h) + v = rearrange(v, 'b n (h d) -> b h n d', h=h) + + q = q * self.scale + + sim = einsum('... i d, ... j d -> ... i j', q, k) + + if media_locations is not None: + # at each boolean of True, increment the time counter + # (relative to media time) + text_time = media_locations.cumsum(dim=-1) + media_time = torch.arange(T_img, device=x.device) + 1 + + if not attend_previous: + text_time[~media_locations] += 1 + # make sure max is still the number of images in the sequence + text_time[text_time > repeat( + torch.count_nonzero(media_locations, dim=1), + 'b -> b i', + i=text_time.shape[1], + )] = 0 + + # text time must equal media time if only attending to most + # immediate image otherwise, as long as text time is greater than + # media time (if attending to all previous images / media) + mask_op = torch.eq if self.only_attend_immediate_media else torch.ge # noqa + + text_to_media_mask = mask_op( + rearrange(text_time, 'b i -> b 1 i 1'), + repeat(media_time, 'j -> 1 1 1 (j n)', n=n), + ) + sim = sim.masked_fill(~text_to_media_mask, + -torch.finfo(sim.dtype).max) + + sim = sim - sim.amax(dim=-1, keepdim=True).detach() + attn = sim.softmax(dim=-1) + + if media_locations is not None and self.only_attend_immediate_media: + # any text without a preceding media needs to have + # attention zeroed out + text_without_media_mask = text_time == 0 + text_without_media_mask = rearrange(text_without_media_mask, + 'b i -> b 1 i 1') + attn = attn.masked_fill(text_without_media_mask, 0.0) + + out = einsum('... i j, ... j d -> ... i d', attn, v) + out = rearrange(out, 'b h n d -> b n (h d)') + return self.to_out(out) + + +class GatedCrossAttentionBlock(nn.Module): + """Gated cross attention layers. + + Args: + dim (int): Input text feature dimensions. + dim_visual (int): Input visual feature dimensions. + dim_head (int): Number of dimension heads. Defaults to 64. + heads (int): Number of heads. Defaults to 8. + ff_mult (int): Feed forward multiplier. Defaults to 4. + only_attend_immediate_media (bool): Whether attend immediate media. + Defaults to True. + """ + + def __init__( + self, + *, + dim: int, + dim_visual: int, + dim_head: int = 64, + heads: int = 8, + ff_mult: int = 4, + only_attend_immediate_media: bool = True, + ): + super().__init__() + self.attn = MaskedCrossAttention( + dim=dim, + dim_visual=dim_visual, + dim_head=dim_head, + heads=heads, + only_attend_immediate_media=only_attend_immediate_media, + ) + self.attn_gate = nn.Parameter(torch.tensor([0.0])) + + self.ff = FeedForward(dim, mult=ff_mult) + self.ff_gate = nn.Parameter(torch.tensor([0.0])) + + def forward(self, + x: torch.Tensor, + media: torch.Tensor, + media_locations: Optional[torch.Tensor] = None, + attend_previous: bool = True): + """Forward function for perceiver sampler. + + Args: + x (torch.Tensor): text features of shape (B, T_txt, D_txt). + media (torch.Tensor): image features of shape + (B, T_img, n, D_img) where n is the dim of the latents. + media_locations (torch.Tensor, optional): boolean mask identifying + the media tokens in x of shape (B, T_txt). Defaults to None. + attend_previous (bool): If false, ignores immediately preceding + image and starts attending when following image. + Defaults to True. + """ + x = ( + self.attn( + x, + media, + media_locations=media_locations, + attend_previous=attend_previous, + ) * self.attn_gate.tanh() + x) + x = self.ff(x) * self.ff_gate.tanh() + x + + return x + + +class FlamingoLayer(nn.Module): + """Faminogo layers. + + Args: + gated_cross_attn_layer (nn.Module): Gated cross attention layer. + decoder_layer (nn.Module): Decoder layer. + """ + + def __init__(self, gated_cross_attn_layer: nn.Module, + decoder_layer: nn.Module): + super().__init__() + self.gated_cross_attn_layer = gated_cross_attn_layer + self.decoder_layer = decoder_layer + self.vis_x = None + self.media_locations = None + + def is_conditioned(self) -> bool: + """Check whether the layer is conditioned.""" + return self.vis_x is not None + + def condition_vis_x(self, vis_x): + """Set condition vision features.""" + self.vis_x = vis_x + + def condition_media_locations(self, media_locations): + """Set condition media locations.""" + self.media_locations = media_locations + + def condition_attend_previous(self, attend_previous): + """Set attend previous.""" + self.attend_previous = attend_previous + + def forward( + self, + lang_x: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + **decoder_layer_kwargs, + ): + """Forward function. + + Args: + lang_x (torch.Tensor): language inputs. + attention_mask (torch.Tensor, optional): text attention mask. + Defaults to None. + **decoder_layer_kwargs: Other decoder layer keyword arguments. + """ + if self.gated_cross_attn_layer is None: + return self.decoder_layer( + lang_x, attention_mask=attention_mask, **decoder_layer_kwargs) + + if self.vis_x is None: + raise ValueError('vis_x must be conditioned before forward pass') + + if self.media_locations is None: + raise ValueError( + 'media_locations must be conditioned before forward pass') + + lang_x = self.gated_cross_attn_layer( + lang_x, + self.vis_x, + media_locations=self.media_locations, + attend_previous=self.attend_previous, + ) + lang_x = self.decoder_layer( + lang_x, attention_mask=attention_mask, **decoder_layer_kwargs) + return lang_x diff --git a/mmpretrain/models/multimodal/flamingo/utils.py b/mmpretrain/models/multimodal/flamingo/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1077e145a7daeeff1c769d837ec9c5aac0cf3d93 --- /dev/null +++ b/mmpretrain/models/multimodal/flamingo/utils.py @@ -0,0 +1,64 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Any, Type + +from mmpretrain.registry import MODELS + + +class ExtendModule: + """Combine the base language model with adapter. This module will create a + instance from base with extended functions in adapter. + + Args: + base (object): Base module could be any object that represent + a instance of language model or a dict that can build the + base module. + adapter: (dict): Dict to build the adapter. + """ + + def __new__(cls, base: object, adapter: dict): + + if isinstance(base, dict): + base = MODELS.build(base) + + adapter_module = MODELS.get(adapter.pop('type')) + cls.extend_instance(base, adapter_module) + return adapter_module.extend_init(base, **adapter) + + @classmethod + def extend_instance(cls, base: object, mixin: Type[Any]): + """Apply mixins to a class instance after creation. + + Args: + base (object): Base module instance. + mixin: (Type[Any]): Adapter class type to mixin. + """ + base_cls = base.__class__ + base_cls_name = base.__class__.__name__ + base.__class__ = type( + base_cls_name, (mixin, base_cls), + {}) # mixin needs to go first for our forward() logic to work + + +def getattr_recursive(obj, att): + """ + Return nested attribute of obj + Example: getattr_recursive(obj, 'a.b.c') is equivalent to obj.a.b.c + """ + if att == '': + return obj + i = att.find('.') + if i < 0: + return getattr(obj, att) + else: + return getattr_recursive(getattr(obj, att[:i]), att[i + 1:]) + + +def setattr_recursive(obj, att, val): + """ + Set nested attribute of obj + Example: setattr_recursive(obj, 'a.b.c', val) + is equivalent to obj.a.b.c = val + """ + if '.' in att: + obj = getattr_recursive(obj, '.'.join(att.split('.')[:-1])) + setattr(obj, att.split('.')[-1], val) diff --git a/mmpretrain/models/multimodal/ofa/__init__.py b/mmpretrain/models/multimodal/ofa/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bcb3f45f09b757304bfca3de2a94d217ff78d8d4 --- /dev/null +++ b/mmpretrain/models/multimodal/ofa/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .ofa import OFA +from .ofa_modules import OFADecoder, OFAEncoder, OFAEncoderDecoder + +__all__ = ['OFAEncoderDecoder', 'OFA', 'OFAEncoder', 'OFADecoder'] diff --git a/mmpretrain/models/multimodal/ofa/ofa.py b/mmpretrain/models/multimodal/ofa/ofa.py new file mode 100644 index 0000000000000000000000000000000000000000..e15787a60d66ac56308b320cdd73a7703a2a29bc --- /dev/null +++ b/mmpretrain/models/multimodal/ofa/ofa.py @@ -0,0 +1,320 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import string +from collections import defaultdict +from functools import partial +from typing import Optional, Union + +import mmengine +import torch +from mmengine.model import BaseModel + +from mmpretrain.datasets import CleanCaption +from mmpretrain.registry import MODELS, TOKENIZER +from mmpretrain.structures import DataSample +from .ofa_modules import OFAEncoderDecoder + + +class TreeNode(): + + def __init__(self): + self.child = defaultdict(TreeNode) + + +class Trie: + + def __init__(self, eos): + self.root = TreeNode() + self.eos = eos + + def insert(self, word): + cur = self.root + for c in word: + cur = cur.child[c] + + def get_next_layer(self, word): + cur = self.root + for c in word: + cur = cur.child.get(c) + if cur is None: + return [self.eos] + return list(cur.child.keys()) + + +def apply_constraint( + input_ids: torch.Tensor, + logits: torch.Tensor, + decoder_prompts: Optional[list], + num_beams: int, + constraint_trie: Trie = None, +): + if decoder_prompts is None and constraint_trie is None: + return logits + + mask = logits.new_zeros(logits[:, -1, :].size(), dtype=torch.bool) + input_ids = input_ids.view(-1, num_beams, input_ids.shape[-1]) + for batch_id, beam_sent in enumerate(input_ids): + for beam_id, sent in enumerate(beam_sent): + if decoder_prompts is None: + prompt_len = 0 + else: + prompt_len = len(decoder_prompts[batch_id]) + + if sent.size(0) - 1 < prompt_len: + allowed_tokens = [decoder_prompts[batch_id][sent.size(0) - 1]] + mask[batch_id * num_beams + beam_id, allowed_tokens] = True + elif constraint_trie is not None: + answer_tokens = [0] + sent[prompt_len + 1:].tolist() + allowed_tokens = constraint_trie.get_next_layer(answer_tokens) + mask[batch_id * num_beams + beam_id, allowed_tokens] = True + else: + mask[batch_id * num_beams + beam_id, :] = True + logits[:, -1, :].masked_fill_(~mask, float('-inf')) + return logits + + +@MODELS.register_module() +class OFA(BaseModel): + """The OFA model for multiple tasks. + + Args: + encoder_cfg (dict): The config of the encoder, accept the keyword + arguments of :class:`OFAEncoder`. + decoder_cfg (dict): The config of the decoder, accept the keyword + arguments of :class:`OFADecoder`. + vocab_size (int): The size of the vocabulary. + embedding_dim (int): The embedding dimensions of both the encoder + and the decoder. + tokenizer (dict | PreTrainedTokenizer): The tokenizer to encode + the text. + task (str): The task name, supported tasks are "caption", "vqa" and + "refcoco". + prompt (str, optional): The prompt template for the following tasks, + If None, use default prompt: + + - **caption**: ' what does the image describe?' + - **refcoco**: ' which region does the text " {} " describe?' + + Defaults to None + ans2label (str | Sequence | None): The answer to label mapping for + the vqa task. If a string, it should be a pickle or json file. + The sequence constrains the output answers. Defaults to None, + which means no constraint. + generation_cfg (dict): The extra generation config, accept the keyword + arguments of :class:`~transformers.GenerationConfig`. + Defaults to an empty dict. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None or no specified type, it will use + "MultiModalDataPreprocessor" as type. See :class: + `MultiModalDataPreprocessor` for more details. Defaults to None. + init_cfg (dict, optional): The initialization config. Defaults to None. + """ + support_tasks = {'caption', 'vqa', 'refcoco'} + + def __init__( + self, + encoder_cfg, + decoder_cfg, + vocab_size, + embedding_dim, + tokenizer, + task, + prompt=None, + ans2label: Union[dict, str, None] = None, + generation_cfg=dict(), + data_preprocessor: Optional[dict] = None, + init_cfg=None, + ): + if data_preprocessor is None: + data_preprocessor = {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'MultiModalDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + if isinstance(tokenizer, dict): + self.tokenizer = TOKENIZER.build(tokenizer) + else: + self.tokenizer = tokenizer + + if task not in self.support_tasks: + raise ValueError(f'Unsupported task {task}, please select ' + f'the task from {self.support_tasks}.') + + self.prompt = prompt + self.task = task + + if isinstance(ans2label, str): + self.ans2label = mmengine.load(ans2label) + else: + self.ans2label = ans2label + + if self.task == 'vqa' and self.ans2label is not None: + self.constraint_trie = Trie(eos=self.tokenizer.eos_token_id) + answers = [f' {answer}' for answer in self.ans2label] + answer_tokens = self.tokenizer(answers, padding=False) + for answer_token in answer_tokens['input_ids']: + self.constraint_trie.insert(answer_token) + else: + self.constraint_trie = None + + generation_cfg = { + 'num_beams': 5, + 'max_new_tokens': 20, + 'no_repeat_ngram_size': 3, + **generation_cfg, + } + self.model = OFAEncoderDecoder( + encoder_cfg=encoder_cfg, + decoder_cfg=decoder_cfg, + padding_idx=self.tokenizer.pad_token_id, + vocab_size=vocab_size, + embedding_dim=embedding_dim, + generation_cfg=generation_cfg, + ) + + def forward( + self, + images: torch.Tensor, + data_samples: Optional[list] = None, + mode: str = 'predict', + **kwargs, + ): + """The unified entry for a forward process in both training and test. + The method accepts the following modes: + + - "predict": Forward and return a list of data samples contain the + predict results. + + Args: + images (torch.Tensor): the preprocessed image tensor of shape + ``(N, C, H, W)``. + data_samples (List[DataSample], optional): The annotation data + of every samples. Defaults to None. + mode (str): Return what kind of value. Defaults to 'predict'. + """ + if mode == 'predict': + return self.predict(images, data_samples, **kwargs) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def predict( + self, + images, + data_samples=None, + post_process=True, + **generation_config, + ): + text_tokens = self.preprocess_text(data_samples, images.size(0), + images.device) + + if 'images_mask' in data_samples[0]: + images_mask = torch.tensor([ + sample.get('images_mask') for sample in data_samples + ]).bool().to(images.device) + else: + images_mask = None + + num_beams = generation_config.get( + 'num_beams', getattr(self.model.generation_config, 'num_beams')) + decoder_prompts = self.get_decoder_prompts(data_samples) + constrain_fn = partial( + apply_constraint, + constraint_trie=self.constraint_trie, + decoder_prompts=decoder_prompts, + num_beams=num_beams, + ) + + outputs = self.model.generate( + input_ids=text_tokens, + images=images, + images_mask=images_mask, + constrain_fn=constrain_fn, + **generation_config, + ) + + if decoder_prompts is not None: + # Remove the prefix decoder prompt. + for prompt_ids, token in zip(decoder_prompts, outputs): + token[1:len(prompt_ids) + 1] = self.tokenizer.pad_token_id + + if post_process: + return self.post_process(outputs, data_samples) + else: + return outputs + + def get_decoder_prompts(self, data_samples): + decoder_prompts = [] + if 'decoder_prompt' not in data_samples[0]: + return None + for sample in data_samples: + prompt = ' ' + sample.get('decoder_prompt') + prompt_ids = self.tokenizer(prompt, add_special_tokens=False) + prompt_ids = prompt_ids['input_ids'] + decoder_prompts.append(prompt_ids) + return decoder_prompts + + def preprocess_text(self, data_samples, batch_size, device): + if self.task == 'caption': + prompt = self.prompt or ' what does the image describe?' + prompts = [prompt] * batch_size + prompts = self.tokenizer(prompts, return_tensors='pt') + return prompts.input_ids.to(device) + elif self.task == 'vqa': + prompts = [] + for sample in data_samples: + assert 'question' in sample + prompt = ' ' + sample.get('question') + prompts.append(prompt) + prompts = self.tokenizer( + prompts, return_tensors='pt', padding=True) + return prompts.input_ids.to(device) + elif self.task == 'refcoco': + prompt_template = self.prompt or \ + ' which region does the text " {} " describe?' + prompts = [] + for sample in data_samples: + assert 'text' in sample + prompt = prompt_template.format(sample.get('text')) + prompts.append(prompt) + prompts = self.tokenizer( + prompts, return_tensors='pt', padding=True) + return prompts.input_ids.to(device) + + def post_process(self, outputs, data_samples): + + out_data_samples = [] + if data_samples is None: + data_samples = [None] * outputs.size(0) + + for data_sample, token in zip(data_samples, outputs): + if data_sample is None: + data_sample = DataSample() + + if self.task == 'caption': + text = self.tokenizer.decode(token, skip_special_tokens=True) + text = CleanCaption( + lowercase=False, + remove_chars=string.punctuation).clean(text) + data_sample.pred_caption = text + elif self.task == 'vqa': + text = self.tokenizer.decode(token, skip_special_tokens=True) + data_sample.pred_answer = text.strip() + elif self.task == 'refcoco': + bbox = token[1:5] - self.tokenizer.bin_offset + # During training, the bbox is normalized by 512. It's related + # to the `max_image_size` config in the official repo. + bbox = bbox / self.tokenizer.num_bins * 512 + scale_factor = data_sample.get('scale_factor', (1, 1)) + bbox[0::2] /= scale_factor[0] + bbox[1::2] /= scale_factor[1] + data_sample.pred_bboxes = bbox.unsqueeze(0) + if 'gt_bboxes' in data_sample: + gt_bboxes = bbox.new_tensor(data_sample.gt_bboxes) + gt_bboxes[:, 0::2] /= scale_factor[0] + gt_bboxes[:, 1::2] /= scale_factor[1] + data_sample.gt_bboxes = gt_bboxes + out_data_samples.append(data_sample) + + return out_data_samples diff --git a/mmpretrain/models/multimodal/ofa/ofa_modules.py b/mmpretrain/models/multimodal/ofa/ofa_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..1c79049b617685ad9d5ab244ed09c56e70b348fd --- /dev/null +++ b/mmpretrain/models/multimodal/ofa/ofa_modules.py @@ -0,0 +1,1612 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from dataclasses import dataclass +from functools import partial +from typing import List, Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule +from mmengine.utils import digit_version +from transformers.modeling_outputs import ( + BaseModelOutputWithPastAndCrossAttentions, ModelOutput, Seq2SeqLMOutput) +from transformers.modeling_utils import (GenerationConfig, GenerationMixin, + PretrainedConfig) + +from mmpretrain.registry import MODELS +from ...backbones.resnet import Bottleneck, ResNet + +if digit_version(torch.__version__) >= digit_version('1.10.0'): + torch_meshgrid = partial(torch.meshgrid, indexing='ij') +else: + torch_meshgrid = torch.meshgrid + + +def make_token_bucket_position(bucket_size, max_position=1024): + context_pos = torch.arange(max_position, dtype=torch.long)[:, None] + memory_pos = torch.arange(max_position, dtype=torch.long)[None, :] + relative_pos = context_pos - memory_pos + sign = torch.sign(relative_pos) + mid = bucket_size // 2 + abs_pos = torch.where((relative_pos < mid) & (relative_pos > -mid), + mid - 1, torch.abs(relative_pos)) + log_pos = torch.ceil( + torch.log(abs_pos / mid) / math.log( + (max_position - 1) / mid) * (mid - 1)) + mid + log_pos = log_pos.int() + bucket_pos = torch.where(abs_pos.le(mid), relative_pos, + log_pos * sign).long() + return bucket_pos + bucket_size - 1 + + +def make_image_bucket_position(bucket_size, num_relative_distance): + coords_h = torch.arange(bucket_size) + coords_w = torch.arange(bucket_size) + # (2, h, w) + coords = torch.stack(torch_meshgrid([coords_h, coords_w])) + # (2, h*w) + coords_flatten = torch.flatten(coords, 1) + # (2, h*w, h*w) + relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] + # (h*w, h*w, 2) + relative_coords = relative_coords.permute(1, 2, 0).contiguous() + relative_coords[:, :, 0] += bucket_size - 1 # shift to start from 0 + relative_coords[:, :, 1] += bucket_size - 1 + relative_coords[:, :, 0] *= 2 * bucket_size - 1 + relative_position_index = torch.zeros( + size=(bucket_size * bucket_size + 1, ) * 2, + dtype=relative_coords.dtype) + # (h*w, h*w) + relative_position_index[1:, 1:] = relative_coords.sum(-1) + relative_position_index[0, 0:] = num_relative_distance - 3 + relative_position_index[0:, 0] = num_relative_distance - 2 + relative_position_index[0, 0] = num_relative_distance - 1 + return relative_position_index + + +def _make_causal_mask(input_ids_shape: torch.Size, + dtype: torch.dtype, + past_key_values_length: int = 0): + """Make causal mask used for uni-directional self-attention.""" + bsz, tgt_len = input_ids_shape + mask = torch.full((tgt_len, tgt_len), float('-inf')) + mask_cond = torch.arange(mask.size(-1)) + mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0) + mask = mask.to(dtype) + + if past_key_values_length > 0: + mask = torch.cat( + [torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], + dim=-1) + return mask[None, None, :, :].expand(bsz, 1, tgt_len, + tgt_len + past_key_values_length) + + +def _expand_mask(mask: torch.Tensor, + dtype: torch.dtype, + tgt_len: Optional[int] = None): + """Expands attention_mask from ``[B, L_s]`` to ``[B, 1, L_t, L_s]``. + + Where ``B`` is batch_size, `L_s`` is the source sequence length, and + ``L_t`` is the target sequence length. + """ + bsz, src_len = mask.size() + tgt_len = tgt_len if tgt_len is not None else src_len + + expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, + src_len).to(dtype) + return expanded_mask.masked_fill(expanded_mask.bool(), + torch.finfo(dtype).min) + + +class MultiheadAttention(BaseModule): + """Multi-head Attention Module for OFA. + + Args: + embedding_dim (int): The embedding dimension of query. + num_heads (int): Parallel attention heads. + kdim (int, optional): The embedding dimension of key. + Defaults to None, which means the same as the `embedding_dim`. + vdim (int, optional): The embedding dimension of value. + Defaults to None, which means the same as the `embedding_dim`. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + scale_factor (float): The scale of qk will be + ``(head_dim * scale_factor) ** -0.5``. Defaults to 1. + proj_bias (bool) If True, add a learnable bias to output projection. + Defaults to True. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embedding_dim, + num_heads, + kdim=None, + vdim=None, + attn_drop=0., + scale_factor=1., + qkv_bias=True, + proj_bias=True, + scale_heads=False, + init_cfg=None): + super(MultiheadAttention, self).__init__(init_cfg=init_cfg) + + self.embedding_dim = embedding_dim + self.num_heads = num_heads + self.kdim = kdim or embedding_dim + self.vdim = vdim or embedding_dim + + self.head_dim = embedding_dim // num_heads + self.scale = (self.head_dim * scale_factor)**-0.5 + + self.q_proj = nn.Linear(embedding_dim, embedding_dim, bias=qkv_bias) + self.k_proj = nn.Linear(self.kdim, embedding_dim, bias=qkv_bias) + self.v_proj = nn.Linear(self.vdim, embedding_dim, bias=qkv_bias) + self.out_proj = nn.Linear(embedding_dim, embedding_dim, bias=proj_bias) + + self.attn_drop = nn.Dropout(p=attn_drop) + + if scale_heads: + self.c_attn = nn.Parameter(torch.ones(num_heads)) + else: + self.c_attn = None + + def forward( + self, + query, + key_value=None, + attn_mask=None, + attn_bias=None, + past_key_value=None, + output_attentions=False, + ): + B, _, C = query.shape + assert C == self.head_dim * self.num_heads + + is_cross_attention = key_value is not None + if key_value is None: + key_value = query + + # (B, L, C) -> (B, num_heads, L, head_dims) + q = self.q_proj(query).reshape(B, -1, self.num_heads, + self.head_dim).transpose(1, 2) + + if is_cross_attention and past_key_value is not None: + # Reuse key and value in cross_attentions + k, v = past_key_value + else: + k = self.k_proj(key_value).reshape(B, -1, self.num_heads, + self.head_dim).transpose(1, 2) + v = self.v_proj(key_value).reshape(B, -1, self.num_heads, + self.head_dim).transpose(1, 2) + if past_key_value is not None: + past_key, past_value = past_key_value + k = torch.cat([past_key, k], dim=2) + v = torch.cat([past_value, v], dim=2) + + past_key_value = (k, v) + + attn_weights = q @ k.transpose(-2, -1) * self.scale + + if attn_bias is not None: + src_len = k.size(2) + attn_weights[:, :, -src_len:] += attn_bias[:, :, -src_len:] + + if attn_mask is not None: + attn_weights += attn_mask + attn_weights = torch.softmax(attn_weights, dim=-1) + attn = self.attn_drop(attn_weights) @ v + + if self.c_attn is not None: + attn = torch.einsum('bhlc,h->bhlc', attn, self.c_attn) + + # (B, num_heads, L, head_dims) -> (B, L, C) + attn = attn.transpose(1, 2).reshape(B, -1, self.embedding_dim) + attn = self.out_proj(attn) + + if output_attentions: + return attn, attn_weights, past_key_value + else: + return attn, None, past_key_value + + +@MODELS.register_module(force=True) +class OFAResNet(ResNet): + """ResNet module for OFA. + + The ResNet in OFA has only three stages. + """ + arch_settings = { + 50: (Bottleneck, (3, 4, 6)), + 101: (Bottleneck, (3, 4, 23)), + 152: (Bottleneck, (3, 8, 36)), + } + + def __init__(self, depth, *args, **kwargs): + super().__init__( + depth=depth, + *args, + num_stages=3, + out_indices=(2, ), + dilations=(1, 1, 1), + strides=(1, 2, 2), + **kwargs) + + +@dataclass +class OFAEncoderOutput(ModelOutput): + """OFA encoder outputs. + + Args: + last_hidden_state (torch.tensor): The hidden-states of the output at + the last layer of the model. The shape is (B, L, C). + hidden_states (Tuple[torch.tensor]): The initial embedding and the + output of each layer. The shape of every item is (B, L, C). + attentions (Tuple[torch.tensor]): The attention weights after the + attention softmax, used to compute the weighted average in the + self-attention heads. The shape of every item is + (B, num_heads, L, L). + position_embedding (torch.tensor): The positional embeddings of the + inputs. The shape is (B, L, C). + """ + + last_hidden_state: torch.FloatTensor = None + padding_mask: torch.Tensor = None + hidden_states: Optional[Tuple[torch.FloatTensor]] = None + attentions: Optional[Tuple[torch.FloatTensor]] = None + position_embedding: Optional[torch.FloatTensor] = None + + +class OFAEncoderLayer(nn.Module): + """OFAEncoder layer block.""" + + def __init__(self, + embedding_dim, + num_heads, + dropout_rate=0., + drop_path_rate=0., + attn_drop=0., + act_drop=0., + scale_factor=2., + mlp_ratio=4., + scale_heads=True, + normformer=True, + pre_norm=True, + act_cfg=dict(type='GELU')): + super().__init__() + self.embedding_dim = embedding_dim + self.pre_norm = pre_norm + + self.attn = MultiheadAttention( + embedding_dim=embedding_dim, + num_heads=num_heads, + attn_drop=attn_drop, + scale_factor=scale_factor, + scale_heads=scale_heads, + ) + + mid_channels = int(embedding_dim * mlp_ratio) + self.fc1 = nn.Linear(embedding_dim, mid_channels) + self.fc2 = nn.Linear(mid_channels, embedding_dim) + self.act = MODELS.build(act_cfg) + self.act_drop = nn.Dropout( + act_drop) if act_drop > 0. else nn.Identity() + + # LayerNorm between attention block and ffn block. + self.attn_ln = nn.LayerNorm(embedding_dim) + self.ffn_ln = nn.LayerNorm(embedding_dim) + + # Extra LayerNorm + self.normformer = normformer + if self.normformer: + self.attn_mid_ln = nn.LayerNorm(embedding_dim) + self.ffn_mid_ln = nn.LayerNorm(mid_channels) + + self.dropout = nn.Dropout(dropout_rate) + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0.0 else nn.Identity() + + def forward(self, + x, + attention_mask=None, + attn_bias=None, + output_attentions=False): + """Forward the encoder layer. + + Args: + x (torch.tensor): The input to the layer of shape ``(B, L, C)``. + attention_mask (torch.Tensor, optional): The attention mask of size + ``(B, 1, L, L)``, where padding elements are indicated by very + large negative values. Defaults to None. + attn_bias (torch.tensor, optional): The bias for positional + information. Defaults to None. + output_attentions (bool): Whether to return the attentions tensors + of the attention layer. + + Returns: + List[torch.tensor]: The first element is the encoded output of + shape ``(B, L, C)``. And the second element is the output + attentions if ``output_attentions=True``. + """ + residual = x + + # Attention block + if self.pre_norm: + x = self.attn_ln(x) + x, attn_weights, _ = self.attn( + query=x, + attn_mask=attention_mask, + attn_bias=attn_bias, + output_attentions=output_attentions) + if self.normformer: + x = self.attn_mid_ln(x) + x = self.dropout(x) + x = residual + self.drop_path(x) + if not self.pre_norm: + x = self.attn_ln(x) + + residual = x + + # FFN block + if self.pre_norm: + x = self.ffn_ln(x) + x = self.act(self.fc1(x)) + x = self.act_drop(x) + if self.normformer: + x = self.ffn_mid_ln(x) + x = self.fc2(x) + x = self.dropout(x) + x = residual + self.drop_path(x) + if not self.pre_norm: + x = self.ffn_ln(x) + + if output_attentions: + return [x, attn_weights] + else: + return [x] + + +class OFADecoderLayer(nn.Module): + """OFADecoder layer block.""" + + def __init__(self, + embedding_dim, + num_heads, + dropout_rate=0., + drop_path_rate=0., + attn_drop=0., + act_drop=0., + scale_factor=2., + mlp_ratio=4., + encoder_embed_dim=None, + scale_heads=True, + normformer=True, + pre_norm=True, + act_cfg=dict(type='GELU')): + super().__init__() + self.embedding_dim = embedding_dim + self.pre_norm = pre_norm + + self.self_attn = MultiheadAttention( + embedding_dim=embedding_dim, + num_heads=num_heads, + attn_drop=attn_drop, + scale_factor=scale_factor, + scale_heads=scale_heads, + ) + + self.cross_attn = MultiheadAttention( + embedding_dim=embedding_dim, + kdim=encoder_embed_dim, + vdim=encoder_embed_dim, + num_heads=num_heads, + attn_drop=attn_drop, + scale_factor=scale_factor, + scale_heads=scale_heads, + ) + + mid_channels = int(embedding_dim * mlp_ratio) + self.fc1 = nn.Linear(embedding_dim, mid_channels) + self.fc2 = nn.Linear(mid_channels, embedding_dim) + self.act = MODELS.build(act_cfg) + self.act_drop = nn.Dropout( + act_drop) if act_drop > 0. else nn.Identity() + + # LayerNorm between attention block and ffn block. + self.self_attn_ln = nn.LayerNorm(embedding_dim) + self.cross_attn_ln = nn.LayerNorm(embedding_dim) + self.ffn_ln = nn.LayerNorm(embedding_dim) + + # Extra LayerNorm + self.normformer = normformer + if self.normformer: + self.self_attn_mid_ln = nn.LayerNorm(embedding_dim) + self.cross_attn_mid_ln = nn.LayerNorm(embedding_dim) + self.ffn_mid_ln = nn.LayerNorm(mid_channels) + + self.dropout = nn.Dropout(dropout_rate) + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0.0 else nn.Identity() + + def forward( + self, + x, + attention_mask=None, + encoder_hidden_states: Optional[torch.Tensor] = None, + encoder_attention_mask: Optional[torch.Tensor] = None, + past_key_value: Optional[List[torch.Tensor]] = None, + output_attentions: bool = False, + use_cache: bool = False, + self_attn_bias: Optional[torch.Tensor] = None, + cross_attn_bias: Optional[torch.Tensor] = None, + ): + """Forward the decoder layer. + + Args: + x (torch.tensor): The input to the layer of shape ``(B, L, C)``. + attention_mask (torch.Tensor, optional): The attention mask of size + ``(B, 1, L, L)``, where padding elements are indicated by very + large negative values. Defaults to None. + encoder_hidden_states (torch.Tensor, optional): The cross attention + input to the layer of size ``(B, L, C)``. Defaults to None. + encoder_attention_mask (torch.Tensor, optional): The cross + attention mask where padding elements are indicated by very + large negative values. Defaults to None. + past_key_value (Tuple[torch.tensor], optional): The cached past key + and value projection states. Defaults to none. + output_attentions (bool): whether to return the attentions tensors + of all attention layers. Defaults to False. + use_cache (bool, optional): Whether to use cache. + Defaults to False. + self_attn_bias (torch.Tensor, optional): The self attention bias + for positional information. Defaults to None. + cross_attn_bias (torch.Tensor, optional): The cross attention bias + for positional information. Defaults to None. + + Returns: + List[torch.tensor]: The first element is the encoded output of + shape ``(B, L, C)``. The following two elements can be the output + self-attentions and cross-attentions if ``output_attentions=True``. + The following one element can be the cached past key and value + projection states. + """ + residual = x + + if past_key_value is not None: + self_past_key_value = past_key_value[:2] + cross_past_key_value = past_key_value[2:] + else: + self_past_key_value, cross_past_key_value = None, None + + # Self-Attention block + if self.pre_norm: + x = self.self_attn_ln(x) + x, self_attn_weights, present_key_value = self.self_attn( + query=x, + past_key_value=self_past_key_value, + attn_mask=attention_mask, + output_attentions=output_attentions, + attn_bias=self_attn_bias, + ) + if self.normformer: + x = self.self_attn_mid_ln(x) + x = self.dropout(x) + x = residual + self.drop_path(x) + if not self.pre_norm: + x = self.self_attn_ln(x) + + # Cross-Attention block + if encoder_hidden_states is not None: + residual = x + if self.pre_norm: + x = self.cross_attn_ln(x) + x, cross_attn_weights, cross_key_value = self.cross_attn.forward( + query=x, + key_value=encoder_hidden_states, + attn_mask=encoder_attention_mask, + past_key_value=cross_past_key_value, + output_attentions=output_attentions, + attn_bias=cross_attn_bias) + if self.normformer: + x = self.cross_attn_mid_ln(x) + x = self.dropout(x) + x = residual + self.drop_path(x) + if not self.pre_norm: + x = self.cross_attn_ln(x) + + present_key_value = present_key_value + cross_key_value + + residual = x + + # FFN block + if self.pre_norm: + x = self.ffn_ln(x) + x = self.act(self.fc1(x)) + x = self.act_drop(x) + if self.normformer: + x = self.ffn_mid_ln(x) + x = self.fc2(x) + x = self.dropout(x) + x = residual + self.drop_path(x) + if not self.pre_norm: + x = self.ffn_ln(x) + + outputs = [x] + + if output_attentions: + outputs.extend([self_attn_weights, cross_attn_weights]) + + if use_cache: + outputs.append(present_key_value) + + return outputs + + +class OFAEncoder(BaseModule): + """The encoder module of OFA. + + Args: + embed_tokens (nn.Embedding): The embedding module to embed the + input tokens. + embed_images (dict | nn.Module): The module to embed the input + images into features. The output number of channels should + be 1024. + num_layers (int): The number of encoder layers. Defaults to 6. + num_heads (int): The number of heads of attention. Defaults to 12. + dropout_rate (float): The prob of dropout for embedding and + transformer layers. Defaults to 0. + drop_path_rate (float): The prob of droppath for transformer layers. + Defaults to 0. + max_source_positions (int): The maximum length of the input tokens. + Defaults to 1024. + token_bucket_size (int): The token bucket size, it's used as the + maximum relative position index in relative position embedding + of input tokens. Defaults to 256. + image_bucket_size (int): The image bucket size, it's used to generate + the image relative position embedding table. It should be larger + than the shape of image feature map. Defaults to 42. + attn_scale_factor (float): The scale factor to calculate qk scale in + attentions. Defaults to 2. + scale_embedding (bool): Whether to scale the embeddings by the square + root of the dimension. Defaults to False. + add_embedding_ln (bool): Whether to add an extra layer norm for token + embeddings. Defaults to True. + add_image_embedding_ln (bool): Whether to add an extra layer norm for + image embeddings. Defaults to True. + pre_norm (bool): Whether to do layer norm before attention and ffn + blocks in transformer layers. Defaults to True. + entangle_position_embedding (bool): Whether to add the position + embedding on the embeddings directly. Defaults to False. + init_cfg (dict, optional): The initialization config. Defaults to None. + """ + + def __init__( + self, + embed_tokens, + embed_images: dict, + num_layers=6, + num_heads=12, + dropout_rate=0., + drop_path_rate=0., + max_source_positions=1024, + token_bucket_size=256, + image_bucket_size=42, + attn_scale_factor=2., + scale_embedding=False, + add_embedding_ln=True, + add_type_embed=True, + add_image_embedding_ln=True, + pre_norm=True, + entangle_position_embedding=False, + init_cfg=None, + ): + super().__init__(init_cfg=init_cfg) + + self.num_layers = num_layers + embedding_dim = embed_tokens.embedding_dim + self.embedding_dim = embedding_dim + self.padding_idx = embed_tokens.padding_idx + self.max_source_positions = max_source_positions + self.num_heads = num_heads + + # Build embedding process components + self.embed_tokens = embed_tokens + self.embedding_scale = math.sqrt( + embedding_dim) if scale_embedding else 1.0 + + if not isinstance(embed_images, nn.Module): + self.embed_images = MODELS.build(embed_images) + else: + self.embed_images = embed_images + self.image_proj = nn.Linear(1024, embedding_dim) + + if add_embedding_ln: + self.embedding_ln = nn.LayerNorm(embedding_dim) + else: + self.embedding_ln = None + + if add_type_embed: + self.embed_type = nn.Embedding(2, embedding_dim) + else: + self.embed_type = None + + if add_image_embedding_ln: + self.image_embedding_ln = nn.LayerNorm(embedding_dim) + else: + self.image_embedding_ln = None + + self.entangle_position_embedding = entangle_position_embedding + + # Build position embedding + self.embed_positions = nn.Embedding(self.max_source_positions + 2, + embedding_dim) + self.pos_ln = nn.LayerNorm(embedding_dim) + self.embed_image_positions = nn.Embedding(image_bucket_size**2 + 1, + embedding_dim) + self.image_pos_ln = nn.LayerNorm(embedding_dim) + + self.pos_scaling = float(embedding_dim / num_heads * + attn_scale_factor)**-0.5 + self.pos_q_linear = nn.Linear(embedding_dim, embedding_dim) + self.pos_k_linear = nn.Linear(embedding_dim, embedding_dim) + + self.dropout = nn.Dropout( + dropout_rate) if dropout_rate > 0. else nn.Identity() + + # Register token relative position embedding table + self.token_bucket_size = token_bucket_size + token_num_rel_dis = 2 * token_bucket_size - 1 + token_rp_bucket = make_token_bucket_position(token_bucket_size, + self.max_source_positions) + self.register_buffer('token_rp_bucket', token_rp_bucket) + self.token_rel_pos_table_list = nn.ModuleList() + + # Register image relative position embedding table + self.image_bucket_size = image_bucket_size + image_num_rel_dis = (2 * image_bucket_size - + 1) * (2 * image_bucket_size - 1) + 3 + image_rp_bucket = make_image_bucket_position(image_bucket_size, + image_num_rel_dis) + self.register_buffer('image_rp_bucket', image_rp_bucket) + self.image_rel_pos_table_list = nn.ModuleList() + + # Build encoder layers + self.layers = nn.ModuleList() + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, num_layers)] + for index in range(self.num_layers): + layer = OFAEncoderLayer( + embedding_dim=embedding_dim, + num_heads=num_heads, + dropout_rate=dropout_rate, + drop_path_rate=dpr[index], + scale_factor=attn_scale_factor, + pre_norm=pre_norm, + ) + self.layers.append(layer) + token_pos_table = nn.Embedding(token_num_rel_dis, self.num_heads) + image_pos_table = nn.Embedding(image_num_rel_dis, self.num_heads) + nn.init.constant_(token_pos_table.weight, 0.) + nn.init.constant_(image_pos_table.weight, 0.) + self.token_rel_pos_table_list.append(token_pos_table) + self.image_rel_pos_table_list.append(image_pos_table) + + if pre_norm: + self.final_ln = nn.LayerNorm(embedding_dim) + else: + self.final_ln = None + + main_input_name = 'input_ids' + + def forward(self, + input_ids, + images, + images_mask, + output_attentions=False, + output_hidden_states=False, + sample_patch_num=None): + padding_mask = input_ids.eq(self.padding_idx) + has_pads = padding_mask.any() + token_embedding = self.embed_tokens(input_ids) + token_embedding = self.embedding_scale * token_embedding + + # Embed the token position + src_pos_idx = torch.arange(input_ids.size(-1), device=input_ids.device) + src_pos_idx = src_pos_idx.expand(*input_ids.shape).contiguous() + pos_embedding = self.embed_positions(src_pos_idx) + + # Embed the input tokens + x = self.process_embedding( + embedding=token_embedding, + type_tokens=input_ids.new_zeros(token_embedding.shape[:2]), + pos_embedding=pos_embedding, + embedding_ln=self.embedding_ln, + ) + pos_embedding = self.pos_ln(pos_embedding) + + # Embed the input images + if images is not None: + (image_tokens, image_padding_mask, image_position_ids, + image_pos_embedding) = self.get_image_tokens( + images, + sample_patch_num, + images_mask, + ) + image_embedding = self.image_proj(image_tokens) + + image_x = self.process_embedding( + embedding=image_embedding, + type_tokens=input_ids.new_ones(image_embedding.shape[:2]), + pos_embedding=image_pos_embedding, + embedding_ln=self.image_embedding_ln, + ) + image_pos_embedding = self.image_pos_ln(image_pos_embedding) + + x = torch.cat([image_x, x], dim=1) + padding_mask = torch.cat([image_padding_mask, padding_mask], dim=1) + pos_embedding = torch.cat([image_pos_embedding, pos_embedding], + dim=1) + + # account for padding while computing the representation + if has_pads: + x = x * (1 - padding_mask.unsqueeze(-1).type_as(x)) + + # Decoupled position embedding + B, L = pos_embedding.shape[:2] + pos_q = self.pos_q_linear(pos_embedding).view( + B, L, self.num_heads, -1).transpose(1, 2) * self.pos_scaling + pos_k = self.pos_k_linear(pos_embedding).view(B, L, self.num_heads, + -1).transpose(1, 2) + abs_pos_bias = torch.matmul(pos_q, pos_k.transpose(2, 3)) + + all_hidden_states = [] if output_hidden_states else None + all_attentions = [] if output_attentions else None + + for idx, layer in enumerate(self.layers): + if output_hidden_states: + all_hidden_states.append(x) + + self_attn_bias = abs_pos_bias.clone() + # Add decoupled position embedding for input tokens. + token_len = input_ids.size(1) + rel_pos_bias = self.get_rel_pos_bias(input_ids, idx) + self_attn_bias[:, :, -token_len:, -token_len:] += rel_pos_bias + + # Add decoupled position embedding for images + if images is not None: + token_len = image_tokens.size(1) + rel_pos_bias = self.get_image_rel_pos_bias( + image_position_ids, idx) + self_attn_bias[:, :, :token_len, :token_len] += rel_pos_bias + + if has_pads: + attention_mask = _expand_mask(padding_mask, dtype=x.dtype) + else: + attention_mask = None + + out = layer( + x, + attention_mask=attention_mask, + attn_bias=self_attn_bias, + output_attentions=output_attentions) + x = out[0] + + if output_attentions: + all_attentions.append(out[1]) + + if output_hidden_states: + all_hidden_states.append(x) + + if self.final_ln is not None: + x = self.final_ln(x) + + return OFAEncoderOutput( + last_hidden_state=x, # (B, L, C) + padding_mask=padding_mask, # (B, L) + position_embedding=pos_embedding, # (B, L, C) + hidden_states=all_hidden_states, # list of (B, L, C) + attentions=all_attentions, # list of (B, num_heads, L, head_dims) + ) + + def get_image_tokens(self, images, sample_patch_num, images_mask): + image_embedding = self.embed_images(images)[-1] + B, C, H, W = image_embedding.shape + num_patches = H * W + + padding_mask = images.new_zeros((B, num_patches)).bool() + position_col = torch.arange(W).unsqueeze(0) + position_row = torch.arange(H).unsqueeze(1) * self.image_bucket_size + position_idx = (position_col + position_row + 1).view(-1) + position_idx = position_idx.to(images.device).expand(B, num_patches) + + # (B, C, H, W) -> (B, C, H*W) -> (B, H*W, C) + image_embedding = image_embedding.flatten(2).transpose(1, 2) + if sample_patch_num is not None: + patch_orders = torch.stack([ + torch.randperm(num_patches)[:sample_patch_num] + for _ in range(B) + ]) + num_patches = sample_patch_num + image_embedding = image_embedding.gather( + dim=1, index=patch_orders.unsqueeze(2).expand(-1, -1, C)) + padding_mask = padding_mask.gather(1, patch_orders) + position_idx = position_idx.gather(1, patch_orders) + + pos_embedding = self.embed_image_positions(position_idx) + padding_mask[~images_mask] = True + return image_embedding, padding_mask, position_idx, pos_embedding + + def process_embedding(self, + embedding, + pos_embedding=None, + type_tokens=None, + embedding_ln=None): + if self.entangle_position_embedding and pos_embedding is not None: + embedding += pos_embedding + if self.embed_type is not None: + embedding += self.embed_type(type_tokens) + if embedding_ln is not None: + embedding = embedding_ln(embedding) + embedding = self.dropout(embedding) + + return embedding + + def get_rel_pos_bias(self, x, idx): + seq_len = x.size(1) + rp_bucket = self.token_rp_bucket[:seq_len, :seq_len] + values = F.embedding(rp_bucket, + self.token_rel_pos_table_list[idx].weight) + values = values.unsqueeze(0).expand(x.size(0), -1, -1, -1) + values = values.permute([0, 3, 1, 2]) + return values.contiguous() + + def get_image_rel_pos_bias(self, image_position_ids, idx): + bsz, seq_len = image_position_ids.shape + rp_bucket_size = self.image_rp_bucket.size(1) + + rp_bucket = self.image_rp_bucket.unsqueeze(0).expand( + bsz, rp_bucket_size, rp_bucket_size).gather( + 1, image_position_ids[:, :, None].expand( + bsz, seq_len, rp_bucket_size)).gather( + 2, image_position_ids[:, None, :].expand( + bsz, seq_len, seq_len)) + values = F.embedding(rp_bucket, + self.image_rel_pos_table_list[idx].weight) + values = values.permute(0, 3, 1, 2) + return values + + +class OFADecoder(BaseModule): + """The decoder module of OFA. + + Args: + embed_tokens (nn.Embedding): The embedding module to embed the + input tokens. + num_layers (int): The number of decoder layers. Defaults to 6. + num_heads (int): The number of heads of attention. Defaults to 12. + dropout_rate (float): The prob of dropout for embedding and + transformer layers. Defaults to 0. + drop_path_rate (float): The prob of droppath for transformer layers. + Defaults to 0. + max_target_positions (int): The maximum length of the input tokens. + Defaults to 1024. + code_image_size (int): The resolution of the generated image in the + image infilling task. Defaults to 128. + token_bucket_size (int): The token bucket size, it's used as the + maximum relative position index in relative position embedding + of input tokens. Defaults to 256. + image_bucket_size (int): The image bucket size, it's used to generate + the image relative position embedding table. It should be larger + than the shape of image feature map. Defaults to 42. + attn_scale_factor (float): The scale factor to calculate qk scale in + attentions. Defaults to 2. + scale_embedding (bool): Whether to scale the embeddings by the square + root of the dimension. Defaults to False. + add_embedding_ln (bool): Whether to add an extra layer norm for token + embeddings. Defaults to True. + add_code_embedding_ln (bool): Whether to add an extra layer norm for + code embeddings. Defaults to True. + pre_norm (bool): Whether to do layer norm before attention and ffn + blocks in transformer layers. Defaults to True. + entangle_position_embedding (bool): Whether to add the position + embedding on the embeddings directly. Defaults to False. + share_input_output_embed (bool): Share the weights of the input token + embedding module and the output projection module. + Defaults to True. + init_cfg (dict, optional): The initialization config. Defaults to None. + """ + + def __init__( + self, + embed_tokens, + num_layers=6, + num_heads=12, + dropout_rate=0., + drop_layer_rate=0., + drop_path_rate=0., + max_target_positions=1024, + code_image_size=128, + token_bucket_size=256, + image_bucket_size=42, + attn_scale_factor=2., + scale_embedding=False, + add_embedding_ln=True, + add_code_embedding_ln=True, + pre_norm=True, + entangle_position_embedding=False, + share_input_output_embed=True, + init_cfg=None, + ): + super().__init__(init_cfg=init_cfg) + self._future_mask = torch.empty(0) + + self.num_layers = num_layers + embedding_dim = embed_tokens.embedding_dim + self.embedding_dim = embedding_dim + self.padding_idx = embed_tokens.padding_idx + self.max_target_positions = max_target_positions + self.num_heads = num_heads + + # Build embedding process components + self.embed_tokens = embed_tokens + self.embedding_scale = math.sqrt( + embedding_dim) if scale_embedding else 1.0 + + if add_embedding_ln: + self.embedding_ln = nn.LayerNorm(embedding_dim) + else: + self.embedding_ln = None + + if add_code_embedding_ln: + self.code_embedding_ln = nn.LayerNorm(embedding_dim) + else: + self.code_embedding_ln = None + + # Build position embedding + self.embed_positions = nn.Embedding(self.max_target_positions + 2, + embedding_dim) + self.pos_ln = nn.LayerNorm(embedding_dim) + self.embed_image_positions = nn.Embedding(image_bucket_size**2 + 1, + embedding_dim) + self.image_pos_ln = nn.LayerNorm(embedding_dim) + + self.pos_scaling = float(embedding_dim / num_heads * + attn_scale_factor)**-0.5 + self.self_pos_q_linear = nn.Linear(embedding_dim, embedding_dim) + self.self_pos_k_linear = nn.Linear(embedding_dim, embedding_dim) + self.cross_pos_q_linear = nn.Linear(embedding_dim, embedding_dim) + self.cross_pos_k_linear = nn.Linear(embedding_dim, embedding_dim) + + self.entangle_position_embedding = entangle_position_embedding + + self.dropout = nn.Dropout( + dropout_rate) if dropout_rate > 0. else nn.Identity() + if drop_layer_rate > 0.: + raise NotImplementedError + + # Register token relative position embedding table + self.token_bucket_size = token_bucket_size + token_num_rel_dis = 2 * token_bucket_size - 1 + token_rp_bucket = make_token_bucket_position(token_bucket_size) + self.register_buffer('token_rp_bucket', token_rp_bucket) + self.token_rel_pos_table_list = nn.ModuleList() + + # Register image relative position embedding table + self.image_bucket_size = image_bucket_size + image_num_rel_dis = (2 * image_bucket_size - + 1) * (2 * image_bucket_size - 1) + 3 + image_rp_bucket = make_image_bucket_position(image_bucket_size, + image_num_rel_dis) + self.register_buffer('image_rp_bucket', image_rp_bucket) + self.image_rel_pos_table_list = nn.ModuleList() + + self.window_size = code_image_size // 8 + + position_col = torch.arange(self.window_size).unsqueeze(0) + position_row = torch.arange( + self.window_size).unsqueeze(1) * self.image_bucket_size + image_position_idx = (position_col + position_row + 1) + image_position_idx = torch.cat( + [torch.tensor([0]), image_position_idx.view(-1)]) + image_position_idx = torch.cat( + [image_position_idx, + torch.tensor([1024] * 768)]) + self.register_buffer('image_position_idx', image_position_idx) + + # Build decoder layers + self.layers = nn.ModuleList() + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, num_layers)] + for index in range(self.num_layers): + layer = OFADecoderLayer( + embedding_dim=embedding_dim, + num_heads=num_heads, + dropout_rate=dropout_rate, + drop_path_rate=dpr[index], + scale_factor=attn_scale_factor, + pre_norm=pre_norm, + ) + self.layers.append(layer) + token_pos_table = nn.Embedding(token_num_rel_dis, self.num_heads) + image_pos_table = nn.Embedding(image_num_rel_dis, self.num_heads) + nn.init.constant_(token_pos_table.weight, 0.) + nn.init.constant_(image_pos_table.weight, 0.) + self.token_rel_pos_table_list.append(token_pos_table) + self.image_rel_pos_table_list.append(image_pos_table) + + if pre_norm: + self.final_ln = nn.LayerNorm(embedding_dim) + else: + self.final_ln = None + + # Build output projection + if share_input_output_embed: + self.output_projection = nn.Linear( + self.embed_tokens.weight.shape[1], + self.embed_tokens.weight.shape[0], + bias=False, + ) + self.output_projection.weight = self.embed_tokens.weight + else: + vocab_size = self.embed_tokens.num_embeddings + self.output_projection = nn.Linear( + embedding_dim, vocab_size, bias=False) + nn.init.normal_( + self.output_projection.weight, + mean=0, + std=embedding_dim**-0.5, + ) + + main_input_name = 'input_ids' + + def forward( + self, + input_ids: torch.Tensor = None, + attention_mask: torch.Tensor = None, + encoder_hidden_states: torch.Tensor = None, + encoder_attention_mask: torch.Tensor = None, + code_masks: Optional[torch.Tensor] = None, + encoder_pos_embedding: Optional[torch.Tensor] = None, + past_key_values: Optional[torch.Tensor] = None, + use_cache: bool = False, + output_attentions: bool = False, + output_hidden_states: bool = False, + ): + + if past_key_values is not None and len(past_key_values) > 0: + B, _, L_past, _ = past_key_values[0][0].shape + L = L_past + 1 + else: + B, L = input_ids.shape + L_past = 0 + + # Embed the token position + target_pos_idx = torch.arange( + L, device=input_ids.device).expand([B, L]).contiguous() + pos_embedding = self.embed_positions(target_pos_idx) + + # Embed the code positions + if code_masks is not None and torch.any(code_masks): + image_position_idx = self.image_position_idx[:input_ids.size(1)] + image_position_idx = image_position_idx.unsqueeze(0).expand(B, L) + pos_embedding[code_masks] = self.embed_image_positions( + image_position_idx)[code_masks] + + # Self-attention position bias (B, num_heads, L_t, L_t) + self_abs_pos_bias = self.get_pos_info(self.pos_ln(pos_embedding)) + if code_masks is not None and torch.any(code_masks): + self_image_abs_pos_bias = self.get_pos_info( + self.image_pos_ln(pos_embedding)) + self_abs_pos_bias[code_masks] = self_image_abs_pos_bias[code_masks] + + # Cross-attention position bias (B, num_heads, L_t, L_s) + cross_abs_pos_bias = self.get_pos_info( + self.pos_ln(pos_embedding), encoder_pos_embedding) + if code_masks is not None and torch.any(code_masks): + cross_image_abs_pos_bias = self.get_pos_info( + self.image_pos_ln(pos_embedding), encoder_pos_embedding) + cross_abs_pos_bias[code_masks] = cross_image_abs_pos_bias[ + code_masks] + + all_prev_output_tokens = input_ids.clone() + if past_key_values is not None and len(past_key_values) > 0: + input_ids = input_ids[:, -1:] + cross_abs_pos_bias = cross_abs_pos_bias[:, :, -1:, :] + pos_embedding = pos_embedding[:, -1:, :] + + # Embed the input tokens + x = self.embed_tokens(input_ids) * self.embedding_scale + + if self.entangle_position_embedding: + x += pos_embedding + + if self.embedding_ln is not None: + if (code_masks is None or not code_masks.any() + or self.code_embedding_ln is None): + x = self.embedding_ln(x) + elif code_masks is not None and code_masks.all(): + x = self.code_embedding_ln(x) + else: + x[~code_masks] = self.embedding_ln(x[~code_masks]) + x[code_masks] = self.code_embedding_ln(x[code_masks]) + + x = self.dropout(x) + + attention_mask = self._prepare_decoder_attention_mask( + attention_mask, input_ids.shape, x.dtype, L_past) + attention_mask = attention_mask.to(x.device) + + # decoder layers + all_hidden_states = [] if output_hidden_states else None + all_self_attns = [] if output_attentions else None + all_cross_attentions = [] if ( + output_attentions and encoder_hidden_states is not None) else None + next_decoder_cache = [] if use_cache else None + + for idx, layer in enumerate(self.layers): + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states.append(x) + + if past_key_values is not None and len(past_key_values) > 0: + past_key_value = past_key_values[idx] + else: + past_key_value = None + + self_attn_bias = self_abs_pos_bias.clone() + if code_masks is None or not code_masks.any(): + self_attn_bias += self.get_rel_pos_bias( + all_prev_output_tokens, idx) + elif code_masks is not None and code_masks.all(): + self_attn_bias += self.get_image_rel_pos_bias( + all_prev_output_tokens, idx) + else: + self_attn_bias[~code_masks] += self.get_rel_pos_bias( + all_prev_output_tokens, idx) + self_attn_bias[code_masks] += self.get_image_rel_pos_bias( + all_prev_output_tokens, idx) + + if past_key_value is not None: + self_attn_bias = self_attn_bias[:, :, -1:, :] + + out = layer( + x, + attention_mask=attention_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + self_attn_bias=self_attn_bias, + cross_attn_bias=cross_abs_pos_bias, + ) + x = out.pop(0) + + if output_attentions: + all_self_attns.append(out.pop(0)) + if encoder_hidden_states is not None: + all_cross_attentions.append(out.pop(0)) + + if use_cache: + next_decoder_cache.append(out.pop(0)) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (x, ) + + if self.final_ln is not None: + x = self.final_ln(x) + + x = self.output_projection(x) + + return BaseModelOutputWithPastAndCrossAttentions( + last_hidden_state=x, + past_key_values=next_decoder_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + cross_attentions=all_cross_attentions, + ) + + def _prepare_decoder_attention_mask( + self, + attention_mask, + input_shape, + dtype, + past_key_values_length, + ): + r""" + Create causal mask for unidirectional decoding. + [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len] + """ + combined_attention_mask = None + if input_shape[-1] > 1: + combined_attention_mask = _make_causal_mask( + input_shape, + dtype, + past_key_values_length=past_key_values_length).to( + attention_mask.device) + + if attention_mask is not None: + # (B, L_s) -> (B, 1, L_t, L_s) + expanded_attention_mask = _expand_mask( + attention_mask, dtype, tgt_len=input_shape[-1]) + combined_attention_mask = ( + expanded_attention_mask if combined_attention_mask is None else + expanded_attention_mask + combined_attention_mask) + + return combined_attention_mask + + def get_pos_info(self, pos_embedding, src_pos_embedding=None): + B, tgt_len = pos_embedding.shape[:2] + if src_pos_embedding is not None: + src_len = src_pos_embedding.size(1) + pos_q = self.cross_pos_q_linear(pos_embedding).view( + B, tgt_len, self.num_heads, -1).transpose(1, 2) + pos_q = pos_q * self.pos_scaling + pos_k = self.cross_pos_k_linear(src_pos_embedding).view( + B, src_len, self.num_heads, -1).transpose(1, 2) + else: + pos_q = self.self_pos_q_linear(pos_embedding).view( + B, tgt_len, self.num_heads, -1).transpose(1, 2) + pos_q = pos_q * self.pos_scaling + pos_k = self.self_pos_k_linear(pos_embedding).view( + B, tgt_len, self.num_heads, -1).transpose(1, 2) + + abs_pos_bias = torch.matmul(pos_q, pos_k.transpose(2, 3)) + + return abs_pos_bias + + def get_rel_pos_bias(self, x, idx): + seq_len = x.size(1) + rp_bucket = self.token_rp_bucket[:seq_len, :seq_len] + values = F.embedding(rp_bucket, + self.token_rel_pos_table_list[idx].weight) + values = values.unsqueeze(0).expand(x.size(0), -1, -1, -1) + values = values.permute([0, 3, 1, 2]) + return values.contiguous() + + def get_image_rel_pos_bias(self, image_position_ids, idx): + bsz, seq_len = image_position_ids.shape + rp_bucket_size = self.image_rp_bucket.size(1) + + rp_bucket = self.image_rp_bucket.unsqueeze(0).expand( + bsz, rp_bucket_size, rp_bucket_size).gather( + 1, image_position_ids[:, :, None].expand( + bsz, seq_len, rp_bucket_size)).gather( + 2, image_position_ids[:, None, :].expand( + bsz, seq_len, seq_len)) + values = F.embedding(rp_bucket, + self.image_rel_pos_table_list[idx].weight) + values = values.permute(0, 3, 1, 2) + return values + + +class OFAEncoderDecoder(BaseModule, GenerationMixin): + """The OFA main architecture with an encoder and a decoder. + + Args: + encoder_cfg (dict): The config of the encoder, accept the keyword + arguments of :class:`OFAEncoder`. + decoder_cfg (dict): The config of the decoder, accept the keyword + arguments of :class:`OFADecoder`. + padding_idx (int): The index of the padding token. + vocab_size (int): The size of the vocabulary. + embedding_dim (int): The embedding dimensions of both the encoder + and the decoder. + generation_cfg (dict): The extra generation config, accept the keyword + arguments of :class:`~transformers.GenerationConfig`. + Defaults to an empty dict. + init_cfg (dict, optional): The initialization config. Defaults to None. + """ + + def __init__( + self, + encoder_cfg, + decoder_cfg, + padding_idx, + vocab_size, + embedding_dim, + generation_cfg=dict(), + init_cfg=None, + ): + super().__init__(init_cfg=init_cfg) + + self.padding_idx = padding_idx + self.vocab_size = vocab_size + self.embedding_dim = embedding_dim + embed_tokens = nn.Embedding(vocab_size, embedding_dim, padding_idx) + + self.encoder = OFAEncoder(embed_tokens, **encoder_cfg) + self.decoder = OFADecoder(embed_tokens, **decoder_cfg) + + self.config = PretrainedConfig( + vocab_size=vocab_size, + embedding_dim=embedding_dim, + padding_idx=padding_idx, + bos_token_id=0, + decoder_start_token_id=0, + pad_token_id=1, + eos_token_id=2, + forced_eos_token_id=2, + use_cache=False, + is_encoder_decoder=True, + ) + self.config.update(generation_cfg) + + self.generation_config = GenerationConfig.from_model_config( + self.config) + + @property + def device(self): + return next(self.parameters()).device + + def can_generate(self): + return True + + def get_encoder(self): + return self.encoder + + def get_decoder(self): + return self.decoder + + def max_decoder_positions(self): + """Maximum length supported by the decoder.""" + return self.decoder.max_positions() + + def get_normalized_probs(self, net_output, log_probs: bool, sample=None): + """Get normalized probabilities (or log probs) from a net's output.""" + return self.get_normalized_probs_scriptable(net_output, log_probs, + sample) + + def get_normalized_probs_scriptable( + self, + net_output, + log_probs: bool, + sample=None, + ): + """Scriptable helper function for get_normalized_probs in. + + ~BaseFairseqModel. + """ + if hasattr(self, 'decoder'): + return self.decoder.get_normalized_probs(net_output, log_probs, + sample) + elif torch.is_tensor(net_output): + # syntactic sugar for simple models which don't have a decoder + # (e.g., the classification tutorial) + logits = net_output.float() + if log_probs: + return F.log_softmax(logits, dim=-1) + else: + return F.softmax(logits, dim=-1) + raise NotImplementedError + + main_input_name = 'input_ids' + + def forward(self, + input_ids=None, + images=None, + images_mask=None, + sample_patch_num=None, + decoder_input_ids=None, + code_masks=None, + attention_mask=None, + encoder_outputs=None, + past_key_values=None, + use_cache=False, + output_attentions=False, + output_hidden_states=False, + constrain_fn=None, + return_dict=False): + """Forword the module. + + Args: + input_ids (torch.Tensor): The indices of the input tokens in the + vocabulary, and padding will be ignored by default. The indices + can be obtained using :class:`OFATokenizer`. + The shape is (B, L). + images (torch.Tensor): The input images. The shape is (B, 3, H, W). + images_mask (torch.Tensor): The mask of all available images. The + shape is (B, ). + sample_patch_num (int): The number of patches to sample for the + images. Defaults to None, which means to use all patches. + decoder_input_ids (torch.Tensor): The indices of the input tokens + for the decoder. + code_masks (torch.Tensor): The mask of all samples for image + generation. The shape is (B, ). + attention_mask (torch.Tensor): The attention mask for decoding. + The shape is (B, L). + encoder_outputs (OFAEncoderOutput): The encoder outputs with hidden + states, positional embeddings, and padding masks. + past_key_values (Tuple[Tuple[torch.Tensor]]): If use cache, the + parameter is a tuple of length ``num_layers``. Every item is + also a tuple with four tensors, two for the key and value of + self-attention, two for the key and value of cross-attention. + use_cache (bool): Whether to use cache for faster inference. + Defaults to False. + output_attentions (bool): Whether to output attention weights. + Defaults to False. + output_hidden_states (bool): Whether to output hidden states. + Defaults to False. + constrain_fn (Callable, optional): The function to constrain the + output logits. Defaults to None. + return_dict (bool): Not used, it's only for compat with the + interface of the ``generate`` of ``transformers``. + + Returns: + Seq2SeqLMOutput: + + - logits (``torch.Tensor``): The last decoder hidden states. + The shape is (B, L, C). + - past_key_values (``Tuple[Tuple[torch.Tensor]]``): The past keys + and values for faster inference. + - decoder_hidden_states (``Tuple[torch.Tensor]``): the decoder + hidden states of all layers. + - decoder_attentions (``Tuple[torch.Tensor]``): The self-attention + weights of all layers in the decoder. + - cross_attentions (``Tuple[torch.Tensor]``): The cross-attention + weights of all layers in the decoder. + - encoder_last_hidden_state (``torch.Tensor``): The last encoder + hidden states. + - encoder_hidden_states (``Tuple[torch.Tensor]``): The encoder + hidden states of all layers, including the embeddings. + - encoder_attentions (``Tuple[torch.Tensor]``): The self-attention + weights of all layers in the encoder. + """ + + if encoder_outputs is None: + encoder_outputs = self.encoder( + input_ids=input_ids, + images=images, + images_mask=images_mask, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + sample_patch_num=sample_patch_num, + ) + + if decoder_input_ids.eq(self.padding_idx).any(): + attention_mask = decoder_input_ids.eq(self.padding_idx) + + encoder_hidden_states = encoder_outputs.last_hidden_state + encoder_attention_mask = _expand_mask(encoder_outputs.padding_mask, + encoder_hidden_states.dtype, + decoder_input_ids.shape[-1]) + src_pos_embed = encoder_outputs.position_embedding + + decoder_outputs = self.decoder( + input_ids=decoder_input_ids, + attention_mask=attention_mask, + encoder_hidden_states=encoder_hidden_states, + encoder_attention_mask=encoder_attention_mask, + code_masks=code_masks, + encoder_pos_embedding=src_pos_embed, + past_key_values=past_key_values, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + ) + + # The constrain operation for fine-tuned model in OFA is applied + # before log_softmax, therefore we cannot use + # `prefix_allowed_tokens_fn` to implement it. + if constrain_fn is not None: + logits = constrain_fn(decoder_input_ids, + decoder_outputs.last_hidden_state) + else: + logits = decoder_outputs.last_hidden_state + + return Seq2SeqLMOutput( + logits=logits, + past_key_values=decoder_outputs.past_key_values, + decoder_hidden_states=decoder_outputs.hidden_states, + decoder_attentions=decoder_outputs.attentions, + cross_attentions=decoder_outputs.cross_attentions, + encoder_last_hidden_state=encoder_outputs.last_hidden_state, + encoder_hidden_states=encoder_outputs.hidden_states, + encoder_attentions=encoder_outputs.attentions, + ) + + def prepare_inputs_for_generation(self, + decoder_input_ids=None, + past=None, + attention_mask=None, + code_masks=None, + use_cache=False, + encoder_outputs=None, + constrain_fn=None, + **kwargs): + # if attention_mask is None: + attention_mask = decoder_input_ids.new_zeros(decoder_input_ids.shape) + + # cut decoder_input_ids if past is used + if past is not None: + decoder_input_ids = decoder_input_ids[:, -1:] + + return { + 'input_ids': None, + 'images': None, + 'images_mask': None, + 'sample_patch_num': None, + 'attention_mask': attention_mask, + 'encoder_outputs': encoder_outputs, + 'past_key_values': past, + 'decoder_input_ids': decoder_input_ids, + 'code_masks': code_masks, + 'use_cache': use_cache, + 'constrain_fn': constrain_fn, + } + + def _prepare_encoder_decoder_kwargs_for_generation( + self, + inputs_tensor: torch.Tensor, + model_kwargs, + model_input_name: Optional[str] = None): + # 1. get encoder + encoder = self.get_encoder() + + # 2. prepare encoder args and encoder kwargs from model kwargs + irrelevant_prefix = [ + 'decoder_', 'cross_attn', 'use_cache', 'attention_mask', + 'constrain_fn' + ] + encoder_kwargs = { + argument: value + for argument, value in model_kwargs.items() + if not any(argument.startswith(p) for p in irrelevant_prefix) + } + + if encoder_kwargs.get('images_mask') is None: + encoder_kwargs['images_mask'] = torch.tensor([True] * + inputs_tensor.size(0)) + + # 3. make sure that encoder returns `ModelOutput` + model_input_name = model_input_name or self.main_input_name + encoder_kwargs[model_input_name] = inputs_tensor + model_kwargs['encoder_outputs']: ModelOutput = encoder( + **encoder_kwargs) + model_kwargs['attention_mask'] = None + + return model_kwargs + + @staticmethod + def _reorder_cache(past, beam_idx): + reordered_past = () + for layer_past in past: + reordered_past += (tuple( + past_state.index_select(0, beam_idx) + for past_state in layer_past), ) + return reordered_past + + @staticmethod + def _expand_inputs_for_generation( + input_ids: torch.LongTensor, + expand_size: int = 1, + is_encoder_decoder: bool = False, + attention_mask: Optional[torch.LongTensor] = None, + encoder_outputs: Optional[ModelOutput] = None, + **model_kwargs, + ): + expanded_return_idx = ( + torch.arange(input_ids.shape[0]).view(-1, 1).repeat( + 1, expand_size).view(-1).to(input_ids.device)) + input_ids = input_ids.index_select(0, expanded_return_idx) + + if attention_mask is not None: + model_kwargs['attention_mask'] = attention_mask.index_select( + 0, expanded_return_idx) + + if is_encoder_decoder: + if encoder_outputs is None: + raise ValueError('If `is_encoder_decoder` is True, make ' + 'sure that `encoder_outputs` is defined.') + encoder_outputs['last_hidden_state'] = encoder_outputs.\ + last_hidden_state.index_select(0, expanded_return_idx) + encoder_outputs['position_embedding'] = encoder_outputs.\ + position_embedding.index_select(0, expanded_return_idx) + encoder_outputs['padding_mask'] = encoder_outputs.\ + padding_mask.index_select(0, expanded_return_idx) + model_kwargs['encoder_outputs'] = encoder_outputs + return input_ids, model_kwargs diff --git a/mmpretrain/models/necks/__init__.py b/mmpretrain/models/necks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6f9371c500212da67fc6de9fc276a5263e6d62cf --- /dev/null +++ b/mmpretrain/models/necks/__init__.py @@ -0,0 +1,33 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .beitv2_neck import BEiTV2Neck +from .cae_neck import CAENeck +from .densecl_neck import DenseCLNeck +from .gap import GlobalAveragePooling +from .gem import GeneralizedMeanPooling +from .hr_fuse import HRFuseScales +from .linear_neck import LinearNeck +from .mae_neck import ClsBatchNormNeck, MAEPretrainDecoder +from .milan_neck import MILANPretrainDecoder +from .mixmim_neck import MixMIMPretrainDecoder +from .mocov2_neck import MoCoV2Neck +from .nonlinear_neck import NonLinearNeck +from .simmim_neck import SimMIMLinearDecoder +from .swav_neck import SwAVNeck + +__all__ = [ + 'GlobalAveragePooling', + 'GeneralizedMeanPooling', + 'HRFuseScales', + 'LinearNeck', + 'BEiTV2Neck', + 'CAENeck', + 'DenseCLNeck', + 'MAEPretrainDecoder', + 'ClsBatchNormNeck', + 'MILANPretrainDecoder', + 'MixMIMPretrainDecoder', + 'MoCoV2Neck', + 'NonLinearNeck', + 'SimMIMLinearDecoder', + 'SwAVNeck', +] diff --git a/mmpretrain/models/necks/beitv2_neck.py b/mmpretrain/models/necks/beitv2_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..745e3879f5e3a4b9269687797728354cb6cf7d4e --- /dev/null +++ b/mmpretrain/models/necks/beitv2_neck.py @@ -0,0 +1,153 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import List, Optional, Tuple, Union + +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.models.backbones.beit import BEiTTransformerEncoderLayer +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class BEiTV2Neck(BaseModule): + """Neck for BEiTV2 Pre-training. + + This module construct the decoder for the final prediction. + + Args: + num_layers (int): Number of encoder layers of neck. Defaults to 2. + early_layers (int): The layer index of the early output from the + backbone. Defaults to 9. + backbone_arch (str): Vision Transformer architecture. Defaults to base. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): The initialization value for the + learnable scaling of attention and FFN. Defaults to 0.1. + use_rel_pos_bias (bool): Whether to use unique relative position bias, + if False, use shared relative position bias defined in backbone. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + arch_zoo = { + **dict.fromkeys( + ['b', 'base'], { + 'embed_dims': 768, + 'depth': 12, + 'num_heads': 12, + 'feedforward_channels': 3072, + }), + **dict.fromkeys( + ['l', 'large'], { + 'embed_dims': 1024, + 'depth': 24, + 'num_heads': 16, + 'feedforward_channels': 4096, + }), + } + + def __init__( + self, + num_layers: int = 2, + early_layers: int = 9, + backbone_arch: str = 'base', + drop_rate: float = 0., + drop_path_rate: float = 0., + layer_scale_init_value: float = 0.1, + use_rel_pos_bias: bool = False, + norm_cfg: dict = dict(type='LN', eps=1e-6), + init_cfg: Optional[Union[dict, List[dict]]] = dict( + type='TruncNormal', layer='Linear', std=0.02, bias=0) + ) -> None: + super().__init__(init_cfg=init_cfg) + + if isinstance(backbone_arch, str): + backbone_arch = backbone_arch.lower() + assert backbone_arch in set(self.arch_zoo), \ + (f'Arch {backbone_arch} is not in default archs ' + f'{set(self.arch_zoo)}') + self.arch_settings = self.arch_zoo[backbone_arch] + else: + essential_keys = { + 'embed_dims', 'num_layers', 'num_heads', 'feedforward_channels' + } + assert isinstance(backbone_arch, dict) and essential_keys <= set( + backbone_arch + ), f'Custom arch needs a dict with keys {essential_keys}' + self.arch_settings = backbone_arch + + # stochastic depth decay rule + self.early_layers = early_layers + depth = self.arch_settings['depth'] + dpr = np.linspace(0, drop_path_rate, + max(depth, early_layers + num_layers)) + + self.patch_aggregation = nn.ModuleList() + for i in range(early_layers, early_layers + num_layers): + _layer_cfg = dict( + embed_dims=self.arch_settings['embed_dims'], + num_heads=self.arch_settings['num_heads'], + feedforward_channels=self. + arch_settings['feedforward_channels'], + drop_rate=drop_rate, + drop_path_rate=dpr[i], + norm_cfg=norm_cfg, + layer_scale_init_value=layer_scale_init_value, + window_size=None, + use_rel_pos_bias=use_rel_pos_bias) + self.patch_aggregation.append( + BEiTTransformerEncoderLayer(**_layer_cfg)) + + self.rescale_patch_aggregation_init_weight() + + embed_dims = self.arch_settings['embed_dims'] + _, norm = build_norm_layer(norm_cfg, embed_dims) + self.add_module('norm', norm) + + def rescale_patch_aggregation_init_weight(self): + """Rescale the initialized weights.""" + + def rescale(param, layer_id): + param.div_(math.sqrt(2.0 * layer_id)) + + for layer_id, layer in enumerate(self.patch_aggregation): + rescale(layer.attn.proj.weight.data, + self.early_layers + layer_id + 1) + rescale(layer.ffn.layers[1].weight.data, + self.early_layers + layer_id + 1) + + def forward(self, inputs: Tuple[torch.Tensor], rel_pos_bias: torch.Tensor, + **kwargs) -> Tuple[torch.Tensor, torch.Tensor]: + """Get the latent prediction and final prediction. + + Args: + x (Tuple[torch.Tensor]): Features of tokens. + rel_pos_bias (torch.Tensor): Shared relative position bias table. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: + - ``x``: The final layer features from backbone, which are normed + in ``BEiTV2Neck``. + - ``x_cls_pt``: The early state features from backbone, which are + consist of final layer cls_token and early state patch_tokens + from backbone and sent to PatchAggregation layers in the neck. + """ + + early_states, x = inputs[0], inputs[1] + x_cls_pt = torch.cat([x[:, [0]], early_states[:, 1:]], dim=1) + for layer in self.patch_aggregation: + x_cls_pt = layer(x_cls_pt, rel_pos_bias=rel_pos_bias) + + # shared norm + x, x_cls_pt = self.norm(x), self.norm(x_cls_pt) + + # remove cls_token + x = x[:, 1:] + x_cls_pt = x_cls_pt[:, 1:] + return x, x_cls_pt diff --git a/mmpretrain/models/necks/cae_neck.py b/mmpretrain/models/necks/cae_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..2cd2e9271efe247c940a6867b3d89c6cff0e74f0 --- /dev/null +++ b/mmpretrain/models/necks/cae_neck.py @@ -0,0 +1,273 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Tuple + +import torch +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmcv.cnn.bricks import DropPath +from mmcv.cnn.bricks.transformer import FFN +from mmengine.model import BaseModule +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.models.backbones.beit import BEiTTransformerEncoderLayer +from mmpretrain.registry import MODELS +from ..utils import CrossMultiheadAttention + + +class CAETransformerRegressorLayer(BaseModule): + """Transformer layer for the regressor of CAE. + + This module is different from conventional transformer encoder layer, for + its queries are the masked tokens, but its keys and values are the + concatenation of the masked and unmasked tokens. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): The number of heads in multi-head attention. + feedforward_channels (int): The hidden dimension of FFNs. + Defaults: 1024. + num_fcs (int, optional): The number of fully-connected layers in + FFNs. Default: 2. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + drop_rate (float): The dropout rate. Defaults to 0.0. + attn_drop_rate (float): The drop out rate for attention output weights. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + layer_scale_init_value (float): The init value of gamma. + Defaults to 0.0. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + """ + + def __init__( + self, + embed_dims: int, + num_heads: int, + feedforward_channels: int, + num_fcs: int = 2, + qkv_bias: bool = False, + qk_scale: float = None, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + layer_scale_init_value: float = 0.0, + act_cfg: dict = dict(type='GELU'), + norm_cfg: dict = dict(type='LN', eps=1e-6) + ) -> None: + super().__init__() + + # NOTE: cross attention + _, self.norm1_q_cross = build_norm_layer( + norm_cfg, embed_dims, postfix=2) + _, self.norm1_k_cross = build_norm_layer( + norm_cfg, embed_dims, postfix=2) + _, self.norm1_v_cross = build_norm_layer( + norm_cfg, embed_dims, postfix=2) + _, self.norm2_cross = build_norm_layer(norm_cfg, embed_dims, postfix=2) + self.cross_attn = CrossMultiheadAttention( + embed_dims, + num_heads=num_heads, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + attn_drop=attn_drop_rate, + proj_drop=drop_rate) + + self.ffn = FFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + num_fcs=num_fcs, + ffn_drop=drop_rate, + dropout_layer=None, + act_cfg=act_cfg, + add_identity=False) + + self.drop_path = DropPath(drop_prob=drop_path_rate) + + if layer_scale_init_value > 0: + self.gamma_1_cross = nn.Parameter( + layer_scale_init_value * torch.ones((embed_dims)), + requires_grad=True) + self.gamma_2_cross = nn.Parameter( + layer_scale_init_value * torch.ones((embed_dims)), + requires_grad=True) + else: + self.gamma_1_cross = nn.Parameter( + torch.ones((embed_dims)), requires_grad=False) + self.gamma_2_cross = nn.Parameter( + torch.ones((embed_dims)), requires_grad=False) + + def forward(self, x_q: torch.Tensor, x_kv: torch.Tensor, + pos_q: torch.Tensor, pos_k: torch.Tensor) -> torch.Tensor: + """Forward function.""" + x = x_q + self.drop_path(self.gamma_1_cross * self.cross_attn( + self.norm1_q_cross(x_q + pos_q), + k=self.norm1_k_cross(x_kv + pos_k), + v=self.norm1_v_cross(x_kv))) + x = self.norm2_cross(x) + x = x + self.drop_path(self.gamma_2_cross * self.ffn(x)) + + return x + + +@MODELS.register_module() +class CAENeck(BaseModule): + """Neck for CAE Pre-training. + + This module construct the latent prediction regressor and the decoder + for the latent prediction and final prediction. + + Args: + num_classes (int): The number of classes for final prediction. Defaults + to 8192. + embed_dims (int): The embed dims of latent feature in regressor and + decoder. Defaults to 768. + regressor_depth (int): The number of regressor blocks. Defaults to 6. + decoder_depth (int): The number of decoder blocks. Defaults to 8. + num_heads (int): The number of head in multi-head attention. Defaults + to 12. + mlp_ratio (int): The expand ratio of latent features in MLP. defaults + to 4. + qkv_bias (bool): Whether or not to use qkv bias. Defaults to True. + qk_scale (float, optional): The scale applied to the results of qk. + Defaults to None. + drop_rate (float): The dropout rate. Defaults to 0. + attn_drop_rate (float): The dropout rate in attention block. Defaults + to 0. + norm_cfg (dict): The config of normalization layer. Defaults to + dict(type='LN', eps=1e-6). + layer_scale_init_value (float, optional): The init value of gamma. + Defaults to None. + mask_tokens_num (int): The number of mask tokens. Defaults to 75. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + num_classes: int = 8192, + embed_dims: int = 768, + regressor_depth: int = 6, + decoder_depth: int = 8, + num_heads: int = 12, + mlp_ratio: int = 4, + qkv_bias: bool = True, + qk_scale: float = None, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + norm_cfg: dict = dict(type='LN', eps=1e-6), + layer_scale_init_value: float = None, + mask_tokens_num: int = 75, + init_cfg: dict = None) -> None: + super().__init__(init_cfg=init_cfg) + + self.num_features = self.embed_dim = embed_dims + self.mask_token_num = mask_tokens_num + + # regressor + regressor_drop_path_rates = [ + x.item() + for x in torch.linspace(0, drop_path_rate, regressor_depth) + ] + self.regressors = nn.ModuleList([ + CAETransformerRegressorLayer( + embed_dims=embed_dims, + num_heads=num_heads, + feedforward_channels=mlp_ratio * embed_dims, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=regressor_drop_path_rates[i], + norm_cfg=norm_cfg, + layer_scale_init_value=layer_scale_init_value) + for i in range(regressor_depth) + ]) + + # decoder + decoder_drop_path_rates = [ + x.item() for x in torch.linspace(0, drop_path_rate, decoder_depth) + ] + self.decoders = nn.ModuleList([ + BEiTTransformerEncoderLayer( + embed_dims=embed_dims, + num_heads=num_heads, + feedforward_channels=mlp_ratio * embed_dims, + layer_scale_init_value=layer_scale_init_value, + window_size=None, + # setting `use_rel_pos_bias` to False ignores the `window_size` + use_rel_pos_bias=False, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=decoder_drop_path_rates[i], + norm_cfg=norm_cfg) for i in range(decoder_depth) + ]) + + _, self.norm_regressor = build_norm_layer( + norm_cfg, embed_dims, postfix=2) + _, self.norm_decoder = build_norm_layer( + norm_cfg, embed_dims, postfix=2) + + self.head = nn.Linear( + embed_dims, num_classes) if num_classes > 0 else nn.Identity() + self.mask_token = nn.Parameter(torch.zeros(1, 1, embed_dims)) + + def init_weights(self) -> None: + """Initialization.""" + super().init_weights() + self.apply(self._init_weights) + trunc_normal_(self.mask_token, std=0.02) + trunc_normal_(self.head.weight, std=0.02) + + def _init_weights(self, m: nn.Module) -> None: + """Initialization.""" + if isinstance(m, nn.Linear): + nn.init.xavier_uniform_(m.weight) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def forward( + self, x_unmasked: torch.Tensor, pos_embed_masked: torch.Tensor, + pos_embed_unmasked: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Get the latent prediction and final prediction. + + Args: + x_unmasked (torch.Tensor): Features of unmasked tokens. + pos_embed_masked (torch.Tensor): Position embedding of masked + tokens. + pos_embed_unmasked (torch.Tensor): Position embedding of unmasked + tokens. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: + - ``logits``: Final prediction. + - ``latent_pred``: Latent prediction. + """ + x_masked = self.mask_token.expand(x_unmasked.shape[0], + self.mask_token_num, -1) + # regressor + for regressor in self.regressors: + x_masked = regressor( + x_masked, torch.cat([x_unmasked, x_masked], dim=1), + pos_embed_masked, + torch.cat([pos_embed_unmasked, pos_embed_masked], dim=1)) + x_masked = self.norm_regressor(x_masked) + latent_pred = x_masked + + # decoder + x_masked = x_masked + pos_embed_masked + for decoder in self.decoders: + x_masked = decoder(x_masked, rel_pos_bias=None) + x_masked = self.norm_decoder(x_masked) + + logits = self.head(x_masked) + + return logits, latent_pred diff --git a/mmpretrain/models/necks/densecl_neck.py b/mmpretrain/models/necks/densecl_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..bee9a2368d8917ece7b4b8ab8d1398ce951ede24 --- /dev/null +++ b/mmpretrain/models/necks/densecl_neck.py @@ -0,0 +1,71 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class DenseCLNeck(BaseModule): + """The non-linear neck of DenseCL. + + Single and dense neck in parallel: fc-relu-fc, conv-relu-conv. + Borrowed from the authors' `code `_. + + Args: + in_channels (int): Number of input channels. + hid_channels (int): Number of hidden channels. + out_channels (int): Number of output channels. + num_grid (int): The grid size of dense features. Defaults to None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + hid_channels: int, + out_channels: int, + num_grid: Optional[int] = None, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg) + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.mlp = nn.Sequential( + nn.Linear(in_channels, hid_channels), nn.ReLU(inplace=True), + nn.Linear(hid_channels, out_channels)) + + self.with_pool = True if num_grid is not None else False + if self.with_pool: + self.pool = nn.AdaptiveAvgPool2d((num_grid, num_grid)) + self.mlp2 = nn.Sequential( + nn.Conv2d(in_channels, hid_channels, 1), nn.ReLU(inplace=True), + nn.Conv2d(hid_channels, out_channels, 1)) + self.avgpool2 = nn.AdaptiveAvgPool2d((1, 1)) + + def forward(self, x: Tuple[torch.Tensor]) -> Tuple[torch.Tensor]: + """Forward function of neck. + + Args: + x (Tuple[torch.Tensor]): feature map of backbone. + + Returns: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + - ``avgpooled_x``: Global feature vectors. + - ``x``: Dense feature vectors. + - ``avgpooled_x2``: Dense feature vectors for queue. + """ + assert len(x) == 1 + x = x[0] + + avgpooled_x = self.avgpool(x) + avgpooled_x = self.mlp(avgpooled_x.view(avgpooled_x.size(0), -1)) + + if self.with_pool: + x = self.pool(x) # sxs + x = self.mlp2(x) # sxs: bxdxsxs + avgpooled_x2 = self.avgpool2(x) # 1x1: bxdx1x1 + x = x.view(x.size(0), x.size(1), -1) # bxdxs^2 + avgpooled_x2 = avgpooled_x2.view(avgpooled_x2.size(0), -1) # bxd + return avgpooled_x, x, avgpooled_x2 diff --git a/mmpretrain/models/necks/gap.py b/mmpretrain/models/necks/gap.py new file mode 100644 index 0000000000000000000000000000000000000000..0877743ad1e5a75976feb14f5d34942c0b7b8ee4 --- /dev/null +++ b/mmpretrain/models/necks/gap.py @@ -0,0 +1,45 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class GlobalAveragePooling(nn.Module): + """Global Average Pooling neck. + + Note that we use `view` to remove extra channel after pooling. We do not + use `squeeze` as it will also remove the batch dimension when the tensor + has a batch dimension of size 1, which can lead to unexpected errors. + + Args: + dim (int): Dimensions of each sample channel, can be one of {1, 2, 3}. + Default: 2 + """ + + def __init__(self, dim=2): + super(GlobalAveragePooling, self).__init__() + assert dim in [1, 2, 3], 'GlobalAveragePooling dim only support ' \ + f'{1, 2, 3}, get {dim} instead.' + if dim == 1: + self.gap = nn.AdaptiveAvgPool1d(1) + elif dim == 2: + self.gap = nn.AdaptiveAvgPool2d((1, 1)) + else: + self.gap = nn.AdaptiveAvgPool3d((1, 1, 1)) + + def init_weights(self): + pass + + def forward(self, inputs): + if isinstance(inputs, tuple): + outs = tuple([self.gap(x) for x in inputs]) + outs = tuple( + [out.view(x.size(0), -1) for out, x in zip(outs, inputs)]) + elif isinstance(inputs, torch.Tensor): + outs = self.gap(inputs) + outs = outs.view(inputs.size(0), -1) + else: + raise TypeError('neck inputs should be tuple or torch.tensor') + return outs diff --git a/mmpretrain/models/necks/gem.py b/mmpretrain/models/necks/gem.py new file mode 100644 index 0000000000000000000000000000000000000000..f5648be86303caa6f2c25786fe8c3058c2f98d7e --- /dev/null +++ b/mmpretrain/models/necks/gem.py @@ -0,0 +1,53 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +from torch import Tensor, nn +from torch.nn import functional as F +from torch.nn.parameter import Parameter + +from mmpretrain.registry import MODELS + + +def gem(x: Tensor, p: Parameter, eps: float = 1e-6, clamp=True) -> Tensor: + if clamp: + x = x.clamp(min=eps) + return F.avg_pool2d(x.pow(p), (x.size(-2), x.size(-1))).pow(1. / p) + + +@MODELS.register_module() +class GeneralizedMeanPooling(nn.Module): + """Generalized Mean Pooling neck. + + Note that we use `view` to remove extra channel after pooling. We do not + use `squeeze` as it will also remove the batch dimension when the tensor + has a batch dimension of size 1, which can lead to unexpected errors. + + Args: + p (float): Parameter value. Defaults to 3. + eps (float): epsilon. Defaults to 1e-6. + clamp (bool): Use clamp before pooling. Defaults to True + p_trainable (bool): Toggle whether Parameter p is trainable or not. + Defaults to True. + """ + + def __init__(self, p=3., eps=1e-6, clamp=True, p_trainable=True): + assert p >= 1, "'p' must be a value greater than 1" + super(GeneralizedMeanPooling, self).__init__() + self.p = Parameter(torch.ones(1) * p, requires_grad=p_trainable) + self.eps = eps + self.clamp = clamp + self.p_trainable = p_trainable + + def forward(self, inputs): + if isinstance(inputs, tuple): + outs = tuple([ + gem(x, p=self.p, eps=self.eps, clamp=self.clamp) + for x in inputs + ]) + outs = tuple( + [out.view(x.size(0), -1) for out, x in zip(outs, inputs)]) + elif isinstance(inputs, torch.Tensor): + outs = gem(inputs, p=self.p, eps=self.eps, clamp=self.clamp) + outs = outs.view(inputs.size(0), -1) + else: + raise TypeError('neck inputs should be tuple or torch.tensor') + return outs diff --git a/mmpretrain/models/necks/hr_fuse.py b/mmpretrain/models/necks/hr_fuse.py new file mode 100644 index 0000000000000000000000000000000000000000..4a97f86f9fb9e4cce89e950e54674d5ec3d9b1f7 --- /dev/null +++ b/mmpretrain/models/necks/hr_fuse.py @@ -0,0 +1,83 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +from mmcv.cnn.bricks import ConvModule +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS +from ..backbones.resnet import Bottleneck, ResLayer + + +@MODELS.register_module() +class HRFuseScales(BaseModule): + """Fuse feature map of multiple scales in HRNet. + + Args: + in_channels (list[int]): The input channels of all scales. + out_channels (int): The channels of fused feature map. + Defaults to 2048. + norm_cfg (dict): dictionary to construct norm layers. + Defaults to ``dict(type='BN', momentum=0.1)``. + init_cfg (dict | list[dict], optional): Initialization config dict. + Defaults to ``dict(type='Normal', layer='Linear', std=0.01))``. + """ + + def __init__(self, + in_channels, + out_channels=2048, + norm_cfg=dict(type='BN', momentum=0.1), + init_cfg=dict(type='Normal', layer='Linear', std=0.01)): + super(HRFuseScales, self).__init__(init_cfg=init_cfg) + self.in_channels = in_channels + self.out_channels = out_channels + self.norm_cfg = norm_cfg + + block_type = Bottleneck + out_channels = [128, 256, 512, 1024] + + # Increase the channels on each resolution + # from C, 2C, 4C, 8C to 128, 256, 512, 1024 + increase_layers = [] + for i in range(len(in_channels)): + increase_layers.append( + ResLayer( + block_type, + in_channels=in_channels[i], + out_channels=out_channels[i], + num_blocks=1, + stride=1, + )) + self.increase_layers = nn.ModuleList(increase_layers) + + # Downsample feature maps in each scale. + downsample_layers = [] + for i in range(len(in_channels) - 1): + downsample_layers.append( + ConvModule( + in_channels=out_channels[i], + out_channels=out_channels[i + 1], + kernel_size=3, + stride=2, + padding=1, + norm_cfg=self.norm_cfg, + bias=False, + )) + self.downsample_layers = nn.ModuleList(downsample_layers) + + # The final conv block before final classifier linear layer. + self.final_layer = ConvModule( + in_channels=out_channels[3], + out_channels=self.out_channels, + kernel_size=1, + norm_cfg=self.norm_cfg, + bias=False, + ) + + def forward(self, x): + assert isinstance(x, tuple) and len(x) == len(self.in_channels) + + feat = self.increase_layers[0](x[0]) + for i in range(len(self.downsample_layers)): + feat = self.downsample_layers[i](feat) + \ + self.increase_layers[i + 1](x[i + 1]) + + return (self.final_layer(feat), ) diff --git a/mmpretrain/models/necks/linear_neck.py b/mmpretrain/models/necks/linear_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..bcdbee264325c8db0a054f765651a5dbadc968db --- /dev/null +++ b/mmpretrain/models/necks/linear_neck.py @@ -0,0 +1,88 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +from typing import Optional, Tuple, Union + +import torch +import torch.nn as nn +from mmcv.cnn import build_activation_layer, build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class LinearNeck(BaseModule): + """Linear neck with Dimension projection. + + Args: + in_channels (int): Number of channels in the input. + out_channels (int): Number of channels in the output. + gap_dim (int): Dimensions of each sample channel, can be one of + {0, 1, 2, 3}. Defaults to 0. + norm_cfg (dict, optional): dictionary to construct and + config norm layer. Defaults to dict(type='BN1d'). + act_cfg (dict, optional): dictionary to construct and + config activate layer. Defaults to None. + init_cfg (dict, optional): dictionary to initialize weights. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + out_channels: int, + gap_dim: int = 0, + norm_cfg: Optional[dict] = dict(type='BN1d'), + act_cfg: Optional[dict] = None, + init_cfg: Optional[dict] = None): + super().__init__(init_cfg=init_cfg) + + self.in_channels = in_channels + self.out_channels = out_channels + self.norm_cfg = copy.deepcopy(norm_cfg) + self.act_cfg = copy.deepcopy(act_cfg) + + assert gap_dim in [0, 1, 2, 3], 'GlobalAveragePooling dim only ' \ + f'support {0, 1, 2, 3}, get {gap_dim} instead.' + if gap_dim == 0: + self.gap = nn.Identity() + elif gap_dim == 1: + self.gap = nn.AdaptiveAvgPool1d(1) + elif gap_dim == 2: + self.gap = nn.AdaptiveAvgPool2d((1, 1)) + elif gap_dim == 3: + self.gap = nn.AdaptiveAvgPool3d((1, 1, 1)) + + self.fc = nn.Linear(in_features=in_channels, out_features=out_channels) + + if norm_cfg: + self.norm = build_norm_layer(norm_cfg, out_channels)[1] + else: + self.norm = nn.Identity() + + if act_cfg: + self.act = build_activation_layer(act_cfg) + else: + self.act = nn.Identity() + + def forward(self, inputs: Union[Tuple, + torch.Tensor]) -> Tuple[torch.Tensor]: + """forward function. + + Args: + inputs (Union[Tuple, torch.Tensor]): The features extracted from + the backbone. Multiple stage inputs are acceptable but only + the last stage will be used. + + Returns: + Tuple[torch.Tensor]: A tuple of output features. + """ + assert isinstance(inputs, (tuple, torch.Tensor)), ( + 'The inputs of `LinearNeck` must be tuple or `torch.Tensor`, ' + f'but get {type(inputs)}.') + if isinstance(inputs, tuple): + inputs = inputs[-1] + + x = self.gap(inputs) + x = x.view(x.size(0), -1) + out = self.act(self.norm(self.fc(x))) + return (out, ) diff --git a/mmpretrain/models/necks/mae_neck.py b/mmpretrain/models/necks/mae_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..773692dcb3a94d85d2d2085360fd339493a24db3 --- /dev/null +++ b/mmpretrain/models/necks/mae_neck.py @@ -0,0 +1,188 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS +from ..backbones.vision_transformer import TransformerEncoderLayer +from ..utils import build_2d_sincos_position_embedding + + +@MODELS.register_module() +class MAEPretrainDecoder(BaseModule): + """Decoder for MAE Pre-training. + + Some of the code is borrowed from `https://github.com/facebookresearch/mae`. # noqa + + Args: + num_patches (int): The number of total patches. Defaults to 196. + patch_size (int): Image patch size. Defaults to 16. + in_chans (int): The channel of input image. Defaults to 3. + embed_dim (int): Encoder's embedding dimension. Defaults to 1024. + decoder_embed_dim (int): Decoder's embedding dimension. + Defaults to 512. + decoder_depth (int): The depth of decoder. Defaults to 8. + decoder_num_heads (int): Number of attention heads of decoder. + Defaults to 16. + mlp_ratio (int): Ratio of mlp hidden dim to decoder's embedding dim. + Defaults to 4. + norm_cfg (dict): Normalization layer. Defaults to LayerNorm. + init_cfg (Union[List[dict], dict], optional): Initialization config + dict. Defaults to None. + + Example: + >>> from mmpretrain.models import MAEPretrainDecoder + >>> import torch + >>> self = MAEPretrainDecoder() + >>> self.eval() + >>> inputs = torch.rand(1, 50, 1024) + >>> ids_restore = torch.arange(0, 196).unsqueeze(0) + >>> level_outputs = self.forward(inputs, ids_restore) + >>> print(tuple(level_outputs.shape)) + (1, 196, 768) + """ + + def __init__(self, + num_patches: int = 196, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 1024, + decoder_embed_dim: int = 512, + decoder_depth: int = 8, + decoder_num_heads: int = 16, + mlp_ratio: int = 4, + norm_cfg: dict = dict(type='LN', eps=1e-6), + predict_feature_dim: Optional[float] = None, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__(init_cfg=init_cfg) + self.num_patches = num_patches + + # used to convert the dim of features from encoder to the dim + # compatible with that of decoder + self.decoder_embed = nn.Linear(embed_dim, decoder_embed_dim, bias=True) + + self.mask_token = nn.Parameter(torch.zeros(1, 1, decoder_embed_dim)) + + # create new position embedding, different from that in encoder + # and is not learnable + self.decoder_pos_embed = nn.Parameter( + torch.zeros(1, self.num_patches + 1, decoder_embed_dim), + requires_grad=False) + + self.decoder_blocks = nn.ModuleList([ + TransformerEncoderLayer( + decoder_embed_dim, + decoder_num_heads, + int(mlp_ratio * decoder_embed_dim), + qkv_bias=True, + norm_cfg=norm_cfg) for _ in range(decoder_depth) + ]) + + self.decoder_norm_name, decoder_norm = build_norm_layer( + norm_cfg, decoder_embed_dim, postfix=1) + self.add_module(self.decoder_norm_name, decoder_norm) + + # Used to map features to pixels + if predict_feature_dim is None: + predict_feature_dim = patch_size**2 * in_chans + self.decoder_pred = nn.Linear( + decoder_embed_dim, predict_feature_dim, bias=True) + + def init_weights(self) -> None: + """Initialize position embedding and mask token of MAE decoder.""" + super().init_weights() + + decoder_pos_embed = build_2d_sincos_position_embedding( + int(self.num_patches**.5), + self.decoder_pos_embed.shape[-1], + cls_token=True) + self.decoder_pos_embed.data.copy_(decoder_pos_embed.float()) + + torch.nn.init.normal_(self.mask_token, std=.02) + + @property + def decoder_norm(self): + """The normalization layer of decoder.""" + return getattr(self, self.decoder_norm_name) + + def forward(self, x: torch.Tensor, + ids_restore: torch.Tensor) -> torch.Tensor: + """The forward function. + + The process computes the visible patches' features vectors and the mask + tokens to output feature vectors, which will be used for + reconstruction. + + Args: + x (torch.Tensor): hidden features, which is of shape + B x (L * mask_ratio) x C. + ids_restore (torch.Tensor): ids to restore original image. + + Returns: + torch.Tensor: The reconstructed feature vectors, which is of + shape B x (num_patches) x C. + """ + # embed tokens + x = self.decoder_embed(x) + + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] + 1 - x.shape[1], 1) + x_ = torch.cat([x[:, 1:, :], mask_tokens], dim=1) + x_ = torch.gather( + x_, + dim=1, + index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) + x = torch.cat([x[:, :1, :], x_], dim=1) + + # add pos embed + x = x + self.decoder_pos_embed + + # apply Transformer blocks + for blk in self.decoder_blocks: + x = blk(x) + x = self.decoder_norm(x) + + # predictor projection + x = self.decoder_pred(x) + + # remove cls token + x = x[:, 1:, :] + + return x + + +@MODELS.register_module() +class ClsBatchNormNeck(BaseModule): + """Normalize cls token across batch before head. + + This module is proposed by MAE, when running linear probing. + + Args: + input_features (int): The dimension of features. + affine (bool): a boolean value that when set to ``True``, this module + has learnable affine parameters. Defaults to False. + eps (float): a value added to the denominator for numerical stability. + Defaults to 1e-6. + init_cfg (Dict or List[Dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + input_features: int, + affine: bool = False, + eps: float = 1e-6, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg) + self.bn = nn.BatchNorm1d(input_features, affine=affine, eps=eps) + + def forward( + self, + inputs: Tuple[List[torch.Tensor]]) -> Tuple[List[torch.Tensor]]: + """The forward function.""" + # Only apply batch norm to cls_token + inputs = [self.bn(input_) for input_ in inputs] + return tuple(inputs) diff --git a/mmpretrain/models/necks/milan_neck.py b/mmpretrain/models/necks/milan_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..c142d2007e7a1a398f1857177c1fe20066fcd442 --- /dev/null +++ b/mmpretrain/models/necks/milan_neck.py @@ -0,0 +1,222 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +from torch import nn + +from mmpretrain.registry import MODELS +from ..backbones.vision_transformer import TransformerEncoderLayer +from ..utils import PromptMultiheadAttention +from .mae_neck import MAEPretrainDecoder + + +class PromptTransformerEncoderLayer(TransformerEncoderLayer): + """Prompt Transformer Encoder Layer for MILAN. + + This module is specific for the prompt encoder in MILAN. It will not update + the visible tokens from the encoder. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + drop_rate (float): Probability of an element to be zeroed + after the feed forward layer. Defaults to 0.0. + attn_drop_rate (float): The drop out rate for attention layer. + Defaults to 0.0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0.0. + num_fcs (int): The number of fully-connected layers for FFNs. + Defaults to 2. + qkv_bias (bool): Enable bias for qkv if True. Defaults to True. + act_cfg (dict): The activation config for FFNs. + Defaluts to ``dict(type='GELU')``. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + batch_first (bool): Key, Query and Value are shape of + (batch, n, embed_dim) + or (n, batch, embed_dim). Defaults to False. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims: int, + num_heads: int, + feedforward_channels=int, + drop_rate: float = 0., + attn_drop_rate: float = 0., + drop_path_rate: float = 0., + num_fcs: int = 2, + qkv_bias: bool = True, + act_cfg: dict = dict(type='GELU'), + norm_cfg: dict = dict(type='LN'), + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + embed_dims=embed_dims, + num_heads=num_heads, + feedforward_channels=feedforward_channels, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=drop_path_rate, + num_fcs=num_fcs, + qkv_bias=qkv_bias, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + init_cfg=init_cfg) + self.attn = PromptMultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + qkv_bias=qkv_bias) + + def forward(self, x: torch.Tensor, visible_tokens: torch.Tensor, + ids_restore: torch.Tensor) -> torch.Tensor: + """Forward function for `PromptMultiheadAttention`. + + Args: + x (torch.Tensor): Mask token features with shape N x L_m x C. + visible_tokens (torch.Tensor): The visible tokens features from + encoder with shape N x L_v x C. + ids_restore (torch.Tensor): The ids of all tokens in the original + image with shape N x L. + + Returns: + torch Tensor: Output features with shape N x L x C. + """ + x = x + self.attn(self.norm1(x), visible_tokens, ids_restore) + x = self.ffn(self.norm2(x), identity=x) + return x + + +@MODELS.register_module() +class MILANPretrainDecoder(MAEPretrainDecoder): + """Prompt decoder for MILAN. + + This decoder is used in MILAN pretraining, which will not update these + visible tokens from the encoder. + + Args: + num_patches (int): The number of total patches. Defaults to 196. + patch_size (int): Image patch size. Defaults to 16. + in_chans (int): The channel of input image. Defaults to 3. + embed_dim (int): Encoder's embedding dimension. Defaults to 1024. + decoder_embed_dim (int): Decoder's embedding dimension. + Defaults to 512. + decoder_depth (int): The depth of decoder. Defaults to 8. + decoder_num_heads (int): Number of attention heads of decoder. + Defaults to 16. + predict_feature_dim (int): The dimension of the feature to be + predicted. Defaults to 512. + mlp_ratio (int): Ratio of mlp hidden dim to decoder's embedding dim. + Defaults to 4. + norm_cfg (dict): Normalization layer. Defaults to LayerNorm. + init_cfg (Union[List[dict], dict], optional): Initialization config + dict. Defaults to None. + """ + + def __init__(self, + num_patches: int = 196, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 1024, + decoder_embed_dim: int = 512, + decoder_depth: int = 8, + decoder_num_heads: int = 16, + predict_feature_dim: int = 512, + mlp_ratio: int = 4, + norm_cfg: dict = dict(type='LN', eps=1e-6), + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + num_patches=num_patches, + patch_size=patch_size, + in_chans=in_chans, + embed_dim=embed_dim, + decoder_embed_dim=decoder_embed_dim, + decoder_depth=decoder_depth, + decoder_num_heads=decoder_num_heads, + mlp_ratio=mlp_ratio, + norm_cfg=norm_cfg, + init_cfg=init_cfg) + + # map the dim of features from decoder to the dim compatible with + # that of CLIP + self.decoder_pred = nn.Linear( + decoder_embed_dim, predict_feature_dim, bias=True) + + # use prompt transformer encoder layer, instead of the conventional + # transformer encoder layer + self.decoder_blocks = nn.ModuleList([ + PromptTransformerEncoderLayer( + decoder_embed_dim, + decoder_num_heads, + int(mlp_ratio * decoder_embed_dim), + qkv_bias=True, + norm_cfg=norm_cfg) for _ in range(decoder_depth) + ]) + + def forward(self, x: torch.Tensor, ids_restore: torch.Tensor, + ids_keep: torch.Tensor, + ids_dump: torch.Tensor) -> torch.Tensor: + """Forward function. + + Args: + x (torch.Tensor): The input features, which is of shape (N, L, C). + ids_restore (torch.Tensor): The indices to restore these tokens + to the original image. + ids_keep (torch.Tensor): The indices of tokens to be kept. + ids_dump (torch.Tensor): The indices of tokens to be masked. + + Returns: + torch.Tensor: The reconstructed features, which is of shape + (N, L, C). + """ + # embed tokens + x = self.decoder_embed(x) + + # append mask tokens to sequence + mask_tokens = self.mask_token.repeat( + x.shape[0], ids_restore.shape[1] + 1 - x.shape[1], 1) + x_ = torch.cat([x[:, 1:, :], mask_tokens], dim=1) + x_ = torch.gather( + x_, + dim=1, + index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[2])) + x = torch.cat([x[:, :1, :], x_], dim=1) + + # add pos embed + x = x + self.decoder_pos_embed + + # split mask tokens and visible tokens + visible_tokens = torch.cat([ + x[:, :1, :], + torch.gather( + x[:, 1:, :], + dim=1, + index=ids_keep.unsqueeze(-1).repeat(1, 1, x.shape[-1])) + ], + dim=1) + x = torch.gather( + x[:, 1:, :], + dim=1, + index=ids_dump.unsqueeze(-1).repeat(1, 1, x.shape[-1])) + + for blk in self.decoder_blocks: + x = blk(x, visible_tokens, ids_restore) + + # full sequence recovery + x_ = torch.cat([visible_tokens[:, 1:, :], x], dim=1) + x_ = torch.gather( + x_, + dim=1, + index=ids_restore.unsqueeze(-1).repeat(1, 1, + x.shape[-1])) # unshuffle + x = torch.cat([visible_tokens[:, :1, :], x_], dim=1) + + x = self.decoder_norm(x) + + # predictor projection + x = self.decoder_pred(x) + + return x diff --git a/mmpretrain/models/necks/mixmim_neck.py b/mmpretrain/models/necks/mixmim_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..8d67ee2bd6b48136f2ae6b298e11bd7758fa414b --- /dev/null +++ b/mmpretrain/models/necks/mixmim_neck.py @@ -0,0 +1,111 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from ..utils import build_2d_sincos_position_embedding +from .mae_neck import MAEPretrainDecoder + + +@MODELS.register_module() +class MixMIMPretrainDecoder(MAEPretrainDecoder): + """Decoder for MixMIM Pretraining. + + Some of the code is borrowed from `https://github.com/Sense-X/MixMIM`. # noqa + + Args: + num_patches (int): The number of total patches. Defaults to 196. + patch_size (int): Image patch size. Defaults to 16. + in_chans (int): The channel of input image. Defaults to 3. + embed_dim (int): Encoder's embedding dimension. Defaults to 1024. + encoder_stride (int): The output stride of MixMIM backbone. Defaults + to 32. + decoder_embed_dim (int): Decoder's embedding dimension. + Defaults to 512. + decoder_depth (int): The depth of decoder. Defaults to 8. + decoder_num_heads (int): Number of attention heads of decoder. + Defaults to 16. + mlp_ratio (int): Ratio of mlp hidden dim to decoder's embedding dim. + Defaults to 4. + norm_cfg (dict): Normalization layer. Defaults to LayerNorm. + init_cfg (Union[List[dict], dict], optional): Initialization config + dict. Defaults to None. + """ + + def __init__(self, + num_patches: int = 196, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 1024, + encoder_stride: int = 32, + decoder_embed_dim: int = 512, + decoder_depth: int = 8, + decoder_num_heads: int = 16, + mlp_ratio: int = 4, + norm_cfg: dict = dict(type='LN', eps=1e-6), + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + + super().__init__( + num_patches=num_patches, + patch_size=patch_size, + in_chans=in_chans, + embed_dim=embed_dim, + decoder_embed_dim=decoder_embed_dim, + decoder_depth=decoder_depth, + decoder_num_heads=decoder_num_heads, + mlp_ratio=mlp_ratio, + norm_cfg=norm_cfg, + init_cfg=init_cfg) + + self.decoder_pos_embed = nn.Parameter( + torch.zeros(1, num_patches, decoder_embed_dim), + requires_grad=False) + self.decoder_pred = nn.Linear(decoder_embed_dim, encoder_stride**2 * 3) + + def init_weights(self) -> None: + """Initialize position embedding and mask token of MixMIM decoder.""" + super(MAEPretrainDecoder, self).init_weights() + + decoder_pos_embed = build_2d_sincos_position_embedding( + int(self.num_patches**.5), + self.decoder_pos_embed.shape[-1], + cls_token=False) + self.decoder_pos_embed.data.copy_(decoder_pos_embed.float()) + + torch.nn.init.normal_(self.mask_token, std=.02) + + def forward(self, x: torch.Tensor, mask: torch.Tensor) -> torch.Tensor: + """Forward function. + + Args: + x (torch.Tensor): The input features, which is of shape (N, L, C). + mask (torch.Tensor): The tensor to indicate which tokens a + re masked. + + Returns: + torch.Tensor: The reconstructed features, which is of shape + (N, L, C). + """ + + x = self.decoder_embed(x) + B, L, C = x.shape + + mask_tokens = self.mask_token.expand(B, L, -1) + x1 = x * (1 - mask) + mask_tokens * mask + x2 = x * mask + mask_tokens * (1 - mask) + x = torch.cat([x1, x2], dim=0) + + # add pos embed + x = x + self.decoder_pos_embed + + # apply Transformer blocks + for idx, blk in enumerate(self.decoder_blocks): + x = blk(x) + x = self.decoder_norm(x) + + # predictor projection + x = self.decoder_pred(x) + + return x diff --git a/mmpretrain/models/necks/mocov2_neck.py b/mmpretrain/models/necks/mocov2_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..9ad9107812eb9aaaaff8cbc1a7d5c3d39e92dfa1 --- /dev/null +++ b/mmpretrain/models/necks/mocov2_neck.py @@ -0,0 +1,52 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class MoCoV2Neck(BaseModule): + """The non-linear neck of MoCo v2: fc-relu-fc. + + Args: + in_channels (int): Number of input channels. + hid_channels (int): Number of hidden channels. + out_channels (int): Number of output channels. + with_avg_pool (bool): Whether to apply the global + average pooling after backbone. Defaults to True. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + in_channels: int, + hid_channels: int, + out_channels: int, + with_avg_pool: bool = True, + init_cfg: Optional[Union[dict, List[dict]]] = None) -> None: + super().__init__(init_cfg) + self.with_avg_pool = with_avg_pool + if with_avg_pool: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.mlp = nn.Sequential( + nn.Linear(in_channels, hid_channels), nn.ReLU(inplace=True), + nn.Linear(hid_channels, out_channels)) + + def forward(self, x: Tuple[torch.Tensor]) -> Tuple[torch.Tensor]: + """Forward function. + + Args: + x (Tuple[torch.Tensor]): The feature map of backbone. + + Returns: + Tuple[torch.Tensor]: The output features. + """ + assert len(x) == 1 + x = x[0] + if self.with_avg_pool: + x = self.avgpool(x) + return (self.mlp(x.view(x.size(0), -1)), ) diff --git a/mmpretrain/models/necks/nonlinear_neck.py b/mmpretrain/models/necks/nonlinear_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..ef684d39d1f7f5dc7361ccbf631d3ce712d65ac5 --- /dev/null +++ b/mmpretrain/models/necks/nonlinear_neck.py @@ -0,0 +1,115 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple, Union + +import torch +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class NonLinearNeck(BaseModule): + """The non-linear neck. + + Structure: fc-bn-[relu-fc-bn] where the substructure in [] can be repeated. + For the default setting, the repeated time is 1. + The neck can be used in many algorithms, e.g., SimCLR, BYOL, SimSiam. + + Args: + in_channels (int): Number of input channels. + hid_channels (int): Number of hidden channels. + out_channels (int): Number of output channels. + num_layers (int): Number of fc layers. Defaults to 2. + with_bias (bool): Whether to use bias in fc layers (except for the + last). Defaults to False. + with_last_bn (bool): Whether to add the last BN layer. + Defaults to True. + with_last_bn_affine (bool): Whether to have learnable affine parameters + in the last BN layer (set False for SimSiam). Defaults to True. + with_last_bias (bool): Whether to use bias in the last fc layer. + Defaults to False. + with_avg_pool (bool): Whether to apply the global average pooling + after backbone. Defaults to True. + norm_cfg (dict): Dictionary to construct and config norm layer. + Defaults to dict(type='SyncBN'). + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__( + self, + in_channels: int, + hid_channels: int, + out_channels: int, + num_layers: int = 2, + with_bias: bool = False, + with_last_bn: bool = True, + with_last_bn_affine: bool = True, + with_last_bias: bool = False, + with_avg_pool: bool = True, + norm_cfg: dict = dict(type='SyncBN'), + init_cfg: Optional[Union[dict, List[dict]]] = [ + dict(type='Constant', val=1, layer=['_BatchNorm', 'GroupNorm']) + ] + ) -> None: + super(NonLinearNeck, self).__init__(init_cfg) + self.with_avg_pool = with_avg_pool + if with_avg_pool: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.relu = nn.ReLU(inplace=True) + self.fc0 = nn.Linear(in_channels, hid_channels, bias=with_bias) + self.bn0 = build_norm_layer(norm_cfg, hid_channels)[1] + + self.fc_names = [] + self.bn_names = [] + for i in range(1, num_layers): + this_channels = out_channels if i == num_layers - 1 \ + else hid_channels + if i != num_layers - 1: + self.add_module( + f'fc{i}', + nn.Linear(hid_channels, this_channels, bias=with_bias)) + self.add_module(f'bn{i}', + build_norm_layer(norm_cfg, this_channels)[1]) + self.bn_names.append(f'bn{i}') + else: + self.add_module( + f'fc{i}', + nn.Linear( + hid_channels, this_channels, bias=with_last_bias)) + if with_last_bn: + self.add_module( + f'bn{i}', + build_norm_layer( + dict(**norm_cfg, affine=with_last_bn_affine), + this_channels)[1]) + self.bn_names.append(f'bn{i}') + else: + self.bn_names.append(None) + self.fc_names.append(f'fc{i}') + + def forward(self, x: Tuple[torch.Tensor]) -> Tuple[torch.Tensor]: + """Forward function. + + Args: + x (Tuple[torch.Tensor]): The feature map of backbone. + + Returns: + Tuple[torch.Tensor]: The output features. + """ + assert len(x) == 1 + x = x[0] + if self.with_avg_pool: + x = self.avgpool(x) + x = x.view(x.size(0), -1) + x = self.fc0(x) + x = self.bn0(x) + for fc_name, bn_name in zip(self.fc_names, self.bn_names): + fc = getattr(self, fc_name) + x = self.relu(x) + x = fc(x) + if bn_name is not None: + bn = getattr(self, bn_name) + x = bn(x) + return (x, ) diff --git a/mmpretrain/models/necks/simmim_neck.py b/mmpretrain/models/necks/simmim_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..cb1e29bcf195ecb800a22a2c43917e62718b5ffe --- /dev/null +++ b/mmpretrain/models/necks/simmim_neck.py @@ -0,0 +1,33 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class SimMIMLinearDecoder(BaseModule): + """Linear Decoder For SimMIM pretraining. + + This neck reconstructs the original image from the shrunk feature map. + + Args: + in_channels (int): Channel dimension of the feature map. + encoder_stride (int): The total stride of the encoder. + """ + + def __init__(self, in_channels: int, encoder_stride: int) -> None: + super().__init__() + self.decoder = nn.Sequential( + nn.Conv2d( + in_channels=in_channels, + out_channels=encoder_stride**2 * 3, + kernel_size=1), + nn.PixelShuffle(encoder_stride), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + x = self.decoder(x) + return x diff --git a/mmpretrain/models/necks/swav_neck.py b/mmpretrain/models/necks/swav_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..807ae8b9b3155e9dd14ef95fe5fca526919ee11d --- /dev/null +++ b/mmpretrain/models/necks/swav_neck.py @@ -0,0 +1,93 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Union + +import torch +import torch.nn as nn +from mmcv.cnn import build_norm_layer +from mmengine.model import BaseModule + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class SwAVNeck(BaseModule): + """The non-linear neck of SwAV: fc-bn-relu-fc-normalization. + + Args: + in_channels (int): Number of input channels. + hid_channels (int): Number of hidden channels. + out_channels (int): Number of output channels. + with_avg_pool (bool): Whether to apply the global average pooling after + backbone. Defaults to True. + with_l2norm (bool): whether to normalize the output after projection. + Defaults to True. + norm_cfg (dict): Dictionary to construct and config norm layer. + Defaults to dict(type='SyncBN'). + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__( + self, + in_channels: int, + hid_channels: int, + out_channels: int, + with_avg_pool: bool = True, + with_l2norm: bool = True, + norm_cfg: dict = dict(type='SyncBN'), + init_cfg: Optional[Union[dict, List[dict]]] = [ + dict(type='Constant', val=1, layer=['_BatchNorm', 'GroupNorm']) + ] + ) -> None: + super().__init__(init_cfg) + self.with_avg_pool = with_avg_pool + self.with_l2norm = with_l2norm + if with_avg_pool: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + + if out_channels == 0: + self.projection_neck = nn.Identity() + elif hid_channels == 0: + self.projection_neck = nn.Linear(in_channels, out_channels) + else: + self.norm = build_norm_layer(norm_cfg, hid_channels)[1] + self.projection_neck = nn.Sequential( + nn.Linear(in_channels, hid_channels), + self.norm, + nn.ReLU(inplace=True), + nn.Linear(hid_channels, out_channels), + ) + + def forward_projection(self, x: torch.Tensor) -> torch.Tensor: + """Compute projection. + + Args: + x (torch.Tensor): The feature vectors after pooling. + + Returns: + torch.Tensor: The output features with projection or L2-norm. + """ + x = self.projection_neck(x) + if self.with_l2norm: + x = nn.functional.normalize(x, dim=1, p=2) + return x + + def forward(self, x: List[torch.Tensor]) -> torch.Tensor: + """Forward function. + + Args: + x (List[torch.Tensor]): list of feature maps, len(x) according to + len(num_crops). + + Returns: + torch.Tensor: The projection vectors. + """ + avg_out = [] + for _x in x: + _x = _x[0] + if self.with_avg_pool: + _out = self.avgpool(_x) + avg_out.append(_out) + feat_vec = torch.cat(avg_out) # [sum(num_crops) * N, C] + feat_vec = feat_vec.view(feat_vec.size(0), -1) + output = self.forward_projection(feat_vec) + return output diff --git a/mmpretrain/models/retrievers/__init__.py b/mmpretrain/models/retrievers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..593b637d6eb7e44184fdf6ceb70470253639b013 --- /dev/null +++ b/mmpretrain/models/retrievers/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .base import BaseRetriever +from .image2image import ImageToImageRetriever + +__all__ = ['BaseRetriever', 'ImageToImageRetriever'] diff --git a/mmpretrain/models/retrievers/base.py b/mmpretrain/models/retrievers/base.py new file mode 100644 index 0000000000000000000000000000000000000000..15816798f3fadc612b51634994178eb5f8860fb8 --- /dev/null +++ b/mmpretrain/models/retrievers/base.py @@ -0,0 +1,151 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from abc import ABCMeta, abstractmethod +from typing import List, Optional, Union + +import torch +from mmengine.model import BaseModel +from mmengine.structures import BaseDataElement +from torch.utils.data import DataLoader + + +class BaseRetriever(BaseModel, metaclass=ABCMeta): + """Base class for retriever. + + Args: + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None, it will use "BaseDataPreprocessor" as type, see + :class:`mmengine.model.BaseDataPreprocessor` for more details. + Defaults to None. + prototype (Union[DataLoader, dict, str, torch.Tensor]): Database to be + retrieved. The following four types are supported. + + - DataLoader: The original dataloader serves as the prototype. + - dict: The configuration to construct Dataloader. + - str: The path of the saved vector. + - torch.Tensor: The saved tensor whose dimension should be dim. + + Attributes: + prototype (Union[DataLoader, dict, str, torch.Tensor]): Database to be + retrieved. The following four types are supported. + + - DataLoader: The original dataloader serves as the prototype. + - dict: The configuration to construct Dataloader. + - str: The path of the saved vector. + - torch.Tensor: The saved tensor whose dimension should be dim. + + data_preprocessor (:obj:`mmengine.model.BaseDataPreprocessor`): An + extra data pre-processing module, which processes data from + dataloader to the format accepted by :meth:`forward`. + """ + + def __init__( + self, + prototype: Union[DataLoader, dict, str, torch.Tensor] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[dict] = None, + ): + super(BaseRetriever, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + self.prototype = prototype + self.prototype_inited = False + + @abstractmethod + def forward(self, + inputs: torch.Tensor, + data_samples: Optional[List[BaseDataElement]] = None, + mode: str = 'loss'): + """The unified entry for a forward process in both training and test. + + The method should accept three modes: "tensor", "predict" and "loss": + + - "tensor": Forward the whole network and return tensor without any + post-processing, same as a common nn.Module. + - "predict": Forward and return the predictions, which are fully + processed to a list of :obj:`DataSample`. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + inputs (torch.Tensor, tuple): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + + - If ``mode="tensor"``, return a tensor. + - If ``mode="predict"``, return a list of + :obj:`mmpretrain.structures.DataSample`. + - If ``mode="loss"``, return a dict of tensor. + """ + pass + + def extract_feat(self, inputs: torch.Tensor): + """Extract features from the input tensor with shape (N, C, ...). + + The sub-classes are recommended to implement this method to extract + features from backbone and neck. + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + """ + raise NotImplementedError + + def loss(self, inputs: torch.Tensor, + data_samples: List[BaseDataElement]) -> dict: + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + raise NotImplementedError + + def predict(self, + inputs: tuple, + data_samples: Optional[List[BaseDataElement]] = None, + **kwargs) -> List[BaseDataElement]: + """Predict results from the extracted features. + + Args: + inputs (tuple): The features extracted from the backbone. + data_samples (List[BaseDataElement], optional): The annotation + data of every samples. Defaults to None. + **kwargs: Other keyword arguments accepted by the ``predict`` + method of :attr:`head`. + """ + raise NotImplementedError + + def matching(self, inputs: torch.Tensor): + """Compare the prototype and calculate the similarity. + + Args: + inputs (torch.Tensor): The input tensor with shape (N, C). + """ + raise NotImplementedError + + def prepare_prototype(self): + """Preprocessing the prototype before predict.""" + raise NotImplementedError + + def dump_prototype(self, path): + """Save the features extracted from the prototype to the specific path. + + Args: + path (str): Path to save feature. + """ + raise NotImplementedError diff --git a/mmpretrain/models/retrievers/image2image.py b/mmpretrain/models/retrievers/image2image.py new file mode 100644 index 0000000000000000000000000000000000000000..a00c1dceb102ee692c44090b62dcfa19dc441f3b --- /dev/null +++ b/mmpretrain/models/retrievers/image2image.py @@ -0,0 +1,314 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Callable, List, Optional, Union + +import mmengine.dist as dist +import torch +import torch.nn as nn +from mmengine.runner import Runner +from torch.utils.data import DataLoader + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from mmpretrain.utils import track_on_main_process +from .base import BaseRetriever + + +@MODELS.register_module() +class ImageToImageRetriever(BaseRetriever): + """Image To Image Retriever for supervised retrieval task. + + Args: + image_encoder (Union[dict, List[dict]]): Encoder for extracting + features. + prototype (Union[DataLoader, dict, str, torch.Tensor]): Database to be + retrieved. The following four types are supported. + + - DataLoader: The original dataloader serves as the prototype. + - dict: The configuration to construct Dataloader. + - str: The path of the saved vector. + - torch.Tensor: The saved tensor whose dimension should be dim. + + head (dict, optional): The head module to calculate loss from + processed features. See :mod:`mmpretrain.models.heads`. Notice + that if the head is not set, `loss` method cannot be used. + Defaults to None. + similarity_fn (Union[str, Callable]): The way that the similarity + is calculated. If `similarity` is callable, it is used directly + as the measure function. If it is a string, the appropriate + method will be used. The larger the calculated value, the + greater the similarity. Defaults to "cosine_similarity". + train_cfg (dict, optional): The training setting. The acceptable + fields are: + + - augments (List[dict]): The batch augmentation methods to use. + More details can be found in + :mod:`mmpretrain.model.utils.augment`. + + Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing input + data. If None or no specified type, it will use + "ClsDataPreprocessor" as type. See :class:`ClsDataPreprocessor` for + more details. Defaults to None. + topk (int): Return the topk of the retrieval result. `-1` means + return all. Defaults to -1. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + image_encoder: Union[dict, List[dict]], + prototype: Union[DataLoader, dict, str, torch.Tensor], + head: Optional[dict] = None, + pretrained: Optional[str] = None, + similarity_fn: Union[str, Callable] = 'cosine_similarity', + train_cfg: Optional[dict] = None, + data_preprocessor: Optional[dict] = None, + topk: int = -1, + init_cfg: Optional[dict] = None): + + if data_preprocessor is None: + data_preprocessor = {} + # The build process is in MMEngine, so we need to add scope here. + data_preprocessor.setdefault('type', 'mmpretrain.ClsDataPreprocessor') + + if train_cfg is not None and 'augments' in train_cfg: + # Set batch augmentations by `train_cfg` + data_preprocessor['batch_augments'] = train_cfg + + super(ImageToImageRetriever, self).__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + if not isinstance(image_encoder, nn.Module): + image_encoder = MODELS.build(image_encoder) + if head is not None and not isinstance(head, nn.Module): + head = MODELS.build(head) + + self.image_encoder = image_encoder + self.head = head + + self.similarity = similarity_fn + + assert isinstance(prototype, (str, torch.Tensor, dict, DataLoader)), ( + 'The `prototype` in `ImageToImageRetriever` must be a path, ' + 'a torch.Tensor, a dataloader or a dataloader dict format config.') + self.prototype = prototype + self.prototype_inited = False + self.topk = topk + + @property + def similarity_fn(self): + """Returns a function that calculates the similarity.""" + # If self.similarity_way is callable, return it directly + if isinstance(self.similarity, Callable): + return self.similarity + + if self.similarity == 'cosine_similarity': + # a is a tensor with shape (N, C) + # b is a tensor with shape (M, C) + # "cosine_similarity" will get the matrix of similarity + # with shape (N, M). + # The higher the score is, the more similar is + return lambda a, b: torch.cosine_similarity( + a.unsqueeze(1), b.unsqueeze(0), dim=-1) + else: + raise RuntimeError(f'Invalid function "{self.similarity_fn}".') + + def forward(self, + inputs: torch.Tensor, + data_samples: Optional[List[DataSample]] = None, + mode: str = 'tensor'): + """The unified entry for a forward process in both training and test. + + The method should accept three modes: "tensor", "predict" and "loss": + + - "tensor": Forward the whole network and return tensor without any + post-processing, same as a common nn.Module. + - "predict": Forward and return the predictions, which are fully + processed to a list of :obj:`DataSample`. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Note that this method doesn't handle neither back propagation nor + optimizer updating, which are done in the :meth:`train_step`. + + Args: + inputs (torch.Tensor, tuple): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample], optional): The annotation + data of every samples. It's required if ``mode="loss"``. + Defaults to None. + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + + - If ``mode="tensor"``, return a tensor. + - If ``mode="predict"``, return a list of + :obj:`mmpretrain.structures.DataSample`. + - If ``mode="loss"``, return a dict of tensor. + """ + if mode == 'tensor': + return self.extract_feat(inputs) + elif mode == 'loss': + return self.loss(inputs, data_samples) + elif mode == 'predict': + return self.predict(inputs, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, inputs): + """Extract features from the input tensor with shape (N, C, ...). + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + Returns: + Tensor: The output of encoder. + """ + + feat = self.image_encoder(inputs) + return feat + + def loss(self, inputs: torch.Tensor, + data_samples: List[DataSample]) -> dict: + """Calculate losses from a batch of inputs and data samples. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + feats = self.extract_feat(inputs) + return self.head.loss(feats, data_samples) + + def matching(self, inputs: torch.Tensor): + """Compare the prototype and calculate the similarity. + + Args: + inputs (torch.Tensor): The input tensor with shape (N, C). + Returns: + dict: a dictionary of score and prediction label based on fn. + """ + sim = self.similarity_fn(inputs, self.prototype_vecs) + sorted_sim, indices = torch.sort(sim, descending=True, dim=-1) + predictions = dict( + score=sim, pred_label=indices, pred_score=sorted_sim) + return predictions + + def predict(self, + inputs: tuple, + data_samples: Optional[List[DataSample]] = None, + **kwargs) -> List[DataSample]: + """Predict results from the extracted features. + + Args: + inputs (tuple): The features extracted from the backbone. + data_samples (List[DataSample], optional): The annotation + data of every samples. Defaults to None. + **kwargs: Other keyword arguments accepted by the ``predict`` + method of :attr:`head`. + Returns: + List[DataSample]: the raw data_samples with + the predicted results + """ + if not self.prototype_inited: + self.prepare_prototype() + + feats = self.extract_feat(inputs) + if isinstance(feats, tuple): + feats = feats[-1] + + # Matching of similarity + result = self.matching(feats) + return self._get_predictions(result, data_samples) + + def _get_predictions(self, result, data_samples): + """Post-process the output of retriever.""" + pred_scores = result['score'] + pred_labels = result['pred_label'] + if self.topk != -1: + topk = min(self.topk, pred_scores.size()[-1]) + pred_labels = pred_labels[:, :topk] + + if data_samples is not None: + for data_sample, score, label in zip(data_samples, pred_scores, + pred_labels): + data_sample.set_pred_score(score).set_pred_label(label) + else: + data_samples = [] + for score, label in zip(pred_scores, pred_labels): + data_samples.append( + DataSample().set_pred_score(score).set_pred_label(label)) + return data_samples + + def _get_prototype_vecs_from_dataloader(self, data_loader): + """get prototype_vecs from dataloader.""" + self.eval() + num = len(data_loader.dataset) + + prototype_vecs = None + for data_batch in track_on_main_process(data_loader, + 'Prepare prototype'): + data = self.data_preprocessor(data_batch, False) + feat = self(**data) + if isinstance(feat, tuple): + feat = feat[-1] + + if prototype_vecs is None: + dim = feat.shape[-1] + prototype_vecs = torch.zeros(num, dim) + for i, data_sample in enumerate(data_batch['data_samples']): + sample_idx = data_sample.get('sample_idx') + prototype_vecs[sample_idx] = feat[i] + + assert prototype_vecs is not None + dist.all_reduce(prototype_vecs) + return prototype_vecs + + def _get_prototype_vecs_from_path(self, proto_path): + """get prototype_vecs from prototype path.""" + data = [None] + if dist.is_main_process(): + data[0] = torch.load(proto_path) + dist.broadcast_object_list(data, src=0) + prototype_vecs = data[0] + assert prototype_vecs is not None + return prototype_vecs + + @torch.no_grad() + def prepare_prototype(self): + """Used in meta testing. This function will be called before the meta + testing. Obtain the vector based on the prototype. + + - torch.Tensor: The prototype vector is the prototype + - str: The path of the extracted feature path, parse data structure, + and generate the prototype feature vector set + - Dataloader or config: Extract and save the feature vectors according + to the dataloader + """ + device = next(self.image_encoder.parameters()).device + if isinstance(self.prototype, torch.Tensor): + prototype_vecs = self.prototype + elif isinstance(self.prototype, str): + prototype_vecs = self._get_prototype_vecs_from_path(self.prototype) + elif isinstance(self.prototype, (dict, DataLoader)): + loader = Runner.build_dataloader(self.prototype) + prototype_vecs = self._get_prototype_vecs_from_dataloader(loader) + + self.register_buffer( + 'prototype_vecs', prototype_vecs.to(device), persistent=False) + self.prototype_inited = True + + def dump_prototype(self, path): + """Save the features extracted from the prototype to specific path. + + Args: + path (str): Path to save feature. + """ + if not self.prototype_inited: + self.prepare_prototype() + torch.save(self.prototype_vecs, path) diff --git a/mmpretrain/models/selfsup/__init__.py b/mmpretrain/models/selfsup/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..39640cab495cef9082849c1196edc1510d3312ef --- /dev/null +++ b/mmpretrain/models/selfsup/__init__.py @@ -0,0 +1,50 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .barlowtwins import BarlowTwins +from .base import BaseSelfSupervisor +from .beit import VQKD, BEiT, BEiTPretrainViT +from .byol import BYOL +from .cae import CAE, CAEPretrainViT, DALLEEncoder +from .densecl import DenseCL +from .eva import EVA +from .mae import MAE, MAEViT +from .maskfeat import HOGGenerator, MaskFeat, MaskFeatViT +from .milan import MILAN, CLIPGenerator, MILANViT +from .mixmim import MixMIM, MixMIMPretrainTransformer +from .moco import MoCo +from .mocov3 import MoCoV3, MoCoV3ViT +from .simclr import SimCLR +from .simmim import SimMIM, SimMIMSwinTransformer +from .simsiam import SimSiam +from .swav import SwAV + +__all__ = [ + 'BaseSelfSupervisor', + 'BEiTPretrainViT', + 'VQKD', + 'CAEPretrainViT', + 'DALLEEncoder', + 'MAEViT', + 'HOGGenerator', + 'MaskFeatViT', + 'CLIPGenerator', + 'MILANViT', + 'MixMIMPretrainTransformer', + 'MoCoV3ViT', + 'SimMIMSwinTransformer', + 'MoCo', + 'MoCoV3', + 'BYOL', + 'SimCLR', + 'SimSiam', + 'BEiT', + 'CAE', + 'MAE', + 'MaskFeat', + 'MILAN', + 'MixMIM', + 'SimMIM', + 'EVA', + 'DenseCL', + 'BarlowTwins', + 'SwAV', +] diff --git a/mmpretrain/models/selfsup/barlowtwins.py b/mmpretrain/models/selfsup/barlowtwins.py new file mode 100644 index 0000000000000000000000000000000000000000..4c75cd0caca6ab2dc4c4a14e365fda5daa9bdb83 --- /dev/null +++ b/mmpretrain/models/selfsup/barlowtwins.py @@ -0,0 +1,42 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List + +import torch + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class BarlowTwins(BaseSelfSupervisor): + """BarlowTwins. + + Implementation of `Barlow Twins: Self-Supervised Learning via Redundancy + Reduction `_. + Part of the code is borrowed from: + ``_. + """ + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + img_v1 = inputs[0] + img_v2 = inputs[1] + + z1 = self.neck(self.backbone(img_v1))[0] # NxC + z2 = self.neck(self.backbone(img_v2))[0] # NxC + + loss = self.head.loss(z1, z2) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/base.py b/mmpretrain/models/selfsup/base.py new file mode 100644 index 0000000000000000000000000000000000000000..1dd6e59aa17bece3b44c1713194b7ab40b99f283 --- /dev/null +++ b/mmpretrain/models/selfsup/base.py @@ -0,0 +1,179 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from abc import ABCMeta, abstractmethod +from typing import List, Optional, Union + +import torch +from mmengine.model import BaseModel +from torch import nn + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample + + +class BaseSelfSupervisor(BaseModel, metaclass=ABCMeta): + """BaseModel for Self-Supervised Learning. + + All self-supervised algorithms should inherit this module. + + Args: + backbone (dict): The backbone module. See + :mod:`mmpretrain.models.backbones`. + neck (dict, optional): The neck module to process features from + backbone. See :mod:`mmpretrain.models.necks`. Defaults to None. + head (dict, optional): The head module to do prediction and calculate + loss from processed features. See :mod:`mmpretrain.models.heads`. + Notice that if the head is not set, almost all methods cannot be + used except :meth:`extract_feat`. Defaults to None. + target_generator: (dict, optional): The target_generator module to + generate targets for self-supervised learning optimization, such as + HOG, extracted features from other modules(DALL-E, CLIP), etc. + pretrained (str, optional): The pretrained checkpoint path, support + local path and remote path. Defaults to None. + data_preprocessor (Union[dict, nn.Module], optional): The config for + preprocessing input data. If None or no specified type, it will use + "SelfSupDataPreprocessor" as type. + See :class:`SelfSupDataPreprocessor` for more details. + Defaults to None. + init_cfg (dict, optional): the config to control the initialization. + Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: Optional[dict] = None, + head: Optional[dict] = None, + target_generator: Optional[dict] = None, + pretrained: Optional[str] = None, + data_preprocessor: Optional[Union[dict, nn.Module]] = None, + init_cfg: Optional[dict] = None): + if pretrained is not None: + init_cfg = dict(type='Pretrained', checkpoint=pretrained) + + data_preprocessor = data_preprocessor or {} + if isinstance(data_preprocessor, dict): + data_preprocessor.setdefault('type', 'SelfSupDataPreprocessor') + data_preprocessor = MODELS.build(data_preprocessor) + elif not isinstance(data_preprocessor, nn.Module): + raise TypeError('data_preprocessor should be a `dict` or ' + f'`nn.Module` instance, but got ' + f'{type(data_preprocessor)}') + + super().__init__( + init_cfg=init_cfg, data_preprocessor=data_preprocessor) + + if not isinstance(backbone, nn.Module): + backbone = MODELS.build(backbone) + if neck is not None and not isinstance(neck, nn.Module): + neck = MODELS.build(neck) + if head is not None and not isinstance(head, nn.Module): + head = MODELS.build(head) + if target_generator is not None and not isinstance( + target_generator, nn.Module): + target_generator = MODELS.build(target_generator) + + self.backbone = backbone + self.neck = neck + self.head = head + self.target_generator = target_generator + + @property + def with_neck(self) -> bool: + """Check if the model has a neck module.""" + return hasattr(self, 'neck') and self.neck is not None + + @property + def with_head(self) -> bool: + """Check if the model has a head module.""" + return hasattr(self, 'head') and self.head is not None + + @property + def with_target_generator(self) -> bool: + """Check if the model has a target_generator module.""" + return hasattr( + self, 'target_generator') and self.target_generator is not None + + def forward(self, + inputs: Union[torch.Tensor, List[torch.Tensor]], + data_samples: Optional[List[DataSample]] = None, + mode: str = 'tensor'): + """The unified entry for a forward process in both training and test. + + The method currently accepts two modes: "tensor" and "loss": + + - "tensor": Forward the backbone network and return the feature + tensor(s) tensor without any post-processing, same as a common + PyTorch Module. + - "loss": Forward and return a dict of losses according to the given + inputs and data samples. + + Args: + inputs (torch.Tensor or List[torch.Tensor]): The input tensor with + shape (N, C, ...) in general. + data_samples (List[DataSample], optional): The other data of + every samples. It's required for some algorithms + if ``mode="loss"``. Defaults to None. + mode (str): Return what kind of value. Defaults to 'tensor'. + + Returns: + The return type depends on ``mode``. + + - If ``mode="tensor"``, return a tensor or a tuple of tensor. + - If ``mode="loss"``, return a dict of tensor. + """ + if mode == 'tensor': + feats = self.extract_feat(inputs) + return feats + elif mode == 'loss': + return self.loss(inputs, data_samples) + else: + raise RuntimeError(f'Invalid mode "{mode}".') + + def extract_feat(self, inputs: torch.Tensor): + """Extract features from the input tensor with shape (N, C, ...). + + The default behavior is extracting features from backbone. + + Args: + inputs (Tensor): A batch of inputs. The shape of it should be + ``(num_samples, num_channels, *img_shape)``. + + Returns: + tuple | Tensor: The output feature tensor(s). + """ + x = self.backbone(inputs) + return x + + @abstractmethod + def loss(self, inputs: torch.Tensor, + data_samples: List[DataSample]) -> dict: + """Calculate losses from a batch of inputs and data samples. + + This is a abstract method, and subclass should overwrite this methods + if needed. + + Args: + inputs (torch.Tensor): The input tensor with shape + (N, C, ...) in general. + data_samples (List[DataSample]): The annotation data of + every samples. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + raise NotImplementedError + + def get_layer_depth(self, param_name: str): + """Get the layer-wise depth of a parameter. + + Args: + param_name (str): The name of the parameter. + + Returns: + Tuple[int, int]: The layer-wise depth and the max depth. + """ + if hasattr(self.backbone, 'get_layer_depth'): + return self.backbone.get_layer_depth(param_name, 'backbone.') + else: + raise NotImplementedError( + f"The babckone {type(self.backbone)} doesn't " + 'support `get_layer_depth` by now.') diff --git a/mmpretrain/models/selfsup/beit.py b/mmpretrain/models/selfsup/beit.py new file mode 100644 index 0000000000000000000000000000000000000000..c301f7d5cae07370f26b4cd531190b8c3c90e24b --- /dev/null +++ b/mmpretrain/models/selfsup/beit.py @@ -0,0 +1,357 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Dict, List, Optional, Tuple, Union + +import torch +from einops import rearrange +from mmengine.model import BaseModule +from mmengine.model.weight_init import trunc_normal_ +from torch import nn + +from mmpretrain.models.backbones import BEiTViT +from mmpretrain.models.utils import NormEMAVectorQuantizer, resize_pos_embed +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class VQKD(BaseModule): + """Vector-Quantized Knowledge Distillation. + + The module only contains encoder and VectorQuantizer part + Modified from https://github.com/microsoft/unilm/blob/master/beit2/modeling_vqkd.py + + Args: + encoder_config (dict): The config of encoder. + decoder_config (dict, optional): The config of decoder. Currently, + VQKD only support to build encoder. Defaults to None. + num_embed (int): Number of embedding vectors in the codebook. Defaults + to 8192. + embed_dims (int) : The dimension of embedding vectors in the codebook. + Defaults to 32. + decay (float): The decay parameter of EMA. Defaults to 0.99. + beta (float): The mutiplier for VectorQuantizer loss. Defaults to 1. + quantize_kmeans_init (bool): Whether to use k-means to initialize the + VectorQuantizer. Defaults to True. + init_cfg (dict or List[dict], optional): Initialization config dict. + Defaults to None. + """ # noqa: E501 + + def __init__(self, + encoder_config: dict, + decoder_config: Optional[dict] = None, + num_embed: int = 8192, + embed_dims: int = 32, + decay: float = 0.99, + beta: float = 1.0, + quantize_kmeans_init: bool = True, + init_cfg: Optional[dict] = None) -> None: + super().__init__(init_cfg=init_cfg) + + self.encoder = BEiTViT(**encoder_config) + if decoder_config is not None: + self.decoder = BEiTViT(**decoder_config) + + self.quantize = NormEMAVectorQuantizer( + num_embed=num_embed, + embed_dims=embed_dims, + beta=beta, + decay=decay, + kmeans_init=quantize_kmeans_init, + ) + + # task layer + self.encode_task_layer = nn.Sequential( + nn.Linear(self.encoder.arch_settings['embed_dims'], + self.encoder.arch_settings['embed_dims']), nn.Tanh(), + nn.Linear(self.encoder.arch_settings['embed_dims'], embed_dims)) + + def get_tokens(self, x: torch.Tensor) -> dict: + """Get tokens for beit pre-training.""" + _, embed_ind, _ = self.encode(x) + output = {} + output['token'] = embed_ind.view(x.shape[0], -1) + output['input_img'] = x + + return output + + def encode( + self, x: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """Encode the input images and get corresponding results.""" + encoder_features = self.encoder(x)[0] + B, C, N1, N2 = encoder_features.shape + encoder_features = encoder_features.permute(0, 2, 3, + 1).reshape(B, N1 * N2, C) + + with torch.cuda.amp.autocast(enabled=False): + to_quantizer_features = self.encode_task_layer( + encoder_features.type_as(self.encode_task_layer[-1].weight)) + + N = to_quantizer_features.shape[1] + h, w = int(math.sqrt(N)), int(math.sqrt(N)) + + to_quantizer_features = rearrange( + to_quantizer_features, 'b (h w) c -> b c h w', h=h, + w=w) # reshape for quantizer + quantize, loss, embed_ind = self.quantize(to_quantizer_features) + + return quantize, embed_ind, loss + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """The forward function. + + Currently, only support to get tokens. + """ + return self.get_tokens(x)['token'] + + +@MODELS.register_module() +class BEiTPretrainViT(BEiTViT): + """Vision Transformer for BEiT pre-training. + + Args: + arch (str | dict): Vision Transformer architecture. If use string, + choose from 'small', 'base' and 'large'. If use dict, it should + have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **num_layers** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + - **feedforward_channels** (int): The hidden dimensions in + feedforward modules. + + Defaults to 'base'. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to the most + common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + It only works without input mask. Defaults to ``"avg_featmap"``. + with_cls_token (bool): Whether concatenating class token into image + tokens as transformer input. Defaults to True. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + use_abs_pos_emb (bool): Whether or not use absolute position embedding. + Defaults to False. + use_rel_pos_bias (bool): Whether or not use relative position bias. + Defaults to False. + use_shared_rel_pos_bias (bool): Whether or not use shared relative + position bias. Defaults to True. + layer_scale_init_value (float): The initialization value for + the learnable scaling of attention and FFN. Defaults to 0.1. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + arch: str = 'base', + img_size: int = 224, + patch_size: int = 16, + in_channels: int = 3, + out_indices: int = -1, + drop_rate: float = 0, + drop_path_rate: float = 0, + norm_cfg: dict = dict(type='LN', eps=1e-6), + final_norm: bool = True, + out_type: str = 'raw', + frozen_stages: int = -1, + use_abs_pos_emb: bool = False, + use_rel_pos_bias: bool = False, + use_shared_rel_pos_bias: bool = True, + layer_scale_init_value: int = 0.1, + interpolate_mode: str = 'bicubic', + patch_cfg: dict = dict(padding=0), + layer_cfgs: dict = dict(), + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + arch=arch, + img_size=img_size, + patch_size=patch_size, + in_channels=in_channels, + out_indices=out_indices, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + norm_cfg=norm_cfg, + final_norm=final_norm, + out_type=out_type, + with_cls_token=True, + frozen_stages=frozen_stages, + use_abs_pos_emb=use_abs_pos_emb, + use_shared_rel_pos_bias=use_shared_rel_pos_bias, + use_rel_pos_bias=use_rel_pos_bias, + layer_scale_init_value=layer_scale_init_value, + interpolate_mode=interpolate_mode, + patch_cfg=patch_cfg, + layer_cfgs=layer_cfgs, + init_cfg=init_cfg) + + self.mask_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + + def init_weights(self) -> None: + """Initialize position embedding, patch embedding and cls token.""" + super().init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + trunc_normal_(self.cls_token, std=0.02) + trunc_normal_(self.mask_token, std=0.02) + self.rescale_init_weight() + + def rescale_init_weight(self) -> None: + """Rescale the initialized weights.""" + + def rescale(param, layer_id): + param.div_(math.sqrt(2.0 * layer_id)) + + for layer_id, layer in enumerate(self.layers): + rescale(layer.attn.proj.weight.data, layer_id + 1) + rescale(layer.ffn.layers[1].weight.data, layer_id + 1) + + def forward(self, x: torch.Tensor, + mask: Optional[torch.Tensor]) -> Tuple[torch.Tensor]: + """The BEiT style forward function. + + The function supports two kind of forward behaviors. If the ``mask`` is + not ``None``, the forward function will be executed as masked image + modeling pre-training; if the ``mask`` is ``None``, the forward + function will call ``super().forward()``, which extract features from + images without mask. + + Args: + x (torch.Tensor): Input images, which is of shape (B x C x H x W). + mask (torch.Tensor, optional): Mask for input, which is of shape + (B x patch_resolution[0] x patch_resolution[1]). + + Returns: + Tuple[torch.Tensor]: Hidden features. + """ + if mask is None: + return super().forward(x) + + else: + x, patch_resolution = self.patch_embed(x) + + # replace the masked visual tokens by mask_token + B, L, _ = x.shape + mask_token = self.mask_token.expand(B, L, -1) + w = mask.flatten(1).unsqueeze(-1).type_as(mask_token) + x = x * (1. - w) + mask_token * w + + # stole cls_tokens impl from Phil Wang, thanks + cls_tokens = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + if self.pos_embed is not None: + x = x + resize_pos_embed( + self.pos_embed, + self.patch_resolution, + patch_resolution, + mode=self.interpolate_mode, + num_extra_tokens=self.num_extra_tokens) + x = self.drop_after_pos(x) + + self.shared_rel_pos_bias = self.rel_pos_bias().to( + mask.device) if self.rel_pos_bias is not None else None + + outs = [] + for i, layer in enumerate(self.layers): + x = layer(x, rel_pos_bias=self.shared_rel_pos_bias) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.norm1(x) + + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + +@MODELS.register_module() +class BEiT(BaseSelfSupervisor): + """BEiT v1/v2. + + Implementation of `BEiT: BERT Pre-Training of Image Transformers + `_ and `BEiT v2: Masked Image Modeling + with Vector-Quantized Visual Tokenizers + `_. + """ + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + mask = torch.stack([data_sample.mask for data_sample in data_samples]) + + img_latent = self.backbone(inputs[0], mask) + + # inputs[1] is the target image + with torch.no_grad(): + target = self.target_generator(inputs[1]) + target = target.detach() + + if self.with_neck: + # BEiT v2 + feats, feats_cls_pt = self.neck( + img_latent, rel_pos_bias=self.backbone.shared_rel_pos_bias) + loss = self.head.loss(feats, feats_cls_pt, target, mask) + else: + # BEiT v1 + loss = self.head.loss(img_latent[0], target, mask) + + if isinstance(loss, torch.Tensor): + losses = dict(loss=loss) + return losses + elif isinstance(loss, Tuple): + # the loss_1 and loss_2 are general reconstruction loss (patch + # feature vectors from last layer of backbone) and early state + # reconstruction loss (patch feature vectors from intermediate + # layer of backbone) + loss_1, loss_2 = loss[0], loss[1] + losses = dict() + # the key with prefix 'loss', like loss_1 and loss_2, will be used + # as the final criterion + losses['loss_1'] = loss_1 + losses['loss_2'] = loss_2 + return losses diff --git a/mmpretrain/models/selfsup/byol.py b/mmpretrain/models/selfsup/byol.py new file mode 100644 index 0000000000000000000000000000000000000000..803e4005da8620b0e5a93fb29cb65e90a78f345f --- /dev/null +++ b/mmpretrain/models/selfsup/byol.py @@ -0,0 +1,89 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Union + +import torch +import torch.nn as nn + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import CosineEMA +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class BYOL(BaseSelfSupervisor): + """BYOL. + + Implementation of `Bootstrap Your Own Latent: A New Approach to + Self-Supervised Learning `_. + + Args: + backbone (dict): Config dict for module of backbone. + neck (dict): Config dict for module of deep features + to compact feature vectors. + head (dict): Config dict for module of head functions. + base_momentum (float): The base momentum coefficient for the target + network. Defaults to 0.004. + pretrained (str, optional): The pretrained checkpoint path, support + local path and remote path. Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing + input data. If None or no specified type, it will use + "SelfSupDataPreprocessor" as type. + See :class:`SelfSupDataPreprocessor` for more details. + Defaults to None. + init_cfg (Union[List[dict], dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: dict, + head: dict, + base_momentum: float = 0.004, + pretrained: Optional[str] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + backbone=backbone, + neck=neck, + head=head, + pretrained=pretrained, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg) + + # create momentum model + self.target_net = CosineEMA( + nn.Sequential(self.backbone, self.neck), momentum=base_momentum) + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + img_v1 = inputs[0] + img_v2 = inputs[1] + # compute online features + proj_online_v1 = self.neck(self.backbone(img_v1))[0] + proj_online_v2 = self.neck(self.backbone(img_v2))[0] + # compute target features + with torch.no_grad(): + # update the target net + self.target_net.update_parameters( + nn.Sequential(self.backbone, self.neck)) + + proj_target_v1 = self.target_net(img_v1)[0] + proj_target_v2 = self.target_net(img_v2)[0] + + loss_1 = self.head.loss(proj_online_v1, proj_target_v2) + loss_2 = self.head.loss(proj_online_v2, proj_target_v1) + + losses = dict(loss=2. * (loss_1 + loss_2)) + return losses diff --git a/mmpretrain/models/selfsup/cae.py b/mmpretrain/models/selfsup/cae.py new file mode 100644 index 0000000000000000000000000000000000000000..67ac09188e9bf97cdbea63378aa4facb1e8348ab --- /dev/null +++ b/mmpretrain/models/selfsup/cae.py @@ -0,0 +1,472 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Part of code is modified from BEiT +# https://github.com/microsoft/unilm/blob/master/beit/dall_e/encoder.py +import math +from collections import OrderedDict +from functools import partial +from typing import Dict, List, Optional, Union + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.model import BaseModule +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.models.backbones import BEiTViT +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import build_2d_sincos_position_embedding +from .base import BaseSelfSupervisor + + +class Conv2d(nn.Module): + """Rewrite Conv2d module according to DALL-E code.""" + + def __init__(self, + n_in: int, + n_out: int, + kw: int, + use_float16: bool = True, + device: torch.device = torch.device('cpu'), + requires_grad: bool = False) -> None: + super().__init__() + + w = torch.empty((n_out, n_in, kw, kw), + dtype=torch.float32, + device=device, + requires_grad=requires_grad) + w.normal_(std=1 / math.sqrt(n_in * kw**2)) + + b = torch.zeros((n_out, ), + dtype=torch.float32, + device=device, + requires_grad=requires_grad) + self.kw = kw + self.w, self.b = nn.Parameter(w), nn.Parameter(b) + self.use_float16 = use_float16 + + def forward(self, x: torch.Tensor) -> torch.Tensor: + if self.use_float16 and 'cuda' in self.w.device.type: + if x.dtype != torch.float16: + x = x.half() + + w, b = self.w.half(), self.b.half() + else: + if x.dtype != torch.float32: + x = x.float() + + w, b = self.w, self.b + + return F.conv2d(x, w, b, padding=(self.kw - 1) // 2) + + +class EncoderBlock(nn.Module): + """Rewrite EncoderBlock module according to DALL-E code.""" + + def __init__(self, + n_in: int, + n_out: int, + n_layers: int, + device: torch.device = None, + requires_grad: bool = False) -> None: + super().__init__() + self.n_hid = n_out // 4 + self.post_gain = 1 / (n_layers**2) + + make_conv = partial(Conv2d, device=device, requires_grad=requires_grad) + self.id_path = make_conv(n_in, n_out, + 1) if n_in != n_out else nn.Identity() + self.res_path = nn.Sequential( + OrderedDict([ + ('relu_1', nn.ReLU()), + ('conv_1', make_conv(n_in, self.n_hid, 3)), + ('relu_2', nn.ReLU()), + ('conv_2', make_conv(self.n_hid, self.n_hid, 3)), + ('relu_3', nn.ReLU()), + ('conv_3', make_conv(self.n_hid, self.n_hid, 3)), + ('relu_4', nn.ReLU()), + ('conv_4', make_conv(self.n_hid, n_out, 1)), + ])) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.id_path(x) + self.post_gain * self.res_path(x) + + +@MODELS.register_module(name='DALL-E') +class DALLEEncoder(BaseModule): + """DALL-E Encoder for feature extraction. + + Args: + group_count (int): Number of groups in DALL-E encoder. Defaults to 4. + n_hid (int): Dimension of hidden layers. Defaults to 256. + n_blk_per_group (int): Number of blocks per group. Defaults to 2. + input_channels: (int): The channels of input images. Defaults to 3. + vocab_size (int): Vocabulary size, indicating the number of classes. + Defaults to 8192. + device (torch.device): Device of parameters. Defaults to + ``torch.device('cpu')``. + requires_grad (bool): Require gradient or not. Defaults to False. + init_cfg (Union[List[dict], dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + group_count: int = 4, + n_hid: int = 256, + n_blk_per_group: int = 2, + input_channels: int = 3, + vocab_size: int = 8192, + device: torch.device = torch.device('cpu'), + requires_grad: bool = False, + init_cfg: Union[dict, List[dict], None] = None): + super().__init__(init_cfg=init_cfg) + self.input_channels = input_channels + + blk_range = range(n_blk_per_group) + n_layers = group_count * n_blk_per_group + make_conv = partial(Conv2d, device=device, requires_grad=requires_grad) + make_blk = partial( + EncoderBlock, + n_layers=n_layers, + device=device, + requires_grad=requires_grad) + + self.blocks = nn.Sequential( + OrderedDict([ + ('input', make_conv(input_channels, 1 * n_hid, 7)), + ('group_1', + nn.Sequential( + OrderedDict([ + *[(f'block_{i + 1}', make_blk(1 * n_hid, 1 * n_hid)) + for i in blk_range], + ('pool', nn.MaxPool2d(kernel_size=2)), + ]))), + ('group_2', + nn.Sequential( + OrderedDict([ + *[(f'block_{i + 1}', + make_blk(1 * n_hid if i == 0 else 2 * n_hid, + 2 * n_hid)) for i in blk_range], + ('pool', nn.MaxPool2d(kernel_size=2)), + ]))), + ('group_3', + nn.Sequential( + OrderedDict([ + *[(f'block_{i + 1}', + make_blk(2 * n_hid if i == 0 else 4 * n_hid, + 4 * n_hid)) for i in blk_range], + ('pool', nn.MaxPool2d(kernel_size=2)), + ]))), + ('group_4', + nn.Sequential( + OrderedDict([ + *[(f'block_{i + 1}', + make_blk(4 * n_hid if i == 0 else 8 * n_hid, + 8 * n_hid)) for i in blk_range], + ]))), + ('output', + nn.Sequential( + OrderedDict([ + ('relu', nn.ReLU()), + ('conv', + make_conv( + 8 * n_hid, vocab_size, 1, use_float16=False)), + ]))), + ])) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function of DALL-E encoder. + + Args: + x (torch.Tensor): The input images with shape (B, C, H, W). + + Returns: + torch.Tensor: The output with shape (B, vocab_size, h, w). + """ + x = x.float() + if len(x.shape) != 4: + raise ValueError(f'input shape {x.shape} is not 4d') + if x.shape[1] != self.input_channels: + raise ValueError(f'input has {x.shape[1]} channels but model \ + built for {self.input_channels}') + if x.dtype != torch.float32: + raise ValueError('input must have dtype torch.float32') + + return self.blocks(x) + + +@MODELS.register_module() +class CAEPretrainViT(BEiTViT): + """Vision Transformer for CAE pre-training and the implementation is based + on BEiTViT. + + Args: + arch (str | dict): Vision Transformer architecture. Default: 'b' + img_size (int | tuple): Input image size + patch_size (int | tuple): The patch size + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + bias (bool | str): The option to add leanable bias for q, k, v. If bias + is True, it will add leanable bias. If bias is 'qv_bias', it will + only add leanable bias for q, v. If bias is False, it will not add + bias for q, k, v. Default to 'qv_bias'. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + It only works without input mask. Defaults to ``"avg_featmap"``. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + layer_scale_init_value (float, optional): The init value of gamma in + BEiTTransformerEncoderLayer. + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__( + self, + arch: str = 'b', + img_size: int = 224, + patch_size: int = 16, + in_channels: int = 3, + out_indices: int = -1, + drop_rate: float = 0, + drop_path_rate: float = 0, + bias: bool = 'qv_bias', + norm_cfg: dict = dict(type='LN', eps=1e-6), + final_norm: bool = True, + out_type: str = 'raw', + frozen_stages: int = -1, + use_abs_pos_emb: bool = True, + use_rel_pos_bias: bool = False, + use_shared_rel_pos_bias: bool = False, + layer_scale_init_value: float = None, + interpolate_mode: str = 'bicubic', + patch_cfg: dict = dict(), + layer_cfgs: dict = dict(), + init_cfg: dict = [ + dict(type='Constant', val=1, layer=['LayerNorm']), + dict(type='TruncNormal', std=0.02, layer=['Conv2d']), + dict(type='Xavier', distribution='uniform', layer=['Linear']) + ] + ) -> None: + super().__init__( + arch=arch, + img_size=img_size, + patch_size=patch_size, + in_channels=in_channels, + out_indices=out_indices, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + bias=bias, + norm_cfg=norm_cfg, + final_norm=final_norm, + out_type=out_type, + with_cls_token=True, + frozen_stages=frozen_stages, + use_abs_pos_emb=use_abs_pos_emb, + use_rel_pos_bias=use_rel_pos_bias, + use_shared_rel_pos_bias=use_shared_rel_pos_bias, + layer_scale_init_value=layer_scale_init_value, + interpolate_mode=interpolate_mode, + patch_cfg=patch_cfg, + layer_cfgs=layer_cfgs, + init_cfg=init_cfg) + self.pos_embed.requires_grad = False + self.num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + def init_weights(self) -> None: + """Initialize position embedding, patch embedding and cls token.""" + super().init_weights() + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # initialize position embedding in backbone + pos_embed = build_2d_sincos_position_embedding( + int(self.num_patches**.5), + self.pos_embed.shape[-1], + cls_token=True) + self.pos_embed.data.copy_(pos_embed.float()) + + trunc_normal_(self.cls_token, std=.02) + + def forward(self, x: torch.Tensor, + mask: Optional[torch.Tensor]) -> torch.Tensor: + """Generate features for masked images. + + This function generates mask images and get the hidden features for + visible patches. + + The function supports two kind of forward behaviors. If the ``mask`` is + not ``None``, the forward function will be executed as masked image + modeling pre-training; if the ``mask`` is ``None``, the forward + function will call ``super().forward()``, which extract features from + images without mask. + + Args: + x (torch.Tensor): Input images, which is of shape B x C x H x W. + mask (torch.Tensor, optional): Mask for input, which is of shape + B x L. + + Returns: + torch.Tensor: hidden features. + """ + if mask is None: + return super().forward(x) + + else: + x, _ = self.patch_embed(x) + batch_size, _, dim = x.size() + + cls_tokens = self.cls_token.expand(batch_size, -1, -1) + + # NOTE: unmasked embeddings + x_unmasked = x[~mask].reshape(batch_size, -1, dim) + x_unmasked = torch.cat((cls_tokens, x_unmasked), dim=1) + + pos_embed = self.pos_embed.expand(batch_size, self.num_patches + 1, + dim) + pos_embed_unmasked = pos_embed[:, 1:][~mask].reshape( + batch_size, -1, dim) + pos_embed_unmasked = torch.cat( + (pos_embed[:, :1], pos_embed_unmasked), dim=1) + x_unmasked = x_unmasked + pos_embed_unmasked + + x_unmasked = self.drop_after_pos(x_unmasked) + + for i, layer in enumerate(self.layers): + x_unmasked = layer(x=x_unmasked, rel_pos_bias=None) + + if i == len(self.layers) - 1 and self.final_norm: + x_unmasked = self.norm1(x_unmasked) + + return x_unmasked + + +@MODELS.register_module() +class CAE(BaseSelfSupervisor): + """CAE. + + Implementation of `Context Autoencoder for Self-Supervised Representation + Learning `_. + + Args: + backbone (dict): Config dict for module of backbone. + neck (dict): Config dict for module of neck. + head (dict): Config dict for module of head functions. + target_generator: (dict, optional): The target_generator module to + generate targets for self-supervised learning optimization, such as + HOG, extracted features from other modules(DALL-E, CLIP), etc. + base_momentum (float): The base momentum coefficient for the target + network. Defaults to 0.0. + data_preprocessor (dict, optional): The config for preprocessing + input data. If None or no specified type, it will use + "SelfSupDataPreprocessor" as type. + See :class:`SelfSupDataPreprocessor` for more details. + Defaults to None. + init_cfg (Union[List[dict], dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: dict, + head: dict, + target_generator: Optional[dict] = None, + base_momentum: float = 0.0, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + backbone=backbone, + neck=neck, + head=head, + target_generator=target_generator, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg) + + self.momentum = base_momentum + self.teacher = MODELS.build(backbone) + + def init_weights(self) -> None: + """Initialize weights.""" + super().init_weights() + + # init the weights of teacher with those of backbone + for param_backbone, param_teacher in zip(self.backbone.parameters(), + self.teacher.parameters()): + param_teacher.detach() + param_teacher.data.copy_(param_backbone.data) + param_teacher.requires_grad = False + + def momentum_update(self) -> None: + """Momentum update of the teacher network.""" + for param_bacbone, param_teacher in zip(self.backbone.parameters(), + self.teacher.parameters()): + param_teacher.data = param_teacher.data * self.momentum + \ + param_bacbone.data * (1. - self.momentum) + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + mask = torch.stack([data_sample.mask for data_sample in data_samples]) + mask = mask.flatten(1).to(torch.bool) + + unmasked = self.backbone(inputs[0], mask) + + # get the latent prediction for the masked patches + with torch.no_grad(): + # inputs[0] is the prediction image + latent_target = self.teacher(inputs[0], ~mask) + latent_target = latent_target[:, 1:, :] + self.momentum_update() + + pos_embed = self.backbone.pos_embed.expand(inputs[0].shape[0], -1, -1) + pos_embed_masked = pos_embed[:, + 1:][mask].reshape(inputs[0].shape[0], -1, + pos_embed.shape[-1]) + pos_embed_unmasked = pos_embed[:, 1:][~mask].reshape( + inputs[0].shape[0], -1, pos_embed.shape[-1]) + + # input the unmasked tokens and masked tokens to the decoder + logits, latent_pred = self.neck(unmasked[:, 1:], pos_embed_masked, + pos_embed_unmasked) + + logits = logits.view(-1, logits.shape[-1]) + # inputs[1] is the target image + logits_target = self.target_generator(inputs[1]) + loss_main, loss_align = self.head.loss(logits, logits_target, + latent_pred, latent_target, + mask) + losses = dict() + + losses['loss'] = loss_main + loss_align + losses['main'] = loss_main + losses['align'] = loss_align + return losses diff --git a/mmpretrain/models/selfsup/densecl.py b/mmpretrain/models/selfsup/densecl.py new file mode 100644 index 0000000000000000000000000000000000000000..c969af17fa921a119f6b05b5a319e104f6422494 --- /dev/null +++ b/mmpretrain/models/selfsup/densecl.py @@ -0,0 +1,203 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Union + +import torch +import torch.nn as nn +from mmengine.dist import all_gather +from mmengine.model import ExponentialMovingAverage + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import batch_shuffle_ddp, batch_unshuffle_ddp +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class DenseCL(BaseSelfSupervisor): + """DenseCL. + + Implementation of `Dense Contrastive Learning for Self-Supervised Visual + Pre-Training `_. + Borrowed from the authors' code: ``_. + The loss_lambda warmup is in `engine/hooks/densecl_hook.py`. + + Args: + backbone (dict): Config dict for module of backbone. + neck (dict): Config dict for module of deep features to compact + feature vectors. + head (dict): Config dict for module of head functions. + queue_len (int): Number of negative keys maintained in the queue. + Defaults to 65536. + feat_dim (int): Dimension of compact feature vectors. Defaults to 128. + momentum (float): Momentum coefficient for the momentum-updated + encoder. Defaults to 0.999. + loss_lambda (float): Loss weight for the single and dense contrastive + loss. Defaults to 0.5. + pretrained (str, optional): The pretrained checkpoint path, support + local path and remote path. Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing + input data. If None or no specified type, it will use + "SelfSupDataPreprocessor" as type. + See :class:`SelfSupDataPreprocessor` for more details. + Defaults to None. + init_cfg (Union[List[dict], dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: dict, + head: dict, + queue_len: int = 65536, + feat_dim: int = 128, + momentum: float = 0.001, + loss_lambda: float = 0.5, + pretrained: Optional[str] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + backbone=backbone, + neck=neck, + head=head, + pretrained=pretrained, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg) + + # create momentum model + self.encoder_k = ExponentialMovingAverage( + nn.Sequential(self.backbone, self.neck), momentum) + + self.queue_len = queue_len + self.loss_lambda = loss_lambda + + # create the queue + self.register_buffer('queue', torch.randn(feat_dim, queue_len)) + self.queue = nn.functional.normalize(self.queue, dim=0) + self.register_buffer('queue_ptr', torch.zeros(1, dtype=torch.long)) + + # create the second queue for dense output + self.register_buffer('queue2', torch.randn(feat_dim, queue_len)) + self.queue2 = nn.functional.normalize(self.queue2, dim=0) + self.register_buffer('queue2_ptr', torch.zeros(1, dtype=torch.long)) + + @torch.no_grad() + def _dequeue_and_enqueue(self, keys: torch.Tensor) -> None: + """Update queue.""" + # gather keys before updating queue + keys = torch.cat(all_gather(keys), dim=0) + + batch_size = keys.shape[0] + + ptr = int(self.queue_ptr) + assert self.queue_len % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.queue[:, ptr:ptr + batch_size] = keys.transpose(0, 1) + ptr = (ptr + batch_size) % self.queue_len # move pointer + + self.queue_ptr[0] = ptr + + @torch.no_grad() + def _dequeue_and_enqueue2(self, keys: torch.Tensor) -> None: + """Update queue2.""" + # gather keys before updating queue + keys = torch.cat(all_gather(keys), dim=0) + + batch_size = keys.shape[0] + + ptr = int(self.queue2_ptr) + assert self.queue_len % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.queue2[:, ptr:ptr + batch_size] = keys.transpose(0, 1) + ptr = (ptr + batch_size) % self.queue_len # move pointer + + self.queue2_ptr[0] = ptr + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + im_q = inputs[0] + im_k = inputs[1] + # compute query features + q_b = self.backbone(im_q) # backbone features + q, q_grid, q2 = self.neck(q_b) # queries: NxC; NxCxS^2 + q_b = q_b[0] + q_b = q_b.view(q_b.size(0), q_b.size(1), -1) + + q = nn.functional.normalize(q, dim=1) + q2 = nn.functional.normalize(q2, dim=1) + q_grid = nn.functional.normalize(q_grid, dim=1) + q_b = nn.functional.normalize(q_b, dim=1) + + # compute key features + with torch.no_grad(): # no gradient to keys + # update the key encoder + self.encoder_k.update_parameters( + nn.Sequential(self.backbone, self.neck)) + + # shuffle for making use of BN + im_k, idx_unshuffle = batch_shuffle_ddp(im_k) + + k_b = self.encoder_k.module[0](im_k) # backbone features + k, k_grid, k2 = self.encoder_k.module[1](k_b) # keys: NxC; NxCxS^2 + k_b = k_b[0] + k_b = k_b.view(k_b.size(0), k_b.size(1), -1) + + k = nn.functional.normalize(k, dim=1) + k2 = nn.functional.normalize(k2, dim=1) + k_grid = nn.functional.normalize(k_grid, dim=1) + k_b = nn.functional.normalize(k_b, dim=1) + + # undo shuffle + k = batch_unshuffle_ddp(k, idx_unshuffle) + k2 = batch_unshuffle_ddp(k2, idx_unshuffle) + k_grid = batch_unshuffle_ddp(k_grid, idx_unshuffle) + k_b = batch_unshuffle_ddp(k_b, idx_unshuffle) + + # compute logits + # Einstein sum is more intuitive + # positive logits: Nx1 + l_pos = torch.einsum('nc,nc->n', [q, k]).unsqueeze(-1) + # negative logits: NxK + l_neg = torch.einsum('nc,ck->nk', [q, self.queue.clone().detach()]) + + # feat point set sim + backbone_sim_matrix = torch.matmul(q_b.permute(0, 2, 1), k_b) + densecl_sim_ind = backbone_sim_matrix.max(dim=2)[1] # NxS^2 + + indexed_k_grid = torch.gather(k_grid, 2, + densecl_sim_ind.unsqueeze(1).expand( + -1, k_grid.size(1), -1)) # NxCxS^2 + densecl_sim_q = (q_grid * indexed_k_grid).sum(1) # NxS^2 + + # dense positive logits: NS^2X1 + l_pos_dense = densecl_sim_q.view(-1).unsqueeze(-1) + + q_grid = q_grid.permute(0, 2, 1) + q_grid = q_grid.reshape(-1, q_grid.size(2)) + # dense negative logits: NS^2xK + l_neg_dense = torch.einsum( + 'nc,ck->nk', [q_grid, self.queue2.clone().detach()]) + + loss_single = self.head.loss(l_pos, l_neg) + loss_dense = self.head.loss(l_pos_dense, l_neg_dense) + + losses = dict() + losses['loss_single'] = loss_single * (1 - self.loss_lambda) + losses['loss_dense'] = loss_dense * self.loss_lambda + + self._dequeue_and_enqueue(k) + self._dequeue_and_enqueue2(k2) + + return losses diff --git a/mmpretrain/models/selfsup/eva.py b/mmpretrain/models/selfsup/eva.py new file mode 100644 index 0000000000000000000000000000000000000000..30779bec491ae7c95b6540cdc7d71a875da572de --- /dev/null +++ b/mmpretrain/models/selfsup/eva.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List + +import torch + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class EVA(BaseSelfSupervisor): + """EVA. + + Implementation of `EVA: Exploring the Limits of Masked Visual + Representation Learning at Scale `_. + """ + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (torch.Tensor): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + + clip_feature, _ = self.target_generator(inputs) + + latent, mask, ids_restore = self.backbone(inputs) + pred = self.neck(latent, ids_restore) + + clip_feature = clip_feature[:, 1:, :] + loss = self.head.loss(pred, clip_feature, mask) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/mae.py b/mmpretrain/models/selfsup/mae.py new file mode 100644 index 0000000000000000000000000000000000000000..d631860711ce770695962c79e49b2e6e10033caf --- /dev/null +++ b/mmpretrain/models/selfsup/mae.py @@ -0,0 +1,236 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Sequence, Tuple, Union + +import torch + +from mmpretrain.models import VisionTransformer +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import build_2d_sincos_position_embedding +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class MAEViT(VisionTransformer): + """Vision Transformer for MAE pre-training. + + A PyTorch implement of: `An Image is Worth 16x16 Words: Transformers + for Image Recognition at Scale `_. + This module implements the patch masking in MAE and initialize the + position embedding with sine-cosine position embedding. + + Args: + arch (str | dict): Vision Transformer architecture + Default: 'b' + img_size (int | tuple): Input image size + patch_size (int | tuple): The patch size + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + It only works without input mask. Defaults to ``"avg_featmap"``. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + mask_ratio (bool): The ratio of total number of patches to be masked. + Defaults to 0.75. + init_cfg (Union[List[dict], dict], optional): Initialization config + dict. Defaults to None. + """ + + def __init__(self, + arch: Union[str, dict] = 'b', + img_size: int = 224, + patch_size: int = 16, + out_indices: Union[Sequence, int] = -1, + drop_rate: float = 0, + drop_path_rate: float = 0, + norm_cfg: dict = dict(type='LN', eps=1e-6), + final_norm: bool = True, + out_type: str = 'raw', + interpolate_mode: str = 'bicubic', + patch_cfg: dict = dict(), + layer_cfgs: dict = dict(), + mask_ratio: float = 0.75, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + arch=arch, + img_size=img_size, + patch_size=patch_size, + out_indices=out_indices, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + norm_cfg=norm_cfg, + final_norm=final_norm, + out_type=out_type, + with_cls_token=True, + interpolate_mode=interpolate_mode, + patch_cfg=patch_cfg, + layer_cfgs=layer_cfgs, + init_cfg=init_cfg) + + # position embedding is not learnable during pretraining + self.pos_embed.requires_grad = False + self.mask_ratio = mask_ratio + self.num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + def init_weights(self) -> None: + """Initialize position embedding, patch embedding and cls token.""" + super().init_weights() + pos_embed = build_2d_sincos_position_embedding( + int(self.num_patches**.5), + self.pos_embed.shape[-1], + cls_token=True) + self.pos_embed.data.copy_(pos_embed.float()) + + w = self.patch_embed.projection.weight.data + torch.nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + + torch.nn.init.normal_(self.cls_token, std=.02) + + def random_masking( + self, + x: torch.Tensor, + mask_ratio: float = 0.75 + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """Generate the mask for MAE Pre-training. + + Args: + x (torch.Tensor): Image with data augmentation applied, which is + of shape B x L x C. + mask_ratio (float): The mask ratio of total patches. + Defaults to 0.75. + + Returns: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: masked image, mask + and the ids to restore original image. + + - ``x_masked`` (torch.Tensor): masked image. + - ``mask`` (torch.Tensor): mask used to mask image. + - ``ids_restore`` (torch.Tensor): ids to restore original image. + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = torch.rand(N, L, device=x.device) # noise in [0, 1] + + # sort noise for each sample + ids_shuffle = torch.argsort( + noise, dim=1) # ascend: small is keep, large is remove + ids_restore = torch.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + x_masked = torch.gather( + x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = torch.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = torch.gather(mask, dim=1, index=ids_restore) + + return x_masked, mask, ids_restore + + def forward( + self, + x: torch.Tensor, + mask: Optional[bool] = True + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """Generate features for masked images. + + The function supports two kind of forward behaviors. If the ``mask`` is + ``True``, the function will generate mask to masking some patches + randomly and get the hidden features for visible patches, which means + the function will be executed as masked imagemodeling pre-training; + if the ``mask`` is ``None`` or ``False``, the forward function will + call ``super().forward()``, which extract features from images without + mask. + + + Args: + x (torch.Tensor): Input images, which is of shape B x C x H x W. + mask (bool, optional): To indicate whether the forward function + generating ``mask`` or not. + + Returns: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: Hidden features, + mask and the ids to restore original image. + + - ``x`` (torch.Tensor): hidden features, which is of shape + B x (L * mask_ratio) x C. + - ``mask`` (torch.Tensor): mask used to mask image. + - ``ids_restore`` (torch.Tensor): ids to restore original image. + """ + if mask is None or False: + return super().forward(x) + + else: + B = x.shape[0] + x = self.patch_embed(x)[0] + # add pos embed w/o cls token + x = x + self.pos_embed[:, 1:, :] + + # masking: length -> length * mask_ratio + x, mask, ids_restore = self.random_masking(x, self.mask_ratio) + + # append cls token + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + for _, layer in enumerate(self.layers): + x = layer(x) + # Use final norm + x = self.norm1(x) + + return (x, mask, ids_restore) + + +@MODELS.register_module() +class MAE(BaseSelfSupervisor): + """MAE. + + Implementation of `Masked Autoencoders Are Scalable Vision Learners + `_. + """ + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (torch.Tensor): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + # ids_restore: the same as that in original repo, which is used + # to recover the original order of tokens in decoder. + latent, mask, ids_restore = self.backbone(inputs) + pred = self.neck(latent, ids_restore) + loss = self.head.loss(pred, inputs, mask) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/maskfeat.py b/mmpretrain/models/selfsup/maskfeat.py new file mode 100644 index 0000000000000000000000000000000000000000..fd9f0b296c44cdffe7f2a40caae04de0104abd60 --- /dev/null +++ b/mmpretrain/models/selfsup/maskfeat.py @@ -0,0 +1,336 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Dict, List, Optional, Sequence, Union + +import cv2 +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmengine.model import BaseModule + +from mmpretrain.models import VisionTransformer +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class HOGGenerator(BaseModule): + """Generate HOG feature for images. + + This module is used in MaskFeat to generate HOG feature. The code is + modified from file `slowfast/models/operators.py + `_. + Here is the link of `HOG wikipedia + `_. + + Args: + nbins (int): Number of bin. Defaults to 9. + pool (float): Number of cell. Defaults to 8. + gaussian_window (int): Size of gaussian kernel. Defaults to 16. + """ + + def __init__(self, + nbins: int = 9, + pool: int = 8, + gaussian_window: int = 16) -> None: + super().__init__() + self.nbins = nbins + self.pool = pool + self.pi = math.pi + weight_x = torch.FloatTensor([[1, 0, -1], [2, 0, -2], [1, 0, -1]]) + weight_x = weight_x.view(1, 1, 3, 3).repeat(3, 1, 1, 1).contiguous() + weight_y = weight_x.transpose(2, 3).contiguous() + self.register_buffer('weight_x', weight_x) + self.register_buffer('weight_y', weight_y) + + self.gaussian_window = gaussian_window + if gaussian_window: + gaussian_kernel = self.get_gaussian_kernel(gaussian_window, + gaussian_window // 2) + self.register_buffer('gaussian_kernel', gaussian_kernel) + + def get_gaussian_kernel(self, kernlen: int, std: int) -> torch.Tensor: + """Returns a 2D Gaussian kernel array.""" + + def _gaussian_fn(kernlen: int, std: int) -> torch.Tensor: + n = torch.arange(0, kernlen).float() + n -= n.mean() + n /= std + w = torch.exp(-0.5 * n**2) + return w + + kernel_1d = _gaussian_fn(kernlen, std) + kernel_2d = kernel_1d[:, None] * kernel_1d[None, :] + return kernel_2d / kernel_2d.sum() + + def _reshape(self, hog_feat: torch.Tensor) -> torch.Tensor: + """Reshape HOG Features for output.""" + hog_feat = hog_feat.flatten(1, 2) + self.unfold_size = hog_feat.shape[-1] // 14 + hog_feat = hog_feat.permute(0, 2, 3, 1) + hog_feat = hog_feat.unfold(1, self.unfold_size, + self.unfold_size).unfold( + 2, self.unfold_size, self.unfold_size) + hog_feat = hog_feat.flatten(1, 2).flatten(2) + return hog_feat + + @torch.no_grad() + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Generate hog feature for each batch images. + + Args: + x (torch.Tensor): Input images of shape (N, 3, H, W). + + Returns: + torch.Tensor: Hog features. + """ + # input is RGB image with shape [B 3 H W] + self.h, self.w = x.size(-2), x.size(-1) + x = F.pad(x, pad=(1, 1, 1, 1), mode='reflect') + gx_rgb = F.conv2d( + x, self.weight_x, bias=None, stride=1, padding=0, groups=3) + gy_rgb = F.conv2d( + x, self.weight_y, bias=None, stride=1, padding=0, groups=3) + norm_rgb = torch.stack([gx_rgb, gy_rgb], dim=-1).norm(dim=-1) + phase = torch.atan2(gx_rgb, gy_rgb) + phase = phase / self.pi * self.nbins # [-9, 9] + + b, c, h, w = norm_rgb.shape + out = torch.zeros((b, c, self.nbins, h, w), + dtype=torch.float, + device=x.device) + phase = phase.view(b, c, 1, h, w) + norm_rgb = norm_rgb.view(b, c, 1, h, w) + if self.gaussian_window: + if h != self.gaussian_window: + assert h % self.gaussian_window == 0, 'h {} gw {}'.format( + h, self.gaussian_window) + repeat_rate = h // self.gaussian_window + temp_gaussian_kernel = self.gaussian_kernel.repeat( + [repeat_rate, repeat_rate]) + else: + temp_gaussian_kernel = self.gaussian_kernel + norm_rgb *= temp_gaussian_kernel + + out.scatter_add_(2, phase.floor().long() % self.nbins, norm_rgb) + + out = out.unfold(3, self.pool, self.pool) + out = out.unfold(4, self.pool, self.pool) + out = out.sum(dim=[-1, -2]) + + self.out = F.normalize(out, p=2, dim=2) + + return self._reshape(self.out) + + def generate_hog_image(self, hog_out: torch.Tensor) -> np.ndarray: + """Generate HOG image according to HOG features.""" + assert hog_out.size(0) == 1 and hog_out.size(1) == 3, \ + 'Check the input batch size and the channcel number, only support'\ + '"batch_size = 1".' + hog_image = np.zeros([self.h, self.w]) + cell_gradient = np.array(hog_out.mean(dim=1).squeeze().detach().cpu()) + cell_width = self.pool / 2 + max_mag = np.array(cell_gradient).max() + angle_gap = 360 / self.nbins + + for x in range(cell_gradient.shape[1]): + for y in range(cell_gradient.shape[2]): + cell_grad = cell_gradient[:, x, y] + cell_grad /= max_mag + angle = 0 + for magnitude in cell_grad: + angle_radian = math.radians(angle) + x1 = int(x * self.pool + + magnitude * cell_width * math.cos(angle_radian)) + y1 = int(y * self.pool + + magnitude * cell_width * math.sin(angle_radian)) + x2 = int(x * self.pool - + magnitude * cell_width * math.cos(angle_radian)) + y2 = int(y * self.pool - + magnitude * cell_width * math.sin(angle_radian)) + magnitude = 0 if magnitude < 0 else magnitude + cv2.line(hog_image, (y1, x1), (y2, x2), + int(255 * math.sqrt(magnitude))) + angle += angle_gap + return hog_image + + +@MODELS.register_module() +class MaskFeatViT(VisionTransformer): + """Vision Transformer for MaskFeat pre-training. + + A PyTorch implement of: `Masked Feature Prediction for Self-Supervised + Visual Pre-Training `_. + + Args: + arch (str | dict): Vision Transformer architecture + Default: 'b' + img_size (int | tuple): Input image size + patch_size (int | tuple): The patch size + out_indices (Sequence | int): Output from which stages. + Defaults to -1, means the last stage. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + final_norm (bool): Whether to add a additional layer to normalize + final feature map. Defaults to True. + out_type (str): The type of output features. Please choose from + + - ``"cls_token"``: The class token tensor with shape (B, C). + - ``"featmap"``: The feature map tensor from the patch tokens + with shape (B, C, H, W). + - ``"avg_featmap"``: The global averaged feature map tensor + with shape (B, C). + - ``"raw"``: The raw feature tensor includes patch tokens and + class tokens with shape (B, L, C). + + It only works without input mask. Defaults to ``"avg_featmap"``. + interpolate_mode (str): Select the interpolate mode for position + embeding vector resize. Defaults to "bicubic". + patch_cfg (dict): Configs of patch embeding. Defaults to an empty dict. + layer_cfgs (Sequence | dict): Configs of each transformer layer in + encoder. Defaults to an empty dict. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + arch: Union[str, dict] = 'b', + img_size: int = 224, + patch_size: int = 16, + out_indices: Union[Sequence, int] = -1, + drop_rate: float = 0, + drop_path_rate: float = 0, + norm_cfg: dict = dict(type='LN', eps=1e-6), + final_norm: bool = True, + out_type: str = 'raw', + interpolate_mode: str = 'bicubic', + patch_cfg: dict = dict(), + layer_cfgs: dict = dict(), + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + arch=arch, + img_size=img_size, + patch_size=patch_size, + out_indices=out_indices, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + norm_cfg=norm_cfg, + final_norm=final_norm, + out_type=out_type, + with_cls_token=True, + interpolate_mode=interpolate_mode, + patch_cfg=patch_cfg, + layer_cfgs=layer_cfgs, + init_cfg=init_cfg) + + self.mask_token = nn.parameter.Parameter( + torch.zeros(1, 1, self.embed_dims), requires_grad=True) + self.num_patches = self.patch_resolution[0] * self.patch_resolution[1] + + def init_weights(self) -> None: + """Initialize position embedding, mask token and cls token.""" + super().init_weights() + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + + nn.init.trunc_normal_(self.cls_token, std=.02) + nn.init.trunc_normal_(self.mask_token, std=.02) + nn.init.trunc_normal_(self.pos_embed, std=.02) + + self.apply(self._init_weights) + + def _init_weights(self, m: torch.nn.Module) -> None: + if isinstance(m, (nn.Linear, nn.Conv2d, nn.Conv3d)): + nn.init.trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def forward(self, x: torch.Tensor, + mask: Optional[torch.Tensor]) -> torch.Tensor: + """Generate features for masked images. + + The function supports two kind of forward behaviors. If the ``mask`` is + not ``None``, the forward function will be executed as masked image + modeling pre-training; if the ``mask`` is ``None``, the forward + function will call ``super().forward()``, which extract features from + images without mask. + + Args: + x (torch.Tensor): Input images. + mask (torch.Tensor, optional): Input masks. + + Returns: + torch.Tensor: Features with cls_tokens. + """ + if mask is None: + return super().forward(x) + + else: + B = x.shape[0] + x = self.patch_embed(x)[0] + + # masking: length -> length * mask_ratio + B, L, _ = x.shape + mask_tokens = self.mask_token.expand(B, L, -1) + mask = mask.unsqueeze(-1) + x = x * (1 - mask.int()) + mask_tokens * mask + + # append cls token + cls_tokens = self.cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + x = x + self.pos_embed + x = self.drop_after_pos(x) + + for i, layer in enumerate(self.layers): + x = layer(x) + + if i == len(self.layers) - 1 and self.final_norm: + x = self.norm1(x) + + return x + + +@MODELS.register_module() +class MaskFeat(BaseSelfSupervisor): + """MaskFeat. + + Implementation of `Masked Feature Prediction for Self-Supervised Visual + Pre-Training `_. + """ + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (torch.Tensor): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + mask = torch.stack([data_sample.mask for data_sample in data_samples]) + mask = mask.flatten(1).bool() + + latent = self.backbone(inputs, mask) + B, L, C = latent.shape + pred = self.neck((latent.view(B * L, C), )) + pred = pred[0].view(B, L, -1) + hog = self.target_generator(inputs) + + # remove cls_token before compute loss + loss = self.head.loss(pred[:, 1:], hog, mask) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/milan.py b/mmpretrain/models/selfsup/milan.py new file mode 100644 index 0000000000000000000000000000000000000000..fdf86737af3499e6f6309aa5c5ddadef00f63740 --- /dev/null +++ b/mmpretrain/models/selfsup/milan.py @@ -0,0 +1,202 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Tuple + +import torch +import torch.nn as nn +from mmengine.runner.checkpoint import _load_checkpoint + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import build_clip_model +from .base import BaseSelfSupervisor +from .mae import MAEViT + + +@MODELS.register_module() +class CLIPGenerator(nn.Module): + """Get the features and attention from the last layer of CLIP. + + This module is used to generate target features in masked image modeling. + + Args: + tokenizer_path (str): The path of the checkpoint of CLIP. + """ + + def __init__(self, tokenizer_path: str) -> None: + super().__init__() + self.tokenizer_path = tokenizer_path + self.tokenizer = build_clip_model( + _load_checkpoint(self.tokenizer_path), False) + + @torch.no_grad() + def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Get the features and attention from the last layer of CLIP. + + Args: + x (torch.Tensor): The input image, which is of shape (N, 3, H, W). + + Returns: + Tuple[torch.Tensor, torch.Tensor]: The features and attention from + the last layer of CLIP, which are of shape (N, L, C) and (N, L, L), + respectively. + """ + # use the visual branch of CLIP to get the features + assert self.tokenizer is not None, 'Please check whether the ' \ + '`self.tokenizer` is initialized correctly.' + + clip_features = self.tokenizer.encode_image(x) + return clip_features + + +@MODELS.register_module() +class MILANViT(MAEViT): + """Vision Transformer for MILAN pre-training. + + Implementation of the encoder for `MILAN: Masked Image Pretraining on + Language Assisted Representation `_. + + This module inherits from MAEViT and only overrides the forward function + and replace random masking with attention masking. + """ + + def attention_masking( + self, x: torch.Tensor, mask_ratio: float, importance: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + """Generate attention mask for MILAN. + + This is what is different from MAEViT, which uses random masking. + Attention masking generates attention mask for MILAN, according to + importance. The higher the importance, the more likely the patch is + kept. + + Args: + x (torch.Tensor): Input images, which is of shape B x L x C. + mask_ratio (float): The ratio of patches to be masked. + importance (torch.Tensor): Importance of each patch, which is of + shape B x L. + + Returns: + Tuple[torch.Tensor, ...]: + + - ``x_masked``: masked image + - ``ids_restore``: the ids to restore original image + - ``ids_keep``: ids of the kept patches + - ``ids_dump``: ids of the removed patches + """ + N, L, D = x.shape # batch, length, dim + len_keep = int(L * (1 - mask_ratio)) + + noise = importance.to(x.device) # large is keep, small is remove + + # sort noise for each sample + ids_shuffle = torch.multinomial(noise, L, replacement=False) + ids_restore = torch.argsort(ids_shuffle, dim=1) + + # keep the first subset + ids_keep = ids_shuffle[:, :len_keep] + ids_dump = ids_shuffle[:, len_keep:] + x_masked = torch.gather( + x, dim=1, index=ids_keep.unsqueeze(-1).repeat(1, 1, D)) + + # generate the binary mask: 0 is keep, 1 is remove + mask = torch.ones([N, L], device=x.device) + mask[:, :len_keep] = 0 + # unshuffle to get the binary mask + mask = torch.gather(mask, dim=1, index=ids_restore) + + return x_masked, ids_restore, ids_keep, ids_dump + + def forward( + self, + x: torch.Tensor, + importance: Optional[torch.Tensor], + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + """Generate features for masked images. + + The function supports two kind of forward behaviors. If the + ``importance`` is ``None``, the function generates mask and masks some + patches randomly and get the hidden features for visible patches. The + mask is generated by importance. The higher the importance, the more + likely the patch is kept. The importance is calculated by CLIP. + The higher the CLIP score, the more likely the patch is kept. The CLIP + score is calculated by cross attention between the class token and all + other tokens from the last layer. + If the ``importance`` is ``torch.Tensor``, the forward function will + call ``super().forward()``, which extract features from images without + mask. + + Args: + x (torch.Tensor): Input images, which is of shape B x C x H x W. + importance (torch.Tensor, optional): Importance of each patch, + which is of shape B x L. + + Returns: + Tuple[torch.Tensor, ...]: masked image, the ids to restore original + image, ids of the kept patches, ids of the removed patches. + + - ``x`` (torch.Tensor): hidden features, which is of shape + B x (L * mask_ratio) x C. + - ``ids_restore`` (torch.Tensor): ids to restore original image. + - ``ids_keep`` (torch.Tensor): ids of the kept patches. + - ``ids_dump`` (torch.Tensor): ids of the removed patches. + """ + if importance is None: + return super(MAEViT, self).forward(x) + + else: + B = x.shape[0] + x = self.patch_embed(x)[0] + # add pos embed w/o cls token + x = x + self.pos_embed[:, 1:, :] + + # masking: length -> length * mask_ratio + x, ids_restore, ids_keep, ids_dump = self.attention_masking( + x, self.mask_ratio, importance) + + # append cls token + cls_token = self.cls_token + self.pos_embed[:, :1, :] + cls_tokens = cls_token.expand(B, -1, -1) + x = torch.cat((cls_tokens, x), dim=1) + + for _, layer in enumerate(self.layers): + x = layer(x) + # Use final norm + x = self.norm1(x) + + return x, ids_restore, ids_keep, ids_dump + + +@MODELS.register_module() +class MILAN(BaseSelfSupervisor): + """MILAN. + + Implementation of `MILAN: Masked Image Pretraining on Language Assisted + Representation `_. + """ + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, importance=None) + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (torch.Tensor): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + # ids_restore: the same as that in original repo, which is used + # to recover the original order of tokens in decoder. + clip_feature, importance = self.target_generator(inputs) + importance = importance[:, 0, 1:] + latent, ids_restore, ids_keep, ids_dump = self.backbone( + inputs, importance) + pred = self.neck(latent, ids_restore, ids_keep, ids_dump) + + loss = self.head.loss(pred, clip_feature) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/mixmim.py b/mmpretrain/models/selfsup/mixmim.py new file mode 100644 index 0000000000000000000000000000000000000000..b202f836f64358369276a9b85795fb6eec769fb7 --- /dev/null +++ b/mmpretrain/models/selfsup/mixmim.py @@ -0,0 +1,263 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import random +from typing import Dict, List, Optional, Tuple, Union + +import torch +from torch import nn +from torch.nn import functional as F + +from mmpretrain.models.backbones import MixMIMTransformer +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import build_2d_sincos_position_embedding +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class MixMIMPretrainTransformer(MixMIMTransformer): + """MixMIM backbone for MixMIM pre-training. + + A PyTorch implement of : ` MixMIM: Mixed and Masked Image + Modeling for Efficient Visual Representation Learning + `_ + + Args: + arch (str | dict): MixMIM architecture. If use string, + choose from 'base','large' and 'huge'. + If use dict, it should have below keys: + + - **embed_dims** (int): The dimensions of embedding. + - **depths** (int): The number of transformer encoder layers. + - **num_heads** (int): The number of heads in attention modules. + + Defaults to 'base'. + mlp_ratio (int): The mlp ratio in FFN. Defaults to 4. + img_size (int | tuple): The expected input image shape. Because we + support dynamic input shape, just set the argument to mlp_ratio + the most common input image shape. Defaults to 224. + patch_size (int | tuple): The patch size in patch embedding. + Defaults to 16. + in_channels (int): The num of input channels. Defaults to 3. + window_size (list): The height and width of the window. + qkv_bias (bool): Whether to add bias for qkv in attention modules. + Defaults to True. + patch_cfg (dict): Extra config dict for patch embedding. + Defaults to an empty dict. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + drop_rate (float): Probability of an element to be zeroed. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. Defaults to 0. + attn_drop_rate (float): Attention drop rate. Defaults to 0. + use_checkpoint (bool): Whether use the checkpoint to reduce GPU memory + cost. Defaults to False. + mask_ratio (bool): The base ratio of total number of patches to be + masked. Defaults to 0.5. + range_mask_ratio (float): The range of mask ratio. + Defaults to 0. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + arch: Union[str, dict] = 'base', + mlp_ratio: float = 4, + img_size: int = 224, + patch_size: int = 4, + in_channels: int = 3, + window_size: List = [14, 14, 14, 7], + qkv_bias: bool = True, + patch_cfg: dict = dict(), + norm_cfg: dict = dict(type='LN'), + drop_rate: float = 0.0, + drop_path_rate: float = 0.0, + attn_drop_rate: float = 0.0, + use_checkpoint: bool = False, + mask_ratio: float = 0.5, + range_mask_ratio: float = 0.0, + init_cfg: Optional[dict] = None) -> None: + + super().__init__( + arch=arch, + mlp_ratio=mlp_ratio, + img_size=img_size, + patch_size=patch_size, + in_channels=in_channels, + window_size=window_size, + qkv_bias=qkv_bias, + patch_cfg=patch_cfg, + norm_cfg=norm_cfg, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + attn_drop_rate=attn_drop_rate, + use_checkpoint=use_checkpoint, + init_cfg=init_cfg) + + self.mask_ratio = mask_ratio + self.range_mask_ratio = range_mask_ratio + + def init_weights(self): + """Initialize position embedding, patch embedding.""" + super(MixMIMTransformer, self).init_weights() + + pos_embed = build_2d_sincos_position_embedding( + int(self.num_patches**.5), + self.absolute_pos_embed.shape[-1], + cls_token=False) + self.absolute_pos_embed.data.copy_(pos_embed.float()) + + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + # we use xavier_uniform following official JAX ViT: + torch.nn.init.xavier_uniform_(m.weight) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def random_masking(self, + x: torch.Tensor, + mask_ratio: float = 0.5) -> Tuple[torch.Tensor]: + """Generate the mask for MixMIM Pretraining. + + Args: + x (torch.Tensor): Image with data augmentation applied, which is + of shape B x L x C. + mask_ratio (float): The mask ratio of total patches. + Defaults to 0.5. + + Returns: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor]: + - mask_s1 (torch.Tensor): mask with stride of + self.encoder_stride // 8. + - mask_s2 (torch.Tensor): mask with stride of + self.encoder_stride // 4. + - mask_s3 (torch.Tensor): mask with stride of + self.encoder_stride // 2. + - mask (torch.Tensor): mask with stride of + self.encoder_stride. + """ + + B, C, H, W = x.shape + out_H = H // self.encoder_stride + out_W = W // self.encoder_stride + s3_H, s3_W = out_H * 2, out_W * 2 + s2_H, s2_W = out_H * 4, out_W * 4 + s1_H, s1_W = out_H * 8, out_W * 8 + + seq_l = out_H * out_W + # use a shared mask for a batch images + mask = torch.zeros([1, 1, seq_l], device=x.device) + + mask_ratio = mask_ratio + random.uniform(0.0, self.range_mask_ratio) + noise = torch.rand(1, 1, seq_l, device=x.device) # noise in [0, 1] + # ascend: small is keep, large is removed + mask_idx = torch.argsort(noise, dim=2)[:, :, :int(seq_l * mask_ratio)] + mask.scatter_(2, mask_idx, 1) + mask = mask.reshape(1, 1, out_H, out_W) + mask_s1 = F.interpolate(mask, size=(s1_H, s1_W), mode='nearest') + mask_s2 = F.interpolate(mask, size=(s2_H, s2_W), mode='nearest') + mask_s3 = F.interpolate(mask, size=(s3_H, s3_W), mode='nearest') + + mask = mask.reshape(1, out_H * out_W, 1).contiguous() + mask_s1 = mask_s1.reshape(1, s1_H * s1_W, 1).contiguous() + mask_s2 = mask_s2.reshape(1, s2_H * s2_W, 1).contiguous() + mask_s3 = mask_s3.reshape(1, s3_H * s3_W, 1).contiguous() + + return mask_s1, mask_s2, mask_s3, mask + + def forward(self, + x: torch.Tensor, + mask: Optional[bool] = True) -> Tuple[torch.Tensor]: + """Generate features for masked images. + + This function generates mask and masks some patches randomly and get + the hidden features for visible patches. + + Args: + x (torch.Tensor): Input images, which is of shape B x C x H x W. + mask (bool, optional): To indicate whether the forward containing + ``mask`` or not. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: + - x (torch.Tensor): hidden features, which is of shape + B x L x C. + - mask_s4 (torch.Tensor): the mask tensor for the last layer. + """ + if mask is None or False: + return super().forward(x) + + else: + mask_s1, mask_s2, mask_s3, mask_s4 = self.random_masking( + x, self.mask_ratio) + + x, _ = self.patch_embed(x) + + x = x * (1. - mask_s1) + x.flip(0) * mask_s1 + x = x + self.absolute_pos_embed + x = self.drop_after_pos(x) + + for idx, layer in enumerate(self.layers): + if idx == 0: + x = layer(x, attn_mask=mask_s1) + elif idx == 1: + x = layer(x, attn_mask=mask_s2) + elif idx == 2: + x = layer(x, attn_mask=mask_s3) + elif idx == 3: + x = layer(x, attn_mask=mask_s4) + + x = self.norm(x) + + return x, mask_s4 + + +@MODELS.register_module() +class MixMIM(BaseSelfSupervisor): + """MixMIM. + + Implementation of `MixMIM: Mixed and Masked Image Modeling for Efficient + Visual Representation Learning. `_. + """ + + def __init__(self, + backbone: dict, + neck: Optional[dict] = None, + head: Optional[dict] = None, + pretrained: Optional[str] = None, + data_preprocessor: Optional[Union[dict, nn.Module]] = None, + init_cfg: Optional[dict] = None): + + head.update(dict(patch_size=neck['encoder_stride'])) + super().__init__( + backbone=backbone, + neck=neck, + head=head, + pretrained=pretrained, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg) + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (torch.Tensor): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + latent, mask = self.backbone(inputs) + x_rec = self.neck(latent, mask) + loss = self.head.loss(x_rec, inputs, mask) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/moco.py b/mmpretrain/models/selfsup/moco.py new file mode 100644 index 0000000000000000000000000000000000000000..7ff4cf8fd6d0d6bca4724965d3b6d09543317748 --- /dev/null +++ b/mmpretrain/models/selfsup/moco.py @@ -0,0 +1,137 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Union + +import torch +import torch.nn as nn +from mmengine.dist import all_gather +from mmengine.model import ExponentialMovingAverage + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import batch_shuffle_ddp, batch_unshuffle_ddp +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class MoCo(BaseSelfSupervisor): + """MoCo. + + Implementation of `Momentum Contrast for Unsupervised Visual + Representation Learning `_. + Part of the code is borrowed from: + ``_. + + Args: + backbone (dict): Config dict for module of backbone. + neck (dict): Config dict for module of deep features to compact feature + vectors. + head (dict): Config dict for module of head functions. + queue_len (int): Number of negative keys maintained in the + queue. Defaults to 65536. + feat_dim (int): Dimension of compact feature vectors. + Defaults to 128. + momentum (float): Momentum coefficient for the momentum-updated + encoder. Defaults to 0.001. + pretrained (str, optional): The pretrained checkpoint path, support + local path and remote path. Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing + input data. If None or no specified type, it will use + "SelfSupDataPreprocessor" as type. + See :class:`SelfSupDataPreprocessor` for more details. + Defaults to None. + init_cfg (Union[List[dict], dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: dict, + head: dict, + queue_len: int = 65536, + feat_dim: int = 128, + momentum: float = 0.001, + pretrained: Optional[str] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + backbone=backbone, + neck=neck, + head=head, + pretrained=pretrained, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg) + + # create momentum model + self.encoder_k = ExponentialMovingAverage( + nn.Sequential(self.backbone, self.neck), momentum) + + # create the queue + self.queue_len = queue_len + self.register_buffer('queue', torch.randn(feat_dim, queue_len)) + self.queue = nn.functional.normalize(self.queue, dim=0) + self.register_buffer('queue_ptr', torch.zeros(1, dtype=torch.long)) + + @torch.no_grad() + def _dequeue_and_enqueue(self, keys: torch.Tensor) -> None: + """Update queue.""" + # gather keys before updating queue + keys = torch.cat(all_gather(keys), dim=0) + + batch_size = keys.shape[0] + + ptr = int(self.queue_ptr) + assert self.queue_len % batch_size == 0 # for simplicity + + # replace the keys at ptr (dequeue and enqueue) + self.queue[:, ptr:ptr + batch_size] = keys.transpose(0, 1) + ptr = (ptr + batch_size) % self.queue_len # move pointer + + self.queue_ptr[0] = ptr + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + im_q = inputs[0] + im_k = inputs[1] + # compute query features from encoder_q + q = self.neck(self.backbone(im_q))[0] # queries: NxC + q = nn.functional.normalize(q, dim=1) + + # compute key features + with torch.no_grad(): # no gradient to keys + # update the key encoder + self.encoder_k.update_parameters( + nn.Sequential(self.backbone, self.neck)) + + # shuffle for making use of BN + im_k, idx_unshuffle = batch_shuffle_ddp(im_k) + + k = self.encoder_k(im_k)[0] # keys: NxC + k = nn.functional.normalize(k, dim=1) + + # undo shuffle + k = batch_unshuffle_ddp(k, idx_unshuffle) + + # compute logits + # Einstein sum is more intuitive + # positive logits: Nx1 + l_pos = torch.einsum('nc,nc->n', [q, k]).unsqueeze(-1) + # negative logits: NxK + l_neg = torch.einsum('nc,ck->nk', [q, self.queue.clone().detach()]) + + loss = self.head.loss(l_pos, l_neg) + # update the queue + self._dequeue_and_enqueue(k) + + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/mocov3.py b/mmpretrain/models/selfsup/mocov3.py new file mode 100644 index 0000000000000000000000000000000000000000..61b803387fdc129bc29056ee369fa3ad36c13e07 --- /dev/null +++ b/mmpretrain/models/selfsup/mocov3.py @@ -0,0 +1,215 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from functools import reduce +from operator import mul +from typing import Dict, List, Optional, Union + +import torch +import torch.nn as nn +from torch.nn.modules.batchnorm import _BatchNorm + +from mmpretrain.models.backbones import VisionTransformer +from mmpretrain.models.utils import (build_2d_sincos_position_embedding, + to_2tuple) +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from ..utils import CosineEMA +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class MoCoV3ViT(VisionTransformer): + """Vision Transformer for MoCoV3 pre-training. + + A pytorch implement of: `An Images is Worth 16x16 Words: Transformers for + Image Recognition at Scale `_. + + Part of the code is modified from: + ``_. + + Args: + stop_grad_conv1 (bool): whether to stop the gradient of + convolution layer in `PatchEmbed`. Defaults to False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + stop_grad_conv1: bool = False, + frozen_stages: int = -1, + norm_eval: bool = False, + init_cfg: Optional[Union[dict, List[dict]]] = None, + **kwargs) -> None: + + # add MoCoV3 ViT-small arch + self.arch_zoo.update( + dict.fromkeys( + ['mocov3-s', 'mocov3-small'], { + 'embed_dims': 384, + 'num_layers': 12, + 'num_heads': 12, + 'feedforward_channels': 1536, + })) + + super().__init__(init_cfg=init_cfg, **kwargs) + self.patch_size = kwargs['patch_size'] + self.frozen_stages = frozen_stages + self.norm_eval = norm_eval + self.init_cfg = init_cfg + + if stop_grad_conv1: + self.patch_embed.projection.weight.requires_grad = False + self.patch_embed.projection.bias.requires_grad = False + + self._freeze_stages() + + def init_weights(self) -> None: + """Initialize position embedding, patch embedding, qkv layers and cls + token.""" + super().init_weights() + + if not (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + + # Use fixed 2D sin-cos position embedding + pos_emb = build_2d_sincos_position_embedding( + patches_resolution=self.patch_resolution, + embed_dims=self.embed_dims, + cls_token=True) + self.pos_embed.data.copy_(pos_emb) + self.pos_embed.requires_grad = False + + # xavier_uniform initialization for PatchEmbed + val = math.sqrt( + 6. / float(3 * reduce(mul, to_2tuple(self.patch_size), 1) + + self.embed_dims)) + nn.init.uniform_(self.patch_embed.projection.weight, -val, val) + nn.init.zeros_(self.patch_embed.projection.bias) + + # initialization for linear layers + for name, m in self.named_modules(): + if isinstance(m, nn.Linear): + if 'qkv' in name: + # treat the weights of Q, K, V separately + val = math.sqrt( + 6. / + float(m.weight.shape[0] // 3 + m.weight.shape[1])) + nn.init.uniform_(m.weight, -val, val) + else: + nn.init.xavier_uniform_(m.weight) + nn.init.zeros_(m.bias) + nn.init.normal_(self.cls_token, std=1e-6) + + def _freeze_stages(self) -> None: + """Freeze patch_embed layer, some parameters and stages.""" + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + self.cls_token.requires_grad = False + self.pos_embed.requires_grad = False + + for i in range(1, self.frozen_stages + 1): + m = self.layers[i - 1] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + if i == (self.num_layers) and self.final_norm: + for param in getattr(self, 'norm1').parameters(): + param.requires_grad = False + + def train(self, mode: bool = True) -> None: + super().train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + +@MODELS.register_module() +class MoCoV3(BaseSelfSupervisor): + """MoCo v3. + + Implementation of `An Empirical Study of Training Self-Supervised Vision + Transformers `_. + + Args: + backbone (dict): Config dict for module of backbone + neck (dict): Config dict for module of deep features to compact feature + vectors. + head (dict): Config dict for module of head functions. + base_momentum (float): Momentum coefficient for the momentum-updated + encoder. Defaults to 0.01. + pretrained (str, optional): The pretrained checkpoint path, support + local path and remote path. Defaults to None. + data_preprocessor (dict, optional): The config for preprocessing + input data. If None or no specified type, it will use + "SelfSupDataPreprocessor" as type. + See :class:`SelfSupDataPreprocessor` for more details. + Defaults to None. + init_cfg (Union[List[dict], dict], optional): Config dict for weight + initialization. Defaults to None. + """ + + def __init__(self, + backbone: dict, + neck: dict, + head: dict, + base_momentum: float = 0.01, + pretrained: Optional[str] = None, + data_preprocessor: Optional[dict] = None, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + backbone=backbone, + neck=neck, + head=head, + pretrained=pretrained, + data_preprocessor=data_preprocessor, + init_cfg=init_cfg) + + # create momentum model + self.momentum_encoder = CosineEMA( + nn.Sequential(self.backbone, self.neck), momentum=base_momentum) + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + view_1 = inputs[0] + view_2 = inputs[1] + + # compute query features, [N, C] each + q1 = self.neck(self.backbone(view_1))[0] + q2 = self.neck(self.backbone(view_2))[0] + + # compute key features, [N, C] each, no gradient + with torch.no_grad(): + # update momentum encoder + self.momentum_encoder.update_parameters( + nn.Sequential(self.backbone, self.neck)) + + k1 = self.momentum_encoder(view_1)[0] + k2 = self.momentum_encoder(view_2)[0] + + loss = self.head.loss(q1, k2) + self.head.loss(q2, k1) + + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/simclr.py b/mmpretrain/models/selfsup/simclr.py new file mode 100644 index 0000000000000000000000000000000000000000..4b19ab4053de21a865fbaf864f654ff3ad8840f1 --- /dev/null +++ b/mmpretrain/models/selfsup/simclr.py @@ -0,0 +1,98 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Any, Dict, List, Tuple + +import torch +from mmengine.dist import all_gather, get_rank + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +class GatherLayer(torch.autograd.Function): + """Gather tensors from all process, supporting backward propagation.""" + + @staticmethod + def forward(ctx: Any, input: torch.Tensor) -> Tuple[List]: + ctx.save_for_backward(input) + output = all_gather(input) + return tuple(output) + + @staticmethod + def backward(ctx: Any, *grads: torch.Tensor) -> torch.Tensor: + input, = ctx.saved_tensors + grad_out = torch.zeros_like(input) + grad_out[:] = grads[get_rank()] + return grad_out + + +@MODELS.register_module() +class SimCLR(BaseSelfSupervisor): + """SimCLR. + + Implementation of `A Simple Framework for Contrastive Learning of Visual + Representations `_. + """ + + @staticmethod + def _create_buffer( + batch_size: int, device: torch.device + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """Compute the mask and the index of positive samples. + + Args: + batch_size (int): The batch size. + device (torch.device): The device of backend. + + Returns: + Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + - The mask for feature selection. + - The index of positive samples. + - The mask of negative samples. + """ + mask = 1 - torch.eye(batch_size * 2, dtype=torch.uint8).to(device) + pos_idx = ( + torch.arange(batch_size * 2).to(device), + 2 * torch.arange(batch_size, dtype=torch.long).unsqueeze(1).repeat( + 1, 2).view(-1, 1).squeeze().to(device)) + neg_mask = torch.ones((batch_size * 2, batch_size * 2 - 1), + dtype=torch.uint8).to(device) + neg_mask[pos_idx] = 0 + return mask, pos_idx, neg_mask + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + inputs = torch.stack(inputs, 1) + inputs = inputs.reshape((inputs.size(0) * 2, inputs.size(2), + inputs.size(3), inputs.size(4))) + x = self.backbone(inputs) + z = self.neck(x)[0] # (2n)xd + + z = z / (torch.norm(z, p=2, dim=1, keepdim=True) + 1e-10) + z = torch.cat(GatherLayer.apply(z), dim=0) # (2N)xd + assert z.size(0) % 2 == 0 + N = z.size(0) // 2 + s = torch.matmul(z, z.permute(1, 0)) # (2N)x(2N) + mask, pos_idx, neg_mask = self._create_buffer(N, s.device) + + # remove diagonal, (2N)x(2N-1) + s = torch.masked_select(s, mask == 1).reshape(s.size(0), -1) + positive = s[pos_idx].unsqueeze(1) # (2N)x1 + + # select negative, (2N)x(2N-2) + negative = torch.masked_select(s, neg_mask == 1).reshape(s.size(0), -1) + + loss = self.head.loss(positive, negative) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/selfsup/simmim.py b/mmpretrain/models/selfsup/simmim.py new file mode 100644 index 0000000000000000000000000000000000000000..7cf9494210c7a9d22853c4138542ba5c77d779f6 --- /dev/null +++ b/mmpretrain/models/selfsup/simmim.py @@ -0,0 +1,194 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List, Optional, Sequence, Tuple, Union + +import torch +import torch.nn as nn +from mmengine.model.weight_init import trunc_normal_ + +from mmpretrain.models import SwinTransformer +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class SimMIMSwinTransformer(SwinTransformer): + """Swin Transformer for SimMIM pre-training. + + Args: + Args: + arch (str | dict): Swin Transformer architecture + Defaults to 'T'. + img_size (int | tuple): The size of input image. + Defaults to 224. + in_channels (int): The num of input channels. + Defaults to 3. + drop_rate (float): Dropout rate after embedding. + Defaults to 0. + drop_path_rate (float): Stochastic depth rate. + Defaults to 0.1. + out_indices (tuple): Layers to be outputted. Defaults to (3, ). + use_abs_pos_embed (bool): If True, add absolute position embedding to + the patch embedding. Defaults to False. + with_cp (bool): Use checkpoint or not. Using checkpoint + will save some memory while slowing down the training speed. + Defaults to False. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Defaults to -1. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Defaults to False. + norm_cfg (dict): Config dict for normalization layer at end + of backone. Defaults to dict(type='LN') + stage_cfgs (Sequence | dict): Extra config dict for each + stage. Defaults to empty dict. + patch_cfg (dict): Extra config dict for patch embedding. + Defaults to empty dict. + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + arch: Union[str, dict] = 'T', + img_size: Union[Tuple[int, int], int] = 224, + in_channels: int = 3, + drop_rate: float = 0., + drop_path_rate: float = 0.1, + out_indices: tuple = (3, ), + use_abs_pos_embed: bool = False, + with_cp: bool = False, + frozen_stages: bool = -1, + norm_eval: bool = False, + norm_cfg: dict = dict(type='LN'), + stage_cfgs: Union[Sequence, dict] = dict(), + patch_cfg: dict = dict(), + pad_small_map: bool = False, + init_cfg: Optional[dict] = None) -> None: + super().__init__( + arch=arch, + img_size=img_size, + in_channels=in_channels, + drop_rate=drop_rate, + drop_path_rate=drop_path_rate, + out_indices=out_indices, + use_abs_pos_embed=use_abs_pos_embed, + with_cp=with_cp, + frozen_stages=frozen_stages, + norm_eval=norm_eval, + norm_cfg=norm_cfg, + stage_cfgs=stage_cfgs, + patch_cfg=patch_cfg, + pad_small_map=pad_small_map, + init_cfg=init_cfg) + + self.mask_token = nn.Parameter(torch.zeros(1, 1, self.embed_dims)) + + def init_weights(self) -> None: + """Initialize weights.""" + super().init_weights() + + if (isinstance(self.init_cfg, dict) + and self.init_cfg['type'] == 'Pretrained'): + # Suppress default init if use pretrained model. + return + + if self.use_abs_pos_embed: + trunc_normal_(self.absolute_pos_embed, std=0.02) + + trunc_normal_(self.mask_token, mean=0, std=.02) + + self.apply(self._init_weights) + + def _init_weights(self, m): + """Initialize weights.""" + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def forward(self, x: torch.Tensor, + mask: Optional[torch.Tensor]) -> Sequence[torch.Tensor]: + """Generate features for masked images. + + The function supports two kind of forward behaviors. If the ``mask`` is + not ``None``, the forward function will be executed as masked image + modeling pre-training; if the ``mask`` is ``None``, the forward + function will call ``super().forward()``, which extract features from + images without mask. + + Args: + x (torch.Tensor): Input images. + mask (torch.Tensor, optional): Masks for images. + + Returns: + tuple: A tuple containing features from multi-stages. + """ + if mask is None: + return super().forward(x) + + else: + x, hw_shape = self.patch_embed(x) + B, L, _ = x.shape + + mask_token = self.mask_token.expand(B, L, -1) + w = mask.flatten(1).unsqueeze(-1).type_as(mask_token) + x = x * (1. - w) + mask_token * w + + if self.use_abs_pos_embed: + x = x + self.absolute_pos_embed + + x = self.drop_after_pos(x) + + outs = [] + for i, stage in enumerate(self.stages): + x, hw_shape = stage(x, hw_shape) + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + out = norm_layer(x) + out = out.view(-1, *hw_shape, + stage.out_channels).permute(0, 3, 1, + 2).contiguous() + outs.append(out) + + return tuple(outs) + + +@MODELS.register_module() +class SimMIM(BaseSelfSupervisor): + """SimMIM. + + Implementation of `SimMIM: A Simple Framework for Masked Image Modeling + `_. + """ + + def extract_feat(self, inputs: torch.Tensor): + return self.backbone(inputs, mask=None) + + def loss(self, inputs: torch.Tensor, data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + mask = torch.stack([data_sample.mask for data_sample in data_samples]) + + img_latent = self.backbone(inputs, mask) + img_rec = self.neck(img_latent[0]) + loss = self.head.loss(img_rec, inputs, mask) + losses = dict(loss=loss) + + return losses diff --git a/mmpretrain/models/selfsup/simsiam.py b/mmpretrain/models/selfsup/simsiam.py new file mode 100644 index 0000000000000000000000000000000000000000..a502cd770d0b497368dc7fc1d93caac01ec65db1 --- /dev/null +++ b/mmpretrain/models/selfsup/simsiam.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List + +import torch + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class SimSiam(BaseSelfSupervisor): + """SimSiam. + + Implementation of `Exploring Simple Siamese Representation Learning + `_. The operation of fixing learning rate + of predictor is in `engine/hooks/simsiam_hook.py`. + """ + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """The forward function in training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + img_v1 = inputs[0] + img_v2 = inputs[1] + + z1 = self.neck(self.backbone(img_v1))[0] # NxC + z2 = self.neck(self.backbone(img_v2))[0] # NxC + + loss_1 = self.head.loss(z1, z2) + loss_2 = self.head.loss(z2, z1) + + losses = dict(loss=0.5 * (loss_1 + loss_2)) + return losses diff --git a/mmpretrain/models/selfsup/swav.py b/mmpretrain/models/selfsup/swav.py new file mode 100644 index 0000000000000000000000000000000000000000..efe0eab483319bd2dfde8929a2285e684cd3fc38 --- /dev/null +++ b/mmpretrain/models/selfsup/swav.py @@ -0,0 +1,49 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Dict, List + +import torch + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample +from .base import BaseSelfSupervisor + + +@MODELS.register_module() +class SwAV(BaseSelfSupervisor): + """SwAV. + + Implementation of `Unsupervised Learning of Visual Features by Contrasting + Cluster Assignments `_. + + The queue is built in ``mmpretrain/engine/hooks/swav_hook.py``. + """ + + def loss(self, inputs: List[torch.Tensor], data_samples: List[DataSample], + **kwargs) -> Dict[str, torch.Tensor]: + """Forward computation during training. + + Args: + inputs (List[torch.Tensor]): The input images. + data_samples (List[DataSample]): All elements required + during the forward function. + + Returns: + Dict[str, torch.Tensor]: A dictionary of loss components. + """ + assert isinstance(inputs, list) + # multi-res forward passes + idx_crops = torch.cumsum( + torch.unique_consecutive( + torch.tensor([input.shape[-1] for input in inputs]), + return_counts=True)[1], 0) + start_idx = 0 + output = [] + for end_idx in idx_crops: + _out = self.backbone(torch.cat(inputs[start_idx:end_idx])) + output.append(_out) + start_idx = end_idx + output = self.neck(output) + + loss = self.head.loss(output) + losses = dict(loss=loss) + return losses diff --git a/mmpretrain/models/tta/__init__.py b/mmpretrain/models/tta/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..568e64ffdc743b4694045f39a46deb5083b2688a --- /dev/null +++ b/mmpretrain/models/tta/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .score_tta import AverageClsScoreTTA + +__all__ = ['AverageClsScoreTTA'] diff --git a/mmpretrain/models/tta/score_tta.py b/mmpretrain/models/tta/score_tta.py new file mode 100644 index 0000000000000000000000000000000000000000..5b8a0786577c6cdb5076957df0ed60aac9d307cb --- /dev/null +++ b/mmpretrain/models/tta/score_tta.py @@ -0,0 +1,36 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List + +from mmengine.model import BaseTTAModel + +from mmpretrain.registry import MODELS +from mmpretrain.structures import DataSample + + +@MODELS.register_module() +class AverageClsScoreTTA(BaseTTAModel): + + def merge_preds( + self, + data_samples_list: List[List[DataSample]], + ) -> List[DataSample]: + """Merge predictions of enhanced data to one prediction. + + Args: + data_samples_list (List[List[DataSample]]): List of predictions + of all enhanced data. + + Returns: + List[DataSample]: Merged prediction. + """ + merged_data_samples = [] + for data_samples in data_samples_list: + merged_data_samples.append(self._merge_single_sample(data_samples)) + return merged_data_samples + + def _merge_single_sample(self, data_samples): + merged_data_sample: DataSample = data_samples[0].new() + merged_score = sum(data_sample.pred_score + for data_sample in data_samples) / len(data_samples) + merged_data_sample.set_pred_score(merged_score) + return merged_data_sample diff --git a/mmpretrain/models/utils/__init__.py b/mmpretrain/models/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4c13dca04f1522c0657a4b78196c0058a9e65842 --- /dev/null +++ b/mmpretrain/models/utils/__init__.py @@ -0,0 +1,92 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from mmpretrain.utils.dependency import WITH_MULTIMODAL +from .attention import (BEiTAttention, ChannelMultiheadAttention, + CrossMultiheadAttention, LeAttention, + MultiheadAttention, PromptMultiheadAttention, + ShiftWindowMSA, WindowMSA, WindowMSAV2) +from .batch_augments import CutMix, Mixup, RandomBatchAugment, ResizeMix +from .batch_shuffle import batch_shuffle_ddp, batch_unshuffle_ddp +from .channel_shuffle import channel_shuffle +from .clip_generator_helper import QuickGELU, build_clip_model +from .data_preprocessor import (ClsDataPreprocessor, + MultiModalDataPreprocessor, + SelfSupDataPreprocessor, + TwoNormDataPreprocessor, VideoDataPreprocessor) +from .ema import CosineEMA +from .embed import (HybridEmbed, PatchEmbed, PatchMerging, resize_pos_embed, + resize_relative_position_bias_table) +from .helpers import is_tracing, to_2tuple, to_3tuple, to_4tuple, to_ntuple +from .inverted_residual import InvertedResidual +from .layer_scale import LayerScale +from .make_divisible import make_divisible +from .norm import GRN, LayerNorm2d, build_norm_layer +from .position_encoding import (ConditionalPositionEncoding, + PositionEncodingFourier, RotaryEmbeddingFast, + build_2d_sincos_position_embedding) +from .res_layer_extra_norm import ResLayerExtraNorm +from .se_layer import SELayer +from .swiglu_ffn import SwiGLUFFN, SwiGLUFFNFused +from .vector_quantizer import NormEMAVectorQuantizer + +__all__ = [ + 'channel_shuffle', + 'make_divisible', + 'InvertedResidual', + 'SELayer', + 'to_ntuple', + 'to_2tuple', + 'to_3tuple', + 'to_4tuple', + 'PatchEmbed', + 'PatchMerging', + 'HybridEmbed', + 'RandomBatchAugment', + 'ShiftWindowMSA', + 'is_tracing', + 'MultiheadAttention', + 'ConditionalPositionEncoding', + 'resize_pos_embed', + 'resize_relative_position_bias_table', + 'ClsDataPreprocessor', + 'Mixup', + 'CutMix', + 'ResizeMix', + 'BEiTAttention', + 'LayerScale', + 'WindowMSA', + 'WindowMSAV2', + 'ChannelMultiheadAttention', + 'PositionEncodingFourier', + 'LeAttention', + 'GRN', + 'LayerNorm2d', + 'build_norm_layer', + 'CrossMultiheadAttention', + 'build_2d_sincos_position_embedding', + 'PromptMultiheadAttention', + 'NormEMAVectorQuantizer', + 'build_clip_model', + 'batch_shuffle_ddp', + 'batch_unshuffle_ddp', + 'SelfSupDataPreprocessor', + 'TwoNormDataPreprocessor', + 'VideoDataPreprocessor', + 'CosineEMA', + 'ResLayerExtraNorm', + 'MultiModalDataPreprocessor', + 'QuickGELU', + 'SwiGLUFFN', + 'SwiGLUFFNFused', + 'RotaryEmbeddingFast', +] + +if WITH_MULTIMODAL: + from .huggingface import (no_load_hf_pretrained_model, register_hf_model, + register_hf_tokenizer) + from .tokenizer import (Blip2Tokenizer, BlipTokenizer, FullTokenizer, + OFATokenizer) + + __all__.extend([ + 'BlipTokenizer', 'OFATokenizer', 'Blip2Tokenizer', 'register_hf_model', + 'register_hf_tokenizer', 'no_load_hf_pretrained_model', 'FullTokenizer' + ]) diff --git a/mmpretrain/models/utils/attention.py b/mmpretrain/models/utils/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..e92f6054dd83881b508ac5e87d9034cd86b3a36c --- /dev/null +++ b/mmpretrain/models/utils/attention.py @@ -0,0 +1,1129 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import itertools +import warnings +from functools import partial +from typing import List, Optional, Union + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn.bricks.drop import build_dropout +from mmengine.model import BaseModule +from mmengine.model.weight_init import trunc_normal_ +from mmengine.utils import digit_version + +from mmpretrain.registry import MODELS +from .helpers import to_2tuple +from .layer_scale import LayerScale + +# After pytorch v1.10.0, use torch.meshgrid without indexing +# will raise extra warning. For more details, +# refers to https://github.com/pytorch/pytorch/issues/50276 +if digit_version(torch.__version__) >= digit_version('1.10.0'): + torch_meshgrid = partial(torch.meshgrid, indexing='ij') +else: + torch_meshgrid = torch.meshgrid + + +def scaled_dot_product_attention_pyimpl(query, + key, + value, + attn_mask=None, + dropout_p=0., + scale=None, + is_causal=False): + scale = scale or query.size(-1)**0.5 + if is_causal and attn_mask is not None: + attn_mask = torch.ones( + query.size(-2), key.size(-2), dtype=torch.bool).tril(diagonal=0) + if attn_mask is not None and attn_mask.dtype == torch.bool: + attn_mask = attn_mask.masked_fill(not attn_mask, -float('inf')) + + attn_weight = query @ key.transpose(-2, -1) / scale + if attn_mask is not None: + attn_weight += attn_mask + attn_weight = torch.softmax(attn_weight, dim=-1) + attn_weight = torch.dropout(attn_weight, dropout_p, True) + return attn_weight @ value + + +if digit_version(torch.__version__) >= digit_version('2.0.0'): + scaled_dot_product_attention = F.scaled_dot_product_attention +else: + scaled_dot_product_attention = scaled_dot_product_attention_pyimpl + + +class WindowMSA(BaseModule): + """Window based multi-head self-attention (W-MSA) module with relative + position bias. + + Args: + embed_dims (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to q, k, v. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + attn_drop (float, optional): Dropout ratio of attention weight. + Defaults to 0. + proj_drop (float, optional): Dropout ratio of output. Defaults to 0. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + window_size, + num_heads, + qkv_bias=True, + qk_scale=None, + attn_drop=0., + proj_drop=0., + init_cfg=None): + + super().__init__(init_cfg) + self.embed_dims = embed_dims + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_embed_dims = embed_dims // num_heads + self.scale = qk_scale or head_embed_dims**-0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), + num_heads)) # 2*Wh-1 * 2*Ww-1, nH + + # About 2x faster than original impl + Wh, Ww = self.window_size + rel_index_coords = self.double_step_seq(2 * Ww - 1, Wh, 1, Ww) + rel_position_index = rel_index_coords + rel_index_coords.T + rel_position_index = rel_position_index.flip(1).contiguous() + self.register_buffer('relative_position_index', rel_position_index) + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(embed_dims, embed_dims) + self.proj_drop = nn.Dropout(proj_drop) + + self.softmax = nn.Softmax(dim=-1) + + def init_weights(self): + super(WindowMSA, self).init_weights() + + trunc_normal_(self.relative_position_bias_table, std=0.02) + + def forward(self, x, mask=None): + """ + Args: + + x (tensor): input features with shape of (num_windows*B, N, C) + mask (tensor, Optional): mask with shape of (num_windows, Wh*Ww, + Wh*Ww), value should be between (-inf, 0]. + """ + B_, N, C = x.shape + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, + C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[ + 2] # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1)].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, + N) + mask.unsqueeze(1).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + @staticmethod + def double_step_seq(step1, len1, step2, len2): + seq1 = torch.arange(0, step1 * len1, step1) + seq2 = torch.arange(0, step2 * len2, step2) + return (seq1[:, None] + seq2[None, :]).reshape(1, -1) + + +class WindowMSAV2(BaseModule): + """Window based multi-head self-attention (W-MSA) module with relative + position bias. + + Based on implementation on Swin Transformer V2 original repo. Refers to + https://github.com/microsoft/Swin-Transformer/blob/main/models/swin_transformer_v2.py + for more details. + + Args: + embed_dims (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + attn_drop (float): Dropout ratio of attention weight. + Defaults to 0. + proj_drop (float): Dropout ratio of output. Defaults to 0. + cpb_mlp_hidden_dims (int): The hidden dimensions of the continuous + relative position bias network. Defaults to 512. + pretrained_window_size (tuple(int)): The height and width of the window + in pre-training. Defaults to (0, 0), which means not load + pretrained model. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + window_size, + num_heads, + qkv_bias=True, + attn_drop=0., + proj_drop=0., + cpb_mlp_hidden_dims=512, + pretrained_window_size=(0, 0), + init_cfg=None): + + super().__init__(init_cfg) + self.embed_dims = embed_dims + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + + # Use small network for continuous relative position bias + self.cpb_mlp = nn.Sequential( + nn.Linear( + in_features=2, out_features=cpb_mlp_hidden_dims, bias=True), + nn.ReLU(inplace=True), + nn.Linear( + in_features=cpb_mlp_hidden_dims, + out_features=num_heads, + bias=False)) + + # Add learnable scalar for cosine attention + self.logit_scale = nn.Parameter( + torch.log(10 * torch.ones((num_heads, 1, 1))), requires_grad=True) + + # get relative_coords_table + relative_coords_h = torch.arange( + -(self.window_size[0] - 1), + self.window_size[0], + dtype=torch.float32) + relative_coords_w = torch.arange( + -(self.window_size[1] - 1), + self.window_size[1], + dtype=torch.float32) + relative_coords_table = torch.stack( + torch_meshgrid([relative_coords_h, relative_coords_w])).permute( + 1, 2, 0).contiguous().unsqueeze(0) # 1, 2*Wh-1, 2*Ww-1, 2 + if pretrained_window_size[0] > 0: + relative_coords_table[:, :, :, 0] /= ( + pretrained_window_size[0] - 1) + relative_coords_table[:, :, :, 1] /= ( + pretrained_window_size[1] - 1) + else: + relative_coords_table[:, :, :, 0] /= (self.window_size[0] - 1) + relative_coords_table[:, :, :, 1] /= (self.window_size[1] - 1) + relative_coords_table *= 8 # normalize to -8, 8 + relative_coords_table = torch.sign(relative_coords_table) * torch.log2( + torch.abs(relative_coords_table) + 1.0) / np.log2(8) + self.register_buffer('relative_coords_table', relative_coords_table) + + # get pair-wise relative position index + # for each token inside the window + indexes_h = torch.arange(self.window_size[0]) + indexes_w = torch.arange(self.window_size[1]) + coordinates = torch.stack( + torch_meshgrid([indexes_h, indexes_w]), dim=0) # 2, Wh, Ww + coordinates = torch.flatten(coordinates, start_dim=1) # 2, Wh*Ww + # 2, Wh*Ww, Wh*Ww + relative_coordinates = coordinates[:, :, None] - coordinates[:, + None, :] + relative_coordinates = relative_coordinates.permute( + 1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2 + + relative_coordinates[:, :, 0] += self.window_size[ + 0] - 1 # shift to start from 0 + relative_coordinates[:, :, 1] += self.window_size[1] - 1 + relative_coordinates[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coordinates.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer('relative_position_index', + relative_position_index) + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=False) + if qkv_bias: + self.q_bias = nn.Parameter(torch.zeros(embed_dims)) + self.v_bias = nn.Parameter(torch.zeros(embed_dims)) + else: + self.q_bias = None + self.v_bias = None + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(embed_dims, embed_dims) + self.proj_drop = nn.Dropout(proj_drop) + + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ + Args: + + x (tensor): input features with shape of (num_windows*B, N, C) + mask (tensor, Optional): mask with shape of (num_windows, Wh*Ww, + Wh*Ww), value should be between (-inf, 0]. + """ + B_, N, C = x.shape + qkv_bias = None + if self.q_bias is not None: + qkv_bias = torch.cat( + (self.q_bias, + torch.zeros_like(self.v_bias, + requires_grad=False), self.v_bias)) + qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias) + qkv = qkv.reshape(B_, N, 3, self.num_heads, + C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[ + 2] # make torchscript happy (cannot use tensor as tuple) + + # cosine attention + attn = ( + F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1)) + logit_scale = torch.clamp( + self.logit_scale, max=np.log(1. / 0.01)).exp() + attn = attn * logit_scale + + relative_position_bias_table = self.cpb_mlp( + self.relative_coords_table).view(-1, self.num_heads) + relative_position_bias = relative_position_bias_table[ + self.relative_position_index.view(-1)].view( + self.window_size[0] * self.window_size[1], + self.window_size[0] * self.window_size[1], + -1) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute( + 2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww + relative_position_bias = 16 * torch.sigmoid(relative_position_bias) + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, + N) + mask.unsqueeze(1).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +@MODELS.register_module() +class ShiftWindowMSA(BaseModule): + """Shift Window Multihead Self-Attention Module. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): The height and width of the window. + shift_size (int, optional): The shift step of each window towards + right-bottom. If zero, act as regular window-msa. Defaults to 0. + dropout_layer (dict, optional): The dropout_layer used before output. + Defaults to dict(type='DropPath', drop_prob=0.). + pad_small_map (bool): If True, pad the small feature map to the window + size, which is common used in detection and segmentation. If False, + avoid shifting window and shrink the window size to the size of + feature map, which is common used in classification. + Defaults to False. + window_msa (Callable): To build a window multi-head attention module. + Defaults to :class:`WindowMSA`. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + **kwargs: Other keyword arguments to build the window multi-head + attention module. + """ + + def __init__(self, + embed_dims, + num_heads, + window_size, + shift_size=0, + dropout_layer=dict(type='DropPath', drop_prob=0.), + pad_small_map=False, + window_msa=WindowMSA, + init_cfg=None, + **kwargs): + super().__init__(init_cfg) + + self.shift_size = shift_size + self.window_size = window_size + assert 0 <= self.shift_size < self.window_size + + self.w_msa = window_msa( + embed_dims=embed_dims, + num_heads=num_heads, + window_size=to_2tuple(self.window_size), + **kwargs, + ) + + self.drop = build_dropout(dropout_layer) + self.pad_small_map = pad_small_map + + def forward(self, query, hw_shape): + B, L, C = query.shape + H, W = hw_shape + assert L == H * W, f"The query length {L} doesn't match the input "\ + f'shape ({H}, {W}).' + query = query.view(B, H, W, C) + + window_size = self.window_size + shift_size = self.shift_size + + if min(H, W) == window_size: + # If not pad small feature map, avoid shifting when the window size + # is equal to the size of feature map. It's to align with the + # behavior of the original implementation. + shift_size = shift_size if self.pad_small_map else 0 + elif min(H, W) < window_size: + # In the original implementation, the window size will be shrunk + # to the size of feature map. The behavior is different with + # swin-transformer for downstream tasks. To support dynamic input + # shape, we don't allow this feature. + assert self.pad_small_map, \ + f'The input shape ({H}, {W}) is smaller than the window ' \ + f'size ({window_size}). Please set `pad_small_map=True`, or ' \ + 'decrease the `window_size`.' + + pad_r = (window_size - W % window_size) % window_size + pad_b = (window_size - H % window_size) % window_size + query = F.pad(query, (0, 0, 0, pad_r, 0, pad_b)) + + H_pad, W_pad = query.shape[1], query.shape[2] + + # cyclic shift + if shift_size > 0: + query = torch.roll( + query, shifts=(-shift_size, -shift_size), dims=(1, 2)) + + attn_mask = self.get_attn_mask((H_pad, W_pad), + window_size=window_size, + shift_size=shift_size, + device=query.device) + + # nW*B, window_size, window_size, C + query_windows = self.window_partition(query, window_size) + # nW*B, window_size*window_size, C + query_windows = query_windows.view(-1, window_size**2, C) + + # W-MSA/SW-MSA (nW*B, window_size*window_size, C) + attn_windows = self.w_msa(query_windows, mask=attn_mask) + + # merge windows + attn_windows = attn_windows.view(-1, window_size, window_size, C) + + # B H' W' C + shifted_x = self.window_reverse(attn_windows, H_pad, W_pad, + window_size) + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll( + shifted_x, shifts=(shift_size, shift_size), dims=(1, 2)) + else: + x = shifted_x + + if H != H_pad or W != W_pad: + x = x[:, :H, :W, :].contiguous() + + x = x.view(B, H * W, C) + + x = self.drop(x) + + return x + + @staticmethod + def window_reverse(windows, H, W, window_size): + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view(B, H // window_size, W // window_size, window_size, + window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + @staticmethod + def window_partition(x, window_size): + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, + window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous() + windows = windows.view(-1, window_size, window_size, C) + return windows + + @staticmethod + def get_attn_mask(hw_shape, window_size, shift_size, device=None): + if shift_size > 0: + img_mask = torch.zeros(1, *hw_shape, 1, device=device) + h_slices = (slice(0, -window_size), slice(-window_size, + -shift_size), + slice(-shift_size, None)) + w_slices = (slice(0, -window_size), slice(-window_size, + -shift_size), + slice(-shift_size, None)) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + # nW, window_size, window_size, 1 + mask_windows = ShiftWindowMSA.window_partition( + img_mask, window_size) + mask_windows = mask_windows.view(-1, window_size * window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, -100.0) + attn_mask = attn_mask.masked_fill(attn_mask == 0, 0.0) + else: + attn_mask = None + return attn_mask + + +class MultiheadAttention(BaseModule): + """Multi-head Attention Module. + + This module implements multi-head attention that supports different input + dims and embed dims. And it also supports a shortcut from ``value``, which + is useful if input dims is not the same with embed dims. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + input_dims (int, optional): The input dimension, and if None, + use ``embed_dims``. Defaults to None. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + dropout_layer (dict): The dropout config before adding the shortcut. + Defaults to ``dict(type='Dropout', drop_prob=0.)``. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + proj_bias (bool) If True, add a learnable bias to output projection. + Defaults to True. + v_shortcut (bool): Add a shortcut from value to output. It's usually + used if ``input_dims`` is different from ``embed_dims``. + Defaults to False. + use_layer_scale (bool): Whether to use layer scale. Defaults to False. + layer_scale_init_value (float or torch.Tensor): Init value of layer + scale. Defaults to 0. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads, + input_dims=None, + attn_drop=0., + proj_drop=0., + dropout_layer=dict(type='Dropout', drop_prob=0.), + qkv_bias=True, + qk_scale=None, + proj_bias=True, + v_shortcut=False, + use_layer_scale=False, + layer_scale_init_value=0., + init_cfg=None): + super(MultiheadAttention, self).__init__(init_cfg=init_cfg) + + self.input_dims = input_dims or embed_dims + self.embed_dims = embed_dims + self.num_heads = num_heads + self.v_shortcut = v_shortcut + + self.head_dims = embed_dims // num_heads + if qk_scale is not None: + self.scaled_dot_product_attention = partial( + scaled_dot_product_attention_pyimpl, + scale=self.head_dims**-0.5) + else: + self.scaled_dot_product_attention = scaled_dot_product_attention + + self.qkv = nn.Linear(self.input_dims, embed_dims * 3, bias=qkv_bias) + self.attn_drop = attn_drop + self.proj = nn.Linear(embed_dims, embed_dims, bias=proj_bias) + self.proj_drop = nn.Dropout(proj_drop) + + self.out_drop = build_dropout(dropout_layer) + + if use_layer_scale: + warnings.warn('The `use_layer_scale` in `MultiheadAttention` will ' + 'be deprecated. Please use `layer_scale_init_value` ' + 'to control whether using layer scale or not.') + + if use_layer_scale or (layer_scale_init_value > 0): + layer_scale_init_value = layer_scale_init_value or 1e-5 + self.gamma1 = LayerScale( + embed_dims, layer_scale_init_value=layer_scale_init_value) + else: + self.gamma1 = nn.Identity() + + def forward(self, x): + B, N, _ = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, + self.head_dims).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + + attn_drop = self.attn_drop if self.training else 0. + x = self.scaled_dot_product_attention(q, k, v, dropout_p=attn_drop) + x = x.transpose(1, 2).reshape(B, N, self.embed_dims) + + x = self.proj(x) + x = self.out_drop(self.gamma1(self.proj_drop(x))) + + if self.v_shortcut: + x = v.squeeze(1) + x + return x + + +class BEiTAttention(BaseModule): + """Window based multi-head self-attention (W-MSA) module with relative + position bias. + + The initial implementation is in MMSegmentation. + + Args: + embed_dims (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (tuple[int, int]): The height and width of the window. + use_rel_pos_bias (bool): Whether to use unique relative position bias, + if False, use shared relative position bias defined in backbone. + bias (str): The option to add leanable bias for q, k, v. If bias is + True, it will add leanable bias. If bias is 'qv_bias', it will only + add leanable bias for q, v. If bias is False, it will not add bias + for q, k, v. Default to 'qv_bias'. + qk_scale (float | None, optional): Override default qk scale of + head_dim ** -0.5 if set. Default: None. + attn_drop_rate (float): Dropout ratio of attention weight. + Default: 0.0 + proj_drop_rate (float): Dropout ratio of output. Default: 0. + init_cfg (dict | None, optional): The Config for initialization. + Default: None. + """ + + def __init__(self, + embed_dims, + num_heads, + window_size, + use_rel_pos_bias, + bias='qv_bias', + qk_scale=None, + attn_drop_rate=0., + proj_drop_rate=0., + init_cfg=None, + **kwargs): + super().__init__(init_cfg=init_cfg) + self.embed_dims = embed_dims + self.num_heads = num_heads + head_embed_dims = embed_dims // num_heads + self.bias = bias + self.scale = qk_scale or head_embed_dims**-0.5 + + qkv_bias = bias + if bias == 'qv_bias': + self._init_qv_bias() + qkv_bias = False + + if window_size is None: + assert not use_rel_pos_bias + else: + assert isinstance(window_size, tuple) + self.window_size = window_size + self.use_rel_pos_bias = use_rel_pos_bias + self._init_rel_pos_embedding() + + self.qkv = nn.Linear(embed_dims, embed_dims * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop_rate) + self.proj = nn.Linear(embed_dims, embed_dims) + self.proj_drop = nn.Dropout(proj_drop_rate) + + def _init_qv_bias(self): + self.q_bias = nn.Parameter(torch.zeros(self.embed_dims)) + self.v_bias = nn.Parameter(torch.zeros(self.embed_dims)) + + def _init_rel_pos_embedding(self): + if self.use_rel_pos_bias: + Wh, Ww = self.window_size + # cls to token & token 2 cls & cls to cls + self.num_relative_distance = (2 * Wh - 1) * (2 * Ww - 1) + 3 + # relative_position_bias_table shape is (2*Wh-1 * 2*Ww-1 + 3, nH) + self.relative_position_bias_table = nn.Parameter( + torch.zeros(self.num_relative_distance, self.num_heads)) + + # get pair-wise relative position index for + # each token inside the window + coords_h = torch.arange(Wh) + coords_w = torch.arange(Ww) + # coords shape is (2, Wh, Ww) + coords = torch.stack(torch_meshgrid([coords_h, coords_w])) + # coords_flatten shape is (2, Wh*Ww) + coords_flatten = torch.flatten(coords, 1) + relative_coords = ( + coords_flatten[:, :, None] - coords_flatten[:, None, :]) + # relative_coords shape is (Wh*Ww, Wh*Ww, 2) + relative_coords = relative_coords.permute(1, 2, 0).contiguous() + # shift to start from 0 + relative_coords[:, :, 0] += Wh - 1 + relative_coords[:, :, 1] += Ww - 1 + relative_coords[:, :, 0] *= 2 * Ww - 1 + relative_position_index = torch.zeros( + size=(Wh * Ww + 1, ) * 2, dtype=relative_coords.dtype) + # relative_position_index shape is (Wh*Ww, Wh*Ww) + relative_position_index[1:, 1:] = relative_coords.sum(-1) + relative_position_index[0, 0:] = self.num_relative_distance - 3 + relative_position_index[0:, 0] = self.num_relative_distance - 2 + relative_position_index[0, 0] = self.num_relative_distance - 1 + + self.register_buffer('relative_position_index', + relative_position_index) + else: + self.window_size = None + self.relative_position_bias_table = None + self.relative_position_index = None + + def init_weights(self): + super().init_weights() + if self.use_rel_pos_bias: + trunc_normal_(self.relative_position_bias_table, std=0.02) + + def forward(self, x, rel_pos_bias=None): + """ + Args: + x (tensor): input features with shape of (num_windows*B, N, C). + rel_pos_bias (tensor): input relative position bias with shape of + (num_heads, N, N). + """ + B, N, C = x.shape + + if self.bias == 'qv_bias': + k_bias = torch.zeros_like(self.v_bias, requires_grad=False) + qkv_bias = torch.cat((self.q_bias, k_bias, self.v_bias)) + qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias) + else: + qkv = self.qkv(x) + + qkv = qkv.reshape(B, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + + if self.relative_position_bias_table is not None: + Wh = self.window_size[0] + Ww = self.window_size[1] + relative_position_bias = self.relative_position_bias_table[ + self.relative_position_index.view(-1)].view( + Wh * Ww + 1, Wh * Ww + 1, -1) + relative_position_bias = relative_position_bias.permute( + 2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if rel_pos_bias is not None: + # use shared relative position bias + attn = attn + rel_pos_bias + + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class ChannelMultiheadAttention(BaseModule): + """Channel Multihead Self-attention Module. + + This module implements channel multi-head attention that supports different + input dims and embed dims. + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + input_dims (int, optional): The input dimension, and if None, + use ``embed_dims``. Defaults to None. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + dropout_layer (dict): The dropout config before adding the shoutcut. + Defaults to ``dict(type='Dropout', drop_prob=0.)``. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to False. + proj_bias (bool) If True, add a learnable bias to output projection. + Defaults to True. + qk_scale_type (str): The scale type of qk scale. + Defaults to 'learnable'. It can be 'learnable', 'fixed' or 'none'. + qk_scale (float, optional): If set qk_scale_type to 'none', this + should be specified with valid float number. Defaults to None. + v_shortcut (bool): Add a shortcut from value to output. It's usually + used if ``input_dims`` is different from ``embed_dims``. + Defaults to False. + init_cfg (dict, optional): The Config for initialization. + Defaults to None. + """ + + def __init__(self, + embed_dims, + num_heads=8, + input_dims=None, + attn_drop=0., + proj_drop=0., + dropout_layer=dict(type='Dropout', drop_prob=0.), + qkv_bias=False, + proj_bias=True, + qk_scale_type='learnable', + qk_scale=None, + v_shortcut=False, + init_cfg=None): + super().__init__(init_cfg) + + self.input_dims = input_dims or embed_dims + self.embed_dims = embed_dims + self.num_heads = num_heads + self.v_shortcut = v_shortcut + + self.head_dims = embed_dims // num_heads + if qk_scale_type == 'learnable': + self.scale = nn.Parameter(torch.ones(num_heads, 1, 1)) + elif qk_scale_type == 'fixed': + self.scale = self.head_dims**-0.5 + elif qk_scale_type == 'none': + assert qk_scale is not None + self.scale = qk_scale + + self.qkv = nn.Linear(self.input_dims, embed_dims * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(embed_dims, embed_dims, bias=proj_bias) + self.proj_drop = nn.Dropout(proj_drop) + + self.out_drop = build_dropout(dropout_layer) + + def forward(self, x): + B, N, _ = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, + self.head_dims).permute(2, 0, 3, 1, 4) + + q, k, v = [item.transpose(-2, -1) for item in [qkv[0], qkv[1], qkv[2]]] + + q, k = F.normalize(q, dim=-1), F.normalize(k, dim=-1) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + + x = (attn @ v).permute(0, 3, 1, 2).reshape(B, N, self.embed_dims) + x = self.proj(x) + x = self.out_drop(self.proj_drop(x)) + + if self.v_shortcut: + x = qkv[2].squeeze(1) + x + return x + + +class LeAttention(BaseModule): + """LeViT Attention. Multi-head attention with attention bias, which is + proposed in `LeViT: a Vision Transformer in ConvNet’s Clothing for Faster + Inference`_ + + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. Default: 8. + key_dim (int): Dimension of key. Default: None. + attn_ratio (int): Ratio of attention heads. Default: 8. + resolution (tuple[int]): Input resolution. Default: (16, 16). + init_cfg (dict, optional): The Config for initialization. + """ + + def __init__(self, + dim, + key_dim, + num_heads=8, + attn_ratio=4, + resolution=(14, 14), + init_cfg=None): + super().__init__(init_cfg=init_cfg) + # (h, w) + assert isinstance(resolution, tuple) and len(resolution) == 2 + self.num_heads = num_heads + self.scale = key_dim**-0.5 + self.key_dim = key_dim + self.nh_kd = nh_kd = key_dim * num_heads + self.d = int(attn_ratio * key_dim) + self.dh = int(attn_ratio * key_dim) * num_heads + self.attn_ratio = attn_ratio + h = self.dh + nh_kd * 2 + + self.norm = nn.LayerNorm(dim) + self.qkv = nn.Linear(dim, h) + self.proj = nn.Linear(self.dh, dim) + + points = list( + itertools.product(range(resolution[0]), range(resolution[1]))) + N = len(points) + attention_offsets = {} + idxs = [] + for p1 in points: + for p2 in points: + offset = (abs(p1[0] - p2[0]), abs(p1[1] - p2[1])) + if offset not in attention_offsets: + attention_offsets[offset] = len(attention_offsets) + idxs.append(attention_offsets[offset]) + self.attention_biases = torch.nn.Parameter( + torch.zeros(num_heads, len(attention_offsets))) + self.register_buffer( + 'attention_bias_idxs', + torch.LongTensor(idxs).view(N, N), + persistent=False) + + @torch.no_grad() + def train(self, mode=True): + super().train(mode) + if mode and hasattr(self, 'ab'): + del self.ab + else: + self.ab = self.attention_biases[:, self.attention_bias_idxs] + + def forward(self, x): # x (B,N,C) + B, N, _ = x.shape + + # Normalization + x = self.norm(x) + + qkv = self.qkv(x) + # (B, N, num_heads, d) + q, k, v = qkv.view(B, N, self.num_heads, + -1).split([self.key_dim, self.key_dim, self.d], + dim=3) + # (B, num_heads, N, d) + q = q.permute(0, 2, 1, 3) + k = k.permute(0, 2, 1, 3) + v = v.permute(0, 2, 1, 3) + + attn = ((q @ k.transpose(-2, -1)) * self.scale + + (self.attention_biases[:, self.attention_bias_idxs] + if self.training else self.ab)) + attn = attn.softmax(dim=-1) + x = (attn @ v).transpose(1, 2).reshape(B, N, self.dh) + x = self.proj(x) + return x + + +class CrossMultiheadAttention(BaseModule): + """Cross attention between queries and the union of keys and values. + + This module is different from ``MultiheadAttention``, for the attention + is computed between queries and the union of keys and values. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + """ + + def __init__(self, + embed_dims: int, + num_heads: int = 8, + qkv_bias: bool = False, + qk_scale: float = None, + attn_drop: float = 0., + proj_drop: float = 0.) -> None: + super().__init__() + self.num_heads = num_heads + head_dim = embed_dims // num_heads + self.scale = qk_scale or head_dim**-0.5 + + self.q = nn.Linear(embed_dims, embed_dims, bias=False) + self.k = nn.Linear(embed_dims, embed_dims, bias=False) + self.v = nn.Linear(embed_dims, embed_dims, bias=False) + + if qkv_bias: + self.q_bias = nn.Parameter(torch.zeros(embed_dims)) + self.v_bias = nn.Parameter(torch.zeros(embed_dims)) + else: + self.q_bias = None + self.k_bias = None + self.v_bias = None + + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(embed_dims, embed_dims) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, + x: torch.Tensor, + k: torch.Tensor = None, + v: torch.Tensor = None) -> None: + """Forward function.""" + B, N, _ = x.shape + + N_k = k.shape[1] + N_v = v.shape[1] + + q_bias, k_bias, v_bias = None, None, None + if self.q_bias is not None: + q_bias = self.q_bias + k_bias = torch.zeros_like(self.v_bias, requires_grad=False) + v_bias = self.v_bias + + q = F.linear( + input=x, weight=self.q.weight, bias=q_bias) # (B, N_q, dim) + k = F.linear( + input=k, weight=self.k.weight, bias=k_bias) # (B, N_k, dim) + v = F.linear(input=v, weight=self.v.weight, bias=v_bias) + + q = q.reshape(B, N, 1, self.num_heads, + -1).permute(2, 0, 3, 1, + 4).squeeze(0) # (B, num_heads, N_q, dim) + k = k.reshape(B, N_k, 1, self.num_heads, + -1).permute(2, 0, 3, 1, + 4).squeeze(0) # (B, num_heads, N_k, dim) + v = v.reshape(B, N_v, 1, self.num_heads, + -1).permute(2, 0, 3, 1, + 4).squeeze(0) # (B, num_heads, N_v, dim) + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) # (B, N_head, N_q, N_k) + + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, -1) + x = self.proj(x) + x = self.proj_drop(x) + + return x + + +class PromptMultiheadAttention(MultiheadAttention): + """Prompt Multihead Attention for MILAN. + + This module is specific for the prompt encoder in MILAN. It will not update + the visible tokens from the encoder. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + input_dims (int, optional): The input dimension, and if None, + use ``embed_dims``. Defaults to None. + attn_drop (float): Dropout rate of the dropout layer after the + attention calculation of query and key. Defaults to 0. + proj_drop (float): Dropout rate of the dropout layer after the + output projection. Defaults to 0. + dropout_layer (dict): The dropout config before adding the shortcut. + Defaults to ``dict(type='Dropout', drop_prob=0.)``. + qkv_bias (bool): If True, add a learnable bias to q, k, v. + Defaults to True. + qk_scale (float, optional): Override default qk scale of + ``head_dim ** -0.5`` if set. Defaults to None. + proj_bias (bool) If True, add a learnable bias to output projection. + Defaults to True. + v_shortcut (bool): Add a shortcut from value to output. It's usually + used if ``input_dims`` is different from ``embed_dims``. + Defaults to False. + return_attention (bool): If True, return the attention map, computed by + the cross attention between the class token and all other tokens. + Defaults to False. + init_cfg (Union[List[dict], dict], optional): The Config for + initialization. Defaults to None. + """ + + def __init__(self, + embed_dims: int, + num_heads: int, + input_dims: Optional[int] = None, + attn_drop: float = 0, + proj_drop: float = 0, + dropout_layer: dict = dict(type='Dropout', drop_prob=0.), + qkv_bias: bool = True, + qk_scale: Optional[float] = None, + proj_bias: bool = True, + v_shortcut: bool = False, + use_layer_scale: bool = False, + init_cfg: Optional[Union[List[dict], dict]] = None) -> None: + super().__init__( + embed_dims=embed_dims, + num_heads=num_heads, + input_dims=input_dims, + attn_drop=attn_drop, + proj_drop=proj_drop, + dropout_layer=dropout_layer, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + proj_bias=proj_bias, + v_shortcut=v_shortcut, + use_layer_scale=use_layer_scale, + init_cfg=init_cfg) + # no longer need qkv + del self.qkv + + # to project the mask tokens + self.q = nn.Linear(embed_dims, embed_dims, bias=qkv_bias) + # to project al the tokens + self.kv = nn.Linear(embed_dims, embed_dims * 2, bias=qkv_bias) + + def forward(self, x: torch.Tensor, visible_tokens: torch.Tensor, + ids_restore: torch.Tensor) -> torch.Tensor: + """Forward function for `PromptMultiheadAttention`. + + Args: + x (torch.Tensor): Mask token features with shape N x L_m x C. + visible_tokens (torch.Tensor): The visible tokens features from + encoder with shape N x L_v x C. + ids_restore (torch.Tensor): The ids of all tokens in the original + image with shape N x L. + + Returns: + torch Tensor: Output features with shape N x L x C. + """ + x_ = torch.cat([visible_tokens[:, 1:, :], x], dim=1) + assert x_.shape[1] == ids_restore.shape[1] + x_ = torch.gather( + x_, + dim=1, + index=ids_restore.unsqueeze(-1).repeat(1, 1, x.shape[-1])) + x_ = torch.cat([visible_tokens[:, :1, :], x_], dim=1) + + # full sequence shape + B, _, _ = x_.shape + q = self.q(x).reshape(B, x.shape[1], self.num_heads, + self.head_dims).permute(0, 2, 1, 3) + kv = self.kv(x_).reshape(B, x_.shape[1], 2, self.num_heads, + self.head_dims).permute(2, 0, 3, 1, 4) + k, v = kv[0], kv[1] + + attn_drop = self.attn_drop if self.training else 0. + attn = self.scaled_dot_product_attention(q, k, v, dropout_p=attn_drop) + x = attn.transpose(1, 2).reshape(B, x.shape[1], self.embed_dims) + + x = self.proj(x) + x = self.out_drop(self.gamma1(self.proj_drop(x))) + return x diff --git a/mmpretrain/models/utils/batch_augments/__init__.py b/mmpretrain/models/utils/batch_augments/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2fbc4e179608767f667ca1075e5134dbecb8c38d --- /dev/null +++ b/mmpretrain/models/utils/batch_augments/__init__.py @@ -0,0 +1,7 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .cutmix import CutMix +from .mixup import Mixup +from .resizemix import ResizeMix +from .wrapper import RandomBatchAugment + +__all__ = ('RandomBatchAugment', 'CutMix', 'Mixup', 'ResizeMix') diff --git a/mmpretrain/models/utils/batch_augments/cutmix.py b/mmpretrain/models/utils/batch_augments/cutmix.py new file mode 100644 index 0000000000000000000000000000000000000000..665427bf5e2ff3a5ae9d656e7d642db8b72acabb --- /dev/null +++ b/mmpretrain/models/utils/batch_augments/cutmix.py @@ -0,0 +1,157 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple + +import numpy as np +import torch + +from mmpretrain.registry import BATCH_AUGMENTS +from .mixup import Mixup + + +@BATCH_AUGMENTS.register_module() +class CutMix(Mixup): + r"""CutMix batch agumentation. + + CutMix is a method to improve the network's generalization capability. It's + proposed in `CutMix: Regularization Strategy to Train Strong Classifiers + with Localizable Features ` + + With this method, patches are cut and pasted among training images where + the ground truth labels are also mixed proportionally to the area of the + patches. + + Args: + alpha (float): Parameters for Beta distribution to generate the + mixing ratio. It should be a positive number. More details + can be found in :class:`Mixup`. + cutmix_minmax (List[float], optional): The min/max area ratio of the + patches. If not None, the bounding-box of patches is uniform + sampled within this ratio range, and the ``alpha`` will be ignored. + Otherwise, the bounding-box is generated according to the + ``alpha``. Defaults to None. + correct_lam (bool): Whether to apply lambda correction when cutmix bbox + clipped by image borders. Defaults to True. + + .. note :: + If the ``cutmix_minmax`` is None, how to generate the bounding-box of + patches according to the ``alpha``? + + First, generate a :math:`\lambda`, details can be found in + :class:`Mixup`. And then, the area ratio of the bounding-box + is calculated by: + + .. math:: + \text{ratio} = \sqrt{1-\lambda} + """ + + def __init__(self, + alpha: float, + cutmix_minmax: Optional[List[float]] = None, + correct_lam: bool = True): + super().__init__(alpha=alpha) + + self.cutmix_minmax = cutmix_minmax + self.correct_lam = correct_lam + + def rand_bbox_minmax( + self, + img_shape: Tuple[int, int], + count: Optional[int] = None) -> Tuple[int, int, int, int]: + """Min-Max CutMix bounding-box Inspired by Darknet cutmix + implementation. It generates a random rectangular bbox based on min/max + percent values applied to each dimension of the input image. + + Typical defaults for minmax are usually in the .2-.3 for min and + .8-.9 range for max. + + Args: + img_shape (tuple): Image shape as tuple + count (int, optional): Number of bbox to generate. Defaults to None + """ + assert len(self.cutmix_minmax) == 2 + img_h, img_w = img_shape + cut_h = np.random.randint( + int(img_h * self.cutmix_minmax[0]), + int(img_h * self.cutmix_minmax[1]), + size=count) + cut_w = np.random.randint( + int(img_w * self.cutmix_minmax[0]), + int(img_w * self.cutmix_minmax[1]), + size=count) + yl = np.random.randint(0, img_h - cut_h, size=count) + xl = np.random.randint(0, img_w - cut_w, size=count) + yu = yl + cut_h + xu = xl + cut_w + return yl, yu, xl, xu + + def rand_bbox(self, + img_shape: Tuple[int, int], + lam: float, + margin: float = 0., + count: Optional[int] = None) -> Tuple[int, int, int, int]: + """Standard CutMix bounding-box that generates a random square bbox + based on lambda value. This implementation includes support for + enforcing a border margin as percent of bbox dimensions. + + Args: + img_shape (tuple): Image shape as tuple + lam (float): Cutmix lambda value + margin (float): Percentage of bbox dimension to enforce as margin + (reduce amount of box outside image). Defaults to 0. + count (int, optional): Number of bbox to generate. Defaults to None + """ + ratio = np.sqrt(1 - lam) + img_h, img_w = img_shape + cut_h, cut_w = int(img_h * ratio), int(img_w * ratio) + margin_y, margin_x = int(margin * cut_h), int(margin * cut_w) + cy = np.random.randint(0 + margin_y, img_h - margin_y, size=count) + cx = np.random.randint(0 + margin_x, img_w - margin_x, size=count) + yl = np.clip(cy - cut_h // 2, 0, img_h) + yh = np.clip(cy + cut_h // 2, 0, img_h) + xl = np.clip(cx - cut_w // 2, 0, img_w) + xh = np.clip(cx + cut_w // 2, 0, img_w) + return yl, yh, xl, xh + + def cutmix_bbox_and_lam(self, + img_shape: Tuple[int, int], + lam: float, + count: Optional[int] = None) -> tuple: + """Generate bbox and apply lambda correction. + + Args: + img_shape (tuple): Image shape as tuple + lam (float): Cutmix lambda value + count (int, optional): Number of bbox to generate. Defaults to None + """ + if self.cutmix_minmax is not None: + yl, yu, xl, xu = self.rand_bbox_minmax(img_shape, count=count) + else: + yl, yu, xl, xu = self.rand_bbox(img_shape, lam, count=count) + if self.correct_lam or self.cutmix_minmax is not None: + bbox_area = (yu - yl) * (xu - xl) + lam = 1. - bbox_area / float(img_shape[0] * img_shape[1]) + return (yl, yu, xl, xu), lam + + def mix(self, batch_inputs: torch.Tensor, + batch_scores: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Mix the batch inputs and batch one-hot format ground truth. + + Args: + batch_inputs (Tensor): A batch of images tensor in the shape of + ``(N, C, H, W)``. + batch_scores (Tensor): A batch of one-hot format labels in the + shape of ``(N, num_classes)``. + + Returns: + Tuple[Tensor, Tensor): The mixed inputs and labels. + """ + lam = np.random.beta(self.alpha, self.alpha) + batch_size = batch_inputs.size(0) + img_shape = batch_inputs.shape[-2:] + index = torch.randperm(batch_size) + + (y1, y2, x1, x2), lam = self.cutmix_bbox_and_lam(img_shape, lam) + batch_inputs[:, :, y1:y2, x1:x2] = batch_inputs[index, :, y1:y2, x1:x2] + mixed_scores = lam * batch_scores + (1 - lam) * batch_scores[index, :] + + return batch_inputs, mixed_scores diff --git a/mmpretrain/models/utils/batch_augments/mixup.py b/mmpretrain/models/utils/batch_augments/mixup.py new file mode 100644 index 0000000000000000000000000000000000000000..bedb2c3e5b6e62595e50f7494eeda7c14827b391 --- /dev/null +++ b/mmpretrain/models/utils/batch_augments/mixup.py @@ -0,0 +1,65 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Tuple + +import numpy as np +import torch + +from mmpretrain.registry import BATCH_AUGMENTS + + +@BATCH_AUGMENTS.register_module() +class Mixup: + r"""Mixup batch augmentation. + + Mixup is a method to reduces the memorization of corrupt labels and + increases the robustness to adversarial examples. It's proposed in + `mixup: Beyond Empirical Risk Minimization + `_ + + Args: + alpha (float): Parameters for Beta distribution to generate the + mixing ratio. It should be a positive number. More details + are in the note. + + Note: + The :math:`\alpha` (``alpha``) determines a random distribution + :math:`Beta(\alpha, \alpha)`. For each batch of data, we sample + a mixing ratio (marked as :math:`\lambda`, ``lam``) from the random + distribution. + """ + + def __init__(self, alpha: float): + assert isinstance(alpha, float) and alpha > 0 + + self.alpha = alpha + + def mix(self, batch_inputs: torch.Tensor, + batch_scores: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Mix the batch inputs and batch one-hot format ground truth. + + Args: + batch_inputs (Tensor): A batch of images tensor in the shape of + ``(N, C, H, W)``. + batch_scores (Tensor): A batch of one-hot format labels in the + shape of ``(N, num_classes)``. + + Returns: + Tuple[Tensor, Tensor): The mixed inputs and labels. + """ + lam = np.random.beta(self.alpha, self.alpha) + batch_size = batch_inputs.size(0) + index = torch.randperm(batch_size) + + mixed_inputs = lam * batch_inputs + (1 - lam) * batch_inputs[index, :] + mixed_scores = lam * batch_scores + (1 - lam) * batch_scores[index, :] + + return mixed_inputs, mixed_scores + + def __call__(self, batch_inputs: torch.Tensor, batch_score: torch.Tensor): + """Mix the batch inputs and batch data samples.""" + assert batch_score.ndim == 2, \ + 'The input `batch_score` should be a one-hot format tensor, '\ + 'which shape should be ``(N, num_classes)``.' + + mixed_inputs, mixed_score = self.mix(batch_inputs, batch_score.float()) + return mixed_inputs, mixed_score diff --git a/mmpretrain/models/utils/batch_augments/resizemix.py b/mmpretrain/models/utils/batch_augments/resizemix.py new file mode 100644 index 0000000000000000000000000000000000000000..89cfb72033e75065502a594f17124eb1f471116f --- /dev/null +++ b/mmpretrain/models/utils/batch_augments/resizemix.py @@ -0,0 +1,95 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Optional, Tuple + +import numpy as np +import torch +import torch.nn.functional as F + +from mmpretrain.registry import BATCH_AUGMENTS +from .cutmix import CutMix + + +@BATCH_AUGMENTS.register_module() +class ResizeMix(CutMix): + r"""ResizeMix Random Paste layer for a batch of data. + + The ResizeMix will resize an image to a small patch and paste it on another + image. It's proposed in `ResizeMix: Mixing Data with Preserved Object + Information and True Labels `_ + + Args: + alpha (float): Parameters for Beta distribution to generate the + mixing ratio. It should be a positive number. More details + can be found in :class:`Mixup`. + lam_min(float): The minimum value of lam. Defaults to 0.1. + lam_max(float): The maximum value of lam. Defaults to 0.8. + interpolation (str): algorithm used for upsampling: + 'nearest' | 'linear' | 'bilinear' | 'bicubic' | 'trilinear' | + 'area'. Defaults to 'bilinear'. + prob (float): The probability to execute resizemix. It should be in + range [0, 1]. Defaults to 1.0. + cutmix_minmax (List[float], optional): The min/max area ratio of the + patches. If not None, the bounding-box of patches is uniform + sampled within this ratio range, and the ``alpha`` will be ignored. + Otherwise, the bounding-box is generated according to the + ``alpha``. Defaults to None. + correct_lam (bool): Whether to apply lambda correction when cutmix bbox + clipped by image borders. Defaults to True + **kwargs: Any other parameters accpeted by :class:`CutMix`. + + Note: + The :math:`\lambda` (``lam``) is the mixing ratio. It's a random + variable which follows :math:`Beta(\alpha, \alpha)` and is mapped + to the range [``lam_min``, ``lam_max``]. + + .. math:: + \lambda = \frac{Beta(\alpha, \alpha)} + {\lambda_{max} - \lambda_{min}} + \lambda_{min} + + And the resize ratio of source images is calculated by :math:`\lambda`: + + .. math:: + \text{ratio} = \sqrt{1-\lambda} + """ + + def __init__(self, + alpha: float, + lam_min: float = 0.1, + lam_max: float = 0.8, + interpolation: str = 'bilinear', + cutmix_minmax: Optional[List[float]] = None, + correct_lam: bool = True): + super().__init__( + alpha=alpha, cutmix_minmax=cutmix_minmax, correct_lam=correct_lam) + self.lam_min = lam_min + self.lam_max = lam_max + self.interpolation = interpolation + + def mix(self, batch_inputs: torch.Tensor, + batch_scores: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Mix the batch inputs and batch one-hot format ground truth. + + Args: + batch_inputs (Tensor): A batch of images tensor in the shape of + ``(N, C, H, W)``. + batch_scores (Tensor): A batch of one-hot format labels in the + shape of ``(N, num_classes)``. + + Returns: + Tuple[Tensor, Tensor): The mixed inputs and labels. + """ + lam = np.random.beta(self.alpha, self.alpha) + lam = lam * (self.lam_max - self.lam_min) + self.lam_min + img_shape = batch_inputs.shape[-2:] + batch_size = batch_inputs.size(0) + index = torch.randperm(batch_size) + + (y1, y2, x1, x2), lam = self.cutmix_bbox_and_lam(img_shape, lam) + batch_inputs[:, :, y1:y2, x1:x2] = F.interpolate( + batch_inputs[index], + size=(y2 - y1, x2 - x1), + mode=self.interpolation, + align_corners=False) + mixed_scores = lam * batch_scores + (1 - lam) * batch_scores[index, :] + + return batch_inputs, mixed_scores diff --git a/mmpretrain/models/utils/batch_augments/wrapper.py b/mmpretrain/models/utils/batch_augments/wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..10e5304c3ca1a42428870ea5a00416007ca2e35c --- /dev/null +++ b/mmpretrain/models/utils/batch_augments/wrapper.py @@ -0,0 +1,74 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Callable, Union + +import numpy as np +import torch + +from mmpretrain.registry import BATCH_AUGMENTS + + +class RandomBatchAugment: + """Randomly choose one batch augmentation to apply. + + Args: + augments (Callable | dict | list): configs of batch + augmentations. + probs (float | List[float] | None): The probabilities of each batch + augmentations. If None, choose evenly. Defaults to None. + + Example: + >>> import torch + >>> import torch.nn.functional as F + >>> from mmpretrain.models import RandomBatchAugment + >>> augments_cfg = [ + ... dict(type='CutMix', alpha=1.), + ... dict(type='Mixup', alpha=1.) + ... ] + >>> batch_augment = RandomBatchAugment(augments_cfg, probs=[0.5, 0.3]) + >>> imgs = torch.rand(16, 3, 32, 32) + >>> label = F.one_hot(torch.randint(0, 10, (16, )), num_classes=10) + >>> imgs, label = batch_augment(imgs, label) + + .. note :: + + To decide which batch augmentation will be used, it picks one of + ``augments`` based on the probabilities. In the example above, the + probability to use CutMix is 0.5, to use Mixup is 0.3, and to do + nothing is 0.2. + """ + + def __init__(self, augments: Union[Callable, dict, list], probs=None): + if not isinstance(augments, (tuple, list)): + augments = [augments] + + self.augments = [] + for aug in augments: + if isinstance(aug, dict): + self.augments.append(BATCH_AUGMENTS.build(aug)) + else: + self.augments.append(aug) + + if isinstance(probs, float): + probs = [probs] + + if probs is not None: + assert len(augments) == len(probs), \ + '``augments`` and ``probs`` must have same lengths. ' \ + f'Got {len(augments)} vs {len(probs)}.' + assert sum(probs) <= 1, \ + 'The total probability of batch augments exceeds 1.' + self.augments.append(None) + probs.append(1 - sum(probs)) + + self.probs = probs + + def __call__(self, batch_input: torch.Tensor, batch_score: torch.Tensor): + """Randomly apply batch augmentations to the batch inputs and batch + data samples.""" + aug_index = np.random.choice(len(self.augments), p=self.probs) + aug = self.augments[aug_index] + + if aug is not None: + return aug(batch_input, batch_score) + else: + return batch_input, batch_score.float() diff --git a/mmpretrain/models/utils/batch_shuffle.py b/mmpretrain/models/utils/batch_shuffle.py new file mode 100644 index 0000000000000000000000000000000000000000..a0b03c5fec5f99295daed2872feff73dfc238140 --- /dev/null +++ b/mmpretrain/models/utils/batch_shuffle.py @@ -0,0 +1,66 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Tuple + +import torch +from mmengine.dist import all_gather, broadcast, get_rank + + +@torch.no_grad() +def batch_shuffle_ddp(x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Batch shuffle, for making use of BatchNorm. + + Args: + x (torch.Tensor): Data in each GPU. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: Output of shuffle operation. + - x_gather[idx_this]: Shuffled data. + - idx_unshuffle: Index for restoring. + """ + # gather from all gpus + batch_size_this = x.shape[0] + x_gather = torch.cat(all_gather(x), dim=0) + batch_size_all = x_gather.shape[0] + + num_gpus = batch_size_all // batch_size_this + + # random shuffle index + idx_shuffle = torch.randperm(batch_size_all) + + # broadcast to all gpus + broadcast(idx_shuffle, src=0) + + # index for restoring + idx_unshuffle = torch.argsort(idx_shuffle) + + # shuffled index for this gpu + gpu_idx = get_rank() + idx_this = idx_shuffle.view(num_gpus, -1)[gpu_idx] + + return x_gather[idx_this], idx_unshuffle + + +@torch.no_grad() +def batch_unshuffle_ddp(x: torch.Tensor, + idx_unshuffle: torch.Tensor) -> torch.Tensor: + """Undo batch shuffle. + + Args: + x (torch.Tensor): Data in each GPU. + idx_unshuffle (torch.Tensor): Index for restoring. + + Returns: + torch.Tensor: Output of unshuffle operation. + """ + # gather from all gpus + batch_size_this = x.shape[0] + x_gather = torch.cat(all_gather(x), dim=0) + batch_size_all = x_gather.shape[0] + + num_gpus = batch_size_all // batch_size_this + + # restored index for this gpu + gpu_idx = get_rank() + idx_this = idx_unshuffle.view(num_gpus, -1)[gpu_idx] + + return x_gather[idx_this] diff --git a/mmpretrain/models/utils/box_utils.py b/mmpretrain/models/utils/box_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..79db516c990f51a7c952404d932b6de022684fb4 --- /dev/null +++ b/mmpretrain/models/utils/box_utils.py @@ -0,0 +1,56 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torchvision.ops.boxes as boxes + + +def box_cxcywh_to_xyxy(x): + x_c, y_c, w, h = x.unbind(-1) + b = [(x_c - 0.5 * w), (y_c - 0.5 * h), (x_c + 0.5 * w), (y_c + 0.5 * h)] + return torch.stack(b, dim=-1) + + +def box_xyxy_to_cxcywh(x): + x0, y0, x1, y1 = x.unbind(-1) + b = [(x0 + x1) / 2.0, (y0 + y1) / 2.0, (x1 - x0), (y1 - y0)] + return torch.stack(b, dim=-1) + + +def box_iou(boxes1, boxes2): + """Return intersection-over-union (Jaccard index) between two sets of + boxes. + + Both sets of boxes are expected to be in ``(x1, y1, x2, y2)`` format with + ``0 <= x1 < x2`` and ``0 <= y1 < y2``. + + Args: + boxes1 (Tensor[N, 4]): first set of boxes + boxes2 (Tensor[M, 4]): second set of boxes + + Returns: + Tensor[N, M]: the NxM matrix containing the pairwise IoU values for + every element in boxes1 and boxes2 + """ + return boxes.box_iou(boxes1, boxes2) + + +def generalized_box_iou(boxes1, boxes2): + """Return generalized intersection-over-union (Jaccard index) between two + sets of boxes. + + Both sets of boxes are expected to be in ``(x1, y1, x2, y2)`` format with + ``0 <= x1 < x2`` and ``0 <= y1 < y2``. + + Args: + boxes1 (Tensor[N, 4]): first set of boxes + boxes2 (Tensor[M, 4]): second set of boxes + + Returns: + Tensor[N, M]: the NxM matrix containing the pairwise generalized IoU + values for every element in boxes1 and boxes2 + """ + # degenerate boxes gives inf / nan results + # so do an early check + assert (boxes1[:, 2:] >= boxes1[:, :2]).all() + assert (boxes2[:, 2:] >= boxes2[:, :2]).all() + + return boxes.generalized_box_iou(boxes1, boxes2) diff --git a/mmpretrain/models/utils/channel_shuffle.py b/mmpretrain/models/utils/channel_shuffle.py new file mode 100644 index 0000000000000000000000000000000000000000..27006a8065db35a14c4207ce6613104374b064ad --- /dev/null +++ b/mmpretrain/models/utils/channel_shuffle.py @@ -0,0 +1,29 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch + + +def channel_shuffle(x, groups): + """Channel Shuffle operation. + + This function enables cross-group information flow for multiple groups + convolution layers. + + Args: + x (Tensor): The input tensor. + groups (int): The number of groups to divide the input tensor + in the channel dimension. + + Returns: + Tensor: The output tensor after channel shuffle operation. + """ + + batch_size, num_channels, height, width = x.size() + assert (num_channels % groups == 0), ('num_channels should be ' + 'divisible by groups') + channels_per_group = num_channels // groups + + x = x.view(batch_size, groups, channels_per_group, height, width) + x = torch.transpose(x, 1, 2).contiguous() + x = x.view(batch_size, -1, height, width) + + return x diff --git a/mmpretrain/models/utils/clip_generator_helper.py b/mmpretrain/models/utils/clip_generator_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..4f67f0ed6976585a20e15787fc6b94c41082d33d --- /dev/null +++ b/mmpretrain/models/utils/clip_generator_helper.py @@ -0,0 +1,394 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Modified from https://github.com/zejiangh/MILAN +from collections import OrderedDict +from typing import Optional, Tuple, Union + +import numpy as np +import torch +from mmengine.logging import MMLogger +from torch import nn + +from mmpretrain.registry import MODELS + + +class LayerNorm(nn.LayerNorm): + """Subclass torch's LayerNorm to handle fp16.""" + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + orig_type = x.dtype + ret = super().forward(x.type(torch.float32)) + return ret.type(orig_type) + + +@MODELS.register_module() +class QuickGELU(nn.Module): + """A faster version of GELU.""" + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """Forward function.""" + return x * torch.sigmoid(1.702 * x) + + +class ResidualAttentionBlock(nn.Module): + """Residual Attention Block (RAB). + + This module implements the same function as the MultiheadAttention, + but with a different interface, which is mainly used + in CLIP. + + Args: + d_model (int): The feature dimension. + n_head (int): The number of attention heads. + attn_mask (torch.Tensor, optional): The attention mask. + Defaults to None. + """ + + def __init__(self, + d_model: int, + n_head: int, + attn_mask: Optional[torch.Tensor] = None, + return_attention: bool = False) -> None: + super().__init__() + + self.attn = nn.MultiheadAttention(d_model, n_head) + self.ln_1 = LayerNorm(d_model) + self.mlp = nn.Sequential( + OrderedDict([('c_fc', nn.Linear(d_model, d_model * 4)), + ('gelu', QuickGELU()), + ('c_proj', nn.Linear(d_model * 4, d_model))])) + self.ln_2 = LayerNorm(d_model) + self.attn_mask = attn_mask + + self.return_attention = return_attention + + def attention(self, x: torch.Tensor) -> torch.Tensor: + """Attention function.""" + self.attn_mask = self.attn_mask.to( + dtype=x.dtype, + device=x.device) if self.attn_mask is not None else None + if self.return_attention: + return self.attn( + x, + x, + x, + need_weights=self.return_attention, + attn_mask=self.attn_mask) + else: + return self.attn( + x, + x, + x, + need_weights=self.return_attention, + attn_mask=self.attn_mask)[0] + + def forward( + self, x: torch.Tensor + ) -> Union[torch.Tensor, Tuple[torch.Tensor, torch.Tensor]]: + """Forward function.""" + if self.return_attention: + x_, attention = self.attention(self.ln_1(x)) + x = x + x_ + x = x + self.mlp(self.ln_2(x)) + return x, attention + else: + x = x + self.attention(self.ln_1(x)) + x = x + self.mlp(self.ln_2(x)) + return x + + +class Transformer(nn.Module): + """Transformer. + + Both visual and text branches use this transformer. + + Args: + width (int): The feature dimension. + layers (int): The number of layers. + heads (int): The number of attention heads. + attn_mask (torch.Tensor, optional): The attention mask. + """ + + def __init__(self, + width: int, + layers: int, + heads: int, + attn_mask: Optional[torch.Tensor] = None) -> None: + super().__init__() + self.width = width + self.layers = layers + self.resblocks = nn.ModuleList() + for _ in range(layers - 1): + self.resblocks.append( + ResidualAttentionBlock(width, heads, attn_mask)) + self.resblocks.append( + ResidualAttentionBlock( + width, heads, attn_mask, return_attention=True)) + + def forward( + self, x: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """Forward function.""" + z = [] + for idx, blk in enumerate(self.resblocks): + if idx < self.layers - 1: + x = blk(x) + z.append(x.permute(1, 0, 2)) + else: + x, attention = blk(x) + z.append(x.permute(1, 0, 2)) + return x, attention, z + + +class VisionTransformer(nn.Module): + """Vision Transformer for CLIP. + + Args: + input_resolution (int): The image size. + patch_size (int): The patch size. + width (int): The feature dimension. + layers (int): The number of layers. + heads (int): The number of attention heads. + out_dim (int): The output dimension. + fineturn (bool): Whether to fineturn the model. + average_target (bool): Whether to average the target. + """ + + def __init__(self, + input_resolution: int, + patch_size: int, + width: int, + layers: int, + heads: int, + output_dim: int, + finetune=False, + average_targets: int = 1) -> None: + super().__init__() + self.input_resolution = input_resolution + self.output_dim = output_dim + self.conv1 = nn.Conv2d( + in_channels=3, + out_channels=width, + kernel_size=patch_size, + stride=patch_size, + bias=False) + + scale = width**-0.5 + self.class_embedding = nn.Parameter(scale * torch.randn(width)) + self.positional_embedding = nn.Parameter(scale * torch.randn( + (input_resolution // patch_size)**2 + 1, width)) + self.ln_pre = LayerNorm(width) + + self.transformer = Transformer(width, layers, heads) + + self.finetune = finetune + if finetune is False: + self.ln_post = LayerNorm(width) + self.proj = nn.Parameter(scale * torch.randn(width, output_dim)) + + self.average_targets = average_targets + + def forward(self, x: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Forward function.""" + x = self.conv1(x) # shape = [*, width, grid, grid] + x = x.reshape(x.shape[0], x.shape[1], + -1) # shape = [*, width, grid ** 2] + x = x.permute(0, 2, 1) # shape = [*, grid ** 2, width] + x = torch.cat([ + self.class_embedding.to(x.dtype) + torch.zeros( + x.shape[0], 1, x.shape[-1], dtype=x.dtype, device=x.device), x + ], + dim=1) # shape = [*, grid ** 2 + 1, width] + x = x + self.positional_embedding.to(x.dtype) + x = self.ln_pre(x) + + x = x.permute(1, 0, 2) # NLD -> LND + x, attention, z = self.transformer(x) + x = x.permute(1, 0, 2) # LND -> NLD + + x = self.ln_post(x) + if self.proj is not None: + x = x @ self.proj + + return x, attention + + +class CLIP(nn.Module): + """CLIP. + + Args: + embed_dim (int): The embedding dimension. + image_resolution (int): The image size. + vision_layers (int): The number of layers in the vision transformer. + vision_width (int): The feature dimension in the vision transformer. + vision_patch_size (int): The patch size in the vision transformer. + context_length (int): The context length. + vocab_size (int): The vocabulary size. + transformer_width (int): The feature dimension in the text transformer. + transformer_heads (int): The number of attention heads in the + text transformer. + transformer_layers (int): The number of layers in the text transformer. + fineturn (bool): Whether to fineturn the model. + average_target (bool): Whether to average the target. + """ + + def __init__( + self, + embed_dim: int, + image_resolution: int, + vision_layers: Union[Tuple[int, int, int, int], int], + vision_width: int, + vision_patch_size: int, + context_length: int, + vocab_size: int, + transformer_width: int, + transformer_heads: int, + transformer_layers: int, + finetune: bool = False, + average_targets: int = 1, + ) -> None: + super().__init__() + + self.context_length = context_length + + vision_heads = vision_width // 64 + self.visual = VisionTransformer( + input_resolution=image_resolution, + patch_size=vision_patch_size, + width=vision_width, + layers=vision_layers, + heads=vision_heads, + output_dim=embed_dim, + finetune=finetune, + average_targets=average_targets, + ) + + self.transformer = Transformer( + width=transformer_width, + layers=transformer_layers, + heads=transformer_heads, + attn_mask=self.build_attention_mask()) + + self.vocab_size = vocab_size + self.token_embedding = nn.Embedding(vocab_size, transformer_width) + self.positional_embedding = nn.Parameter( + torch.empty(self.context_length, transformer_width)) + self.ln_final = LayerNorm(transformer_width) + + self.text_projection = nn.Parameter( + torch.empty(transformer_width, embed_dim)) + self.logit_scale = nn.Parameter(torch.ones([]) * np.log(1 / 0.07)) + + self.initialize_parameters() + + def initialize_parameters(self) -> None: + """Initialize the parameters. + + The pretrained weight will override the initialized parameters by this + function. + """ + nn.init.normal_(self.token_embedding.weight, std=0.02) + nn.init.normal_(self.positional_embedding, std=0.01) + + proj_std = (self.transformer.width**-0.5) * ( + (2 * self.transformer.layers)**-0.5) + attn_std = self.transformer.width**-0.5 + fc_std = (2 * self.transformer.width)**-0.5 + for block in self.transformer.resblocks: + nn.init.normal_(block.attn.in_proj_weight, std=attn_std) + nn.init.normal_(block.attn.out_proj.weight, std=proj_std) + nn.init.normal_(block.mlp.c_fc.weight, std=fc_std) + nn.init.normal_(block.mlp.c_proj.weight, std=proj_std) + + if self.text_projection is not None: + nn.init.normal_( + self.text_projection, std=self.transformer.width**-0.5) + + def build_attention_mask(self) -> torch.Tensor: + """Build the attention mask.""" + # lazily create causal attention mask, with full attention between the + # vision tokens pytorch uses additive attention mask; fill with -inf + mask = torch.empty(self.context_length, self.context_length) + mask.fill_(float('-inf')) + mask.triu_(1) # zero out the lower diagonal + return mask + + @property + def dtype(self) -> torch.dtype: + """Get the dtype.""" + return self.visual.conv1.weight.dtype + + def encode_image(self, + image: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: + """Encode the image. + + Get the feature and attention mask from the last layer of the visual + branch of CLIP. + + Args: + image (torch.Tensor): The image tensor with shape NCHW. + + Returns: + Tuple[torch.Tensor, torch.Tensor]: The feature and attention mask. + """ + return self.visual(image.type(self.dtype)) + + +def build_clip_model(state_dict: dict, + finetune: bool = False, + average_targets: int = 1) -> nn.Module: + """Build the CLIP model. + + Args: + state_dict (dict): The pretrained state dict. + finetune (bool): Whether to fineturn the model. + average_targets (bool): Whether to average the target. + + Returns: + nn.Module: The CLIP model. + """ + vit = 'visual.proj' in state_dict + + if vit: + vision_width = state_dict['visual.conv1.weight'].shape[0] + vision_layers = len([ + k for k in state_dict.keys() + if k.startswith('visual.') and k.endswith('.attn.in_proj_weight') + ]) + vision_patch_size = state_dict['visual.conv1.weight'].shape[-1] + grid_size = round( + (state_dict['visual.positional_embedding'].shape[0] - 1)**0.5) + image_resolution = vision_patch_size * grid_size + + embed_dim = state_dict['text_projection'].shape[1] + context_length = state_dict['positional_embedding'].shape[0] + vocab_size = state_dict['token_embedding.weight'].shape[0] + transformer_width = state_dict['ln_final.weight'].shape[0] + transformer_heads = transformer_width // 64 + transformer_layers = len( + set( + k.split('.')[2] for k in state_dict + if k.startswith('transformer.resblocks'))) + + model = CLIP( + embed_dim, + image_resolution, + vision_layers, + vision_width, + vision_patch_size, + context_length, + vocab_size, + transformer_width, + transformer_heads, + transformer_layers, + finetune, + average_targets, + ) + + for key in ['input_resolution', 'context_length', 'vocab_size']: + if key in state_dict: + del state_dict[key] + + msg = model.load_state_dict(state_dict, strict=False) + MMLogger.get_current_instance().info(f'Load CLIP model: {msg}') + return model.eval() diff --git a/mmpretrain/models/utils/data_preprocessor.py b/mmpretrain/models/utils/data_preprocessor.py new file mode 100644 index 0000000000000000000000000000000000000000..c407bd4c9361b9fae329854d4a36dab929fef143 --- /dev/null +++ b/mmpretrain/models/utils/data_preprocessor.py @@ -0,0 +1,620 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from numbers import Number +from typing import List, Optional, Sequence, Tuple, Union + +import torch +import torch.nn.functional as F +from mmengine.model import (BaseDataPreprocessor, ImgDataPreprocessor, + stack_batch) + +from mmpretrain.registry import MODELS +from mmpretrain.structures import (DataSample, MultiTaskDataSample, + batch_label_to_onehot, cat_batch_labels, + tensor_split) +from .batch_augments import RandomBatchAugment + + +@MODELS.register_module() +class ClsDataPreprocessor(BaseDataPreprocessor): + """Image pre-processor for classification tasks. + + Comparing with the :class:`mmengine.model.ImgDataPreprocessor`, + + 1. It won't do normalization if ``mean`` is not specified. + 2. It does normalization and color space conversion after stacking batch. + 3. It supports batch augmentations like mixup and cutmix. + + It provides the data pre-processing as follows + + - Collate and move data to the target device. + - Pad inputs to the maximum size of current batch with defined + ``pad_value``. The padding size can be divisible by a defined + ``pad_size_divisor`` + - Stack inputs to batch_inputs. + - Convert inputs from bgr to rgb if the shape of input is (3, H, W). + - Normalize image with defined std and mean. + - Do batch augmentations like Mixup and Cutmix during training. + + Args: + mean (Sequence[Number], optional): The pixel mean of R, G, B channels. + Defaults to None. + std (Sequence[Number], optional): The pixel standard deviation of + R, G, B channels. Defaults to None. + pad_size_divisor (int): The size of padded image should be + divisible by ``pad_size_divisor``. Defaults to 1. + pad_value (Number): The padded pixel value. Defaults to 0. + to_rgb (bool): whether to convert image from BGR to RGB. + Defaults to False. + to_onehot (bool): Whether to generate one-hot format gt-labels and set + to data samples. Defaults to False. + num_classes (int, optional): The number of classes. Defaults to None. + batch_augments (dict, optional): The batch augmentations settings, + including "augments" and "probs". For more details, see + :class:`mmpretrain.models.RandomBatchAugment`. + """ + + def __init__(self, + mean: Sequence[Number] = None, + std: Sequence[Number] = None, + pad_size_divisor: int = 1, + pad_value: Number = 0, + to_rgb: bool = False, + to_onehot: bool = False, + num_classes: Optional[int] = None, + batch_augments: Optional[dict] = None): + super().__init__() + self.pad_size_divisor = pad_size_divisor + self.pad_value = pad_value + self.to_rgb = to_rgb + self.to_onehot = to_onehot + self.num_classes = num_classes + + if mean is not None: + assert std is not None, 'To enable the normalization in ' \ + 'preprocessing, please specify both `mean` and `std`.' + # Enable the normalization in preprocessing. + self._enable_normalize = True + self.register_buffer('mean', + torch.tensor(mean).view(-1, 1, 1), False) + self.register_buffer('std', + torch.tensor(std).view(-1, 1, 1), False) + else: + self._enable_normalize = False + + if batch_augments: + self.batch_augments = RandomBatchAugment(**batch_augments) + if not self.to_onehot: + from mmengine.logging import MMLogger + MMLogger.get_current_instance().info( + 'Because batch augmentations are enabled, the data ' + 'preprocessor automatically enables the `to_onehot` ' + 'option to generate one-hot format labels.') + self.to_onehot = True + else: + self.batch_augments = None + + def forward(self, data: dict, training: bool = False) -> dict: + """Perform normalization, padding, bgr2rgb conversion and batch + augmentation based on ``BaseDataPreprocessor``. + + Args: + data (dict): data sampled from dataloader. + training (bool): Whether to enable training time augmentation. + + Returns: + dict: Data in the same format as the model input. + """ + inputs = self.cast_data(data['inputs']) + + if isinstance(inputs, torch.Tensor): + # The branch if use `default_collate` as the collate_fn in the + # dataloader. + + # ------ To RGB ------ + if self.to_rgb and inputs.size(1) == 3: + inputs = inputs.flip(1) + + # -- Normalization --- + inputs = inputs.float() + if self._enable_normalize: + inputs = (inputs - self.mean) / self.std + + # ------ Padding ----- + if self.pad_size_divisor > 1: + h, w = inputs.shape[-2:] + + target_h = math.ceil( + h / self.pad_size_divisor) * self.pad_size_divisor + target_w = math.ceil( + w / self.pad_size_divisor) * self.pad_size_divisor + pad_h = target_h - h + pad_w = target_w - w + inputs = F.pad(inputs, (0, pad_w, 0, pad_h), 'constant', + self.pad_value) + else: + # The branch if use `pseudo_collate` as the collate_fn in the + # dataloader. + + processed_inputs = [] + for input_ in inputs: + # ------ To RGB ------ + if self.to_rgb and input_.size(0) == 3: + input_ = input_.flip(0) + + # -- Normalization --- + input_ = input_.float() + if self._enable_normalize: + input_ = (input_ - self.mean) / self.std + + processed_inputs.append(input_) + # Combine padding and stack + inputs = stack_batch(processed_inputs, self.pad_size_divisor, + self.pad_value) + + data_samples = data.get('data_samples', None) + sample_item = data_samples[0] if data_samples is not None else None + + if isinstance(sample_item, DataSample): + batch_label = None + batch_score = None + + if 'gt_label' in sample_item: + gt_labels = [sample.gt_label for sample in data_samples] + batch_label, label_indices = cat_batch_labels(gt_labels) + batch_label = batch_label.to(self.device) + if 'gt_score' in sample_item: + gt_scores = [sample.gt_score for sample in data_samples] + batch_score = torch.stack(gt_scores).to(self.device) + elif self.to_onehot and 'gt_label' in sample_item: + assert batch_label is not None, \ + 'Cannot generate onehot format labels because no labels.' + num_classes = self.num_classes or sample_item.get( + 'num_classes') + assert num_classes is not None, \ + 'Cannot generate one-hot format labels because not set ' \ + '`num_classes` in `data_preprocessor`.' + batch_score = batch_label_to_onehot( + batch_label, label_indices, num_classes).to(self.device) + + # ----- Batch Augmentations ---- + if (training and self.batch_augments is not None + and batch_score is not None): + inputs, batch_score = self.batch_augments(inputs, batch_score) + + # ----- scatter labels and scores to data samples --- + if batch_label is not None: + for sample, label in zip( + data_samples, tensor_split(batch_label, + label_indices)): + sample.set_gt_label(label) + if batch_score is not None: + for sample, score in zip(data_samples, batch_score): + sample.set_gt_score(score) + elif isinstance(sample_item, MultiTaskDataSample): + data_samples = self.cast_data(data_samples) + + return {'inputs': inputs, 'data_samples': data_samples} + + +@MODELS.register_module() +class SelfSupDataPreprocessor(ImgDataPreprocessor): + """Image pre-processor for operations, like normalization and bgr to rgb. + + Compared with the :class:`mmengine.ImgDataPreprocessor`, this module + supports ``inputs`` as torch.Tensor or a list of torch.Tensor. + """ + + def __init__(self, + mean: Optional[Sequence[Union[float, int]]] = None, + std: Optional[Sequence[Union[float, int]]] = None, + pad_size_divisor: int = 1, + pad_value: Union[float, int] = 0, + to_rgb: bool = False, + bgr_to_rgb: bool = False, + rgb_to_bgr: bool = False, + non_blocking: Optional[bool] = False): + super().__init__( + mean=mean, + std=std, + pad_size_divisor=pad_size_divisor, + pad_value=pad_value, + bgr_to_rgb=bgr_to_rgb, + rgb_to_bgr=rgb_to_bgr, + non_blocking=non_blocking) + + self._channel_conversion = to_rgb or bgr_to_rgb or rgb_to_bgr + + def forward( + self, + data: dict, + training: bool = False + ) -> Tuple[List[torch.Tensor], Optional[list]]: + """Performs normalization and bgr2rgb conversion based on + ``BaseDataPreprocessor``. + + Args: + data (dict): data sampled from dataloader. + training (bool): Whether to enable training time augmentation. If + subclasses override this method, they can perform different + preprocessing strategies for training and testing based on the + value of ``training``. + Returns: + Tuple[torch.Tensor, Optional[list]]: Data in the same format as the + model input. + """ + assert isinstance(data, + dict), 'Please use default_collate in dataloader, \ + instead of pseudo_collate.' + + data = [val for _, val in data.items()] + batch_inputs, batch_data_samples = self.cast_data(data) + + # Here is what is different from :class:`mmengine.ImgDataPreprocessor` + # Since there are multiple views for an image for some algorithms, + # e.g. SimCLR, each item in inputs is a list, containing multi-views + # for an image. + if isinstance(batch_inputs, list): + # channel transform + if self._channel_conversion: + batch_inputs = [ + _input[:, [2, 1, 0], ...] for _input in batch_inputs + ] + + # convert to float after channel conversion to ensure efficiency + batch_inputs = [_input.float() for _input in batch_inputs] + + # normalization. + if self._enable_normalize: + batch_inputs = [(_input - self.mean) / self.std + for _input in batch_inputs] + else: + # channel transform + if self._channel_conversion: + batch_inputs = batch_inputs[:, [2, 1, 0], ...] + + # convert to float after channel conversion to ensure efficiency + batch_inputs = batch_inputs.float() + + # normalization. + if self._enable_normalize: + batch_inputs = (batch_inputs - self.mean) / self.std + + return {'inputs': batch_inputs, 'data_samples': batch_data_samples} + + +@MODELS.register_module() +class TwoNormDataPreprocessor(SelfSupDataPreprocessor): + """Image pre-processor for CAE, BEiT v1/v2, etc. + + Compared with the :class:`mmselfsup.SelfSupDataPreprocessor`, this module + will normalize the prediction image and target image with different + normalization parameters. + + Args: + mean (Sequence[float or int], optional): The pixel mean of image + channels. If ``to_rgb=True`` it means the mean value of R, G, B + channels. If the length of `mean` is 1, it means all channels have + the same mean value, or the input is a gray image. If it is not + specified, images will not be normalized. Defaults to None. + std (Sequence[float or int], optional): The pixel standard deviation of + image channels. If ``to_rgb=True`` it means the standard deviation + of R, G, B channels. If the length of `std` is 1, it means all + channels have the same standard deviation, or the input is a gray + image. If it is not specified, images will not be normalized. + Defaults to None. + second_mean (Sequence[float or int], optional): The description is + like ``mean``, it can be customized for targe image. Defaults to + None. + second_std (Sequence[float or int], optional): The description is + like ``std``, it can be customized for targe image. Defaults to + None. + pad_size_divisor (int): The size of padded image should be + divisible by ``pad_size_divisor``. Defaults to 1. + pad_value (float or int): The padded pixel value. Defaults to 0. + to_rgb (bool): whether to convert image from BGR to RGB. + Defaults to False. + non_blocking (bool): Whether block current process when transferring + data to device. Defaults to False. + """ + + def __init__(self, + mean: Optional[Sequence[Union[float, int]]] = None, + std: Optional[Sequence[Union[float, int]]] = None, + second_mean: Sequence[Union[float, int]] = None, + second_std: Sequence[Union[float, int]] = None, + pad_size_divisor: int = 1, + pad_value: Union[float, int] = 0, + to_rgb: bool = False, + non_blocking: Optional[bool] = False): + super().__init__( + mean=mean, + std=std, + pad_size_divisor=pad_size_divisor, + pad_value=pad_value, + to_rgb=to_rgb, + non_blocking=non_blocking) + assert (second_mean is not None) and (second_std is not None), ( + 'mean and std should not be None while using ' + '`TwoNormDataPreprocessor`') + assert len(second_mean) == 3 or len(second_mean) == 1, ( + '`mean` should have 1 or 3 values, to be compatible with ' + f'RGB or gray image, but got {len(second_mean)} values') + assert len(second_std) == 3 or len(second_std) == 1, ( + '`std` should have 1 or 3 values, to be compatible with RGB ' + f'or gray image, but got {len(std)} values') + + self.register_buffer('second_mean', + torch.tensor(second_mean).view(-1, 1, 1), False) + self.register_buffer('second_std', + torch.tensor(second_std).view(-1, 1, 1), False) + + def forward( + self, + data: dict, + training: bool = False + ) -> Tuple[List[torch.Tensor], Optional[list]]: + """Performs normalization and bgr2rgb conversion based on + ``BaseDataPreprocessor``. The ``batch_inputs`` in forward function is a + list. + + Args: + data (dict): data sampled from dataloader. + training (bool): Whether to enable training time augmentation. If + subclasses override this method, they can perform different + preprocessing strategies for training and testing based on the + value of ``training``. + Returns: + Tuple[torch.Tensor, Optional[list]]: Data in the same format as the + model input. + """ + data = [val for _, val in data.items()] + batch_inputs, batch_data_samples = self.cast_data(data) + # channel transform + if self._channel_conversion: + batch_inputs = [ + _input[:, [2, 1, 0], ...] for _input in batch_inputs + ] + + # convert to float after channel conversion to ensure efficiency + batch_inputs = [_input.float() for _input in batch_inputs] + + # Normalization. Here is what is different from + # :class:`mmselfsup.SelfSupDataPreprocessor`. Normalize the target + # image and prediction image with different normalization params + if self._enable_normalize: + batch_inputs = [ + (batch_inputs[0] - self.mean) / self.std, + (batch_inputs[1] - self.second_mean) / self.second_std + ] + + return {'inputs': batch_inputs, 'data_samples': batch_data_samples} + + +@MODELS.register_module() +class VideoDataPreprocessor(BaseDataPreprocessor): + """Video pre-processor for operations, like normalization and bgr to rgb + conversion . + + Compared with the :class:`mmaction.ActionDataPreprocessor`, this module + supports ``inputs`` as torch.Tensor or a list of torch.Tensor. + + Args: + mean (Sequence[float or int, optional): The pixel mean of channels + of images or stacked optical flow. Defaults to None. + std (Sequence[float or int], optional): The pixel standard deviation + of channels of images or stacked optical flow. Defaults to None. + pad_size_divisor (int): The size of padded image should be + divisible by ``pad_size_divisor``. Defaults to 1. + pad_value (float or int): The padded pixel value. Defaults to 0. + to_rgb (bool): Whether to convert image from BGR to RGB. + Defaults to False. + format_shape (str): Format shape of input data. + Defaults to ``'NCHW'``. + """ + + def __init__(self, + mean: Optional[Sequence[Union[float, int]]] = None, + std: Optional[Sequence[Union[float, int]]] = None, + pad_size_divisor: int = 1, + pad_value: Union[float, int] = 0, + to_rgb: bool = False, + format_shape: str = 'NCHW') -> None: + super().__init__() + self.pad_size_divisor = pad_size_divisor + self.pad_value = pad_value + self.to_rgb = to_rgb + self.format_shape = format_shape + + if mean is not None: + assert std is not None, 'To enable the normalization in ' \ + 'preprocessing, please specify both ' \ + '`mean` and `std`.' + # Enable the normalization in preprocessing. + self._enable_normalize = True + if self.format_shape == 'NCHW': + normalizer_shape = (-1, 1, 1) + elif self.format_shape == 'NCTHW': + normalizer_shape = (-1, 1, 1, 1) + else: + raise ValueError(f'Invalid format shape: {format_shape}') + + self.register_buffer( + 'mean', + torch.tensor(mean, dtype=torch.float32).view(normalizer_shape), + False) + self.register_buffer( + 'std', + torch.tensor(std, dtype=torch.float32).view(normalizer_shape), + False) + else: + self._enable_normalize = False + + def forward( + self, + data: dict, + training: bool = False + ) -> Tuple[List[torch.Tensor], Optional[list]]: + """Performs normalization、padding and bgr2rgb conversion based on + ``BaseDataPreprocessor``. + + Args: + data (dict): data sampled from dataloader. + training (bool): Whether to enable training time augmentation. If + subclasses override this method, they can perform different + preprocessing strategies for training and testing based on the + value of ``training``. + Returns: + Tuple[List[torch.Tensor], Optional[list]]: Data in the same format + as the model input. + """ + + data = [val for _, val in data.items()] + batch_inputs, batch_data_samples = self.cast_data(data) + + if isinstance(batch_inputs, list): + # channel transform + if self.to_rgb: + if self.format_shape == 'NCHW': + batch_inputs = [ + _input[..., [2, 1, 0], :, :] for _input in batch_inputs + ] + elif self.format_shape == 'NCTHW': + batch_inputs = [ + _input[..., [2, 1, 0], :, :, :] + for _input in batch_inputs + ] + else: + raise ValueError( + f'Invalid format shape: {self.format_shape}') + + # convert to float after channel conversion to ensure efficiency + batch_inputs = [_input.float() for _input in batch_inputs] + + # normalization + if self._enable_normalize: + batch_inputs = [(_input - self.mean) / self.std + for _input in batch_inputs] + + else: + # channel transform + if self.to_rgb: + if self.format_shape == 'NCHW': + batch_inputs = batch_inputs[..., [2, 1, 0], :, :] + elif self.format_shape == 'NCTHW': + batch_inputs = batch_inputs[..., [2, 1, 0], :, :, :] + else: + raise ValueError( + f'Invalid format shape: {self.format_shape}') + + # convert to float after channel conversion to ensure efficiency + batch_inputs = batch_inputs.float() + + # normalization + if self._enable_normalize: + batch_inputs = (batch_inputs - self.mean) / self.std + + return {'inputs': batch_inputs, 'data_samples': batch_data_samples} + + +@MODELS.register_module() +class MultiModalDataPreprocessor(BaseDataPreprocessor): + """Data pre-processor for image-text multimodality tasks. + + It provides the data pre-processing as follows + + - Collate and move data to the target device. + - Pad inputs to the maximum size of current batch with defined + ``pad_value``. The padding size can be divisible by a defined + ``pad_size_divisor`` + - Stack inputs to batch_inputs. + - Convert inputs from bgr to rgb if the shape of input is (3, H, W). + - Normalize image with defined std and mean. + + Args: + mean (Sequence[Number], optional): The pixel mean of R, G, B channels. + Defaults to None. + std (Sequence[Number], optional): The pixel standard deviation of + R, G, B channels. Defaults to None. + pad_size_divisor (int): The size of padded image should be + divisible by ``pad_size_divisor``. Defaults to 1. + pad_value (Number): The padded pixel value. Defaults to 0. + to_rgb (bool): whether to convert image from BGR to RGB. + Defaults to False. + """ + + def __init__( + self, + mean: Sequence[Number] = None, + std: Sequence[Number] = None, + pad_size_divisor: int = 1, + pad_value: Number = 0, + to_rgb: bool = False, + ): + super().__init__() + self.pad_size_divisor = pad_size_divisor + self.pad_value = pad_value + self.to_rgb = to_rgb + + if mean is not None: + assert std is not None, 'To enable the normalization in ' \ + 'preprocessing, please specify both `mean` and `std`.' + # Enable the normalization in preprocessing. + self._enable_normalize = True + self.register_buffer('mean', + torch.tensor(mean).view(-1, 1, 1), False) + self.register_buffer('std', + torch.tensor(std).view(-1, 1, 1), False) + else: + self._enable_normalize = False + + def forward(self, data: dict, training: bool = False) -> dict: + """Perform normalization, padding, bgr2rgb conversion and batch + augmentation based on ``BaseDataPreprocessor``. + + Args: + data (dict): data sampled from dataloader. + training (bool): Whether to enable training time augmentation. + + Returns: + dict: Data in the same format as the model input. + """ + data = self.cast_data(data) + + imgs = data.get('inputs', None) + + def _process_img(img): + # ------ To RGB ------ + if self.to_rgb and img.size(1) == 3: + img = img.flip(1) + + # -- Normalization --- + img = img.float() + if self._enable_normalize: + img = (img - self.mean) / self.std + + # ------ Padding ----- + if self.pad_size_divisor > 1: + h, w = img.shape[-2:] + + target_h = math.ceil( + h / self.pad_size_divisor) * self.pad_size_divisor + target_w = math.ceil( + w / self.pad_size_divisor) * self.pad_size_divisor + pad_h = target_h - h + pad_w = target_w - w + img = F.pad(img, (0, pad_w, 0, pad_h), 'constant', + self.pad_value) + return img + + if isinstance(imgs, torch.Tensor): + imgs = _process_img(imgs) + elif isinstance(imgs, Sequence): + # B, T, C, H, W + imgs = torch.stack([_process_img(img) for img in imgs], dim=1) + elif imgs is not None: + raise ValueError(f'{type(imgs)} is not supported for imgs inputs.') + + data_samples = data.get('data_samples', None) + + return {'images': imgs, 'data_samples': data_samples} diff --git a/mmpretrain/models/utils/ema.py b/mmpretrain/models/utils/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..63c5006bbb0d9ff967b3cce7d3b5ada0cc683468 --- /dev/null +++ b/mmpretrain/models/utils/ema.py @@ -0,0 +1,87 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from math import cos, pi +from typing import Optional + +import torch +import torch.nn as nn +from mmengine.logging import MessageHub +from mmengine.model import ExponentialMovingAverage + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class CosineEMA(ExponentialMovingAverage): + r"""CosineEMA is implemented for updating momentum parameter, used in BYOL, + MoCoV3, etc. + + All parameters are updated by the formula as below: + + .. math:: + + X'_{t+1} = (1 - m) * X'_t + m * X_t + + Where :math:`m` the the momentum parameter. And it's updated with cosine + annealing, including momentum adjustment following: + + .. math:: + m = m_{end} + (m_{end} - m_{start}) * (\cos\frac{k\pi}{K} + 1) / 2 + + where :math:`k` is the current step, :math:`K` is the total steps. + + .. note:: + This :attr:`momentum` argument is different from one used in optimizer + classes and the conventional notion of momentum. Mathematically, + :math:`X'_{t}` is the moving average and :math:`X_t` is the new + observed value. The value of momentum is usually a small number, + allowing observed values to slowly update the ema parameters. See also + :external:py:class:`torch.nn.BatchNorm2d`. + + Args: + model (nn.Module): The model to be averaged. + momentum (float): The start momentum value. Defaults to 0.004. + end_momentum (float): The end momentum value for cosine annealing. + Defaults to 0. + interval (int): Interval between two updates. Defaults to 1. + device (torch.device, optional): If provided, the averaged model will + be stored on the :attr:`device`. Defaults to None. + update_buffers (bool): if True, it will compute running averages for + both the parameters and the buffers of the model. Defaults to + False. + """ + + def __init__(self, + model: nn.Module, + momentum: float = 0.004, + end_momentum: float = 0., + interval: int = 1, + device: Optional[torch.device] = None, + update_buffers: bool = False) -> None: + super().__init__( + model=model, + momentum=momentum, + interval=interval, + device=device, + update_buffers=update_buffers) + self.end_momentum = end_momentum + + def avg_func(self, averaged_param: torch.Tensor, + source_param: torch.Tensor, steps: int) -> None: + """Compute the moving average of the parameters using the cosine + momentum strategy. + + Args: + averaged_param (Tensor): The averaged parameters. + source_param (Tensor): The source parameters. + steps (int): The number of times the parameters have been + updated. + + Returns: + Tensor: The averaged parameters. + """ + message_hub = MessageHub.get_current_instance() + max_iters = message_hub.get_info('max_iters') + cosine_annealing = (cos(pi * steps / float(max_iters)) + 1) / 2 + momentum = self.end_momentum - (self.end_momentum - + self.momentum) * cosine_annealing + averaged_param.mul_(1 - momentum).add_(source_param, alpha=momentum) diff --git a/mmpretrain/models/utils/embed.py b/mmpretrain/models/utils/embed.py new file mode 100644 index 0000000000000000000000000000000000000000..8299f9a06789768b26ea58260a2984024fbf801d --- /dev/null +++ b/mmpretrain/models/utils/embed.py @@ -0,0 +1,423 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings +from typing import Sequence + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.cnn.bricks.transformer import AdaptivePadding +from mmengine.model import BaseModule + +from .helpers import to_2tuple + + +def resize_pos_embed(pos_embed, + src_shape, + dst_shape, + mode='bicubic', + num_extra_tokens=1): + """Resize pos_embed weights. + + Args: + pos_embed (torch.Tensor): Position embedding weights with shape + [1, L, C]. + src_shape (tuple): The resolution of downsampled origin training + image, in format (H, W). + dst_shape (tuple): The resolution of downsampled new training + image, in format (H, W). + mode (str): Algorithm used for upsampling. Choose one from 'nearest', + 'linear', 'bilinear', 'bicubic' and 'trilinear'. + Defaults to 'bicubic'. + num_extra_tokens (int): The number of extra tokens, such as cls_token. + Defaults to 1. + + Returns: + torch.Tensor: The resized pos_embed of shape [1, L_new, C] + """ + if src_shape[0] == dst_shape[0] and src_shape[1] == dst_shape[1]: + return pos_embed + assert pos_embed.ndim == 3, 'shape of pos_embed must be [1, L, C]' + _, L, C = pos_embed.shape + src_h, src_w = src_shape + assert L == src_h * src_w + num_extra_tokens, \ + f"The length of `pos_embed` ({L}) doesn't match the expected " \ + f'shape ({src_h}*{src_w}+{num_extra_tokens}). Please check the' \ + '`img_size` argument.' + extra_tokens = pos_embed[:, :num_extra_tokens] + + src_weight = pos_embed[:, num_extra_tokens:] + src_weight = src_weight.reshape(1, src_h, src_w, C).permute(0, 3, 1, 2) + + # The cubic interpolate algorithm only accepts float32 + dst_weight = F.interpolate( + src_weight.float(), size=dst_shape, align_corners=False, mode=mode) + dst_weight = torch.flatten(dst_weight, 2).transpose(1, 2) + dst_weight = dst_weight.to(src_weight.dtype) + + return torch.cat((extra_tokens, dst_weight), dim=1) + + +def resize_relative_position_bias_table(src_shape, dst_shape, table, num_head): + """Resize relative position bias table. + + Args: + src_shape (int): The resolution of downsampled origin training + image, in format (H, W). + dst_shape (int): The resolution of downsampled new training + image, in format (H, W). + table (tensor): The relative position bias of the pretrained model. + num_head (int): Number of attention heads. + + Returns: + torch.Tensor: The resized relative position bias table. + """ + from scipy import interpolate + + def geometric_progression(a, r, n): + return a * (1.0 - r**n) / (1.0 - r) + + left, right = 1.01, 1.5 + while right - left > 1e-6: + q = (left + right) / 2.0 + gp = geometric_progression(1, q, src_shape // 2) + if gp > dst_shape // 2: + right = q + else: + left = q + + dis = [] + cur = 1 + for i in range(src_shape // 2): + dis.append(cur) + cur += q**(i + 1) + + r_ids = [-_ for _ in reversed(dis)] + + x = r_ids + [0] + dis + y = r_ids + [0] + dis + + t = dst_shape // 2.0 + dx = np.arange(-t, t + 0.1, 1.0) + dy = np.arange(-t, t + 0.1, 1.0) + + all_rel_pos_bias = [] + + for i in range(num_head): + z = table[:, i].view(src_shape, src_shape).float().numpy() + f_cubic = interpolate.interp2d(x, y, z, kind='cubic') + all_rel_pos_bias.append( + torch.Tensor(f_cubic(dx, + dy)).contiguous().view(-1, + 1).to(table.device)) + new_rel_pos_bias = torch.cat(all_rel_pos_bias, dim=-1) + return new_rel_pos_bias + + +class PatchEmbed(BaseModule): + """Image to Patch Embedding. + + We use a conv layer to implement PatchEmbed. + + Args: + img_size (int | tuple): The size of input image. Default: 224 + in_channels (int): The num of input channels. Default: 3 + embed_dims (int): The dimensions of embedding. Default: 768 + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None + conv_cfg (dict, optional): The config dict for conv layers. + Default: None + init_cfg (`mmcv.ConfigDict`, optional): The Config for initialization. + Default: None + """ + + def __init__(self, + img_size=224, + in_channels=3, + embed_dims=768, + norm_cfg=None, + conv_cfg=None, + init_cfg=None): + super(PatchEmbed, self).__init__(init_cfg) + warnings.warn('The `PatchEmbed` in mmpretrain will be deprecated. ' + 'Please use `mmcv.cnn.bricks.transformer.PatchEmbed`. ' + "It's more general and supports dynamic input shape") + + if isinstance(img_size, int): + img_size = to_2tuple(img_size) + elif isinstance(img_size, tuple): + if len(img_size) == 1: + img_size = to_2tuple(img_size[0]) + assert len(img_size) == 2, \ + f'The size of image should have length 1 or 2, ' \ + f'but got {len(img_size)}' + + self.img_size = img_size + self.embed_dims = embed_dims + + # Use conv layer to embed + conv_cfg = conv_cfg or dict() + _conv_cfg = dict( + type='Conv2d', kernel_size=16, stride=16, padding=0, dilation=1) + _conv_cfg.update(conv_cfg) + self.projection = build_conv_layer(_conv_cfg, in_channels, embed_dims) + + # Calculate how many patches a input image is splited to. + h_out, w_out = [(self.img_size[i] + 2 * self.projection.padding[i] - + self.projection.dilation[i] * + (self.projection.kernel_size[i] - 1) - 1) // + self.projection.stride[i] + 1 for i in range(2)] + + self.patches_resolution = (h_out, w_out) + self.num_patches = h_out * w_out + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + else: + self.norm = None + + def forward(self, x): + B, C, H, W = x.shape + assert H == self.img_size[0] and W == self.img_size[1], \ + f"Input image size ({H}*{W}) doesn't " \ + f'match model ({self.img_size[0]}*{self.img_size[1]}).' + # The output size is (B, N, D), where N=H*W/P/P, D is embid_dim + x = self.projection(x).flatten(2).transpose(1, 2) + + if self.norm is not None: + x = self.norm(x) + + return x + + +# Modified from pytorch-image-models +class HybridEmbed(BaseModule): + """CNN Feature Map Embedding. + + Extract feature map from CNN, flatten, + project to embedding dim. + + Args: + backbone (nn.Module): CNN backbone + img_size (int | tuple): The size of input image. Default: 224 + feature_size (int | tuple, optional): Size of feature map extracted by + CNN backbone. Default: None + in_channels (int): The num of input channels. Default: 3 + embed_dims (int): The dimensions of embedding. Default: 768 + conv_cfg (dict, optional): The config dict for conv layers. + Default: None. + init_cfg (`mmcv.ConfigDict`, optional): The Config for initialization. + Default: None. + """ + + def __init__(self, + backbone, + img_size=224, + feature_size=None, + in_channels=3, + embed_dims=768, + conv_cfg=None, + init_cfg=None): + super(HybridEmbed, self).__init__(init_cfg) + assert isinstance(backbone, nn.Module) + if isinstance(img_size, int): + img_size = to_2tuple(img_size) + elif isinstance(img_size, tuple): + if len(img_size) == 1: + img_size = to_2tuple(img_size[0]) + assert len(img_size) == 2, \ + f'The size of image should have length 1 or 2, ' \ + f'but got {len(img_size)}' + + self.img_size = img_size + self.backbone = backbone + if feature_size is None: + with torch.no_grad(): + # FIXME this is hacky, but most reliable way of + # determining the exact dim of the output feature + # map for all networks, the feature metadata has + # reliable channel and stride info, but using + # stride to calc feature dim requires info about padding of + # each stage that isn't captured. + training = backbone.training + if training: + backbone.eval() + o = self.backbone( + torch.zeros(1, in_channels, img_size[0], img_size[1])) + if isinstance(o, (list, tuple)): + # last feature if backbone outputs list/tuple of features + o = o[-1] + feature_size = o.shape[-2:] + feature_dim = o.shape[1] + backbone.train(training) + else: + feature_size = to_2tuple(feature_size) + if hasattr(self.backbone, 'feature_info'): + feature_dim = self.backbone.feature_info.channels()[-1] + else: + feature_dim = self.backbone.num_features + self.num_patches = feature_size[0] * feature_size[1] + + # Use conv layer to embed + conv_cfg = conv_cfg or dict() + _conv_cfg = dict( + type='Conv2d', kernel_size=1, stride=1, padding=0, dilation=1) + _conv_cfg.update(conv_cfg) + self.projection = build_conv_layer(_conv_cfg, feature_dim, embed_dims) + + def forward(self, x): + x = self.backbone(x) + if isinstance(x, (list, tuple)): + # last feature if backbone outputs list/tuple of features + x = x[-1] + x = self.projection(x).flatten(2).transpose(1, 2) + return x + + +class PatchMerging(BaseModule): + """Merge patch feature map. + + Modified from mmcv, and this module supports specifying whether to use + post-norm. + + This layer groups feature map by kernel_size, and applies norm and linear + layers to the grouped feature map ((used in Swin Transformer)). Our + implementation uses :class:`torch.nn.Unfold` to merge patches, which is + about 25% faster than the original implementation. However, we need to + modify pretrained models for compatibility. + + Args: + in_channels (int): The num of input channels. To gets fully covered + by filter and stride you specified. + out_channels (int): The num of output channels. + kernel_size (int | tuple, optional): the kernel size in the unfold + layer. Defaults to 2. + stride (int | tuple, optional): the stride of the sliding blocks in the + unfold layer. Defaults to None, which means to be set as + ``kernel_size``. + padding (int | tuple | string ): The padding length of + embedding conv. When it is a string, it means the mode + of adaptive padding, support "same" and "corner" now. + Defaults to "corner". + dilation (int | tuple, optional): dilation parameter in the unfold + layer. Defaults to 1. + bias (bool, optional): Whether to add bias in linear layer or not. + Defaults to False. + norm_cfg (dict, optional): Config dict for normalization layer. + Defaults to ``dict(type='LN')``. + use_post_norm (bool): Whether to use post normalization here. + Defaults to False. + init_cfg (dict, optional): The extra config for initialization. + Defaults to None. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=2, + stride=None, + padding='corner', + dilation=1, + bias=False, + norm_cfg=dict(type='LN'), + use_post_norm=False, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.in_channels = in_channels + self.out_channels = out_channels + self.use_post_norm = use_post_norm + + if stride: + stride = stride + else: + stride = kernel_size + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + if isinstance(padding, str): + self.adaptive_padding = AdaptivePadding( + kernel_size=kernel_size, + stride=stride, + dilation=dilation, + padding=padding) + # disable the padding of unfold + padding = 0 + else: + self.adaptive_padding = None + + padding = to_2tuple(padding) + self.sampler = nn.Unfold( + kernel_size=kernel_size, + dilation=dilation, + padding=padding, + stride=stride) + + sample_dim = kernel_size[0] * kernel_size[1] * in_channels + + self.reduction = nn.Linear(sample_dim, out_channels, bias=bias) + + if norm_cfg is not None: + # build pre or post norm layer based on different channels + if self.use_post_norm: + self.norm = build_norm_layer(norm_cfg, out_channels)[1] + else: + self.norm = build_norm_layer(norm_cfg, sample_dim)[1] + else: + self.norm = None + + def forward(self, x, input_size): + """ + Args: + x (Tensor): Has shape (B, H*W, C_in). + input_size (tuple[int]): The spatial shape of x, arrange as (H, W). + Default: None. + + Returns: + tuple: Contains merged results and its spatial shape. + + - x (Tensor): Has shape (B, Merged_H * Merged_W, C_out) + - out_size (tuple[int]): Spatial shape of x, arrange as + (Merged_H, Merged_W). + """ + B, L, C = x.shape + assert isinstance(input_size, Sequence), f'Expect ' \ + f'input_size is ' \ + f'`Sequence` ' \ + f'but get {input_size}' + + H, W = input_size + assert L == H * W, 'input feature has wrong size' + + x = x.view(B, H, W, C).permute([0, 3, 1, 2]) # B, C, H, W + + if self.adaptive_padding: + x = self.adaptive_padding(x) + H, W = x.shape[-2:] + + # Use nn.Unfold to merge patch. About 25% faster than original method, + # but need to modify pretrained model for compatibility + # if kernel_size=2 and stride=2, x should has shape (B, 4*C, H/2*W/2) + x = self.sampler(x) + + out_h = (H + 2 * self.sampler.padding[0] - self.sampler.dilation[0] * + (self.sampler.kernel_size[0] - 1) - + 1) // self.sampler.stride[0] + 1 + out_w = (W + 2 * self.sampler.padding[1] - self.sampler.dilation[1] * + (self.sampler.kernel_size[1] - 1) - + 1) // self.sampler.stride[1] + 1 + + output_size = (out_h, out_w) + x = x.transpose(1, 2) # B, H/2*W/2, 4*C + + if self.use_post_norm: + # use post-norm here + x = self.reduction(x) + x = self.norm(x) if self.norm else x + else: + x = self.norm(x) if self.norm else x + x = self.reduction(x) + + return x, output_size diff --git a/mmpretrain/models/utils/helpers.py b/mmpretrain/models/utils/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..971f45054e5edac15c71aa64ddd26164bf404d22 --- /dev/null +++ b/mmpretrain/models/utils/helpers.py @@ -0,0 +1,53 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import collections.abc +import warnings +from itertools import repeat + +import torch +from mmengine.utils import digit_version + + +def is_tracing() -> bool: + """Determine whether the model is called during the tracing of code with + ``torch.jit.trace``.""" + if digit_version(torch.__version__) >= digit_version('1.6.0'): + on_trace = torch.jit.is_tracing() + # In PyTorch 1.6, torch.jit.is_tracing has a bug. + # Refers to https://github.com/pytorch/pytorch/issues/42448 + if isinstance(on_trace, bool): + return on_trace + else: + return torch._C._is_tracing() + else: + warnings.warn( + 'torch.jit.is_tracing is only supported after v1.6.0. ' + 'Therefore is_tracing returns False automatically. Please ' + 'set on_trace manually if you are using trace.', UserWarning) + return False + + +# From PyTorch internals +def _ntuple(n): + """A `to_tuple` function generator. + + It returns a function, this function will repeat the input to a tuple of + length ``n`` if the input is not an Iterable object, otherwise, return the + input directly. + + Args: + n (int): The number of the target length. + """ + + def parse(x): + if isinstance(x, collections.abc.Iterable): + return x + return tuple(repeat(x, n)) + + return parse + + +to_1tuple = _ntuple(1) +to_2tuple = _ntuple(2) +to_3tuple = _ntuple(3) +to_4tuple = _ntuple(4) +to_ntuple = _ntuple diff --git a/mmpretrain/models/utils/huggingface.py b/mmpretrain/models/utils/huggingface.py new file mode 100644 index 0000000000000000000000000000000000000000..e527315b26e5d3f34c10d22e75d47b4050de4748 --- /dev/null +++ b/mmpretrain/models/utils/huggingface.py @@ -0,0 +1,98 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import contextlib +from typing import Optional + +import transformers +from mmengine.registry import Registry +from transformers import AutoConfig, PreTrainedModel +from transformers.models.auto.auto_factory import _BaseAutoModelClass + +from mmpretrain.registry import MODELS, TOKENIZER + + +def register_hf_tokenizer( + cls: Optional[type] = None, + registry: Registry = TOKENIZER, +): + """Register HuggingFace-style PreTrainedTokenizerBase class.""" + if cls is None: + + # use it as a decorator: @register_hf_tokenizer() + def _register(cls): + register_hf_tokenizer(cls=cls) + return cls + + return _register + + def from_pretrained(**kwargs): + if ('pretrained_model_name_or_path' not in kwargs + and 'name_or_path' not in kwargs): + raise TypeError( + f'{cls.__name__}.from_pretrained() missing required ' + "argument 'pretrained_model_name_or_path' or 'name_or_path'.") + # `pretrained_model_name_or_path` is too long for config, + # add an alias name `name_or_path` here. + name_or_path = kwargs.pop('pretrained_model_name_or_path', + kwargs.pop('name_or_path')) + return cls.from_pretrained(name_or_path, **kwargs) + + registry._register_module(module=from_pretrained, module_name=cls.__name__) + return cls + + +_load_hf_pretrained_model = True + + +@contextlib.contextmanager +def no_load_hf_pretrained_model(): + global _load_hf_pretrained_model + _load_hf_pretrained_model = False + yield + _load_hf_pretrained_model = True + + +def register_hf_model( + cls: Optional[type] = None, + registry: Registry = MODELS, +): + """Register HuggingFace-style PreTrainedModel class.""" + if cls is None: + + # use it as a decorator: @register_hf_tokenizer() + def _register(cls): + register_hf_model(cls=cls) + return cls + + return _register + + if issubclass(cls, _BaseAutoModelClass): + get_config = AutoConfig.from_pretrained + from_config = cls.from_config + elif issubclass(cls, PreTrainedModel): + get_config = cls.config_class.from_pretrained + from_config = cls + else: + raise TypeError('Not auto model nor pretrained model of huggingface.') + + def build(**kwargs): + if ('pretrained_model_name_or_path' not in kwargs + and 'name_or_path' not in kwargs): + raise TypeError( + f'{cls.__name__} missing required argument ' + '`pretrained_model_name_or_path` or `name_or_path`.') + # `pretrained_model_name_or_path` is too long for config, + # add an alias name `name_or_path` here. + name_or_path = kwargs.pop('pretrained_model_name_or_path', + kwargs.pop('name_or_path')) + + if kwargs.pop('load_pretrained', True) and _load_hf_pretrained_model: + return cls.from_pretrained(name_or_path, **kwargs) + else: + cfg = get_config(name_or_path, **kwargs) + return from_config(cfg) + + registry._register_module(module=build, module_name=cls.__name__) + return cls + + +register_hf_model(transformers.AutoModelForCausalLM) diff --git a/mmpretrain/models/utils/inverted_residual.py b/mmpretrain/models/utils/inverted_residual.py new file mode 100644 index 0000000000000000000000000000000000000000..8387b21251aacff8efcb1b048e37ecdfa1299b2b --- /dev/null +++ b/mmpretrain/models/utils/inverted_residual.py @@ -0,0 +1,125 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import ConvModule +from mmcv.cnn.bricks import DropPath +from mmengine.model import BaseModule + +from .se_layer import SELayer + + +class InvertedResidual(BaseModule): + """Inverted Residual Block. + + Args: + in_channels (int): The input channels of this module. + out_channels (int): The output channels of this module. + mid_channels (int): The input channels of the depthwise convolution. + kernel_size (int): The kernel size of the depthwise convolution. + Defaults to 3. + stride (int): The stride of the depthwise convolution. Defaults to 1. + se_cfg (dict, optional): Config dict for se layer. Defaults to None, + which means no se layer. + conv_cfg (dict): Config dict for convolution layer. Defaults to None, + which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Defaults to ``dict(type='BN')``. + act_cfg (dict): Config dict for activation layer. + Defaults to ``dict(type='ReLU')``. + drop_path_rate (float): stochastic depth rate. Defaults to 0. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Defaults to False. + init_cfg (dict | list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + mid_channels, + kernel_size=3, + stride=1, + se_cfg=None, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + drop_path_rate=0., + with_cp=False, + init_cfg=None): + super(InvertedResidual, self).__init__(init_cfg) + self.with_res_shortcut = (stride == 1 and in_channels == out_channels) + assert stride in [1, 2] + self.with_cp = with_cp + self.drop_path = DropPath( + drop_path_rate) if drop_path_rate > 0 else nn.Identity() + self.with_se = se_cfg is not None + self.with_expand_conv = (mid_channels != in_channels) + + if self.with_se: + assert isinstance(se_cfg, dict) + + if self.with_expand_conv: + self.expand_conv = ConvModule( + in_channels=in_channels, + out_channels=mid_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.depthwise_conv = ConvModule( + in_channels=mid_channels, + out_channels=mid_channels, + kernel_size=kernel_size, + stride=stride, + padding=kernel_size // 2, + groups=mid_channels, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + if self.with_se: + self.se = SELayer(**se_cfg) + self.linear_conv = ConvModule( + in_channels=mid_channels, + out_channels=out_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + def forward(self, x): + """Forward function. + + Args: + x (torch.Tensor): The input tensor. + + Returns: + torch.Tensor: The output tensor. + """ + + def _inner_forward(x): + out = x + + if self.with_expand_conv: + out = self.expand_conv(out) + + out = self.depthwise_conv(out) + + if self.with_se: + out = self.se(out) + + out = self.linear_conv(out) + + if self.with_res_shortcut: + return x + self.drop_path(out) + else: + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + return out diff --git a/mmpretrain/models/utils/layer_scale.py b/mmpretrain/models/utils/layer_scale.py new file mode 100644 index 0000000000000000000000000000000000000000..bb480a15ce35570a5fcfe060c25ef676730430a7 --- /dev/null +++ b/mmpretrain/models/utils/layer_scale.py @@ -0,0 +1,40 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Union + +import torch +import torch.nn as nn + + +class LayerScale(nn.Module): + """LayerScale layer. + + Args: + dim (int): Dimension of input features. + layer_scale_init_value (float or torch.Tensor): Init value of layer + scale. Defaults to 1e-5. + inplace (bool): inplace: can optionally do the + operation in-place. Defaults to False. + data_format (str): The input data format, could be 'channels_last' + or 'channels_first', representing (B, C, H, W) and + (B, N, C) format data respectively. Defaults to 'channels_last'. + """ + + def __init__(self, + dim: int, + layer_scale_init_value: Union[float, torch.Tensor] = 1e-5, + inplace: bool = False, + data_format: str = 'channels_last'): + super().__init__() + assert data_format in ('channels_last', 'channels_first'), \ + "'data_format' could only be channels_last or channels_first." + self.inplace = inplace + self.data_format = data_format + self.weight = nn.Parameter(torch.ones(dim) * layer_scale_init_value) + + def forward(self, x): + if self.data_format == 'channels_first': + if self.inplace: + return x.mul_(self.weight.view(-1, 1, 1)) + else: + return x * self.weight.view(-1, 1, 1) + return x.mul_(self.weight) if self.inplace else x * self.weight diff --git a/mmpretrain/models/utils/make_divisible.py b/mmpretrain/models/utils/make_divisible.py new file mode 100644 index 0000000000000000000000000000000000000000..1ec74689e37d4a9d605a595adb0cca1da88aa19a --- /dev/null +++ b/mmpretrain/models/utils/make_divisible.py @@ -0,0 +1,25 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def make_divisible(value, divisor, min_value=None, min_ratio=0.9): + """Make divisible function. + + This function rounds the channel number down to the nearest value that can + be divisible by the divisor. + + Args: + value (int): The original channel number. + divisor (int): The divisor to fully divide the channel number. + min_value (int, optional): The minimum value of the output channel. + Default: None, means that the minimum value equal to the divisor. + min_ratio (float): The minimum ratio of the rounded channel + number to the original channel number. Default: 0.9. + Returns: + int: The modified output channel number + """ + + if min_value is None: + min_value = divisor + new_value = max(min_value, int(value + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than (1-min_ratio). + if new_value < min_ratio * value: + new_value += divisor + return new_value diff --git a/mmpretrain/models/utils/norm.py b/mmpretrain/models/utils/norm.py new file mode 100644 index 0000000000000000000000000000000000000000..8b890a0c6ec654f00e4bb4cd148158eaeba7599d --- /dev/null +++ b/mmpretrain/models/utils/norm.py @@ -0,0 +1,133 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +import torch.nn.functional as F + +from mmpretrain.registry import MODELS + + +@MODELS.register_module() +class GRN(nn.Module): + """Global Response Normalization Module. + + Come from `ConvNeXt V2: Co-designing and Scaling ConvNets with Masked + Autoencoders `_ + + Args: + in_channels (int): The number of channels of the input tensor. + eps (float): a value added to the denominator for numerical stability. + Defaults to 1e-6. + """ + + def __init__(self, in_channels, eps=1e-6): + super().__init__() + self.in_channels = in_channels + self.gamma = nn.Parameter(torch.zeros(in_channels)) + self.beta = nn.Parameter(torch.zeros(in_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor, data_format='channel_first'): + """Forward method. + + Args: + x (torch.Tensor): The input tensor. + data_format (str): The format of the input tensor. If + ``"channel_first"``, the shape of the input tensor should be + (B, C, H, W). If ``"channel_last"``, the shape of the input + tensor should be (B, H, W, C). Defaults to "channel_first". + """ + if data_format == 'channel_last': + gx = torch.norm(x, p=2, dim=(1, 2), keepdim=True) + nx = gx / (gx.mean(dim=-1, keepdim=True) + self.eps) + x = self.gamma * (x * nx) + self.beta + x + elif data_format == 'channel_first': + gx = torch.norm(x, p=2, dim=(2, 3), keepdim=True) + nx = gx / (gx.mean(dim=1, keepdim=True) + self.eps) + x = self.gamma.view(1, -1, 1, 1) * (x * nx) + self.beta.view( + 1, -1, 1, 1) + x + return x + + +@MODELS.register_module('LN2d') +class LayerNorm2d(nn.LayerNorm): + """LayerNorm on channels for 2d images. + + Args: + num_channels (int): The number of channels of the input tensor. + eps (float): a value added to the denominator for numerical stability. + Defaults to 1e-5. + elementwise_affine (bool): a boolean value that when set to ``True``, + this module has learnable per-element affine parameters initialized + to ones (for weights) and zeros (for biases). Defaults to True. + """ + + def __init__(self, num_channels: int, **kwargs) -> None: + super().__init__(num_channels, **kwargs) + self.num_channels = self.normalized_shape[0] + + def forward(self, x, data_format='channel_first'): + """Forward method. + + Args: + x (torch.Tensor): The input tensor. + data_format (str): The format of the input tensor. If + ``"channel_first"``, the shape of the input tensor should be + (B, C, H, W). If ``"channel_last"``, the shape of the input + tensor should be (B, H, W, C). Defaults to "channel_first". + """ + assert x.dim() == 4, 'LayerNorm2d only supports inputs with shape ' \ + f'(N, C, H, W), but got tensor with shape {x.shape}' + if data_format == 'channel_last': + x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, + self.eps) + elif data_format == 'channel_first': + x = x.permute(0, 2, 3, 1) + x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, + self.eps) + # If the output is discontiguous, it may cause some unexpected + # problem in the downstream tasks + x = x.permute(0, 3, 1, 2).contiguous() + return x + + +def build_norm_layer(cfg: dict, num_features: int) -> nn.Module: + """Build normalization layer. + + Args: + cfg (dict): The norm layer config, which should contain: + + - type (str): Layer type. + - layer args: Args needed to instantiate a norm layer. + + num_features (int): Number of input channels. + + Returns: + nn.Module: The created norm layer. + """ + if not isinstance(cfg, dict): + raise TypeError('cfg must be a dict') + if 'type' not in cfg: + raise KeyError('the cfg dict must contain the key "type"') + cfg_ = cfg.copy() + + layer_type = cfg_.pop('type') + norm_layer = MODELS.get(layer_type) + if norm_layer is None: + raise KeyError(f'Cannot find {layer_type} in registry under scope ' + f'name {MODELS.scope}') + + requires_grad = cfg_.pop('requires_grad', True) + cfg_.setdefault('eps', 1e-5) + + if layer_type != 'GN': + layer = norm_layer(num_features, **cfg_) + else: + layer = norm_layer(num_channels=num_features, **cfg_) + + if layer_type == 'SyncBN' and hasattr(layer, '_specify_ddp_gpu_num'): + layer._specify_ddp_gpu_num(1) + + for param in layer.parameters(): + param.requires_grad = requires_grad + + return layer diff --git a/mmpretrain/models/utils/position_encoding.py b/mmpretrain/models/utils/position_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..07a3c486a25a84633d7e50463dd8b09f1c222837 --- /dev/null +++ b/mmpretrain/models/utils/position_encoding.py @@ -0,0 +1,247 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from functools import partial +from typing import Optional, Sequence, Union + +import torch +import torch.nn as nn +from mmengine.model import BaseModule +from mmengine.utils import digit_version + +from ..utils import to_2tuple + +# After pytorch v1.10.0, use torch.meshgrid without indexing +# will raise extra warning. For more details, +# refers to https://github.com/pytorch/pytorch/issues/50276 +if digit_version(torch.__version__) >= digit_version('1.10.0'): + torch_meshgrid = partial(torch.meshgrid, indexing='ij') +else: + torch_meshgrid = torch.meshgrid + + +class ConditionalPositionEncoding(BaseModule): + """The Conditional Position Encoding (CPE) module. + + The CPE is the implementation of 'Conditional Positional Encodings + for Vision Transformers '_. + + Args: + in_channels (int): Number of input channels. + embed_dims (int): The feature dimension. Default: 768. + stride (int): Stride of conv layer. Default: 1. + """ + + def __init__(self, in_channels, embed_dims=768, stride=1, init_cfg=None): + super(ConditionalPositionEncoding, self).__init__(init_cfg=init_cfg) + self.proj = nn.Conv2d( + in_channels, + embed_dims, + kernel_size=3, + stride=stride, + padding=1, + bias=True, + groups=embed_dims) + self.stride = stride + + def forward(self, x, hw_shape): + B, N, C = x.shape + H, W = hw_shape + feat_token = x + # convert (B, N, C) to (B, C, H, W) + cnn_feat = feat_token.transpose(1, 2).view(B, C, H, W).contiguous() + if self.stride == 1: + x = self.proj(cnn_feat) + cnn_feat + else: + x = self.proj(cnn_feat) + x = x.flatten(2).transpose(1, 2) + return x + + +class PositionEncodingFourier(BaseModule): + """The Position Encoding Fourier (PEF) module. + + The PEF is adopted from EdgeNeXt '_. + Args: + in_channels (int): Number of input channels. + Default: 32 + embed_dims (int): The feature dimension. + Default: 768. + temperature (int): Temperature. + Default: 10000. + dtype (torch.dtype): The data type. + Default: torch.float32. + init_cfg (dict): The config dict for initializing the module. + Default: None. + """ + + def __init__(self, + in_channels=32, + embed_dims=768, + temperature=10000, + dtype=torch.float32, + init_cfg=None): + super(PositionEncodingFourier, self).__init__(init_cfg=init_cfg) + self.proj = nn.Conv2d(in_channels * 2, embed_dims, kernel_size=1) + self.scale = 2 * math.pi + self.in_channels = in_channels + self.embed_dims = embed_dims + self.dtype = dtype + + if digit_version(torch.__version__) < digit_version('1.8.0'): + floor_div = torch.floor_divide + else: + floor_div = partial(torch.div, rounding_mode='floor') + dim_t = torch.arange(in_channels, dtype=self.dtype) + self.dim_t = temperature**(2 * floor_div(dim_t, 2) / in_channels) + + def forward(self, bhw_shape): + B, H, W = bhw_shape + mask = torch.zeros(B, H, W).bool().to(self.proj.weight.device) + not_mask = ~mask + eps = 1e-6 + y_embed = not_mask.cumsum(1, dtype=self.dtype) + x_embed = not_mask.cumsum(2, dtype=self.dtype) + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = self.dim_t.to(mask.device) + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), + dim=4).flatten(3) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), + dim=4).flatten(3) + + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + pos = self.proj(pos) + + return pos + + +def build_2d_sincos_position_embedding( + patches_resolution: Union[int, Sequence[int]], + embed_dims: int, + temperature: Optional[int] = 10000., + cls_token: Optional[bool] = False) -> torch.Tensor: + """The function is to build position embedding for model to obtain the + position information of the image patches. + + Args: + patches_resolution (Union[int, Sequence[int]]): The resolution of each + patch. + embed_dims (int): The dimension of the embedding vector. + temperature (int, optional): The temperature parameter. Defaults to + 10000. + cls_token (bool, optional): Whether to concatenate class token. + Defaults to False. + + Returns: + torch.Tensor: The position embedding vector. + """ + + if isinstance(patches_resolution, int): + patches_resolution = (patches_resolution, patches_resolution) + + h, w = patches_resolution + grid_w = torch.arange(w, dtype=torch.float32) + grid_h = torch.arange(h, dtype=torch.float32) + grid_w, grid_h = torch_meshgrid(grid_w, grid_h) + assert embed_dims % 4 == 0, \ + 'Embed dimension must be divisible by 4.' + pos_dim = embed_dims // 4 + + omega = torch.arange(pos_dim, dtype=torch.float32) / pos_dim + omega = 1. / (temperature**omega) + out_w = torch.einsum('m,d->md', [grid_w.flatten(), omega]) + out_h = torch.einsum('m,d->md', [grid_h.flatten(), omega]) + + pos_emb = torch.cat( + [ + torch.sin(out_w), + torch.cos(out_w), + torch.sin(out_h), + torch.cos(out_h) + ], + dim=1, + )[None, :, :] + + if cls_token: + cls_token_pe = torch.zeros([1, 1, embed_dims], dtype=torch.float32) + pos_emb = torch.cat([cls_token_pe, pos_emb], dim=1) + + return pos_emb + + +class RotaryEmbeddingFast(BaseModule): + """Implements 2D rotary embedding (RoPE) for image tokens. Position + encoding is implemented with sin and cos functions, + + .. math:: + Pos_{cos} = cos(\frac{t}{\theta^{\frac{2i}{d}}} \\ + Pos_{sin} = sin(\frac{t}{\theta^{\frac{2i}{d}}} + Args: + embed_dims (int): The feature dimension for each head. + patch_resolution (int | tuple): The resolution of the + image, in format (H, W). + theta (float): The hyperparameter for position coding. + Defaults to 10000. + init_cfg (dict, optional): Initialization config dict. + Defaults to None. + """ + + def __init__(self, + embed_dims, + patch_resolution, + theta=10000., + init_cfg=None): + super(RotaryEmbeddingFast, self).__init__(init_cfg=init_cfg) + + self.half_dim = embed_dims // 2 + self.patch_resolution = to_2tuple(patch_resolution) + self.theta = theta + + freqs_cos, freqs_sin = self.compute_position_embedding() + self.register_buffer('freqs_cos', freqs_cos) + self.register_buffer('freqs_sin', freqs_sin) + + def compute_position_embedding(self): + frequency = self.theta**( + torch.arange(0, self.half_dim, 2).float() / self.half_dim) + frequency = 1. / frequency + + h, w = self.patch_resolution + th = torch.arange(h) / h * self.half_dim + tw = torch.arange(w) / w * self.half_dim + + position_h = (th[:, None] @ frequency[None, :]).repeat(1, 2) + position_w = (tw[:, None] @ frequency[None, :]).repeat(1, 2) + + height = position_h[:, None, :].expand(h, w, self.half_dim) + width = position_w[None, :, :].expand(h, w, self.half_dim) + position = torch.cat((height, width), dim=-1) + + freqs_cos = position.cos().view(-1, position.shape[-1]) + freqs_sin = position.sin().view(-1, position.shape[-1]) + + return freqs_cos, freqs_sin + + def forward(self, x, patch_resolution): + # Check whether the patch resolution is the predefined size + patch_resolution = to_2tuple(patch_resolution) + if patch_resolution != self.patch_resolution: + self.patch_resolution = patch_resolution + freqs_cos, freqs_sin = self.compute_position_embedding() + self.register_buffer('freqs_cos', freqs_cos.to(x.device)) + self.register_buffer('freqs_sin', freqs_sin.to(x.device)) + + batch, num_heads, num_patches, dim = x.shape + + inputs = x + x = x.reshape(batch, num_heads, num_patches, -1, 2) + x1, x2 = x.unbind(dim=-1) + x = torch.stack((-x2, x1), dim=-1) + x = x.reshape(batch, num_heads, num_patches, dim) + + return inputs * self.freqs_cos + x * self.freqs_sin diff --git a/mmpretrain/models/utils/res_layer_extra_norm.py b/mmpretrain/models/utils/res_layer_extra_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..37e387ba9795ec528bd210dab75bd05abdc0addf --- /dev/null +++ b/mmpretrain/models/utils/res_layer_extra_norm.py @@ -0,0 +1,31 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .norm import build_norm_layer + +try: + from mmdet.models.backbones import ResNet + from mmdet.models.roi_heads.shared_heads.res_layer import ResLayer + from mmdet.registry import MODELS + + @MODELS.register_module() + class ResLayerExtraNorm(ResLayer): + """Add extra norm to original ``ResLayer``.""" + + def __init__(self, *args, **kwargs): + super(ResLayerExtraNorm, self).__init__(*args, **kwargs) + + block = ResNet.arch_settings[kwargs['depth']][0] + self.add_module( + 'norm', + build_norm_layer(self.norm_cfg, + 64 * 2**self.stage * block.expansion)) + + def forward(self, x): + """Forward function.""" + res_layer = getattr(self, f'layer{self.stage + 1}') + norm = getattr(self, 'norm') + x = res_layer(x) + out = norm(x) + return out + +except ImportError: + ResLayerExtraNorm = None diff --git a/mmpretrain/models/utils/se_layer.py b/mmpretrain/models/utils/se_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..20290171008c2fd6f7a9e14e444f23b8375abe22 --- /dev/null +++ b/mmpretrain/models/utils/se_layer.py @@ -0,0 +1,80 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmengine.model import BaseModule +from mmengine.utils import is_tuple_of + +from .make_divisible import make_divisible + + +class SELayer(BaseModule): + """Squeeze-and-Excitation Module. + + Args: + channels (int): The input (and output) channels of the SE layer. + squeeze_channels (None or int): The intermediate channel number of + SElayer. Default: None, means the value of ``squeeze_channels`` + is ``make_divisible(channels // ratio, divisor)``. + ratio (int): Squeeze ratio in SELayer, the intermediate channel will + be ``make_divisible(channels // ratio, divisor)``. Only used when + ``squeeze_channels`` is None. Default: 16. + divisor(int): The divisor to true divide the channel number. Only + used when ``squeeze_channels`` is None. Default: 8. + conv_cfg (None or dict): Config dict for convolution layer. Default: + None, which means using conv2d. + return_weight(bool): Whether to return the weight. Default: False. + act_cfg (dict or Sequence[dict]): Config dict for activation layer. + If act_cfg is a dict, two activation layers will be configurated + by this dict. If act_cfg is a sequence of dicts, the first + activation layer will be configurated by the first dict and the + second activation layer will be configurated by the second dict. + Default: (dict(type='ReLU'), dict(type='Sigmoid')) + """ + + def __init__(self, + channels, + squeeze_channels=None, + ratio=16, + divisor=8, + bias='auto', + conv_cfg=None, + act_cfg=(dict(type='ReLU'), dict(type='Sigmoid')), + return_weight=False, + init_cfg=None): + super(SELayer, self).__init__(init_cfg) + if isinstance(act_cfg, dict): + act_cfg = (act_cfg, act_cfg) + assert len(act_cfg) == 2 + assert is_tuple_of(act_cfg, dict) + self.global_avgpool = nn.AdaptiveAvgPool2d(1) + if squeeze_channels is None: + squeeze_channels = make_divisible(channels // ratio, divisor) + assert isinstance(squeeze_channels, int) and squeeze_channels > 0, \ + '"squeeze_channels" should be a positive integer, but get ' + \ + f'{squeeze_channels} instead.' + self.return_weight = return_weight + self.conv1 = ConvModule( + in_channels=channels, + out_channels=squeeze_channels, + kernel_size=1, + stride=1, + bias=bias, + conv_cfg=conv_cfg, + act_cfg=act_cfg[0]) + self.conv2 = ConvModule( + in_channels=squeeze_channels, + out_channels=channels, + kernel_size=1, + stride=1, + bias=bias, + conv_cfg=conv_cfg, + act_cfg=act_cfg[1]) + + def forward(self, x): + out = self.global_avgpool(x) + out = self.conv1(out) + out = self.conv2(out) + if self.return_weight: + return out + else: + return x * out diff --git a/mmpretrain/models/utils/swiglu_ffn.py b/mmpretrain/models/utils/swiglu_ffn.py new file mode 100644 index 0000000000000000000000000000000000000000..20b4591f4f09ae185dd28e432dff7919d98d3a50 --- /dev/null +++ b/mmpretrain/models/utils/swiglu_ffn.py @@ -0,0 +1,98 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn.bricks.drop import build_dropout + +from .layer_scale import LayerScale +from .norm import build_norm_layer + + +class SwiGLUFFN(nn.Module): + """SwiGLU FFN layer. + + Modified from https://github.com/facebookresearch/dinov2/blob/main/dinov2/layers/swiglu_ffn.py + """ # noqa + + def __init__( + self, + embed_dims: int, + feedforward_channels: Optional[int] = None, + out_dims: Optional[int] = None, + layer_scale_init_value: float = 0., + bias: bool = True, + dropout_layer: Optional[dict] = None, + norm_cfg: Optional[dict] = None, + add_identity: bool = True, + ) -> None: + super().__init__() + self.embed_dims = embed_dims + self.out_dims = out_dims or embed_dims + hidden_dims = feedforward_channels or embed_dims + + self.w12 = nn.Linear(self.embed_dims, 2 * hidden_dims, bias=bias) + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, hidden_dims) + else: + self.norm = nn.Identity() + + self.w3 = nn.Linear(hidden_dims, self.out_dims, bias=bias) + + if layer_scale_init_value > 0: + self.gamma2 = LayerScale( + dim=embed_dims, layer_scale_init_value=layer_scale_init_value) + else: + self.gamma2 = nn.Identity() + + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else torch.nn.Identity() + self.add_identity = add_identity + + def forward(self, + x: torch.Tensor, + identity: Optional[torch.Tensor] = None) -> torch.Tensor: + x12 = self.w12(x) + x1, x2 = x12.chunk(2, dim=-1) + hidden = F.silu(x1) * x2 + hidden = self.norm(hidden) + out = self.w3(hidden) + out = self.gamma2(out) + out = self.dropout_layer(out) + + if self.out_dims != self.embed_dims or not self.add_identity: + # due to the dimension inconsistence or user setting + # not to apply residual operation + return out + + if identity is None: + identity = x + return identity + out + + +class SwiGLUFFNFused(SwiGLUFFN): + """SwiGLU FFN layer with fusing. + + Modified from https://github.com/facebookresearch/dinov2/blob/main/dinov2/layers/swiglu_ffn.py + """ # noqa + + def __init__( + self, + embed_dims: int, + feedforward_channels: Optional[int] = None, + out_dims: Optional[int] = None, + layer_scale_init_value: float = 0., + bias: bool = True, + ) -> None: + out_dims = out_dims or embed_dims + feedforward_channels = feedforward_channels or embed_dims + feedforward_channels = (int(feedforward_channels * 2 / 3) + 7) // 8 * 8 + super().__init__( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + out_dims=out_dims, + layer_scale_init_value=layer_scale_init_value, + bias=bias, + ) diff --git a/mmpretrain/models/utils/tokenizer.py b/mmpretrain/models/utils/tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..5b8a324bad00ff03a9ce24dc4cff222e379f1520 --- /dev/null +++ b/mmpretrain/models/utils/tokenizer.py @@ -0,0 +1,187 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import collections +import os + +from mmengine.fileio import list_from_file +from transformers import (AutoTokenizer, BartTokenizer, BasicTokenizer, + BertTokenizer, BertTokenizerFast, LlamaTokenizer, + WordpieceTokenizer) + +from mmpretrain.registry import TOKENIZER +from .huggingface import register_hf_tokenizer + +register_hf_tokenizer(AutoTokenizer) +register_hf_tokenizer(LlamaTokenizer) + + +@register_hf_tokenizer() +class BlipTokenizer(BertTokenizerFast): + """"BlipTokenizer inherit BertTokenizerFast (fast, Rust-based).""" + + @classmethod + def from_pretrained( + cls, + pretrained_model_name_or_path, + *init_inputs, + **kwargs, + ): + os.environ['TOKENIZERS_PARALLELISM'] = 'true' + + tokenizer = super().from_pretrained( + pretrained_model_name_or_path, + *init_inputs, + **kwargs, + ) + tokenizer.add_special_tokens({'bos_token': '[DEC]'}) + tokenizer.add_special_tokens({'additional_special_tokens': ['[ENC]']}) + return tokenizer + + +@register_hf_tokenizer() +class Blip2Tokenizer(BertTokenizer): + + @classmethod + def from_pretrained( + cls, + pretrained_model_name_or_path, + *init_inputs, + **kwargs, + ): + tokenizer = super().from_pretrained( + pretrained_model_name_or_path, + *init_inputs, + **kwargs, + ) + tokenizer.add_special_tokens({'bos_token': '[DEC]'}) + return tokenizer + + +@register_hf_tokenizer() +class OFATokenizer(BartTokenizer): + + vocab_files_names = { + 'vocab_file': 'vocab.json', + 'merges_file': 'merges.txt' + } + + pretrained_vocab_files_map = { + 'vocab_file': { + 'OFA-Sys/OFA-tiny': + 'https://huggingface.co/OFA-Sys/OFA-tiny/blob/main/vocab.json', + 'OFA-Sys/OFA-medium': + 'https://huggingface.co/OFA-Sys/OFA-medium/blob/main/vocab.json', + 'OFA-Sys/OFA-base': + 'https://huggingface.co/OFA-Sys/OFA-base/blob/main/vocab.json', + 'OFA-Sys/OFA-large': + 'https://huggingface.co/OFA-Sys/OFA-large/blob/main/vocab.json', + }, + 'merges_file': { + 'OFA-Sys/OFA-tiny': + 'https://huggingface.co/OFA-Sys/OFA-tiny/blob/main/merges.txt', + 'OFA-Sys/OFA-medium': + 'https://huggingface.co/OFA-Sys/OFA-medium/blob/main/merges.txt', + 'OFA-Sys/OFA-base': + 'https://huggingface.co/OFA-Sys/OFA-base/blob/main/merges.txt', + 'OFA-Sys/OFA-large': + 'https://huggingface.co/OFA-Sys/OFA-large/blob/main/merges.txt', + }, + } + + max_model_input_sizes = { + 'OFA-Sys/OFA-tiny': 1024, + 'OFA-Sys/OFA-medium': 1024, + 'OFA-Sys/OFA-base': 1024, + 'OFA-Sys/OFA-large': 1024, + } + + @classmethod + def from_pretrained( + cls, + pretrained_model_name_or_path, + *init_inputs, + **kwargs, + ): + num_bins = kwargs.pop('num_bins', 1000) + tokenizer = super().from_pretrained( + pretrained_model_name_or_path, + *init_inputs, + **kwargs, + ) + length = len(tokenizer) + tokenizer.add_tokens([''.format(i) for i in range(8192)]) + tokenizer.code_offset = length + tokenizer.add_tokens([''.format(i) for i in range(num_bins)]) + tokenizer.bin_offset = length + 8192 + tokenizer.num_bins = num_bins + return tokenizer + + +@TOKENIZER.register_module() +class FullTokenizer(BertTokenizer): + """Runs end-to-end tokenziation.""" + + def __init__(self, vocab_file, do_lower_case=True): + self.vocab = self.load_vocab(vocab_file) + self.inv_vocab = {v: k for k, v in self.vocab.items()} + self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case) + self.wordpiece_tokenizer = WordpieceTokenizer( + vocab=self.vocab, unk_token='[UNK]', max_input_chars_per_word=200) + + def load_vocab(self, vocab_file): + """Loads a vocabulary file into a dictionary.""" + vocab = collections.OrderedDict() + index = 0 + vocab_list = list_from_file(vocab_file) + for token in vocab_list: + if not token: + break + token = token.strip() + vocab[token] = index + index += 1 + return vocab + + def tokenize(self, text): + split_tokens = [] + for token in self.basic_tokenizer.tokenize(text): + for sub_token in self.wordpiece_tokenizer.tokenize(token): + split_tokens.append(sub_token) + + return split_tokens + + def convert_by_vocab(self, vocab, items): + """Converts a sequence of [tokens|ids] using the vocab.""" + output = [] + for item in items: + output.append(vocab[item]) + return output + + def convert_tokens_to_ids(self, tokens): + return self.convert_by_vocab(self.vocab, tokens) + + def convert_ids_to_tokens(self, ids): + return self.convert_by_vocab(self.inv_vocab, ids) + + @staticmethod + def convert_tokens_to_string(tokens, clean_up_tokenization_spaces=True): + """Converts a sequence of tokens (string) in a single string.""" + + def clean_up_tokenization(out_string): + """Clean up a list of simple English tokenization artifacts like + spaces before punctuations and abbreviated forms.""" + out_string = ( + out_string.replace(' .', '.').replace(' ?', '?').replace( + ' !', '!').replace(' ,', ',').replace(" ' ", "'").replace( + " n't", "n't").replace(" 'm", "'m").replace( + " 's", "'s").replace(" 've", + "'ve").replace(" 're", "'re")) + return out_string + + text = ' '.join(tokens).replace(' ##', '').strip() + if clean_up_tokenization_spaces: + clean_text = clean_up_tokenization(text) + return clean_text + else: + return text + + def vocab_size(self): + return len(self.vocab) diff --git a/mmpretrain/models/utils/vector_quantizer.py b/mmpretrain/models/utils/vector_quantizer.py new file mode 100644 index 0000000000000000000000000000000000000000..7c2ea89339e190d0d19bf5c89b60c1d4bab8fad5 --- /dev/null +++ b/mmpretrain/models/utils/vector_quantizer.py @@ -0,0 +1,232 @@ +# Copyright (c) OpenMMLab. All rights reserved. +# Copyright (c) 2022 Microsoft +# Modified from +# https://github.com/microsoft/unilm/blob/master/beit2/norm_ema_quantizer.py +from typing import Optional, Tuple + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange, repeat +from mmengine.dist import all_reduce + + +def ema_inplace(moving_avg: torch.Tensor, new: torch.Tensor, + decay: torch.Tensor) -> None: + """Update moving average.""" + moving_avg.data.mul_(decay).add_(new, alpha=(1 - decay)) + + +def norm_ema_inplace(moving_avg: torch.Tensor, new: torch.Tensor, + decay: torch.Tensor) -> None: + """Update moving average with norm data.""" + moving_avg.data.mul_(decay).add_(new, alpha=(1 - decay)) + moving_avg.data.copy_(F.normalize(moving_avg.data, p=2, dim=-1)) + + +def sample_vectors(samples: torch.Tensor, num: int) -> torch.Tensor: + """Sample vectors according to the given number.""" + num_samples, device = samples.shape[0], samples.device + + if num_samples >= num: + indices = torch.randperm(num_samples, device=device)[:num] + else: + indices = torch.randint(0, num_samples, (num, ), device=device) + + return samples[indices] + + +def kmeans(samples: torch.Tensor, + num_clusters: int, + num_iters: int = 10, + use_cosine_sim: bool = False) -> Tuple[torch.Tensor, torch.Tensor]: + """Run k-means algorithm.""" + dim, dtype, _ = samples.shape[-1], samples.dtype, samples.device + + means = sample_vectors(samples, num_clusters) + + for _ in range(num_iters): + if use_cosine_sim: + dists = samples @ means.t() + else: + diffs = rearrange(samples, 'n d -> n () d') \ + - rearrange(means, 'c d -> () c d') + dists = -(diffs**2).sum(dim=-1) + + buckets = dists.max(dim=-1).indices + bins = torch.bincount(buckets, minlength=num_clusters) + zero_mask = bins == 0 + bins_min_clamped = bins.masked_fill(zero_mask, 1) + + new_means = buckets.new_zeros(num_clusters, dim, dtype=dtype) + new_means.scatter_add_(0, repeat(buckets, 'n -> n d', d=dim), samples) + new_means = new_means / bins_min_clamped[..., None] + + if use_cosine_sim: + new_means = F.normalize(new_means, p=2, dim=-1) + + means = torch.where(zero_mask[..., None], means, new_means) + + return means, bins + + +class EmbeddingEMA(nn.Module): + """The codebook of embedding vectors. + + Args: + num_tokens (int): Number of embedding vectors in the codebook. + codebook_dim (int) : The dimension of embedding vectors in the + codebook. + kmeans_init (bool): Whether to use k-means to initialize the + VectorQuantizer. Defaults to True. + codebook_init_path (str): The initialization checkpoint for codebook. + Defaults to None. + """ + + def __init__(self, + num_tokens: int, + codebook_dim: int, + kmeans_init: bool = True, + codebook_init_path: Optional[str] = None): + super().__init__() + self.num_tokens = num_tokens + self.codebook_dim = codebook_dim + if codebook_init_path is None: + if not kmeans_init: + weight = torch.randn(num_tokens, codebook_dim) + weight = F.normalize(weight, p=2, dim=-1) + else: + weight = torch.zeros(num_tokens, codebook_dim) + self.register_buffer('initted', torch.Tensor([not kmeans_init])) + else: + print(f'load init codebook weight from {codebook_init_path}') + codebook_ckpt_weight = torch.load( + codebook_init_path, map_location='cpu') + weight = codebook_ckpt_weight.clone() + self.register_buffer('initted', torch.Tensor([True])) + + self.weight = nn.Parameter(weight, requires_grad=False) + self.update = True + + @torch.jit.ignore + def init_embed_(self, data: torch.Tensor) -> None: + """Initialize embedding vectors of codebook.""" + if self.initted: + return + print('Performing K-means init for codebook') + embed, _ = kmeans(data, self.num_tokens, 10, use_cosine_sim=True) + self.weight.data.copy_(embed) + self.initted.data.copy_(torch.Tensor([True])) + + def forward(self, embed_id: torch.Tensor) -> torch.Tensor: + """Get embedding vectors.""" + return F.embedding(embed_id, self.weight) + + +class NormEMAVectorQuantizer(nn.Module): + """Normed EMA vector quantizer module. + + Args: + num_embed (int): Number of embedding vectors in the codebook. Defaults + to 8192. + embed_dims (int) : The dimension of embedding vectors in the codebook. + Defaults to 32. + beta (float): The mutiplier for VectorQuantizer embedding loss. + Defaults to 1. + decay (float): The decay parameter of EMA. Defaults to 0.99. + statistic_code_usage (bool): Whether to use cluster_size to record + statistic. Defaults to True. + kmeans_init (bool): Whether to use k-means to initialize the + VectorQuantizer. Defaults to True. + codebook_init_path (str): The initialization checkpoint for codebook. + Defaults to None. + """ + + def __init__(self, + num_embed: int, + embed_dims: int, + beta: float, + decay: float = 0.99, + statistic_code_usage: bool = True, + kmeans_init: bool = True, + codebook_init_path: Optional[str] = None) -> None: + super().__init__() + self.codebook_dim = embed_dims + self.num_tokens = num_embed + self.beta = beta + self.decay = decay + + # learnable = True if orthogonal_reg_weight > 0 else False + self.embedding = EmbeddingEMA( + num_tokens=self.num_tokens, + codebook_dim=self.codebook_dim, + kmeans_init=kmeans_init, + codebook_init_path=codebook_init_path) + + self.statistic_code_usage = statistic_code_usage + if statistic_code_usage: + self.register_buffer('cluster_size', torch.zeros(num_embed)) + + def reset_cluster_size(self, device): + + if self.statistic_code_usage: + self.register_buffer('cluster_size', torch.zeros(self.num_tokens)) + self.cluster_size = self.cluster_size.to(device) + + def forward(self, z): + """Forward function.""" + # reshape z -> (batch, height, width, channel) + z = rearrange(z, 'b c h w -> b h w c') + z = F.normalize(z, p=2, dim=-1) + z_flattened = z.reshape(-1, self.codebook_dim) + + self.embedding.init_embed_(z_flattened) + + # 'n d -> d n' + d = z_flattened.pow(2).sum(dim=1, keepdim=True) + \ + self.embedding.weight.pow(2).sum(dim=1) - 2 * \ + torch.einsum('bd,nd->bn', z_flattened, self.embedding.weight) + + encoding_indices = torch.argmin(d, dim=1) + + z_q = self.embedding(encoding_indices).view(z.shape) + + encodings = F.one_hot(encoding_indices, self.num_tokens).type(z.dtype) + + if not self.training: + with torch.no_grad(): + cluster_size = encodings.sum(0) + all_reduce(cluster_size) + ema_inplace(self.cluster_size, cluster_size, self.decay) + + if self.training and self.embedding.update: + # update cluster size with EMA + bins = encodings.sum(0) + all_reduce(bins) + ema_inplace(self.cluster_size, bins, self.decay) + + zero_mask = (bins == 0) + bins = bins.masked_fill(zero_mask, 1.) + + embed_sum = z_flattened.t() @ encodings + all_reduce(embed_sum) + + embed_normalized = (embed_sum / bins.unsqueeze(0)).t() + embed_normalized = F.normalize(embed_normalized, p=2, dim=-1) + embed_normalized = torch.where(zero_mask[..., None], + self.embedding.weight, + embed_normalized) + + # Update embedding vectors with EMA + norm_ema_inplace(self.embedding.weight, embed_normalized, + self.decay) + + # compute loss for embedding + loss = self.beta * F.mse_loss(z_q.detach(), z) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # reshape back to match original input shape + z_q = rearrange(z_q, 'b h w c -> b c h w') + return z_q, loss, encoding_indices diff --git a/mmpretrain/registry.py b/mmpretrain/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..cac2bdad725b9adf5c345d58e5e4a0320b3ddcd4 --- /dev/null +++ b/mmpretrain/registry.py @@ -0,0 +1,195 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""MMPretrain provides 21 registry nodes to support using modules across +projects. Each node is a child of the root registry in MMEngine. + +More details can be found at +https://mmengine.readthedocs.io/en/latest/tutorials/registry.html. +""" + +from mmengine.registry import DATA_SAMPLERS as MMENGINE_DATA_SAMPLERS +from mmengine.registry import DATASETS as MMENGINE_DATASETS +from mmengine.registry import EVALUATOR as MMENGINE_EVALUATOR +from mmengine.registry import HOOKS as MMENGINE_HOOKS +from mmengine.registry import LOG_PROCESSORS as MMENGINE_LOG_PROCESSORS +from mmengine.registry import LOOPS as MMENGINE_LOOPS +from mmengine.registry import METRICS as MMENGINE_METRICS +from mmengine.registry import MODEL_WRAPPERS as MMENGINE_MODEL_WRAPPERS +from mmengine.registry import MODELS as MMENGINE_MODELS +from mmengine.registry import \ + OPTIM_WRAPPER_CONSTRUCTORS as MMENGINE_OPTIM_WRAPPER_CONSTRUCTORS +from mmengine.registry import OPTIM_WRAPPERS as MMENGINE_OPTIM_WRAPPERS +from mmengine.registry import OPTIMIZERS as MMENGINE_OPTIMIZERS +from mmengine.registry import PARAM_SCHEDULERS as MMENGINE_PARAM_SCHEDULERS +from mmengine.registry import \ + RUNNER_CONSTRUCTORS as MMENGINE_RUNNER_CONSTRUCTORS +from mmengine.registry import RUNNERS as MMENGINE_RUNNERS +from mmengine.registry import TASK_UTILS as MMENGINE_TASK_UTILS +from mmengine.registry import TRANSFORMS as MMENGINE_TRANSFORMS +from mmengine.registry import VISBACKENDS as MMENGINE_VISBACKENDS +from mmengine.registry import VISUALIZERS as MMENGINE_VISUALIZERS +from mmengine.registry import \ + WEIGHT_INITIALIZERS as MMENGINE_WEIGHT_INITIALIZERS +from mmengine.registry import Registry + +__all__ = [ + 'RUNNERS', 'RUNNER_CONSTRUCTORS', 'LOOPS', 'HOOKS', 'LOG_PROCESSORS', + 'OPTIMIZERS', 'OPTIM_WRAPPERS', 'OPTIM_WRAPPER_CONSTRUCTORS', + 'PARAM_SCHEDULERS', 'DATASETS', 'DATA_SAMPLERS', 'TRANSFORMS', 'MODELS', + 'MODEL_WRAPPERS', 'WEIGHT_INITIALIZERS', 'BATCH_AUGMENTS', 'TASK_UTILS', + 'METRICS', 'EVALUATORS', 'VISUALIZERS', 'VISBACKENDS' +] + +####################################################################### +# mmpretrain.engine # +####################################################################### + +# Runners like `EpochBasedRunner` and `IterBasedRunner` +RUNNERS = Registry( + 'runner', + parent=MMENGINE_RUNNERS, + locations=['mmpretrain.engine'], +) +# Runner constructors that define how to initialize runners +RUNNER_CONSTRUCTORS = Registry( + 'runner constructor', + parent=MMENGINE_RUNNER_CONSTRUCTORS, + locations=['mmpretrain.engine'], +) +# Loops which define the training or test process, like `EpochBasedTrainLoop` +LOOPS = Registry( + 'loop', + parent=MMENGINE_LOOPS, + locations=['mmpretrain.engine'], +) +# Hooks to add additional functions during running, like `CheckpointHook` +HOOKS = Registry( + 'hook', + parent=MMENGINE_HOOKS, + locations=['mmpretrain.engine'], +) +# Log processors to process the scalar log data. +LOG_PROCESSORS = Registry( + 'log processor', + parent=MMENGINE_LOG_PROCESSORS, + locations=['mmpretrain.engine'], +) +# Optimizers to optimize the model weights, like `SGD` and `Adam`. +OPTIMIZERS = Registry( + 'optimizer', + parent=MMENGINE_OPTIMIZERS, + locations=['mmpretrain.engine'], +) +# Optimizer wrappers to enhance the optimization process. +OPTIM_WRAPPERS = Registry( + 'optimizer_wrapper', + parent=MMENGINE_OPTIM_WRAPPERS, + locations=['mmpretrain.engine'], +) +# Optimizer constructors to customize the hyperparameters of optimizers. +OPTIM_WRAPPER_CONSTRUCTORS = Registry( + 'optimizer wrapper constructor', + parent=MMENGINE_OPTIM_WRAPPER_CONSTRUCTORS, + locations=['mmpretrain.engine'], +) +# Parameter schedulers to dynamically adjust optimization parameters. +PARAM_SCHEDULERS = Registry( + 'parameter scheduler', + parent=MMENGINE_PARAM_SCHEDULERS, + locations=['mmpretrain.engine'], +) + +####################################################################### +# mmpretrain.datasets # +####################################################################### + +# Datasets like `ImageNet` and `CIFAR10`. +DATASETS = Registry( + 'dataset', + parent=MMENGINE_DATASETS, + locations=['mmpretrain.datasets'], +) +# Samplers to sample the dataset. +DATA_SAMPLERS = Registry( + 'data sampler', + parent=MMENGINE_DATA_SAMPLERS, + locations=['mmpretrain.datasets'], +) +# Transforms to process the samples from the dataset. +TRANSFORMS = Registry( + 'transform', + parent=MMENGINE_TRANSFORMS, + locations=['mmpretrain.datasets'], +) + +####################################################################### +# mmpretrain.models # +####################################################################### + +# Neural network modules inheriting `nn.Module`. +MODELS = Registry( + 'model', + parent=MMENGINE_MODELS, + locations=['mmpretrain.models'], +) +# Model wrappers like 'MMDistributedDataParallel' +MODEL_WRAPPERS = Registry( + 'model_wrapper', + parent=MMENGINE_MODEL_WRAPPERS, + locations=['mmpretrain.models'], +) +# Weight initialization methods like uniform, xavier. +WEIGHT_INITIALIZERS = Registry( + 'weight initializer', + parent=MMENGINE_WEIGHT_INITIALIZERS, + locations=['mmpretrain.models'], +) +# Batch augmentations like `Mixup` and `CutMix`. +BATCH_AUGMENTS = Registry( + 'batch augment', + locations=['mmpretrain.models'], +) +# Task-specific modules like anchor generators and box coders +TASK_UTILS = Registry( + 'task util', + parent=MMENGINE_TASK_UTILS, + locations=['mmpretrain.models'], +) +# Tokenizer to encode sequence +TOKENIZER = Registry( + 'tokenizer', + locations=['mmpretrain.models'], +) + +####################################################################### +# mmpretrain.evaluation # +####################################################################### + +# Metrics to evaluate the model prediction results. +METRICS = Registry( + 'metric', + parent=MMENGINE_METRICS, + locations=['mmpretrain.evaluation'], +) +# Evaluators to define the evaluation process. +EVALUATORS = Registry( + 'evaluator', + parent=MMENGINE_EVALUATOR, + locations=['mmpretrain.evaluation'], +) + +####################################################################### +# mmpretrain.visualization # +####################################################################### + +# Visualizers to display task-specific results. +VISUALIZERS = Registry( + 'visualizer', + parent=MMENGINE_VISUALIZERS, + locations=['mmpretrain.visualization'], +) +# Backends to save the visualization results, like TensorBoard, WandB. +VISBACKENDS = Registry( + 'vis_backend', + parent=MMENGINE_VISBACKENDS, + locations=['mmpretrain.visualization'], +) diff --git a/mmpretrain/structures/__init__.py b/mmpretrain/structures/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e7de863087d9d07800ff119d3c8b941059ef3886 --- /dev/null +++ b/mmpretrain/structures/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .data_sample import DataSample +from .multi_task_data_sample import MultiTaskDataSample +from .utils import (batch_label_to_onehot, cat_batch_labels, format_label, + format_score, label_to_onehot, tensor_split) + +__all__ = [ + 'DataSample', 'batch_label_to_onehot', 'cat_batch_labels', 'tensor_split', + 'MultiTaskDataSample', 'label_to_onehot', 'format_label', 'format_score' +] diff --git a/mmpretrain/structures/data_sample.py b/mmpretrain/structures/data_sample.py new file mode 100644 index 0000000000000000000000000000000000000000..ce588b8ba13811afdb2bb3300d42f221a6f2df7f --- /dev/null +++ b/mmpretrain/structures/data_sample.py @@ -0,0 +1,167 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from multiprocessing.reduction import ForkingPickler +from typing import Union + +import numpy as np +import torch +from mmengine.structures import BaseDataElement + +from .utils import LABEL_TYPE, SCORE_TYPE, format_label, format_score + + +class DataSample(BaseDataElement): + """A general data structure interface. + + It's used as the interface between different components. + + The following fields are convention names in MMPretrain, and we will set or + get these fields in data transforms, models, and metrics if needed. You can + also set any new fields for your need. + + Meta fields: + img_shape (Tuple): The shape of the corresponding input image. + ori_shape (Tuple): The original shape of the corresponding image. + sample_idx (int): The index of the sample in the dataset. + num_classes (int): The number of all categories. + + Data fields: + gt_label (tensor): The ground truth label. + gt_score (tensor): The ground truth score. + pred_label (tensor): The predicted label. + pred_score (tensor): The predicted score. + mask (tensor): The mask used in masked image modeling. + + Examples: + >>> import torch + >>> from mmpretrain.structures import DataSample + >>> + >>> img_meta = dict(img_shape=(960, 720), num_classes=5) + >>> data_sample = DataSample(metainfo=img_meta) + >>> data_sample.set_gt_label(3) + >>> print(data_sample) + + >>> + >>> # For multi-label data + >>> data_sample = DataSample().set_gt_label([0, 1, 4]) + >>> print(data_sample) + + >>> + >>> # Set one-hot format score + >>> data_sample = DataSample().set_pred_score([0.1, 0.1, 0.6, 0.1]) + >>> print(data_sample) + + >>> + >>> # Set custom field + >>> data_sample = DataSample() + >>> data_sample.my_field = [1, 2, 3] + >>> print(data_sample) + + >>> print(data_sample.my_field) + [1, 2, 3] + """ + + def set_gt_label(self, value: LABEL_TYPE) -> 'DataSample': + """Set ``gt_label``.""" + self.set_field(format_label(value), 'gt_label', dtype=torch.Tensor) + return self + + def set_gt_score(self, value: SCORE_TYPE) -> 'DataSample': + """Set ``gt_score``.""" + score = format_score(value) + self.set_field(score, 'gt_score', dtype=torch.Tensor) + if hasattr(self, 'num_classes'): + assert len(score) == self.num_classes, \ + f'The length of score {len(score)} should be '\ + f'equal to the num_classes {self.num_classes}.' + else: + self.set_field( + name='num_classes', value=len(score), field_type='metainfo') + return self + + def set_pred_label(self, value: LABEL_TYPE) -> 'DataSample': + """Set ``pred_label``.""" + self.set_field(format_label(value), 'pred_label', dtype=torch.Tensor) + return self + + def set_pred_score(self, value: SCORE_TYPE): + """Set ``pred_label``.""" + score = format_score(value) + self.set_field(score, 'pred_score', dtype=torch.Tensor) + if hasattr(self, 'num_classes'): + assert len(score) == self.num_classes, \ + f'The length of score {len(score)} should be '\ + f'equal to the num_classes {self.num_classes}.' + else: + self.set_field( + name='num_classes', value=len(score), field_type='metainfo') + return self + + def set_mask(self, value: Union[torch.Tensor, np.ndarray]): + if isinstance(value, np.ndarray): + value = torch.from_numpy(value) + elif not isinstance(value, torch.Tensor): + raise TypeError(f'Invalid mask type {type(value)}') + self.set_field(value, 'mask', dtype=torch.Tensor) + return self + + def __repr__(self) -> str: + """Represent the object.""" + + def dump_items(items, prefix=''): + return '\n'.join(f'{prefix}{k}: {v}' for k, v in items) + + repr_ = '' + if len(self._metainfo_fields) > 0: + repr_ += '\n\nMETA INFORMATION\n' + repr_ += dump_items(self.metainfo_items(), prefix=' ' * 4) + if len(self._data_fields) > 0: + repr_ += '\n\nDATA FIELDS\n' + repr_ += dump_items(self.items(), prefix=' ' * 4) + + repr_ = f'<{self.__class__.__name__}({repr_}\n\n) at {hex(id(self))}>' + return repr_ + + +def _reduce_datasample(data_sample): + """reduce DataSample.""" + attr_dict = data_sample.__dict__ + convert_keys = [] + for k, v in attr_dict.items(): + if isinstance(v, torch.Tensor): + attr_dict[k] = v.numpy() + convert_keys.append(k) + return _rebuild_datasample, (attr_dict, convert_keys) + + +def _rebuild_datasample(attr_dict, convert_keys): + """rebuild DataSample.""" + data_sample = DataSample() + for k in convert_keys: + attr_dict[k] = torch.from_numpy(attr_dict[k]) + data_sample.__dict__ = attr_dict + return data_sample + + +# Due to the multi-processing strategy of PyTorch, DataSample may consume many +# file descriptors because it contains multiple tensors. Here we overwrite the +# reduce function of DataSample in ForkingPickler and convert these tensors to +# np.ndarray during pickling. It may slightly influence the performance of +# dataloader. +ForkingPickler.register(DataSample, _reduce_datasample) diff --git a/mmpretrain/structures/multi_task_data_sample.py b/mmpretrain/structures/multi_task_data_sample.py new file mode 100644 index 0000000000000000000000000000000000000000..f00993861bfb4f35fb7d145198f81c5e9f0a5993 --- /dev/null +++ b/mmpretrain/structures/multi_task_data_sample.py @@ -0,0 +1,10 @@ +# Copyright (c) OpenMMLab. All rights reserved. + +from mmengine.structures import BaseDataElement + + +class MultiTaskDataSample(BaseDataElement): + + @property + def tasks(self): + return self._data_fields diff --git a/mmpretrain/structures/utils.py b/mmpretrain/structures/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a4f9e95ef6fd557b9d0bdf5f017a7b73ba250453 --- /dev/null +++ b/mmpretrain/structures/utils.py @@ -0,0 +1,153 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import List, Sequence, Union + +import numpy as np +import torch +import torch.nn.functional as F +from mmengine.utils import is_str + +if hasattr(torch, 'tensor_split'): + tensor_split = torch.tensor_split +else: + # A simple implementation of `tensor_split`. + def tensor_split(input: torch.Tensor, indices: list): + outs = [] + for start, end in zip([0] + indices, indices + [input.size(0)]): + outs.append(input[start:end]) + return outs + + +LABEL_TYPE = Union[torch.Tensor, np.ndarray, Sequence, int] +SCORE_TYPE = Union[torch.Tensor, np.ndarray, Sequence] + + +def format_label(value: LABEL_TYPE) -> torch.Tensor: + """Convert various python types to label-format tensor. + + Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`, + :class:`Sequence`, :class:`int`. + + Args: + value (torch.Tensor | numpy.ndarray | Sequence | int): Label value. + + Returns: + :obj:`torch.Tensor`: The foramtted label tensor. + """ + + # Handle single number + if isinstance(value, (torch.Tensor, np.ndarray)) and value.ndim == 0: + value = int(value.item()) + + if isinstance(value, np.ndarray): + value = torch.from_numpy(value).to(torch.long) + elif isinstance(value, Sequence) and not is_str(value): + value = torch.tensor(value).to(torch.long) + elif isinstance(value, int): + value = torch.LongTensor([value]) + elif not isinstance(value, torch.Tensor): + raise TypeError(f'Type {type(value)} is not an available label type.') + assert value.ndim == 1, \ + f'The dims of value should be 1, but got {value.ndim}.' + + return value + + +def format_score(value: SCORE_TYPE) -> torch.Tensor: + """Convert various python types to score-format tensor. + + Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`, + :class:`Sequence`. + + Args: + value (torch.Tensor | numpy.ndarray | Sequence): Score values. + + Returns: + :obj:`torch.Tensor`: The foramtted score tensor. + """ + + if isinstance(value, np.ndarray): + value = torch.from_numpy(value).float() + elif isinstance(value, Sequence) and not is_str(value): + value = torch.tensor(value).float() + elif not isinstance(value, torch.Tensor): + raise TypeError(f'Type {type(value)} is not an available label type.') + assert value.ndim == 1, \ + f'The dims of value should be 1, but got {value.ndim}.' + + return value + + +def cat_batch_labels(elements: List[torch.Tensor]): + """Concat a batch of label tensor to one tensor. + + Args: + elements (List[tensor]): A batch of labels. + + Returns: + Tuple[torch.Tensor, List[int]]: The first item is the concated label + tensor, and the second item is the split indices of every sample. + """ + labels = [] + splits = [0] + for element in elements: + labels.append(element) + splits.append(splits[-1] + element.size(0)) + batch_label = torch.cat(labels) + return batch_label, splits[1:-1] + + +def batch_label_to_onehot(batch_label, split_indices, num_classes): + """Convert a concated label tensor to onehot format. + + Args: + batch_label (torch.Tensor): A concated label tensor from multiple + samples. + split_indices (List[int]): The split indices of every sample. + num_classes (int): The number of classes. + + Returns: + torch.Tensor: The onehot format label tensor. + + Examples: + >>> import torch + >>> from mmpretrain.structures import batch_label_to_onehot + >>> # Assume a concated label from 3 samples. + >>> # label 1: [0, 1], label 2: [0, 2, 4], label 3: [3, 1] + >>> batch_label = torch.tensor([0, 1, 0, 2, 4, 3, 1]) + >>> split_indices = [2, 5] + >>> batch_label_to_onehot(batch_label, split_indices, num_classes=5) + tensor([[1, 1, 0, 0, 0], + [1, 0, 1, 0, 1], + [0, 1, 0, 1, 0]]) + """ + sparse_onehot_list = F.one_hot(batch_label, num_classes) + onehot_list = [ + sparse_onehot.sum(0) + for sparse_onehot in tensor_split(sparse_onehot_list, split_indices) + ] + return torch.stack(onehot_list) + + +def label_to_onehot(label: LABEL_TYPE, num_classes: int): + """Convert a label to onehot format tensor. + + Args: + label (LABEL_TYPE): Label value. + num_classes (int): The number of classes. + + Returns: + torch.Tensor: The onehot format label tensor. + + Examples: + >>> import torch + >>> from mmpretrain.structures import label_to_onehot + >>> # Single-label + >>> label_to_onehot(1, num_classes=5) + tensor([0, 1, 0, 0, 0]) + >>> # Multi-label + >>> label_to_onehot([0, 2, 3], num_classes=5) + tensor([1, 0, 1, 1, 0]) + """ + label = format_label(label) + sparse_onehot = F.one_hot(label, num_classes) + return sparse_onehot.sum(0) diff --git a/mmpretrain/utils/__init__.py b/mmpretrain/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..991e3217d2f1e5926028e6c9c79e450e30404a33 --- /dev/null +++ b/mmpretrain/utils/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .analyze import load_json_log +from .collect_env import collect_env +from .dependency import require +from .misc import get_ori_model +from .progress import track, track_on_main_process +from .setup_env import register_all_modules + +__all__ = [ + 'collect_env', 'register_all_modules', 'track_on_main_process', + 'load_json_log', 'get_ori_model', 'track', 'require' +] diff --git a/mmpretrain/utils/analyze.py b/mmpretrain/utils/analyze.py new file mode 100644 index 0000000000000000000000000000000000000000..a933591618951e1e49558f4f5cbbdf9c49a76bfe --- /dev/null +++ b/mmpretrain/utils/analyze.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import json + + +def load_json_log(json_log): + """load and convert json_logs to log_dicts. + + Args: + json_log (str): The path of the json log file. + + Returns: + dict: The result dict contains two items, "train" and "val", for + the training log and validate log. + + Example: + An example output: + + .. code-block:: python + + { + 'train': [ + {"lr": 0.1, "time": 0.02, "epoch": 1, "step": 100}, + {"lr": 0.1, "time": 0.02, "epoch": 1, "step": 200}, + {"lr": 0.1, "time": 0.02, "epoch": 1, "step": 300}, + ... + ] + 'val': [ + {"accuracy/top1": 32.1, "step": 1}, + {"accuracy/top1": 50.2, "step": 2}, + {"accuracy/top1": 60.3, "step": 2}, + ... + ] + } + """ + log_dict = dict(train=[], val=[]) + with open(json_log, 'r') as log_file: + for line in log_file: + log = json.loads(line.strip()) + # A hack trick to determine whether the line is training log. + mode = 'train' if 'lr' in log else 'val' + log_dict[mode].append(log) + + return log_dict diff --git a/mmpretrain/utils/collect_env.py b/mmpretrain/utils/collect_env.py new file mode 100644 index 0000000000000000000000000000000000000000..988451ec530e8d21ec3d5a087a3bb7f7b66fd223 --- /dev/null +++ b/mmpretrain/utils/collect_env.py @@ -0,0 +1,16 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import mmcv +from mmengine.utils import get_git_hash +from mmengine.utils.dl_utils import collect_env as collect_base_env + +import mmpretrain + + +def collect_env(with_torch_comiling_info=False): + """Collect the information of the running environments.""" + env_info = collect_base_env() + env_info['MMCV'] = mmcv.__version__ + if not with_torch_comiling_info: + env_info.pop('PyTorch compiling details') + env_info['MMPreTrain'] = mmpretrain.__version__ + '+' + get_git_hash()[:7] + return env_info diff --git a/mmpretrain/utils/dependency.py b/mmpretrain/utils/dependency.py new file mode 100644 index 0000000000000000000000000000000000000000..0e3d8ae5df7a6968f26e0563e80a7d37a2e2cd68 --- /dev/null +++ b/mmpretrain/utils/dependency.py @@ -0,0 +1,82 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import re +from functools import wraps +from inspect import isfunction + +from importlib_metadata import PackageNotFoundError, distribution +from mmengine.utils import digit_version + + +def satisfy_requirement(dep): + pat = '(' + '|'.join(['>=', '==', '>']) + ')' + parts = re.split(pat, dep, maxsplit=1) + parts = [p.strip() for p in parts] + package = parts[0] + if len(parts) > 1: + op, version = parts[1:] + op = { + '>=': '__ge__', + '==': '__eq__', + '>': '__gt__', + '<': '__lt__', + '<=': '__le__' + }[op] + else: + op, version = None, None + + try: + dist = distribution(package) + if op is None or getattr(digit_version(dist.version), op)( + digit_version(version)): + return True + except PackageNotFoundError: + pass + + return False + + +def require(dep, install=None): + """A wrapper of function for extra package requirements. + + Args: + dep (str): The dependency package name, like ``transformers`` + or ``transformers>=4.28.0``. + install (str, optional): The installation command hint. Defaults + to None, which means to use "pip install dep". + """ + + def wrapper(fn): + assert isfunction(fn) + + @wraps(fn) + def ask_install(*args, **kwargs): + name = fn.__qualname__.replace('.__init__', '') + ins = install or f'pip install "{dep}"' + raise ImportError( + f'{name} requires {dep}, please install it by `{ins}`.') + + if satisfy_requirement(dep): + fn._verify_require = getattr(fn, '_verify_require', lambda: None) + return fn + + ask_install._verify_require = ask_install + return ask_install + + return wrapper + + +WITH_MULTIMODAL = all( + satisfy_requirement(item) + for item in ['pycocotools', 'transformers>=4.28.0']) + + +def register_multimodal_placeholder(names, registry): + for name in names: + + def ask_install(*args, **kwargs): + raise ImportError( + f'{name} requires extra multi-modal dependencies, please ' + 'install it by `pip install "mmpretrain[multimodal]"` ' + 'or `pip install -e ".[multimodal]"`.') + + registry.register_module(name=name, module=ask_install) diff --git a/mmpretrain/utils/misc.py b/mmpretrain/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..cc532679943689233be76e9a8f74da8ed822443e --- /dev/null +++ b/mmpretrain/utils/misc.py @@ -0,0 +1,18 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn +from mmengine.model import is_model_wrapper + + +def get_ori_model(model: nn.Module) -> nn.Module: + """Get original model if the input model is a model wrapper. + + Args: + model (nn.Module): A model may be a model wrapper. + + Returns: + nn.Module: The model without model wrapper. + """ + if is_model_wrapper(model): + return model.module + else: + return model diff --git a/mmpretrain/utils/progress.py b/mmpretrain/utils/progress.py new file mode 100644 index 0000000000000000000000000000000000000000..b23f976a42fc3a2f6e38f025f01041deb5608405 --- /dev/null +++ b/mmpretrain/utils/progress.py @@ -0,0 +1,40 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional + +import mmengine.dist as dist +import rich.progress as progress +from rich.live import Live + +disable_progress_bar = False +global_progress = progress.Progress( + '{task.description}', + progress.BarColumn(), + progress.TaskProgressColumn(show_speed=True), + progress.TimeRemainingColumn(), +) +global_live = Live(global_progress, refresh_per_second=10) + + +def track(sequence, description: str = '', total: Optional[float] = None): + if disable_progress_bar: + yield from sequence + else: + global_live.start() + task_id = global_progress.add_task(description, total=total) + task = global_progress._tasks[task_id] + try: + yield from global_progress.track(sequence, task_id=task_id) + finally: + if task.total is None: + global_progress.update(task_id, total=task.completed) + if all(task.finished for task in global_progress.tasks): + global_live.stop() + for task_id in global_progress.task_ids: + global_progress.remove_task(task_id) + + +def track_on_main_process(sequence, description='', total=None): + if not dist.is_main_process() or disable_progress_bar: + yield from sequence + else: + yield from track(sequence, total=total, description=description) diff --git a/mmpretrain/utils/setup_env.py b/mmpretrain/utils/setup_env.py new file mode 100644 index 0000000000000000000000000000000000000000..1b57b848c98a75c7a1b5854c800ecc2dd5da6df8 --- /dev/null +++ b/mmpretrain/utils/setup_env.py @@ -0,0 +1,41 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import datetime +import warnings + +from mmengine import DefaultScope + + +def register_all_modules(init_default_scope: bool = True) -> None: + """Register all modules in mmpretrain into the registries. + + Args: + init_default_scope (bool): Whether initialize the mmpretrain default + scope. If True, the global default scope will be set to + `mmpretrain`, and all registries will build modules from + mmpretrain's registry node. To understand more about the registry, + please refer to + https://github.com/open-mmlab/mmengine/blob/main/docs/en/tutorials/registry.md + Defaults to True. + """ # noqa: E501 + import mmpretrain.datasets # noqa: F401,F403 + import mmpretrain.engine # noqa: F401,F403 + import mmpretrain.evaluation # noqa: F401,F403 + import mmpretrain.models # noqa: F401,F403 + import mmpretrain.structures # noqa: F401,F403 + import mmpretrain.visualization # noqa: F401,F403 + + if not init_default_scope: + return + + current_scope = DefaultScope.get_current_instance() + if current_scope is None: + DefaultScope.get_instance('mmpretrain', scope_name='mmpretrain') + elif current_scope.scope_name != 'mmpretrain': + warnings.warn( + f'The current default scope "{current_scope.scope_name}" ' + 'is not "mmpretrain", `register_all_modules` will force ' + 'the current default scope to be "mmpretrain". If this is ' + 'not expected, please set `init_default_scope=False`.') + # avoid name conflict + new_instance_name = f'mmpretrain-{datetime.datetime.now()}' + DefaultScope.get_instance(new_instance_name, scope_name='mmpretrain') diff --git a/mmpretrain/version.py b/mmpretrain/version.py new file mode 100644 index 0000000000000000000000000000000000000000..1d684c9c1abc37bee3d6c5d68aea0237f6ccd6d5 --- /dev/null +++ b/mmpretrain/version.py @@ -0,0 +1,28 @@ +# Copyright (c) OpenMMLab. All rights reserved + +__version__ = '1.0.0rc8' + + +def parse_version_info(version_str): + """Parse a version string into a tuple. + + Args: + version_str (str): The version string. + Returns: + tuple[int | str]: The version info, e.g., "1.3.0" is parsed into + (1, 3, 0), and "2.0.0rc1" is parsed into (2, 0, 0, 'rc1'). + """ + version_info = [] + for x in version_str.split('.'): + if x.isdigit(): + version_info.append(int(x)) + elif x.find('rc') != -1: + patch_version = x.split('rc') + version_info.append(int(patch_version[0])) + version_info.append(f'rc{patch_version[1]}') + return tuple(version_info) + + +version_info = parse_version_info(__version__) + +__all__ = ['__version__', 'version_info', 'parse_version_info'] diff --git a/mmpretrain/visualization/__init__.py b/mmpretrain/visualization/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0dbeecfb070193f479b248dca3e98311577410a1 --- /dev/null +++ b/mmpretrain/visualization/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .utils import create_figure, get_adaptive_scale +from .visualizer import UniversalVisualizer + +__all__ = ['UniversalVisualizer', 'get_adaptive_scale', 'create_figure'] diff --git a/mmpretrain/visualization/utils.py b/mmpretrain/visualization/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..91a1d81f1449dfbfb7ff5198eb6dc25a6386ed48 --- /dev/null +++ b/mmpretrain/visualization/utils.py @@ -0,0 +1,60 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import TYPE_CHECKING, Tuple + +if TYPE_CHECKING: + from matplotlib.figure import Figure + + +def get_adaptive_scale(img_shape: Tuple[int, int], + min_scale: float = 0.3, + max_scale: float = 3.0) -> float: + """Get adaptive scale according to image shape. + + The target scale depends on the the short edge length of the image. If the + short edge length equals 224, the output is 1.0. And output linear scales + according the short edge length. + + You can also specify the minimum scale and the maximum scale to limit the + linear scale. + + Args: + img_shape (Tuple[int, int]): The shape of the canvas image. + min_size (int): The minimum scale. Defaults to 0.3. + max_size (int): The maximum scale. Defaults to 3.0. + + Returns: + int: The adaptive scale. + """ + short_edge_length = min(img_shape) + scale = short_edge_length / 224. + return min(max(scale, min_scale), max_scale) + + +def create_figure(*args, margin=False, **kwargs) -> 'Figure': + """Create a independent figure. + + Different from the :func:`plt.figure`, the figure from this function won't + be managed by matplotlib. And it has + :obj:`matplotlib.backends.backend_agg.FigureCanvasAgg`, and therefore, you + can use the ``canvas`` attribute to get access the drawn image. + + Args: + *args: All positional arguments of :class:`matplotlib.figure.Figure`. + margin: Whether to reserve the white edges of the figure. + Defaults to False. + **kwargs: All keyword arguments of :class:`matplotlib.figure.Figure`. + + Return: + matplotlib.figure.Figure: The created figure. + """ + from matplotlib.backends.backend_agg import FigureCanvasAgg + from matplotlib.figure import Figure + + figure = Figure(*args, **kwargs) + FigureCanvasAgg(figure) + + if not margin: + # remove white edges by set subplot margin + figure.subplots_adjust(left=0, right=1, bottom=0, top=1) + + return figure diff --git a/mmpretrain/visualization/visualizer.py b/mmpretrain/visualization/visualizer.py new file mode 100644 index 0000000000000000000000000000000000000000..5d18ca87f6bc246b4defe17281ae87c4464e1b89 --- /dev/null +++ b/mmpretrain/visualization/visualizer.py @@ -0,0 +1,777 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from typing import Optional, Sequence, Tuple, Union + +import mmcv +import numpy as np +import torch +import torch.nn.functional as F +from mmengine.dataset import BaseDataset +from mmengine.dist import master_only +from mmengine.visualization import Visualizer +from mmengine.visualization.utils import img_from_canvas + +from mmpretrain.registry import VISUALIZERS +from mmpretrain.structures import DataSample +from .utils import create_figure, get_adaptive_scale + + +@VISUALIZERS.register_module() +class UniversalVisualizer(Visualizer): + """Universal Visualizer for multiple tasks. + + Args: + name (str): Name of the instance. Defaults to 'visualizer'. + image (np.ndarray, optional): the origin image to draw. The format + should be RGB. Defaults to None. + vis_backends (list, optional): Visual backend config list. + Defaults to None. + save_dir (str, optional): Save file dir for all storage backends. + If it is None, the backend storage will not save any data. + fig_save_cfg (dict): Keyword parameters of figure for saving. + Defaults to empty dict. + fig_show_cfg (dict): Keyword parameters of figure for showing. + Defaults to empty dict. + """ + DEFAULT_TEXT_CFG = { + 'family': 'monospace', + 'color': 'white', + 'bbox': dict(facecolor='black', alpha=0.5, boxstyle='Round'), + 'verticalalignment': 'top', + 'horizontalalignment': 'left', + } + + @master_only + def visualize_cls(self, + image: np.ndarray, + data_sample: DataSample, + classes: Optional[Sequence[str]] = None, + draw_gt: bool = True, + draw_pred: bool = True, + draw_score: bool = True, + resize: Optional[int] = None, + rescale_factor: Optional[float] = None, + text_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: str = '', + step: int = 0) -> None: + """Visualize image classification result. + + This method will draw an text box on the input image to visualize the + information about image classification, like the ground-truth label and + prediction label. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + classes (Sequence[str], optional): The categories names. + Defaults to None. + draw_gt (bool): Whether to draw ground-truth labels. + Defaults to True. + draw_pred (bool): Whether to draw prediction labels. + Defaults to True. + draw_score (bool): Whether to draw the prediction scores + of prediction categories. Defaults to True. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + rescale_factor (float, optional): Rescale the image by the rescale + factor before visualization. Defaults to None. + text_cfg (dict): Extra text setting, which accepts + arguments of :meth:`mmengine.Visualizer.draw_texts`. + Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + if self.dataset_meta is not None: + classes = classes or self.dataset_meta.get('classes', None) + + if resize is not None: + h, w = image.shape[:2] + if w < h: + image = mmcv.imresize(image, (resize, resize * h // w)) + else: + image = mmcv.imresize(image, (resize * w // h, resize)) + elif rescale_factor is not None: + image = mmcv.imrescale(image, rescale_factor) + + texts = [] + self.set_image(image) + + if draw_gt and 'gt_label' in data_sample: + idx = data_sample.gt_label.tolist() + class_labels = [''] * len(idx) + if classes is not None: + class_labels = [f' ({classes[i]})' for i in idx] + labels = [str(idx[i]) + class_labels[i] for i in range(len(idx))] + prefix = 'Ground truth: ' + texts.append(prefix + ('\n' + ' ' * len(prefix)).join(labels)) + + if draw_pred and 'pred_label' in data_sample: + idx = data_sample.pred_label.tolist() + score_labels = [''] * len(idx) + class_labels = [''] * len(idx) + if draw_score and 'pred_score' in data_sample: + score_labels = [ + f', {data_sample.pred_score[i].item():.2f}' for i in idx + ] + + if classes is not None: + class_labels = [f' ({classes[i]})' for i in idx] + + labels = [ + str(idx[i]) + score_labels[i] + class_labels[i] + for i in range(len(idx)) + ] + prefix = 'Prediction: ' + texts.append(prefix + ('\n' + ' ' * len(prefix)).join(labels)) + + img_scale = get_adaptive_scale(image.shape[:2]) + text_cfg = { + 'size': int(img_scale * 7), + **self.DEFAULT_TEXT_CFG, + **text_cfg, + } + self.ax_save.text( + img_scale * 5, + img_scale * 5, + '\n'.join(texts), + **text_cfg, + ) + drawn_img = self.get_image() + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + @master_only + def visualize_image_retrieval(self, + image: np.ndarray, + data_sample: DataSample, + prototype_dataset: BaseDataset, + topk: int = 1, + draw_score: bool = True, + resize: Optional[int] = None, + text_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: Optional[str] = '', + step: int = 0) -> None: + """Visualize image retrieval result. + + This method will draw the input image and the images retrieved from the + prototype dataset. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + prototype_dataset (:obj:`BaseDataset`): The prototype dataset. + It should have `get_data_info` method and return a dict + includes `img_path`. + draw_score (bool): Whether to draw the match scores of the + retrieved images. Defaults to True. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + text_cfg (dict): Extra text setting, which accepts arguments of + :func:`plt.text`. Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + text_cfg = {**self.DEFAULT_TEXT_CFG, **text_cfg} + if resize is not None: + image = mmcv.imrescale(image, (resize, resize)) + + match_scores, indices = torch.topk(data_sample.pred_score, k=topk) + + figure = create_figure(margin=True) + gs = figure.add_gridspec(2, topk) + query_plot = figure.add_subplot(gs[0, :]) + query_plot.axis(False) + query_plot.imshow(image) + + for k, (score, sample_idx) in enumerate(zip(match_scores, indices)): + sample = prototype_dataset.get_data_info(sample_idx.item()) + value_image = mmcv.imread(sample['img_path'])[..., ::-1] + value_plot = figure.add_subplot(gs[1, k]) + value_plot.axis(False) + value_plot.imshow(value_image) + if draw_score: + value_plot.text( + 5, + 5, + f'{score:.2f}', + **text_cfg, + ) + drawn_img = img_from_canvas(figure.canvas) + self.set_image(drawn_img) + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + def add_mask_to_image( + self, + image: np.ndarray, + data_sample: DataSample, + resize: Union[int, Tuple[int]] = 224, + color: Union[str, Tuple[int]] = 'black', + alpha: Union[int, float] = 0.8, + ) -> np.ndarray: + if isinstance(resize, int): + resize = (resize, resize) + + image = mmcv.imresize(image, resize) + self.set_image(image) + + if isinstance(data_sample.mask, np.ndarray): + data_sample.mask = torch.tensor(data_sample.mask) + mask = data_sample.mask.float()[None, None, ...] + mask_ = F.interpolate(mask, image.shape[:2], mode='nearest')[0, 0] + + self.draw_binary_masks(mask_.bool(), colors=color, alphas=alpha) + + drawn_img = self.get_image() + return drawn_img + + @master_only + def visualize_masked_image(self, + image: np.ndarray, + data_sample: DataSample, + resize: Union[int, Tuple[int]] = 224, + color: Union[str, Tuple[int]] = 'black', + alpha: Union[int, float] = 0.8, + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: str = '', + step: int = 0) -> None: + """Visualize masked image. + + This method will draw an image with binary mask. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + resize (int | Tuple[int]): Resize the input image to the specified + shape. Defaults to 224. + color (str | Tuple[int]): The color of the binary mask. + Defaults to "black". + alpha (int | float): The transparency of the mask. Defaults to 0.8. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + drawn_img = self.add_mask_to_image( + image=image, + data_sample=data_sample, + resize=resize, + color=color, + alpha=alpha) + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + @master_only + def visualize_image_caption(self, + image: np.ndarray, + data_sample: DataSample, + resize: Optional[int] = None, + text_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: Optional[str] = '', + step: int = 0) -> None: + """Visualize image caption result. + + This method will draw the input image and the images caption. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + text_cfg (dict): Extra text setting, which accepts arguments of + :func:`plt.text`. Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + text_cfg = {**self.DEFAULT_TEXT_CFG, **text_cfg} + + if resize is not None: + h, w = image.shape[:2] + if w < h: + image = mmcv.imresize(image, (resize, resize * h // w)) + else: + image = mmcv.imresize(image, (resize * w // h, resize)) + + self.set_image(image) + + img_scale = get_adaptive_scale(image.shape[:2]) + text_cfg = { + 'size': int(img_scale * 7), + **self.DEFAULT_TEXT_CFG, + **text_cfg, + } + self.ax_save.text( + img_scale * 5, + img_scale * 5, + data_sample.get('pred_caption'), + wrap=True, + **text_cfg, + ) + drawn_img = self.get_image() + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + @master_only + def visualize_vqa(self, + image: np.ndarray, + data_sample: DataSample, + resize: Optional[int] = None, + text_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: Optional[str] = '', + step: int = 0) -> None: + """Visualize visual question answering result. + + This method will draw the input image, question and answer. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + text_cfg (dict): Extra text setting, which accepts arguments of + :func:`plt.text`. Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + text_cfg = {**self.DEFAULT_TEXT_CFG, **text_cfg} + + if resize is not None: + h, w = image.shape[:2] + if w < h: + image = mmcv.imresize(image, (resize, resize * h // w)) + else: + image = mmcv.imresize(image, (resize * w // h, resize)) + + self.set_image(image) + + img_scale = get_adaptive_scale(image.shape[:2]) + text_cfg = { + 'size': int(img_scale * 7), + **self.DEFAULT_TEXT_CFG, + **text_cfg, + } + text = (f'Q: {data_sample.get("question")}\n' + f'A: {data_sample.get("pred_answer")}') + self.ax_save.text( + img_scale * 5, + img_scale * 5, + text, + wrap=True, + **text_cfg, + ) + drawn_img = self.get_image() + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + @master_only + def visualize_visual_grounding(self, + image: np.ndarray, + data_sample: DataSample, + resize: Optional[int] = None, + text_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: Optional[str] = '', + line_width: Union[int, float] = 3, + bbox_color: Union[str, tuple] = 'green', + step: int = 0) -> None: + """Visualize visual grounding result. + + This method will draw the input image, bbox and the object. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + resize (int, optional): Resize the long edge of the image to the + specified length before visualization. Defaults to None. + text_cfg (dict): Extra text setting, which accepts arguments of + :func:`plt.text`. Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + text_cfg = {**self.DEFAULT_TEXT_CFG, **text_cfg} + + gt_bboxes = data_sample.get('gt_bboxes') + pred_bboxes = data_sample.get('pred_bboxes') + if resize is not None: + h, w = image.shape[:2] + if w < h: + image, w_scale, h_scale = mmcv.imresize( + image, (resize, resize * h // w), return_scale=True) + else: + image, w_scale, h_scale = mmcv.imresize( + image, (resize * w // h, resize), return_scale=True) + pred_bboxes[:, ::2] *= w_scale + pred_bboxes[:, 1::2] *= h_scale + if gt_bboxes is not None: + gt_bboxes[:, ::2] *= w_scale + gt_bboxes[:, 1::2] *= h_scale + + self.set_image(image) + # Avoid the line-width limit in the base classes. + self._default_font_size = 1e3 + self.draw_bboxes( + pred_bboxes, line_widths=line_width, edge_colors=bbox_color) + if gt_bboxes is not None: + self.draw_bboxes( + gt_bboxes, line_widths=line_width, edge_colors='blue') + + img_scale = get_adaptive_scale(image.shape[:2]) + text_cfg = { + 'size': int(img_scale * 7), + **self.DEFAULT_TEXT_CFG, + **text_cfg, + } + + text_positions = pred_bboxes[:, :2] + line_width + for i in range(pred_bboxes.size(0)): + self.ax_save.text( + text_positions[i, 0] + line_width, + text_positions[i, 1] + line_width, + data_sample.get('text'), + **text_cfg, + ) + drawn_img = self.get_image() + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + @master_only + def visualize_t2i_retrieval(self, + text: str, + data_sample: DataSample, + prototype_dataset: BaseDataset, + topk: int = 1, + draw_score: bool = True, + text_cfg: dict = dict(), + fig_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: Optional[str] = '', + step: int = 0) -> None: + """Visualize Text-To-Image retrieval result. + + This method will draw the input text and the images retrieved from the + prototype dataset. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + prototype_dataset (:obj:`BaseDataset`): The prototype dataset. + It should have `get_data_info` method and return a dict + includes `img_path`. + topk (int): To visualize the topk matching items. Defaults to 1. + draw_score (bool): Whether to draw the match scores of the + retrieved images. Defaults to True. + text_cfg (dict): Extra text setting, which accepts arguments of + :func:`plt.text`. Defaults to an empty dict. + fig_cfg (dict): Extra figure setting, which accepts arguments of + :func:`plt.Figure`. Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + text_cfg = {**self.DEFAULT_TEXT_CFG, **text_cfg} + + match_scores, indices = torch.topk(data_sample.pred_score, k=topk) + + figure = create_figure(margin=True, **fig_cfg) + figure.suptitle(text) + gs = figure.add_gridspec(1, topk) + + for k, (score, sample_idx) in enumerate(zip(match_scores, indices)): + sample = prototype_dataset.get_data_info(sample_idx.item()) + value_image = mmcv.imread(sample['img_path'])[..., ::-1] + value_plot = figure.add_subplot(gs[0, k]) + value_plot.axis(False) + value_plot.imshow(value_image) + if draw_score: + value_plot.text( + 5, + 5, + f'{score:.2f}', + **text_cfg, + ) + drawn_img = img_from_canvas(figure.canvas) + self.set_image(drawn_img) + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img + + @master_only + def visualize_i2t_retrieval(self, + image: np.ndarray, + data_sample: DataSample, + prototype_dataset: Sequence[str], + topk: int = 1, + draw_score: bool = True, + resize: Optional[int] = None, + text_cfg: dict = dict(), + show: bool = False, + wait_time: float = 0, + out_file: Optional[str] = None, + name: str = '', + step: int = 0) -> None: + """Visualize Image-To-Text retrieval result. + + This method will draw the input image and the texts retrieved from the + prototype dataset. + + Args: + image (np.ndarray): The image to draw. The format should be RGB. + data_sample (:obj:`DataSample`): The annotation of the image. + prototype_dataset (Sequence[str]): The prototype dataset. + It should be a list of texts. + topk (int): To visualize the topk matching items. Defaults to 1. + draw_score (bool): Whether to draw the prediction scores + of prediction categories. Defaults to True. + resize (int, optional): Resize the short edge of the image to the + specified length before visualization. Defaults to None. + text_cfg (dict): Extra text setting, which accepts + arguments of :meth:`mmengine.Visualizer.draw_texts`. + Defaults to an empty dict. + show (bool): Whether to display the drawn image in a window, please + confirm your are able to access the graphical interface. + Defaults to False. + wait_time (float): The display time (s). Defaults to 0, which means + "forever". + out_file (str, optional): Extra path to save the visualization + result. If specified, the visualizer will only save the result + image to the out_file and ignore its storage backends. + Defaults to None. + name (str): The image identifier. It's useful when using the + storage backends of the visualizer to save or display the + image. Defaults to an empty string. + step (int): The global step value. It's useful to record a + series of visualization results for the same image with the + storage backends. Defaults to 0. + + Returns: + np.ndarray: The visualization image. + """ + if resize is not None: + h, w = image.shape[:2] + if w < h: + image = mmcv.imresize(image, (resize, resize * h // w)) + else: + image = mmcv.imresize(image, (resize * w // h, resize)) + + self.set_image(image) + + match_scores, indices = torch.topk(data_sample.pred_score, k=topk) + texts = [] + for score, sample_idx in zip(match_scores, indices): + text = prototype_dataset[sample_idx.item()] + if draw_score: + text = f'{score:.2f} ' + text + texts.append(text) + + img_scale = get_adaptive_scale(image.shape[:2]) + text_cfg = { + 'size': int(img_scale * 7), + **self.DEFAULT_TEXT_CFG, + **text_cfg, + } + self.ax_save.text( + img_scale * 5, + img_scale * 5, + '\n'.join(texts), + **text_cfg, + ) + drawn_img = self.get_image() + + if show: + self.show(drawn_img, win_name=name, wait_time=wait_time) + + if out_file is not None: + # save the image to the target file instead of vis_backends + mmcv.imwrite(drawn_img[..., ::-1], out_file) + else: + self.add_image(name, drawn_img, step=step) + + return drawn_img